Compare commits

...

103 Commits

Author SHA1 Message Date
Jeremy
83b361ae57 Merge pull request #938 from Wikid82/nightly
Weekly: Promote nightly to main (2026-04-13)
2026-04-13 08:48:06 -04:00
Jeremy
7bd3a73bcf Merge pull request #935 from Wikid82/bot/update-geolite2-checksum
chore(docker): update GeoLite2-Country.mmdb checksum
2026-04-13 00:42:15 -04:00
Wikid82
00ba5b3650 chore(docker): update GeoLite2-Country.mmdb checksum
Automated checksum update for GeoLite2-Country.mmdb database.

Old: f5e80a9a3129d46e75c8cccd66bfac725b0449a6c89ba5093a16561d58f20bda
New: b018842033872f19ed9ccefb863ec954f8024db2ae913d0d4ea14e35ace4eba1

Auto-generated by: .github/workflows/update-geolite2.yml
2026-04-13 02:59:03 +00:00
Jeremy
062b86642d Merge pull request #927 from Wikid82/feature/beta-release
fix: dependency updates, CVE suppression management, and Renovate version constraints
2026-04-10 16:32:51 -04:00
GitHub Actions
a5724aecf9 fix: update indirect dependencies for golang.org/x/arch, modernc.org/libc, and modernc.org/sqlite to latest versions 2026-04-10 19:22:04 +00:00
GitHub Actions
53dccbe82b fix: update baseline-browser-mapping and call-bind versions for security and compatibility 2026-04-10 19:22:04 +00:00
Jeremy
8d6645415a Merge pull request #926 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update non-major-updates (feature/beta-release)
2026-04-10 15:21:01 -04:00
renovate[bot]
4cfcc9aa02 chore(deps): update non-major-updates 2026-04-10 19:18:28 +00:00
Jeremy
5d384e4afa Merge pull request #925 from Wikid82/renovate/feature/beta-release-actions-github-script-9.x
chore(deps): update actions/github-script action to v9 (feature/beta-release)
2026-04-10 15:17:21 -04:00
Jeremy
5bf25fdebc Merge pull request #924 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-10 15:17:05 -04:00
Jeremy
253d1ddd29 Merge branch 'feature/beta-release' into renovate/feature/beta-release-actions-github-script-9.x 2026-04-10 11:58:09 -04:00
Jeremy
5eab41b559 Merge branch 'feature/beta-release' into renovate/feature/beta-release-non-major-updates 2026-04-10 11:57:50 -04:00
GitHub Actions
a076bb3265 chore(security): re-evaluate overdue CVE suppressions 2026-04-10 15:56:43 +00:00
Jeremy
9c85d9e737 Merge branch 'feature/beta-release' into renovate/feature/beta-release-actions-github-script-9.x 2026-04-10 11:41:55 -04:00
Jeremy
1de4ce6729 Merge branch 'feature/beta-release' into renovate/feature/beta-release-non-major-updates 2026-04-10 11:41:36 -04:00
GitHub Actions
8e0f88e8bd fix: add suppression for CVE-2026-32286 due to pgproto3/v2 buffer overflow vulnerability 2026-04-10 15:39:52 +00:00
GitHub Actions
36460a884e fix: enforce version constraints for pgx and go-jose modules to maintain compatibility 2026-04-10 15:26:36 +00:00
renovate[bot]
585ae9494d chore(deps): update actions/github-script action to v9 2026-04-10 15:11:56 +00:00
renovate[bot]
ed9d6fe5d8 fix(deps): update non-major-updates 2026-04-10 15:11:47 +00:00
Jeremy
f0147b1315 Merge pull request #921 from Wikid82/feature/beta-release
fix: resolve 5 HIGH-severity CVEs blocking nightly container image scan
2026-04-09 21:13:01 -04:00
GitHub Actions
615e5a95f5 fix: downgrade pgx/v4 to v4.18.3 to address buffer overflow vulnerability 2026-04-09 19:09:25 +00:00
Jeremy
5b85d18217 Merge pull request #923 from Wikid82/renovate/feature/beta-release-github.com-jackc-pgx-v4-5.x
chore(deps): update module github.com/jackc/pgx/v4 to v5 (feature/beta-release)
2026-04-09 14:31:42 -04:00
renovate[bot]
f05c24dd66 chore(deps): update module github.com/jackc/pgx/v4 to v5 2026-04-09 18:24:32 +00:00
GitHub Actions
fd11279aa3 fix: update security policy for CVE-2026-31790 and CVE-2026-2673 vulnerabilities 2026-04-09 17:59:56 +00:00
GitHub Actions
59282952b0 fix(ci): provide Go 1.26.2 toolchain for Renovate dependency lookups 2026-04-09 17:55:51 +00:00
GitHub Actions
8742c76d52 fix: add Grype ignore for unfixed Alpine OpenSSL CVE-2026-31790
No upstream fix available for libcrypto3/libssl3 in Alpine 3.23.3.
Accepted risk documented in SECURITY.md. Monitoring Alpine security
advisories for patch availability.
2026-04-09 17:52:04 +00:00
Jeremy
9c0193e812 Merge pull request #922 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-09 13:42:52 -04:00
renovate[bot]
64465e1cd9 fix(deps): update non-major-updates 2026-04-09 17:27:46 +00:00
GitHub Actions
580e20d573 fix: resolve 5 HIGH-severity CVEs blocking nightly container image scan
Patch vulnerable transitive dependencies across all three compiled
binaries in the Docker image (backend, Caddy, CrowdSec):

- go-jose/v3 and v4: JOSE/JWT validation bypass (CVE-2026-34986)
- otel/sdk: resource leak in OpenTelemetry SDK (CVE-2026-39883)
- pgproto3/v2: buffer overflow via pgx/v4 bump (CVE-2026-32286)
- AWS SDK v2: event stream injection in CrowdSec deps (GHSA-xmrv-pmrh-hhx2)
- OTel HTTP exporters: request smuggling (CVE-2026-39882)
- gRPC: bumped to v1.80.0 for transitive go-jose/v4 resolution

All Dockerfile patches include Renovate annotations for automated
future tracking. Renovate config extended to cover Go version and
GitHub Action refs in skill example workflows, preventing version
drift in non-CI files. SECURITY.md updated with pre-existing Alpine
base image CVE (no upstream fix available).

Nightly Go stdlib CVEs (1.26.1) self-heal on next development sync;
example workflow pinned to 1.26.2 for correctness.
2026-04-09 17:24:25 +00:00
GitHub Actions
bb496daae3 fix(ci): improve health check for Charon container in nightly build 2026-04-09 14:08:19 +00:00
GitHub Actions
4cd568b0e5 fix(deps): update multiple dependencies in package-lock.json 2026-04-09 14:04:00 +00:00
GitHub Actions
efd70cd651 fix(deps): update golang.org/x/text to v0.36.0 and other dependencies 2026-04-09 14:01:05 +00:00
GitHub Actions
3d4a63b515 fix(go): update Go version to 1.26.2 2026-04-09 13:58:24 +00:00
Jeremy
42cec9e8c3 Merge pull request #919 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-09 09:41:24 -04:00
renovate[bot]
73565e0e0d fix(deps): update non-major-updates 2026-04-09 09:20:57 +00:00
Jeremy
6dddc5db43 Merge pull request #918 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-06 20:30:42 -04:00
renovate[bot]
ef90d1c0d7 fix(deps): update non-major-updates 2026-04-06 21:48:29 +00:00
Jeremy
0354f5cecf Merge pull request #917 from Wikid82/nightly
Weekly: Promote nightly to main (2026-04-06)
2026-04-06 12:14:47 -04:00
Jeremy
2d923246a9 Merge pull request #916 from Wikid82/development
Propagate changes from development into feature/beta-release
2026-04-06 01:24:35 -04:00
Jeremy
241c0d1b35 Merge pull request #914 from Wikid82/renovate/development-non-major-updates
chore(deps): update non-major-updates (development)
2026-04-06 01:08:26 -04:00
Jeremy
a9767baa69 Merge branch 'development' into renovate/development-non-major-updates 2026-04-06 01:08:07 -04:00
Jeremy
79f0080c80 Merge pull request #915 from Wikid82/main
Propagate changes from main into development
2026-04-06 01:07:49 -04:00
renovate[bot]
bfa6fc0920 chore(deps): update non-major-updates 2026-04-06 04:42:28 +00:00
Jeremy
a5c6eb95c6 Merge pull request #887 from Wikid82/feature/beta-release
Feature: CrowdSec Dashboard Integration with Observable Metrics
2026-04-06 00:37:46 -04:00
GitHub Actions
47d306b44b fix(docker): ensure CrowdSec hub index and collections bootstrap on every startup 2026-04-05 05:16:26 +00:00
GitHub Actions
5e73ba7bd0 fix(security): add temporary ignore rules for transitive HIGH vulnerabilities 2026-04-05 04:18:54 +00:00
GitHub Actions
32a30434b1 fix(security): prevent client injection of enrichment fields on decisions 2026-04-05 02:51:54 +00:00
GitHub Actions
138426311f fix(models): prevent zero-date serialization for optional ExpiresAt 2026-04-05 02:51:54 +00:00
GitHub Actions
a8ef9dd6ce fix(crowdsec): use read lock for non-mutating cache lookups 2026-04-05 02:51:54 +00:00
GitHub Actions
b48794df14 fix(deps): update smol-toml version constraint to ensure compatibility 2026-04-05 02:51:54 +00:00
GitHub Actions
85a80568b2 fix(ci): load Grype ignore config in supply chain verification 2026-04-05 02:51:54 +00:00
GitHub Actions
fc0e31df56 fix(deps): update tldts and tldts-core to version 7.0.28 for compatibility improvements 2026-04-05 02:51:54 +00:00
Jeremy
cb4ae8367c Merge pull request #910 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update dependency tldts to ^7.0.28 (feature/beta-release)
2026-04-04 22:32:58 -04:00
Jeremy
de020d9901 Merge pull request #909 from Wikid82/renovate/feature/beta-release-react-i18next-17.x
fix(deps): update dependency react-i18next to v17 (feature/beta-release)
2026-04-04 22:24:07 -04:00
renovate[bot]
0634357ee9 fix(deps): update dependency tldts to ^7.0.28 2026-04-05 02:04:41 +00:00
renovate[bot]
9753a13001 fix(deps): update dependency react-i18next to v17 2026-04-04 01:09:32 +00:00
Jeremy
d0deef1537 Merge branch 'development' into feature/beta-release 2026-04-03 21:08:07 -04:00
Jeremy
4603b57224 Merge pull request #908 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-04-03 21:07:43 -04:00
Jeremy
bb64ca64e2 Merge branch 'feature/beta-release' into renovate/feature/beta-release-non-major-updates 2026-04-03 21:07:31 -04:00
Jeremy
ce4a9c5626 Merge pull request #896 from Wikid82/renovate/feature/beta-release-react-i18next-17.x
fix(deps): update dependency react-i18next to v17 (feature/beta-release)
2026-04-03 21:07:00 -04:00
renovate[bot]
b45861090d fix(deps): update non-major-updates 2026-04-04 00:58:06 +00:00
Jeremy
4a3f655a49 Merge pull request #907 from Wikid82/main
Propagate changes from main into development
2026-04-03 20:58:02 -04:00
Jeremy
29e069ac94 Merge branch 'feature/beta-release' into renovate/feature/beta-release-react-i18next-17.x 2026-04-03 20:56:19 -04:00
GitHub Actions
625fcf8e5c fix: update Trivy action version and extend vulnerability review dates in configuration files 2026-04-04 00:54:55 +00:00
GitHub Actions
2b8ed06c3c fix: remediate axios supply chain compromise and harden CI workflow permissions 2026-04-04 00:05:27 +00:00
GitHub Actions
34d73ad6ed fix: update dependencies for @emnapi/core, @emnapi/runtime, @emnapi/wasi-threads, @playwright/test, and dotenv for compatibility improvements 2026-04-03 23:20:41 +00:00
GitHub Actions
e06a8cb676 fix: update go-sqlite3 and other dependencies for compatibility and improvements 2026-04-03 22:57:25 +00:00
GitHub Actions
5ba8cd60c8 fix: add npmDedupe to postUpdateOptions for improved dependency management 2026-04-03 22:55:15 +00:00
GitHub Actions
29985714a3 fix: update CORAZA_CADDY_VERSION to 2.4.0 for compatibility improvements 2026-04-03 22:39:40 +00:00
GitHub Actions
64c9d7adbe fix: update CADDY_SECURITY_VERSION to 1.1.61 for security improvements 2026-04-03 22:38:28 +00:00
Jeremy
8d56760c64 Merge branch 'feature/beta-release' into renovate/feature/beta-release-react-i18next-17.x 2026-03-30 21:19:47 -04:00
Jeremy
087ae9cc0d Merge pull request #890 from Wikid82/renovate/feature/beta-release-non-major-updates
fix(deps): update non-major-updates (feature/beta-release)
2026-03-30 21:19:28 -04:00
Jeremy
35b003ae5e Merge branch 'feature/beta-release' into renovate/feature/beta-release-non-major-updates 2026-03-30 21:19:14 -04:00
Jeremy
cab3c68508 Merge pull request #895 from Wikid82/renovate/feature/beta-release-i18next-26.x
fix(deps): update dependency i18next to v26 (feature/beta-release)
2026-03-30 21:17:42 -04:00
Jeremy
b6558d4165 Merge pull request #894 from Wikid82/renovate/feature/beta-release-eslint-plugin-unicorn-64.x
chore(deps): update dependency eslint-plugin-unicorn to v64 (feature/beta-release)
2026-03-30 21:17:25 -04:00
Jeremy
64cbe5a74d Merge pull request #893 from Wikid82/renovate/feature/beta-release-eslint-markdown-8.x
chore(deps): update dependency @eslint/markdown to v8 (feature/beta-release)
2026-03-30 21:17:10 -04:00
Jeremy
1d3e60b4f8 Merge pull request #892 from Wikid82/renovate/feature/beta-release-codecov-codecov-action-6.x
chore(deps): update codecov/codecov-action action to v6 (feature/beta-release)
2026-03-30 21:16:50 -04:00
Jeremy
07e6ad2d09 Merge pull request #891 from Wikid82/renovate/feature/beta-release-actions-deploy-pages-5.x
chore(deps): update actions/deploy-pages action to v5 (feature/beta-release)
2026-03-30 21:16:33 -04:00
renovate[bot]
543388b5a4 fix(deps): update non-major-updates 2026-03-31 01:08:59 +00:00
Jeremy
e2774cccf7 Merge branch 'feature/beta-release' into renovate/feature/beta-release-react-i18next-17.x 2026-03-30 18:42:13 -04:00
Jeremy
bf4dd17792 Merge branch 'feature/beta-release' into renovate/feature/beta-release-i18next-26.x 2026-03-30 18:41:56 -04:00
Jeremy
4abc29406f Merge branch 'feature/beta-release' into renovate/feature/beta-release-eslint-plugin-unicorn-64.x 2026-03-30 18:41:41 -04:00
Jeremy
b75f92a88b Merge branch 'feature/beta-release' into renovate/feature/beta-release-eslint-markdown-8.x 2026-03-30 18:41:22 -04:00
Jeremy
237a3a4d80 Merge branch 'feature/beta-release' into renovate/feature/beta-release-codecov-codecov-action-6.x 2026-03-30 18:40:59 -04:00
Jeremy
3e926298f2 Merge branch 'feature/beta-release' into renovate/feature/beta-release-actions-deploy-pages-5.x 2026-03-30 18:40:36 -04:00
GitHub Actions
e84df69cb6 fix: add vulnerability suppressions for Docker AuthZ plugin bypass and Moby privilege validation issues 2026-03-30 22:38:33 +00:00
GitHub Actions
0a43a76a4a fix: update CROWDSEC_VERSION to 1.7.7 for compatibility improvements 2026-03-30 22:20:37 +00:00
GitHub Actions
c852838644 fix: update CORAZA_CADDY_VERSION to 2.3.0 for compatibility improvements 2026-03-30 22:19:49 +00:00
GitHub Actions
9740ddb813 fix: update CADDY_SECURITY_VERSION to 1.1.57 for security improvements 2026-03-30 22:19:07 +00:00
renovate[bot]
5abd01f61c fix(deps): update dependency react-i18next to v17 2026-03-30 22:01:12 +00:00
renovate[bot]
e40a241d62 fix(deps): update dependency i18next to v26 2026-03-30 22:01:05 +00:00
renovate[bot]
a72e587d29 chore(deps): update dependency eslint-plugin-unicorn to v64 2026-03-30 22:00:58 +00:00
renovate[bot]
976ae0272b chore(deps): update dependency @eslint/markdown to v8 2026-03-30 22:00:51 +00:00
renovate[bot]
ccd3081d09 chore(deps): update codecov/codecov-action action to v6 2026-03-30 22:00:43 +00:00
renovate[bot]
844c800cd9 chore(deps): update actions/deploy-pages action to v5 2026-03-30 22:00:38 +00:00
GitHub Actions
e6c4e46dd8 chore: Refactor test setup for Gin framework
- Removed redundant `gin.SetMode(gin.TestMode)` calls from individual test files.
- Introduced a centralized `TestMain` function in `testmain_test.go` to set the Gin mode for all tests.
- Ensured consistent test environment setup across various handler test files.
2026-03-25 22:00:07 +00:00
GitHub Actions
f40fca844f fix: update CADDY_SECURITY_VERSION to 1.1.53 for security improvements 2026-03-25 20:47:46 +00:00
GitHub Actions
c7daa4ac46 chore(deps): update electron-to-chromium, lucide-react, and undici to latest versions 2026-03-25 19:36:52 +00:00
GitHub Actions
0a4ac41242 fix: update CADDY_SECURITY_VERSION to 1.1.52 for security improvements 2026-03-25 19:34:48 +00:00
GitHub Actions
3336aae2a0 chore: enforce local patch coverage as a blocking DoD gate
- Added ~40 backend tests covering uncovered branches in CrowdSec
  dashboard handlers (error paths, validation, export edge cases)
- Patch coverage improved from 81.5% to 98.3%, exceeding 90% threshold
- Fixed DoD ordering: coverage tests now run before the patch report
  (the report requires coverage artifacts as input)
- Rewrote the local patch coverage DoD step in both the Management agent
  and testing instructions to clarify purpose, prerequisites, required
  action on findings, and blocking gate semantics
- Eliminated ambiguous "advisory" language that allowed agents to skip
  acting on uncovered lines
2026-03-25 19:33:19 +00:00
GitHub Actions
1fe69c2a15 feat: add Top Attacking IPs chart component and integrate into CrowdSec configuration page
- Implemented TopAttackingIPsChart component for visualizing top attacking IPs.
- Created hooks for fetching CrowdSec dashboard data including summary, timeline, top IPs, scenarios, and alerts.
- Added tests for the new hooks to ensure data fetching works as expected.
- Updated translation files for new dashboard terms in multiple languages.
- Refactored CrowdSecConfig page to include a tabbed interface for configuration and dashboard views.
- Added end-to-end tests for CrowdSec dashboard functionality including tab navigation, data display, and interaction with time range and refresh features.
2026-03-25 17:19:15 +00:00
Jeremy
846eedeab0 Merge pull request #885 from Wikid82/renovate/feature/beta-release-non-major-updates
chore(deps): update dependency knip to ^6.0.5 (feature/beta-release)
2026-03-25 08:33:19 -04:00
renovate[bot]
37c7c4aeb8 chore(deps): update dependency knip to ^6.0.5 2026-03-25 11:35:17 +00:00
187 changed files with 9187 additions and 2743 deletions

View File

@@ -310,10 +310,11 @@ ACQUIS_EOF
echo "✗ WARNING: LAPI port configuration may be incorrect"
fi
# Update hub index to ensure CrowdSec can start
if [ ! -f "/etc/crowdsec/hub/.index.json" ]; then
echo "Updating CrowdSec hub index..."
timeout 60s cscli hub update 2>/dev/null || echo "⚠️ Hub update timed out or failed, continuing..."
# Always refresh hub index on startup (stale index causes hash mismatch errors on collection install)
echo "Updating CrowdSec hub index..."
if ! timeout 60s cscli hub update 2>&1; then
echo "⚠️ Hub index update failed (network issue?). Collections may fail to install."
echo " CrowdSec will still start with whatever index is cached."
fi
# Ensure local machine is registered (auto-heal for volume/config mismatch)
@@ -321,12 +322,11 @@ ACQUIS_EOF
echo "Registering local machine..."
cscli machines add -a --force 2>/dev/null || echo "Warning: Machine registration may have failed"
# Install hub items (parsers, scenarios, collections) if local mode enabled
if [ "$SECURITY_CROWDSEC_MODE" = "local" ]; then
echo "Installing CrowdSec hub items..."
if [ -x /usr/local/bin/install_hub_items.sh ]; then
/usr/local/bin/install_hub_items.sh 2>/dev/null || echo "Warning: Some hub items may not have installed"
fi
# Always ensure required collections are present (idempotent — already-installed items are skipped).
# Collections are just config files with zero runtime cost when CrowdSec is disabled.
echo "Ensuring CrowdSec hub items are installed..."
if [ -x /usr/local/bin/install_hub_items.sh ]; then
/usr/local/bin/install_hub_items.sh || echo "⚠️ Some hub items may not have installed. CrowdSec can still start."
fi
# Fix ownership AFTER cscli commands (they run as root and create root-owned files)

View File

@@ -167,23 +167,27 @@ The task is not complete until ALL of the following pass with zero issues:
- **Base URL**: Uses `PLAYWRIGHT_BASE_URL` or default from `playwright.config.js`
- All E2E tests must pass before proceeding to unit tests
2. **Local Patch Coverage Preflight (MANDATORY - Before Unit/Coverage Tests)**:
- Ensure the local patch report is run first via VS Code task `Test: Local Patch Report` or `bash scripts/local-patch-report.sh`.
- Verify both artifacts exist: `test-results/local-patch-report.md` and `test-results/local-patch-report.json`.
- Use this report to identify changed files needing coverage before running backend/frontend coverage suites.
3. **Coverage Tests (MANDATORY - Verify Explicitly)**:
2. **Coverage Tests (MANDATORY - Verify Explicitly)**:
- **Backend**: Ensure `Backend_Dev` ran VS Code task "Test: Backend with Coverage" or `scripts/go-test-coverage.sh`
- **Frontend**: Ensure `Frontend_Dev` ran VS Code task "Test: Frontend with Coverage" or `scripts/frontend-test-coverage.sh`
- **Why**: These are in manual stage of pre-commit for performance. Subagents MUST run them via VS Code tasks or scripts.
- Minimum coverage: 85% for both backend and frontend.
- All tests must pass with zero failures.
- **Outputs**: `backend/coverage.txt` and `frontend/coverage/lcov.info` — these are required inputs for step 3.
3. **Local Patch Coverage Report (MANDATORY - After Coverage Tests)**:
- **Purpose**: Identify uncovered lines in files modified by this task so missing tests are written before declaring Done. This is the bridge between "overall coverage is fine" and "the actual lines I changed are tested."
- **Prerequisites**: `backend/coverage.txt` and `frontend/coverage/lcov.info` must exist (generated by step 2). If missing, run coverage tests first.
- **Run**: VS Code task `Test: Local Patch Report` or `bash scripts/local-patch-report.sh`.
- **Verify artifacts**: Both `test-results/local-patch-report.md` and `test-results/local-patch-report.json` must exist with non-empty results.
- **Act on findings**: If patch coverage for any changed file is below **90%**, delegate to the responsible agent (`Backend_Dev` or `Frontend_Dev`) to add targeted tests covering the uncovered lines. Re-run coverage (step 2) and this report until the threshold is met.
- **Blocking gate**: 90% overall patch coverage. Do not proceed to pre-commit or security scans until resolved or explicitly waived by the user.
4. **Type Safety (Frontend)**:
- Ensure `Frontend_Dev` ran VS Code task "Lint: TypeScript Check" or `npm run type-check`
- **Why**: This check is in manual stage of pre-commit for performance. Subagents MUST run it explicitly.
5. **Pre-commit Hooks**: Ensure `QA_Security` ran `pre-commit run --all-files` (fast hooks only; coverage was verified in step 3)
5. **Pre-commit Hooks**: Ensure `QA_Security` ran `pre-commit run --all-files` (fast hooks only; coverage was verified in step 2)
6. **Security Scans**: Ensure `QA_Security` ran the following with zero Critical or High severity issues:
- **Trivy Filesystem Scan**: Fast scan of source code and dependencies

View File

@@ -12,9 +12,19 @@ instruction files take precedence over agent files and operator documentation.
**MANDATORY**: Before running unit tests, verify the application UI/UX functions correctly end-to-end.
## 0.5 Local Patch Coverage Preflight (Before Unit Tests)
## 0.5 Local Patch Coverage Report (After Coverage Tests)
**MANDATORY**: After E2E and before backend/frontend unit coverage runs, generate a local patch report so uncovered changed lines are visible early.
**MANDATORY**: After running backend and frontend coverage tests (which generate
`backend/coverage.txt` and `frontend/coverage/lcov.info`), run the local patch
report to identify uncovered lines in changed files.
**Purpose**: Overall coverage can be healthy while the specific lines you changed
are untested. This step catches that gap. If uncovered lines are found in
feature code, add targeted tests before completing the task.
**Prerequisites**: Coverage artifacts must exist before running the report:
- `backend/coverage.txt` — generated by `scripts/go-test-coverage.sh`
- `frontend/coverage/lcov.info` — generated by `scripts/frontend-test-coverage.sh`
Run one of the following from `/projects/Charon`:
@@ -26,11 +36,14 @@ Test: Local Patch Report
bash scripts/local-patch-report.sh
```
Required artifacts:
Required output artifacts:
- `test-results/local-patch-report.md`
- `test-results/local-patch-report.json`
This preflight is advisory for thresholds during rollout, but artifact generation is required in DoD.
**Action on results**: If patch coverage for any changed file is below 90%, add
tests targeting the uncovered changed lines. Re-run coverage and this report to
verify improvement. Artifact generation is required for DoD regardless of
threshold results.
### PREREQUISITE: Start E2E Environment

34
.github/renovate.json vendored
View File

@@ -11,6 +11,7 @@
"development"
],
"postUpdateOptions": ["npmDedupe"],
"timezone": "America/New_York",
"dependencyDashboard": true,
"dependencyDashboardApproval": true,
@@ -231,9 +232,24 @@
"datasourceTemplate": "github-releases",
"versioningTemplate": "semver",
"extractVersionTemplate": "^v(?<version>.*)$"
},
{
"customType": "regex",
"description": "Track go-version in skill example workflows",
"managerFilePatterns": ["/^\\.github/skills/examples/.*\\.yml$/"],
"matchStrings": [
"go-version: [\"']?(?<currentValue>[\\d\\.]+)[\"']?"
],
"depNameTemplate": "golang/go",
"datasourceTemplate": "golang-version",
"versioningTemplate": "semver"
}
],
"github-actions": {
"fileMatch": ["^\\.github/skills/examples/.*\\.ya?ml$"]
},
"packageRules": [
{
"description": "THE MEGAZORD: Group ALL non-major updates (NPM, Docker, Go, Actions) into one PR",
@@ -276,6 +292,24 @@
"matchPackageNames": ["caddy"],
"allowedVersions": "<3.0.0"
},
{
"description": "Go: keep pgx within v4 (CrowdSec requires pgx/v4 module path)",
"matchDatasources": ["go"],
"matchPackageNames": ["github.com/jackc/pgx/v4"],
"allowedVersions": "<5.0.0"
},
{
"description": "Go: keep go-jose/v3 within v3 (v4 is a different Go module path)",
"matchDatasources": ["go"],
"matchPackageNames": ["github.com/go-jose/go-jose/v3"],
"allowedVersions": "<4.0.0"
},
{
"description": "Go: keep go-jose/v4 within v4 (v5 would be a different Go module path)",
"matchDatasources": ["go"],
"matchPackageNames": ["github.com/go-jose/go-jose/v4"],
"allowedVersions": "<5.0.0"
},
{
"description": "Safety: Keep MAJOR updates separate and require manual review",
"matchUpdateTypes": ["major"],

View File

@@ -25,7 +25,7 @@ jobs:
- name: Setup Go
uses: actions/setup-go@v5
with:
go-version: "1.26.1"
go-version: "1.26.2"
- name: Run GORM Security Scanner
id: gorm-scan

View File

@@ -35,7 +35,7 @@ fi
# Check Grype
if ! command -v grype >/dev/null 2>&1; then
log_error "Grype not found - install from: https://github.com/anchore/grype"
log_error "Installation: curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v0.110.0"
log_error "Installation: curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v0.111.0"
error_exit "Grype is required for vulnerability scanning" 2
fi
@@ -50,8 +50,8 @@ SYFT_INSTALLED_VERSION=$(syft version | grep -oP 'Version:\s*\Kv?[0-9]+\.[0-9]+\
GRYPE_INSTALLED_VERSION=$(grype version | grep -oP 'Version:\s*\Kv?[0-9]+\.[0-9]+\.[0-9]+' | head -1 || echo "unknown")
# Set defaults matching CI workflow
set_default_env "SYFT_VERSION" "v1.42.3"
set_default_env "GRYPE_VERSION" "v0.110.0"
set_default_env "SYFT_VERSION" "v1.42.4"
set_default_env "GRYPE_VERSION" "v0.111.0"
set_default_env "IMAGE_TAG" "charon:local"
set_default_env "FAIL_ON_SEVERITY" "Critical,High"
@@ -139,7 +139,10 @@ log_info "This may take 30-60 seconds on first run (database download)"
# Run Grype against the SBOM (generated from image, not filesystem)
# This matches exactly what CI does in supply-chain-pr.yml
# --config ensures .grype.yaml ignore rules are applied, separating
# ignored matches from actionable ones in the JSON output
if grype sbom:sbom.cyclonedx.json \
--config .grype.yaml \
--output json \
--file grype-results.json; then
log_success "Vulnerability scan complete"
@@ -149,6 +152,7 @@ fi
# Generate SARIF output for GitHub Security (matches CI)
grype sbom:sbom.cyclonedx.json \
--config .grype.yaml \
--output sarif \
--file grype-results.sarif 2>/dev/null || true

View File

@@ -8,6 +8,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.issue.number || github.event.pull_request.number }}
cancel-in-progress: false
permissions:
contents: read
jobs:
add-to-project:
runs-on: ubuntu-latest

View File

@@ -12,6 +12,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.workflow_run.head_branch || github.head_ref || github.ref_name }}
cancel-in-progress: true
permissions:
contents: write
jobs:
update-draft:
runs-on: ubuntu-latest
@@ -21,6 +24,6 @@ jobs:
with:
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
- name: Draft Release
uses: release-drafter/release-drafter@139054aeaa9adc52ab36ddf67437541f039b88e2 # v7
uses: release-drafter/release-drafter@5de93583980a40bd78603b6dfdcda5b4df377b32 # v7
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -8,6 +8,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.issue.number }}
cancel-in-progress: true
permissions:
contents: read
jobs:
auto-label:
runs-on: ubuntu-latest
@@ -15,7 +18,7 @@ jobs:
issues: write
steps:
- name: Auto-label based on title and body
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const issue = context.payload.issue;

View File

@@ -12,7 +12,7 @@ concurrency:
cancel-in-progress: true
env:
GO_VERSION: '1.26.1'
GO_VERSION: '1.26.2'
GOTOOLCHAIN: auto
# Minimal permissions at workflow level; write permissions granted at job level for push only
@@ -35,7 +35,7 @@ jobs:
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
- name: Set up Go
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version: ${{ env.GO_VERSION }}

View File

@@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check for Caddy v3 and open issue
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const upstream = { owner: 'caddyserver', repo: 'caddy' };

View File

@@ -20,6 +20,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.workflow_run.event || github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
cerberus-integration:
name: Cerberus Security Stack Integration

View File

@@ -23,7 +23,7 @@ concurrency:
cancel-in-progress: true
env:
GO_VERSION: '1.26.1'
GO_VERSION: '1.26.2'
NODE_VERSION: '24.12.0'
GOTOOLCHAIN: auto
@@ -45,7 +45,7 @@ jobs:
ref: ${{ github.sha }}
- name: Set up Go
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version: ${{ env.GO_VERSION }}
@@ -139,14 +139,14 @@ jobs:
- name: Upload test output artifact
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: backend-test-output
path: backend/test-output.txt
retention-days: 7
- name: Upload backend coverage to Codecov
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./backend/coverage.txt
@@ -183,7 +183,7 @@ jobs:
exit "${PIPESTATUS[0]}"
- name: Upload frontend coverage to Codecov
uses: codecov/codecov-action@1af58845a975a7985b0beb0cbe6fbbb71a41dbad # v5
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: ./frontend/coverage

View File

@@ -15,7 +15,7 @@ concurrency:
env:
GOTOOLCHAIN: auto
GO_VERSION: '1.26.1'
GO_VERSION: '1.26.2'
permissions:
contents: read
@@ -52,7 +52,7 @@ jobs:
run: bash scripts/ci/check-codeql-parity.sh
- name: Initialize CodeQL
uses: github/codeql-action/init@38697555549f1db7851b81482ff19f1fa5c4fedc # v4
uses: github/codeql-action/init@c10b8064de6f491fea524254123dbe5e09572f13 # v4
with:
languages: ${{ matrix.language }}
queries: security-and-quality
@@ -63,7 +63,7 @@ jobs:
- name: Setup Go
if: matrix.language == 'go'
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version: ${{ env.GO_VERSION }}
cache-dependency-path: backend/go.sum
@@ -92,10 +92,10 @@ jobs:
run: mkdir -p sarif-results
- name: Autobuild
uses: github/codeql-action/autobuild@38697555549f1db7851b81482ff19f1fa5c4fedc # v4
uses: github/codeql-action/autobuild@c10b8064de6f491fea524254123dbe5e09572f13 # v4
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@38697555549f1db7851b81482ff19f1fa5c4fedc # v4
uses: github/codeql-action/analyze@c10b8064de6f491fea524254123dbe5e09572f13 # v4
with:
category: "/language:${{ matrix.language }}"
output: sarif-results/${{ matrix.language }}

View File

@@ -88,7 +88,7 @@ jobs:
- name: Upload GHCR prune artifacts
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: prune-ghcr-log-${{ github.run_id }}
path: |
@@ -159,7 +159,7 @@ jobs:
- name: Upload Docker Hub prune artifacts
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: prune-dockerhub-log-${{ github.run_id }}
path: |

View File

@@ -8,6 +8,9 @@ concurrency:
group: ${{ github.workflow }}
cancel-in-progress: false
permissions:
contents: read
jobs:
create-labels:
runs-on: ubuntu-latest
@@ -15,7 +18,7 @@ jobs:
issues: write
steps:
- name: Create all project labels
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const labels = [

View File

@@ -20,6 +20,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.workflow_run.event || github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
crowdsec-integration:
name: CrowdSec Bouncer Integration

View File

@@ -33,6 +33,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.head_ref || github.ref_name }}
cancel-in-progress: true
permissions:
contents: read
env:
GHCR_REGISTRY: ghcr.io
DOCKERHUB_REGISTRY: docker.io
@@ -130,7 +133,7 @@ jobs:
- name: Log in to GitHub Container Registry
if: steps.skip.outputs.skip_build != 'true'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.GHCR_REGISTRY }}
username: ${{ github.actor }}
@@ -138,7 +141,7 @@ jobs:
- name: Log in to Docker Hub
if: steps.skip.outputs.skip_build != 'true' && env.HAS_DOCKERHUB_TOKEN == 'true'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: docker.io
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -344,7 +347,7 @@ jobs:
- name: Upload Image Artifact
if: success() && steps.skip.outputs.skip_build != 'true' && env.TRIGGER_EVENT == 'pull_request'
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: ${{ env.TRIGGER_EVENT == 'pull_request' && format('pr-image-{0}', env.TRIGGER_PR_NUMBER) || 'push-image' }}
path: /tmp/charon-pr-image.tar
@@ -565,7 +568,7 @@ jobs:
- name: Upload Trivy results
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.trivy-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1
uses: github/codeql-action/upload-sarif@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1
with:
sarif_file: 'trivy-results.sarif'
category: '.github/workflows/docker-build.yml:build-and-push'
@@ -594,7 +597,7 @@ jobs:
# Install Cosign for keyless signing
- name: Install Cosign
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
uses: sigstore/cosign-installer@ba7bc0a3fef59531c69a25acd34668d6d3fe6f22 # v4.1.0
uses: sigstore/cosign-installer@cad07c2e89fa2edd6e2d7bab4c1aa38e53f76003 # v4.1.1
# Sign GHCR image with keyless signing (Sigstore/Fulcio)
- name: Sign GHCR Image
@@ -660,7 +663,7 @@ jobs:
echo "image_ref=${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:${PR_TAG}" >> "$GITHUB_OUTPUT"
- name: Log in to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.GHCR_REGISTRY }}
username: ${{ github.actor }}
@@ -724,14 +727,14 @@ jobs:
- name: Upload Trivy scan results
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1
uses: github/codeql-action/upload-sarif@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1
with:
sarif_file: 'trivy-pr-results.sarif'
category: 'docker-pr-image'
- name: Upload Trivy compatibility results (docker-build category)
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1
uses: github/codeql-action/upload-sarif@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1
with:
sarif_file: 'trivy-pr-results.sarif'
category: '.github/workflows/docker-build.yml:build-and-push'
@@ -739,7 +742,7 @@ jobs:
- name: Upload Trivy compatibility results (docker-publish alias)
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1
uses: github/codeql-action/upload-sarif@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1
with:
sarif_file: 'trivy-pr-results.sarif'
category: '.github/workflows/docker-publish.yml:build-and-push'
@@ -747,7 +750,7 @@ jobs:
- name: Upload Trivy compatibility results (nightly alias)
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1
uses: github/codeql-action/upload-sarif@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1
with:
sarif_file: 'trivy-pr-results.sarif'
category: 'trivy-nightly'

View File

@@ -53,7 +53,7 @@ jobs:
- name: Detect changed files
id: changes
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
env:
COMMIT_SHA: ${{ github.event.workflow_run.head_sha || github.sha }}
with:
@@ -95,7 +95,7 @@ jobs:
- name: Process issue files
id: process
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
env:
DRY_RUN: ${{ github.event.inputs.dry_run || 'false' }}
with:

View File

@@ -372,7 +372,7 @@ jobs:
# Deploy to GitHub Pages
- name: 🚀 Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # v4
uses: actions/deploy-pages@cd2ce8fcbc39b97be8ca5fce6e763baed58fa128 # v5
# Create a summary
- name: 📋 Create deployment summary

View File

@@ -83,7 +83,7 @@ on:
env:
NODE_VERSION: '20'
GO_VERSION: '1.26.1'
GO_VERSION: '1.26.2'
GOTOOLCHAIN: auto
DOCKERHUB_REGISTRY: docker.io
IMAGE_NAME: ${{ github.repository_owner }}/charon
@@ -142,7 +142,7 @@ jobs:
- name: Set up Go
if: steps.resolve-image.outputs.image_source == 'build'
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version: ${{ env.GO_VERSION }}
@@ -175,7 +175,7 @@ jobs:
- name: Build Docker image
id: build-image
if: steps.resolve-image.outputs.image_source == 'build'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7
with:
context: .
file: ./Dockerfile
@@ -191,7 +191,7 @@ jobs:
- name: Upload Docker image artifact
if: steps.resolve-image.outputs.image_source == 'build'
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: docker-image
path: charon-e2e-image.tar
@@ -233,7 +233,7 @@ jobs:
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.DOCKERHUB_REGISTRY }}
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -348,7 +348,7 @@ jobs:
- name: Upload HTML report (Chromium Security)
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: playwright-report-chromium-security
path: playwright-report/
@@ -356,7 +356,7 @@ jobs:
- name: Upload Chromium Security coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-coverage-chromium-security
path: coverage/e2e/
@@ -364,7 +364,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: traces-chromium-security
path: test-results/**/*.zip
@@ -383,7 +383,7 @@ jobs:
- name: Upload diagnostics
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-diagnostics-chromium-security
path: diagnostics/
@@ -396,7 +396,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: docker-logs-chromium-security
path: docker-logs-chromium-security.txt
@@ -435,7 +435,7 @@ jobs:
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.DOCKERHUB_REGISTRY }}
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -558,7 +558,7 @@ jobs:
- name: Upload HTML report (Firefox Security)
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: playwright-report-firefox-security
path: playwright-report/
@@ -566,7 +566,7 @@ jobs:
- name: Upload Firefox Security coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-coverage-firefox-security
path: coverage/e2e/
@@ -574,7 +574,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: traces-firefox-security
path: test-results/**/*.zip
@@ -593,7 +593,7 @@ jobs:
- name: Upload diagnostics
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-diagnostics-firefox-security
path: diagnostics/
@@ -606,7 +606,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: docker-logs-firefox-security
path: docker-logs-firefox-security.txt
@@ -645,7 +645,7 @@ jobs:
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.DOCKERHUB_REGISTRY }}
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -768,7 +768,7 @@ jobs:
- name: Upload HTML report (WebKit Security)
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: playwright-report-webkit-security
path: playwright-report/
@@ -776,7 +776,7 @@ jobs:
- name: Upload WebKit Security coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-coverage-webkit-security
path: coverage/e2e/
@@ -784,7 +784,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: traces-webkit-security
path: test-results/**/*.zip
@@ -803,7 +803,7 @@ jobs:
- name: Upload diagnostics
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-diagnostics-webkit-security
path: diagnostics/
@@ -816,7 +816,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: docker-logs-webkit-security
path: docker-logs-webkit-security.txt
@@ -899,7 +899,7 @@ jobs:
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.DOCKERHUB_REGISTRY }}
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -1004,7 +1004,7 @@ jobs:
- name: Upload HTML report (Chromium shard ${{ matrix.shard }})
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: playwright-report-chromium-shard-${{ matrix.shard }}
path: playwright-report/
@@ -1012,7 +1012,7 @@ jobs:
- name: Upload Playwright output (Chromium shard ${{ matrix.shard }})
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: playwright-output-chromium-shard-${{ matrix.shard }}
path: playwright-output/chromium-shard-${{ matrix.shard }}/
@@ -1020,7 +1020,7 @@ jobs:
- name: Upload Chromium coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-coverage-chromium-shard-${{ matrix.shard }}
path: coverage/e2e/
@@ -1028,7 +1028,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: traces-chromium-shard-${{ matrix.shard }}
path: test-results/**/*.zip
@@ -1047,7 +1047,7 @@ jobs:
- name: Upload diagnostics
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-diagnostics-chromium-shard-${{ matrix.shard }}
path: diagnostics/
@@ -1060,7 +1060,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: docker-logs-chromium-shard-${{ matrix.shard }}
path: docker-logs-chromium-shard-${{ matrix.shard }}.txt
@@ -1136,7 +1136,7 @@ jobs:
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.DOCKERHUB_REGISTRY }}
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -1249,7 +1249,7 @@ jobs:
- name: Upload HTML report (Firefox shard ${{ matrix.shard }})
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: playwright-report-firefox-shard-${{ matrix.shard }}
path: playwright-report/
@@ -1257,7 +1257,7 @@ jobs:
- name: Upload Playwright output (Firefox shard ${{ matrix.shard }})
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: playwright-output-firefox-shard-${{ matrix.shard }}
path: playwright-output/firefox-shard-${{ matrix.shard }}/
@@ -1265,7 +1265,7 @@ jobs:
- name: Upload Firefox coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-coverage-firefox-shard-${{ matrix.shard }}
path: coverage/e2e/
@@ -1273,7 +1273,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: traces-firefox-shard-${{ matrix.shard }}
path: test-results/**/*.zip
@@ -1292,7 +1292,7 @@ jobs:
- name: Upload diagnostics
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: e2e-diagnostics-firefox-shard-${{ matrix.shard }}
path: diagnostics/
@@ -1305,7 +1305,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: docker-logs-firefox-shard-${{ matrix.shard }}
path: docker-logs-firefox-shard-${{ matrix.shard }}.txt
@@ -1381,7 +1381,7 @@ jobs:
- name: Log in to Docker Hub
if: needs.build.outputs.image_source == 'registry'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.DOCKERHUB_REGISTRY }}
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -1494,7 +1494,7 @@ jobs:
- name: Upload HTML report (WebKit shard ${{ matrix.shard }})
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: playwright-report-webkit-shard-${{ matrix.shard }}
path: playwright-report/
@@ -1502,7 +1502,7 @@ jobs:
- name: Upload Playwright output (WebKit shard ${{ matrix.shard }})
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: playwright-output-webkit-shard-${{ matrix.shard }}
path: playwright-output/webkit-shard-${{ matrix.shard }}/
@@ -1510,7 +1510,7 @@ jobs:
- name: Upload WebKit coverage (if enabled)
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: e2e-coverage-webkit-shard-${{ matrix.shard }}
path: coverage/e2e/
@@ -1518,7 +1518,7 @@ jobs:
- name: Upload test traces on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: traces-webkit-shard-${{ matrix.shard }}
path: test-results/**/*.zip
@@ -1537,7 +1537,7 @@ jobs:
- name: Upload diagnostics
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: e2e-diagnostics-webkit-shard-${{ matrix.shard }}
path: diagnostics/
@@ -1550,7 +1550,7 @@ jobs:
- name: Upload Docker logs on failure
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: docker-logs-webkit-shard-${{ matrix.shard }}
path: docker-logs-webkit-shard-${{ matrix.shard }}.txt
@@ -1606,7 +1606,7 @@ jobs:
steps:
- name: Check test results
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
env:
EFFECTIVE_BROWSER: ${{ inputs.browser || 'all' }}
EFFECTIVE_CATEGORY: ${{ inputs.test_category || 'all' }}

View File

@@ -7,6 +7,9 @@ on:
required: true
type: string
permissions:
contents: read
jobs:
cleanup:
runs-on: ubuntu-latest

View File

@@ -9,6 +9,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.workflow_run.head_branch || github.head_ref || github.ref_name }}
cancel-in-progress: true
permissions:
contents: read
jobs:
test:
runs-on: ubuntu-latest

View File

@@ -15,13 +15,16 @@ on:
default: "false"
env:
GO_VERSION: '1.26.1'
GO_VERSION: '1.26.2'
NODE_VERSION: '24.12.0'
GOTOOLCHAIN: auto
GHCR_REGISTRY: ghcr.io
DOCKERHUB_REGISTRY: docker.io
IMAGE_NAME: wikid82/charon
permissions:
contents: read
jobs:
sync-development-to-nightly:
runs-on: ubuntu-latest
@@ -86,7 +89,7 @@ jobs:
contents: read
steps:
- name: Dispatch Missing Nightly Validation Workflows
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const owner = context.repo.owner;
@@ -178,7 +181,7 @@ jobs:
echo "image=${ALPINE_IMAGE_REF}" >> "$GITHUB_OUTPUT"
- name: Log in to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.GHCR_REGISTRY }}
username: ${{ github.actor }}
@@ -186,7 +189,7 @@ jobs:
- name: Log in to Docker Hub
if: env.HAS_DOCKERHUB_TOKEN == 'true'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: docker.io
username: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -209,7 +212,7 @@ jobs:
- name: Build and push Docker image
id: build
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
with:
context: .
platforms: linux/amd64,linux/arm64
@@ -282,7 +285,7 @@ jobs:
echo "Primary SBOM generation failed or produced missing/invalid output; using deterministic Syft fallback"
SYFT_VERSION="v1.42.3"
SYFT_VERSION="v1.42.4"
OS="$(uname -s | tr '[:upper:]' '[:lower:]')"
ARCH="$(uname -m)"
case "$ARCH" in
@@ -325,7 +328,7 @@ jobs:
' sbom-nightly.json >/dev/null
- name: Upload SBOM artifact
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: sbom-nightly
path: sbom-nightly.json
@@ -333,7 +336,7 @@ jobs:
# Install Cosign for keyless signing
- name: Install Cosign
uses: sigstore/cosign-installer@ba7bc0a3fef59531c69a25acd34668d6d3fe6f22 # v4.1.0
uses: sigstore/cosign-installer@cad07c2e89fa2edd6e2d7bab4c1aa38e53f76003 # v4.1.1
# Sign GHCR image with keyless signing (Sigstore/Fulcio)
- name: Sign GHCR Image
@@ -375,7 +378,7 @@ jobs:
run: echo "IMAGE_NAME_LC=${IMAGE_NAME,,}" >> "$GITHUB_ENV"
- name: Log in to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.GHCR_REGISTRY }}
username: ${{ github.actor }}
@@ -391,14 +394,28 @@ jobs:
-p 8080:8080 \
"${IMAGE_REF}"
# Wait for container to start
sleep 10
# Wait for container to become healthy
echo "⏳ Waiting for Charon to be healthy..."
MAX_ATTEMPTS=30
ATTEMPT=0
while [[ ${ATTEMPT} -lt ${MAX_ATTEMPTS} ]]; do
ATTEMPT=$((ATTEMPT + 1))
echo "Attempt ${ATTEMPT}/${MAX_ATTEMPTS}..."
if docker exec charon-nightly wget -qO- http://127.0.0.1:8080/health > /dev/null 2>&1; then
echo "✅ Charon is healthy!"
docker exec charon-nightly wget -qO- http://127.0.0.1:8080/health
break
fi
sleep 2
done
# Check container is running
docker ps | grep charon-nightly
# Basic health check
curl -f http://localhost:8080/health || exit 1
if [[ ${ATTEMPT} -ge ${MAX_ATTEMPTS} ]]; then
echo "❌ Health check failed after ${MAX_ATTEMPTS} attempts"
docker logs charon-nightly
docker stop charon-nightly
docker rm charon-nightly
exit 1
fi
# Cleanup
docker stop charon-nightly
@@ -451,7 +468,7 @@ jobs:
trivyignores: '.trivyignore'
- name: Upload Trivy results
uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1
uses: github/codeql-action/upload-sarif@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1
with:
sarif_file: 'trivy-nightly.sarif'
category: 'trivy-nightly'

View File

@@ -12,6 +12,10 @@ concurrency:
group: ${{ github.workflow }}-${{ inputs.pr_number || github.event.pull_request.number }}
cancel-in-progress: true
permissions:
contents: read
pull-requests: write
jobs:
validate:
name: Validate history-rewrite checklist (conditional)
@@ -21,7 +25,7 @@ jobs:
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Validate PR checklist (only for history-rewrite changes)
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
env:
PR_NUMBER: ${{ inputs.pr_number }}
with:

View File

@@ -33,7 +33,7 @@ jobs:
node-version: ${{ env.NODE_VERSION }}
- name: Propagate Changes
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
env:
CURRENT_BRANCH: ${{ github.event.workflow_run.head_branch || github.ref_name }}
CURRENT_SHA: ${{ github.event.workflow_run.head_sha || github.sha }}

View File

@@ -16,7 +16,7 @@ permissions:
checks: write
env:
GO_VERSION: '1.26.1'
GO_VERSION: '1.26.2'
NODE_VERSION: '24.12.0'
GOTOOLCHAIN: auto
@@ -31,7 +31,7 @@ jobs:
ref: ${{ github.sha }}
- name: Set up Go
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0
with:
go-version: ${{ env.GO_VERSION }}
@@ -138,7 +138,7 @@ jobs:
} >> "$GITHUB_ENV"
- name: Set up Go
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6.4.0
with:
go-version: ${{ env.GO_VERSION }}
@@ -161,7 +161,7 @@ jobs:
- name: Upload test output artifact
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: backend-test-output
path: backend/test-output.txt
@@ -268,6 +268,12 @@ jobs:
cache: 'npm'
cache-dependency-path: frontend/package-lock.json
- name: Verify lockfile integrity and audit dependencies
working-directory: frontend
run: |
npm ci --ignore-scripts
npm audit --audit-level=critical
- name: Check if frontend was modified in PR
id: check-frontend
run: |

View File

@@ -20,6 +20,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.workflow_run.event || github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
rate-limit-integration:
name: Rate Limiting Integration

View File

@@ -10,7 +10,7 @@ concurrency:
cancel-in-progress: false
env:
GO_VERSION: '1.26.1'
GO_VERSION: '1.26.2'
NODE_VERSION: '24.12.0'
GOTOOLCHAIN: auto
@@ -45,7 +45,7 @@ jobs:
fi
- name: Set up Go
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version: ${{ env.GO_VERSION }}

View File

@@ -14,6 +14,9 @@ permissions:
pull-requests: write
issues: write
env:
GO_VERSION: '1.26.2'
jobs:
renovate:
runs-on: ubuntu-latest
@@ -24,8 +27,13 @@ jobs:
with:
fetch-depth: 1
- name: Set up Go
uses: actions/setup-go@4a3601121dd01d1626a1e23e37211e3254c1c06c # v6
with:
go-version: ${{ env.GO_VERSION }}
- name: Run Renovate
uses: renovatebot/github-action@68a3ea99af6ad249940b5a9fdf44fc6d7f14378b # v46.1.6
uses: renovatebot/github-action@b67590ea780158ccd13192c22a3655a5231f869d # v46.1.8
with:
configurationFile: .github/renovate.json
token: ${{ secrets.RENOVATE_TOKEN || secrets.GITHUB_TOKEN }}

View File

@@ -30,7 +30,7 @@ jobs:
echo "GITHUB_TOKEN=${{ secrets.CHARON_TOKEN }}" >> "$GITHUB_ENV"
fi
- name: Prune renovate branches
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
github-token: ${{ env.GITHUB_TOKEN }}
script: |

View File

@@ -9,6 +9,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref_name }}
cancel-in-progress: true
permissions:
contents: read
jobs:
repo_health:
name: Repo health
@@ -34,7 +37,7 @@ jobs:
- name: Upload health output
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: repo-health-output
path: |

View File

@@ -22,6 +22,9 @@ concurrency:
group: security-pr-${{ github.event_name == 'workflow_run' && github.event.workflow_run.event || github.event_name }}-${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
security-scan:
name: Trivy Binary Scan
@@ -361,7 +364,7 @@ jobs:
- name: Run Trivy filesystem scan (SARIF output)
if: steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request'
# aquasecurity/trivy-action v0.33.1
# aquasecurity/trivy-action 0.35.0
uses: aquasecurity/trivy-action@57a97c7e7821a5776cebc9bb87c984fa69cba8f1
with:
scan-type: 'fs'
@@ -385,7 +388,7 @@ jobs:
- name: Upload Trivy SARIF to GitHub Security
if: always() && steps.trivy-sarif-check.outputs.exists == 'true'
# github/codeql-action v4
uses: github/codeql-action/upload-sarif@eedab83377f873ae39009d167a89b7a5aab4638b
uses: github/codeql-action/upload-sarif@34950e1b113b30df4edee1a6d3a605242df0c40b
with:
sarif_file: 'trivy-binary-results.sarif'
category: ${{ steps.pr-info.outputs.is_push == 'true' && format('security-scan-{0}', github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref_name) || format('security-scan-pr-{0}', steps.pr-info.outputs.pr_number) }}
@@ -393,7 +396,7 @@ jobs:
- name: Run Trivy filesystem scan (fail on CRITICAL/HIGH)
if: steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request'
# aquasecurity/trivy-action v0.33.1
# aquasecurity/trivy-action 0.35.0
uses: aquasecurity/trivy-action@57a97c7e7821a5776cebc9bb87c984fa69cba8f1
with:
scan-type: 'fs'

View File

@@ -19,6 +19,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: false
permissions:
contents: read
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository_owner }}/charon
@@ -61,7 +64,7 @@ jobs:
echo "Base image digest: $DIGEST"
- name: Log in to Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
@@ -77,7 +80,7 @@ jobs:
- name: Build Docker image (NO CACHE)
id: build
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7
with:
context: .
platforms: linux/amd64
@@ -113,7 +116,7 @@ jobs:
version: 'v0.69.3'
- name: Upload Trivy results to GitHub Security
uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1
uses: github/codeql-action/upload-sarif@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1
with:
sarif_file: 'trivy-weekly-results.sarif'
@@ -127,7 +130,7 @@ jobs:
version: 'v0.69.3'
- name: Upload Trivy JSON results
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
with:
name: trivy-weekly-scan-${{ github.run_number }}
path: trivy-weekly-results.json

View File

@@ -281,19 +281,19 @@ jobs:
echo "component_count=${COMPONENT_COUNT}" >> "$GITHUB_OUTPUT"
echo "✅ SBOM generated with ${COMPONENT_COUNT} components"
# Scan for vulnerabilities using manual Grype installation (pinned to v0.107.1)
# Scan for vulnerabilities using manual Grype installation (pinned to v0.110.0)
- name: Install Grype
if: steps.set-target.outputs.image_name != ''
run: |
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v0.110.0
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v0.111.0
- name: Scan for vulnerabilities
if: steps.set-target.outputs.image_name != ''
id: grype-scan
run: |
echo "🔍 Scanning SBOM for vulnerabilities..."
grype sbom:sbom.cyclonedx.json -o json > grype-results.json
grype sbom:sbom.cyclonedx.json -o sarif > grype-results.sarif
grype sbom:sbom.cyclonedx.json --config .grype.yaml -o json > grype-results.json
grype sbom:sbom.cyclonedx.json --config .grype.yaml -o sarif > grype-results.sarif
- name: Debug Output Files
if: steps.set-target.outputs.image_name != ''
@@ -362,7 +362,7 @@ jobs:
- name: Upload SARIF to GitHub Security
if: steps.check-artifact.outputs.artifact_found == 'true'
uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4
uses: github/codeql-action/upload-sarif@c10b8064de6f491fea524254123dbe5e09572f13 # v4
continue-on-error: true
with:
sarif_file: grype-results.sarif

View File

@@ -144,7 +144,7 @@ jobs:
- name: Upload SBOM Artifact
if: steps.image-check.outputs.exists == 'true' && always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: sbom-${{ steps.tag.outputs.tag }}
path: sbom-verify.cyclonedx.json
@@ -324,7 +324,7 @@ jobs:
- name: Upload Vulnerability Scan Artifact
if: steps.validate-sbom.outputs.valid == 'true' && always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1
with:
name: vulnerability-scan-${{ steps.tag.outputs.tag }}
path: |
@@ -362,7 +362,7 @@ jobs:
if: |
github.event_name == 'pull_request' ||
(github.event_name == 'workflow_run' && github.event.workflow_run.event == 'pull_request')
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
with:
result-encoding: string
script: |

View File

@@ -105,7 +105,7 @@ jobs:
- name: Create Pull Request
if: steps.checksum.outputs.needs_update == 'true'
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8.1.0
uses: peter-evans/create-pull-request@5f6978faf089d4d20b00c7766989d076bb2fc7f1 # v8.1.1
with:
title: "chore(docker): update GeoLite2-Country.mmdb checksum"
body: |
@@ -160,7 +160,7 @@ jobs:
- name: Report failure via GitHub Issue
if: failure()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const errorType = '${{ steps.checksum.outputs.error }}' || 'unknown';

View File

@@ -20,6 +20,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.event.workflow_run.event || github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
waf-integration:
name: Coraza WAF Integration

View File

@@ -47,7 +47,7 @@ jobs:
steps:
- name: Check Nightly Workflow Status
id: check
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const skipCheck = '${{ inputs.skip_workflow_check }}' === 'true';
@@ -274,7 +274,7 @@ jobs:
- name: Check for Existing PR
id: existing-pr
if: steps.check-diff.outputs.skipped != 'true'
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const { data: pulls } = await github.rest.pulls.list({
@@ -297,7 +297,7 @@ jobs:
- name: Create Promotion PR
id: create-pr
if: steps.check-diff.outputs.skipped != 'true' && steps.existing-pr.outputs.exists != 'true'
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const fs = require('fs');
@@ -399,7 +399,7 @@ jobs:
- name: Update Existing PR
if: steps.check-diff.outputs.skipped != 'true' && steps.existing-pr.outputs.exists == 'true'
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const prNumber = ${{ steps.existing-pr.outputs.pr_number }};
@@ -425,7 +425,7 @@ jobs:
contents: read
steps:
- name: Dispatch missing required workflows on nightly head
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const owner = context.repo.owner;
@@ -483,7 +483,7 @@ jobs:
steps:
- name: Create Failure Issue
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9
with:
script: |
const isHealthy = '${{ needs.check-nightly-health.outputs.is_healthy }}';

View File

@@ -32,7 +32,8 @@ ignore:
#
# Review:
# - Reviewed 2026-03-18 (initial suppression): no upstream fix available. Set 30-day review.
# - Next review: 2026-04-18. Remove suppression immediately once upstream fixes.
# - Extended 2026-04-04: Alpine 3.23 still ships 3.5.5-r0. No upstream fix available.
# - Next review: 2026-05-18. Remove suppression immediately once upstream fixes.
#
# Removal Criteria:
# - Alpine publishes a patched version of libcrypto3 and libssl3
@@ -52,7 +53,7 @@ ignore:
No upstream fix: Alpine 3.23 still ships libcrypto3 3.5.5-r0 as of 2026-03-18. Charon
terminates TLS at the Caddy layer; the Go backend does not act as a raw TLS 1.3 server.
Risk accepted pending Alpine upstream patch.
expiry: "2026-04-18" # Initial 30-day review period. Extend in 1430 day increments with documented justification.
expiry: "2026-05-18" # Extended 2026-04-04: Alpine 3.23 still ships 3.5.5-r0. Next review 2026-05-18.
# Action items when this suppression expires:
# 1. Check Alpine security tracker: https://security.alpinelinux.org/vuln/CVE-2026-2673
@@ -74,7 +75,130 @@ ignore:
No upstream fix: Alpine 3.23 still ships libssl3 3.5.5-r0 as of 2026-03-18. Charon
terminates TLS at the Caddy layer; the Go backend does not act as a raw TLS 1.3 server.
Risk accepted pending Alpine upstream patch.
expiry: "2026-04-18" # Initial 30-day review period. See libcrypto3 entry above for action items.
expiry: "2026-05-18" # Extended 2026-04-04: see libcrypto3 entry above for action items.
# CVE-2026-31790: OpenSSL vulnerability in Alpine base image packages
# Severity: HIGH
# Packages: libcrypto3 3.5.5-r0 and libssl3 3.5.5-r0 (Alpine apk)
# Status: No upstream fix available — Alpine 3.23 still ships libcrypto3/libssl3 3.5.5-r0 as of 2026-04-09
#
# Root Cause (No Fix Available):
# - Alpine upstream has not published a patched libcrypto3/libssl3 for Alpine 3.23.
# - Checked: Alpine 3.23 still ships libcrypto3/libssl3 3.5.5-r0 as of 2026-04-09.
# - Fix path: once Alpine publishes a patched libcrypto3/libssl3, rebuild the Docker image
# and remove this suppression.
#
# Risk Assessment: ACCEPTED (No upstream fix; documented in SECURITY.md)
# - Charon terminates TLS at the Caddy layer — the Go backend does not act as a raw TLS server.
# - Container-level isolation reduces the attack surface further.
#
# Mitigation (active while suppression is in effect):
# - Monitor Alpine security advisories: https://security.alpinelinux.org/vuln/CVE-2026-31790
# - Weekly CI security rebuild (security-weekly-rebuild.yml) flags any new CVEs in the full image.
#
# Review:
# - Reviewed 2026-04-09 (initial suppression): no upstream fix available. Set 30-day review.
# - Next review: 2026-05-09. Remove suppression immediately once upstream fixes.
#
# Removal Criteria:
# - Alpine publishes a patched version of libcrypto3 and libssl3
# - Rebuild Docker image and verify CVE-2026-31790 no longer appears in grype-results.json
# - Remove both these entries and the corresponding .trivyignore entry simultaneously
#
# References:
# - CVE-2026-31790: https://nvd.nist.gov/vuln/detail/CVE-2026-31790
# - Alpine security tracker: https://security.alpinelinux.org/vuln/CVE-2026-31790
- vulnerability: CVE-2026-31790
package:
name: libcrypto3
version: "3.5.5-r0"
type: apk
reason: |
HIGH — OpenSSL vulnerability in libcrypto3 3.5.5-r0 (Alpine base image).
No upstream fix: Alpine 3.23 still ships libcrypto3 3.5.5-r0 as of 2026-04-09. Charon
terminates TLS at the Caddy layer; the Go backend does not act as a raw TLS server.
Risk accepted pending Alpine upstream patch. Documented in SECURITY.md.
expiry: "2026-05-09" # Reviewed 2026-04-09: no upstream fix available. Next review 2026-05-09.
# Action items when this suppression expires:
# 1. Check Alpine security tracker: https://security.alpinelinux.org/vuln/CVE-2026-31790
# 2. If a patched Alpine package is now available:
# a. Rebuild Docker image without suppression
# b. Run local security-scan-docker-image and confirm CVE is resolved
# c. Remove this suppression entry, the libssl3 entry below, and the .trivyignore entry
# 3. If no fix yet: Extend expiry by 1430 days and update the review comment above
# 4. If extended 3+ times: Open an issue to track the upstream status formally
# CVE-2026-31790 (libssl3) — see full justification in the libcrypto3 entry above
- vulnerability: CVE-2026-31790
package:
name: libssl3
version: "3.5.5-r0"
type: apk
reason: |
HIGH — OpenSSL vulnerability in libssl3 3.5.5-r0 (Alpine base image).
No upstream fix: Alpine 3.23 still ships libssl3 3.5.5-r0 as of 2026-04-09. Charon
terminates TLS at the Caddy layer; the Go backend does not act as a raw TLS server.
Risk accepted pending Alpine upstream patch. Documented in SECURITY.md.
expiry: "2026-05-09" # Reviewed 2026-04-09: see libcrypto3 entry above for action items.
# GHSA-69x3-g4r3-p962 / CVE-2026-25793: Nebula ECDSA Signature Malleability
# Severity: HIGH (CVSS 8.1)
# Package: github.com/slackhq/nebula v1.9.7 (embedded in /usr/bin/caddy via smallstep/certificates)
# Status: Fix exists in nebula v1.10.3 — smallstep/certificates cannot compile against v1.10+ APIs
#
# Vulnerability Details:
# - ECDSA signature malleability in nebula allows potential authentication bypass via
# crafted certificate signatures (CWE-347).
# - CVSSv3: AV:N/AC:H/PR:N/UI:N/S:U/C:H/I:H/A:N (CVSS 8.1)
#
# Root Cause (Third-Party Binary + Upstream API Incompatibility):
# - Charon does not use nebula directly. The library is compiled into the Caddy binary
# via the caddy-security plugin → smallstep/certificates dependency chain.
# - Nebula v1.10.3 patches the vulnerability but removes legacy APIs that
# smallstep/certificates (through v0.30.2) depends on, causing compile failures.
# - Fix path: once smallstep/certificates releases a version compatible with nebula >= v1.10.3,
# update the Dockerfile and remove this suppression.
#
# Risk Assessment: ACCEPTED (No direct use + upstream API incompatibility blocks fix)
# - Charon does not use Nebula VPN PKI by default. The vulnerable code path is only
# reachable if Nebula-based certificate provisioning is explicitly configured.
# - The attack requires network access and a crafted certificate, which is not part of
# standard Charon deployment.
#
# Mitigation (active while suppression is in effect):
# - Monitor smallstep/certificates releases: https://github.com/smallstep/certificates/releases
# - Monitor nebula releases: https://github.com/slackhq/nebula/releases
# - Weekly CI security rebuild flags the moment a compatible upstream ships.
#
# Review:
# - Reviewed 2026-02-19 (initial suppression in .trivyignore): certificates v0.27.5 pins nebula v1.9.x.
# - Re-evaluated 2026-04-10: nebula v1.10.3 has the fix but certificates (through v0.30.2)
# uses legacy APIs removed in v1.10+. Still blocked. Set 30-day review.
# - Next review: 2026-05-10. Remove suppression once certificates ships with nebula >= v1.10.3.
#
# Removal Criteria:
# - smallstep/certificates releases a version compatible with nebula >= v1.10.3
# - Update Dockerfile nebula pin, rebuild, run security-scan-docker-image, confirm resolved
# - Remove this entry and the corresponding .trivyignore entry simultaneously
#
# References:
# - GHSA-69x3-g4r3-p962: https://github.com/advisories/GHSA-69x3-g4r3-p962
# - CVE-2026-25793: https://nvd.nist.gov/vuln/detail/CVE-2026-25793
# - Nebula releases: https://github.com/slackhq/nebula/releases
# - smallstep/certificates releases: https://github.com/smallstep/certificates/releases
- vulnerability: CVE-2026-25793
package:
name: github.com/slackhq/nebula
version: "v1.9.7"
type: go-module
reason: |
HIGH — ECDSA signature malleability in nebula v1.9.7 embedded in /usr/bin/caddy.
Fix exists in nebula v1.10.3 but smallstep/certificates (through v0.30.2) uses legacy APIs
removed in v1.10+, causing compile failures. Charon does not use Nebula VPN PKI by default.
Risk accepted; no remediation until smallstep/certificates ships with nebula >= v1.10.3.
Re-evaluated 2026-04-10: still blocked by upstream API incompatibility.
expiry: "2026-05-10" # Re-evaluated 2026-04-10: certificates through v0.30.2 incompatible with nebula v1.10+.
# GHSA-6g7g-w4f8-9c9x: buger/jsonparser Delete panic on malformed JSON (DoS)
# Severity: HIGH (CVSS 7.5)
@@ -105,7 +229,8 @@ ignore:
#
# Review:
# - Reviewed 2026-03-19 (initial suppression): no upstream fix exists. Set 30-day review.
# - Next review: 2026-04-19. Remove suppression once buger/jsonparser ships a fix and
# - Extended 2026-04-04: no upstream fix available. buger/jsonparser issue #275 still open.
# - Next review: 2026-05-19. Remove suppression once buger/jsonparser ships a fix and
# CrowdSec updates their dependency.
#
# Removal Criteria:
@@ -130,7 +255,7 @@ ignore:
Charon does not use this package directly; the vector requires reaching CrowdSec's internal
JSON processing pipeline. Risk accepted; no remediation path until upstream ships a fix.
Reviewed 2026-03-19: no patched release available.
expiry: "2026-04-19" # 30-day review: no fix exists. Extend in 30-day increments with documented justification.
expiry: "2026-05-19" # Extended 2026-04-04: no upstream fix. Next review 2026-05-19.
# Action items when this suppression expires:
# 1. Check buger/jsonparser releases: https://github.com/buger/jsonparser/releases
@@ -174,7 +299,8 @@ ignore:
# Review:
# - Reviewed 2026-03-19 (initial suppression): pgproto3/v2 is EOL; no fix exists or will exist.
# Waiting on CrowdSec to migrate to pgx/v5. Set 30-day review.
# - Next review: 2026-04-19. Remove suppression once CrowdSec ships with pgx/v5.
# - Extended 2026-04-04: CrowdSec has not migrated to pgx/v5 yet.
# - Next review: 2026-05-19. Remove suppression once CrowdSec ships with pgx/v5.
#
# Removal Criteria:
# - CrowdSec releases a version with pgx/v5 (pgproto3/v3) replacing pgproto3/v2
@@ -197,7 +323,7 @@ ignore:
Charon uses SQLite, not PostgreSQL; this code path is not reachable in a standard deployment.
Risk accepted; no remediation until CrowdSec ships with pgx/v5.
Reviewed 2026-03-19: pgproto3/v2 EOL confirmed; CrowdSec has not migrated to pgx/v5 yet.
expiry: "2026-04-19" # 30-day review: no fix path until CrowdSec migrates to pgx/v5.
expiry: "2026-05-19" # Extended 2026-04-04: no fix path until CrowdSec migrates to pgx/v5.
# Action items when this suppression expires:
# 1. Check CrowdSec releases for pgx/v5 migration:
@@ -245,7 +371,8 @@ ignore:
# - Reviewed 2026-03-21 (initial suppression): pgproto3/v2 is EOL; no fix exists or will exist.
# Waiting on CrowdSec to migrate to pgx/v5. Set 30-day review. Sibling GHSA-jqcq-xjh3-6g23
# was already suppressed; this alias surfaced as a separate Grype match via NVD/Red Hat tracking.
# - Next review: 2026-04-21. Remove suppression once CrowdSec ships with pgx/v5.
# - Extended 2026-04-04: CrowdSec has not migrated to pgx/v5 yet.
# - Next review: 2026-05-21. Remove suppression once CrowdSec ships with pgx/v5.
#
# Removal Criteria:
# - Same as GHSA-jqcq-xjh3-6g23: CrowdSec releases a version with pgx/v5 replacing pgproto3/v2
@@ -271,7 +398,7 @@ ignore:
Charon uses SQLite, not PostgreSQL; this code path is not reachable in a standard deployment.
Risk accepted; no remediation until CrowdSec ships with pgx/v5.
Reviewed 2026-03-21: pgproto3/v2 EOL confirmed; CrowdSec has not migrated to pgx/v5 yet.
expiry: "2026-04-21" # 30-day review: no fix path until CrowdSec migrates to pgx/v5.
expiry: "2026-05-21" # Extended 2026-04-04: no fix path until CrowdSec migrates to pgx/v5.
# Action items when this suppression expires:
# 1. Check CrowdSec releases for pgx/v5 migration:
@@ -284,6 +411,278 @@ ignore:
# 4. If not yet migrated: Extend expiry by 30 days and update the review comment above
# 5. If extended 3+ times: Open an upstream issue on crowdsecurity/crowdsec requesting pgx/v5 migration
# CVE-2026-32286: pgproto3/v2 buffer overflow in DataRow handling (DoS)
# Severity: HIGH (CVSS 7.5)
# Package: github.com/jackc/pgproto3/v2 v2.3.3 (embedded in /usr/local/bin/crowdsec and /usr/local/bin/cscli)
# Status: NO fix in pgproto3/v2 (archived/EOL) — fix path requires CrowdSec to migrate to pgx/v5
#
# Vulnerability Details:
# - Buffer overflow in pgproto3/v2 DataRow handling allows a malicious or compromised PostgreSQL
# server to trigger a denial of service via crafted protocol messages (CWE-120).
# - CVSSv3: AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H (CVSS 7.5)
#
# Root Cause (EOL Module + Third-Party Binary):
# - Same affected module as GHSA-jqcq-xjh3-6g23 and GHSA-x6gf-mpr2-68h6 — pgproto3/v2 v2.3.3
# is the final release (repository archived Jul 12, 2025). No fix will be released.
# - Charon does not use pgproto3/v2 directly nor communicate with PostgreSQL. The package
# is compiled into CrowdSec binaries for their internal database communication.
# - Fix exists only in pgproto3/v3 (used by pgx/v5). CrowdSec v1.7.7 (latest) still depends
# on pgx/v4 → pgproto3/v2. Dockerfile already applies best-effort mitigation (pgx/v4@v4.18.3).
# - Fix path: once CrowdSec migrates to pgx/v5, rebuild the Docker image and remove this suppression.
#
# Risk Assessment: ACCEPTED (Non-exploitable in Charon context + no upstream fix path)
# - The vulnerability requires a malicious PostgreSQL server response. Charon uses SQLite
# internally and does not run PostgreSQL. CrowdSec's database path is not exposed to
# external traffic in a standard Charon deployment.
# - CrowdSec's PostgreSQL code path is not directly exposed to untrusted network input in
# Charon's deployment.
#
# Mitigation (active while suppression is in effect):
# - Monitor CrowdSec releases for pgx/v5 migration:
# https://github.com/crowdsecurity/crowdsec/releases
# - Weekly CI security rebuild flags the moment a fixed image ships.
#
# Review:
# - Reviewed 2026-04-10 (initial suppression): pgproto3/v2 is EOL; no fix exists or will exist.
# Waiting on CrowdSec to migrate to pgx/v5. Set 90-day review.
# - Next review: 2026-07-09. Remove suppression once CrowdSec ships with pgx/v5.
#
# Removal Criteria:
# - Same as GHSA-jqcq-xjh3-6g23: CrowdSec releases a version with pgx/v5 replacing pgproto3/v2
# - Rebuild Docker image, run security-scan-docker-image, confirm all pgproto3/v2 advisories are resolved
# - Remove this entry, GHSA-jqcq-xjh3-6g23 entry, GHSA-x6gf-mpr2-68h6 entry, and all .trivyignore entries simultaneously
#
# References:
# - CVE-2026-32286: https://nvd.nist.gov/vuln/detail/CVE-2026-32286
# - pgproto3/v2 archive notice: https://github.com/jackc/pgproto3
# - pgx/v5 (replacement): https://github.com/jackc/pgx
# - CrowdSec releases: https://github.com/crowdsecurity/crowdsec/releases
- vulnerability: CVE-2026-32286
package:
name: github.com/jackc/pgproto3/v2
version: "v2.3.3"
type: go-module
reason: |
HIGH — Buffer overflow in pgproto3/v2 v2.3.3 DataRow handling, embedded in CrowdSec binaries.
pgproto3/v2 v2.3.3 is the final release (archived Jul 2025); no fix will be released.
Fix exists only in pgproto3/v3 (pgx/v5). CrowdSec v1.7.7 still depends on pgx/v4 → pgproto3/v2.
Charon uses SQLite, not PostgreSQL; this code path is not reachable in a standard deployment.
Risk accepted; no remediation until CrowdSec ships with pgx/v5.
Reviewed 2026-04-10: pgproto3/v2 EOL confirmed; CrowdSec has not migrated to pgx/v5 yet.
expiry: "2026-07-09" # Reviewed 2026-04-10: no fix path until CrowdSec migrates to pgx/v5. 90-day expiry.
# Action items when this suppression expires:
# 1. Check CrowdSec releases for pgx/v5 migration:
# https://github.com/crowdsecurity/crowdsec/releases
# 2. Verify with: `go version -m /path/to/crowdsec | grep pgproto3`
# Expected: pgproto3/v3 (or no pgproto3 reference if fully replaced)
# 3. If CrowdSec has migrated:
# a. Rebuild Docker image and run local security-scan-docker-image
# b. Remove this entry, GHSA-jqcq-xjh3-6g23 entry, GHSA-x6gf-mpr2-68h6 entry, and all .trivyignore entries
# 4. If not yet migrated: Extend expiry by 30 days and update the review comment above
# 5. If extended 3+ times: Open an upstream issue on crowdsecurity/crowdsec requesting pgx/v5 migration
# GHSA-x744-4wpc-v9h2 / CVE-2026-34040: Docker AuthZ plugin bypass via oversized request body
# Severity: HIGH (CVSS 8.8)
# CVSS Vector: CVSS:3.1/AV:L/AC:L/PR:L/UI:N/S:C/C:H/I:H/A:H
# CWE: CWE-863 (Incorrect Authorization)
# Package: github.com/docker/docker v28.5.2+incompatible (go-module)
# Status: Fixed in moby/moby v29.3.1 — NO fix available for docker/docker import path
#
# Vulnerability Details:
# - Incomplete fix for Docker AuthZ plugin bypass (CVE-2024-41110). An attacker can send an
# oversized request body to the Docker daemon, causing it to forward the request to the AuthZ
# plugin without the body, allowing unauthorized approvals.
#
# Root Cause (No Fix Available for Import Path):
# - The fix exists in moby/moby v29.3.1, but not for the docker/docker import path that Charon uses.
# - Migration to moby/moby/v2 is not practical: currently beta with breaking changes.
# - Fix path: once docker/docker publishes a patched version or moby/moby/v2 stabilizes,
# update the dependency and remove this suppression.
#
# Risk Assessment: ACCEPTED (Not exploitable in Charon context)
# - Charon uses the Docker client SDK only (list containers). The vulnerability is server-side
# in the Docker daemon's AuthZ plugin handler.
# - Charon does not run a Docker daemon or use AuthZ plugins.
# - The attack vector requires local access to the Docker daemon socket with AuthZ plugins enabled.
#
# Mitigation (active while suppression is in effect):
# - Monitor docker/docker releases: https://github.com/moby/moby/releases
# - Monitor moby/moby/v2 stabilization: https://github.com/moby/moby
# - Weekly CI security rebuild flags the moment a fixed version ships.
#
# Review:
# - Reviewed 2026-03-30 (initial suppression): no fix for docker/docker import path. Set 30-day review.
# - Next review: 2026-04-30. Remove suppression once a fix is available for the docker/docker import path.
#
# Removal Criteria:
# - docker/docker publishes a patched version OR moby/moby/v2 stabilizes and migration is feasible
# - Update dependency, rebuild, run security-scan-docker-image, confirm finding is resolved
# - Remove this entry, the GHSA-pxq6-2prw-chj9 entry, and the corresponding .trivyignore entries simultaneously
#
# References:
# - GHSA-x744-4wpc-v9h2: https://github.com/advisories/GHSA-x744-4wpc-v9h2
# - CVE-2026-34040: https://nvd.nist.gov/vuln/detail/CVE-2026-34040
# - CVE-2024-41110 (original): https://nvd.nist.gov/vuln/detail/CVE-2024-41110
# - moby/moby releases: https://github.com/moby/moby/releases
- vulnerability: GHSA-x744-4wpc-v9h2
package:
name: github.com/docker/docker
version: "v28.5.2+incompatible"
type: go-module
reason: |
HIGH — Docker AuthZ plugin bypass via oversized request body in docker/docker v28.5.2+incompatible.
Incomplete fix for CVE-2024-41110. Fixed in moby/moby v29.3.1 but no fix for docker/docker import path.
Charon uses Docker client SDK only (list containers); the vulnerability is server-side in the Docker
daemon's AuthZ plugin handler. Charon does not run a Docker daemon or use AuthZ plugins.
Risk accepted; no remediation path until docker/docker publishes a fix or moby/moby/v2 stabilizes.
Reviewed 2026-03-30: no patched release available for docker/docker import path.
expiry: "2026-04-30" # 30-day review: no fix for docker/docker import path. Extend in 30-day increments with documented justification.
# Action items when this suppression expires:
# 1. Check docker/docker and moby/moby releases: https://github.com/moby/moby/releases
# 2. Check if moby/moby/v2 has stabilized: https://github.com/moby/moby
# 3. If a fix has shipped for docker/docker import path OR moby/moby/v2 is stable:
# a. Update the dependency and rebuild Docker image
# b. Run local security-scan-docker-image and confirm finding is resolved
# c. Remove this entry, GHSA-pxq6-2prw-chj9 entry, and all corresponding .trivyignore entries
# 4. If no fix yet: Extend expiry by 30 days and update the review comment above
# 5. If extended 3+ times: Open an issue to track moby/moby/v2 migration feasibility
# GHSA-pxq6-2prw-chj9 / CVE-2026-33997: Moby off-by-one error in plugin privilege validation
# Severity: MEDIUM (CVSS 6.8)
# Package: github.com/docker/docker v28.5.2+incompatible (go-module)
# Status: Fixed in moby/moby v29.3.1 — NO fix available for docker/docker import path
#
# Vulnerability Details:
# - Off-by-one error in Moby's plugin privilege validation allows potential privilege escalation
# via crafted plugin configurations.
#
# Root Cause (No Fix Available for Import Path):
# - Same import path issue as GHSA-x744-4wpc-v9h2. The fix exists in moby/moby v29.3.1 but not
# for the docker/docker import path that Charon uses.
# - Fix path: same as GHSA-x744-4wpc-v9h2 — wait for docker/docker patch or moby/moby/v2 stabilization.
#
# Risk Assessment: ACCEPTED (Not exploitable in Charon context)
# - Charon uses the Docker client SDK only (list containers). The vulnerability is in Docker's
# plugin privilege validation, which is server-side functionality.
# - Charon does not run a Docker daemon, install Docker plugins, or interact with plugin privileges.
#
# Mitigation (active while suppression is in effect):
# - Monitor docker/docker releases: https://github.com/moby/moby/releases
# - Weekly CI security rebuild flags the moment a fixed version ships.
#
# Review:
# - Reviewed 2026-03-30 (initial suppression): no fix for docker/docker import path. Set 30-day review.
# - Next review: 2026-04-30. Remove suppression once a fix is available for the docker/docker import path.
#
# Removal Criteria:
# - Same as GHSA-x744-4wpc-v9h2: docker/docker publishes a patched version OR moby/moby/v2 stabilizes
# - Update dependency, rebuild, run security-scan-docker-image, confirm finding is resolved
# - Remove this entry, GHSA-x744-4wpc-v9h2 entry, and all corresponding .trivyignore entries simultaneously
#
# References:
# - GHSA-pxq6-2prw-chj9: https://github.com/advisories/GHSA-pxq6-2prw-chj9
# - CVE-2026-33997: https://nvd.nist.gov/vuln/detail/CVE-2026-33997
# - moby/moby releases: https://github.com/moby/moby/releases
- vulnerability: GHSA-pxq6-2prw-chj9
package:
name: github.com/docker/docker
version: "v28.5.2+incompatible"
type: go-module
reason: |
MEDIUM — Off-by-one error in Moby plugin privilege validation in docker/docker v28.5.2+incompatible.
Fixed in moby/moby v29.3.1 but no fix for docker/docker import path.
Charon uses Docker client SDK only (list containers); the vulnerability is in Docker's server-side
plugin privilege validation. Charon does not run a Docker daemon or install Docker plugins.
Risk accepted; no remediation path until docker/docker publishes a fix or moby/moby/v2 stabilizes.
Reviewed 2026-03-30: no patched release available for docker/docker import path.
expiry: "2026-04-30" # 30-day review: no fix for docker/docker import path. Extend in 30-day increments with documented justification.
# Action items when this suppression expires:
# 1. Check docker/docker and moby/moby releases: https://github.com/moby/moby/releases
# 2. Check if moby/moby/v2 has stabilized: https://github.com/moby/moby
# 3. If a fix has shipped for docker/docker import path OR moby/moby/v2 is stable:
# a. Update the dependency and rebuild Docker image
# b. Run local security-scan-docker-image and confirm finding is resolved
# c. Remove this entry, GHSA-x744-4wpc-v9h2 entry, and all corresponding .trivyignore entries
# 4. If no fix yet: Extend expiry by 30 days and update the review comment above
# 5. If extended 3+ times: Open an issue to track moby/moby/v2 migration feasibility
# GHSA-78h2-9frx-2jm8: go-jose JWE decryption panic (DoS)
# Severity: HIGH
# Packages: github.com/go-jose/go-jose/v3 v3.0.4 and github.com/go-jose/go-jose/v4 v4.1.3
# (embedded in /usr/bin/caddy)
# Status: Fix available in go-jose/v3 v3.0.5 and go-jose/v4 v4.1.4 — requires upstream Caddy rebuild
#
# Vulnerability Details:
# - JWE decryption can trigger a panic due to improper input validation, causing
# a denial-of-service condition (runtime crash).
#
# Root Cause (Third-Party Binary):
# - Charon does not use go-jose directly. The library is compiled into the Caddy binary
# shipped in the Docker image.
# - Fixes are available upstream (v3.0.5 and v4.1.4) but require a Caddy rebuild to pick up.
# - Fix path: once the upstream Caddy release includes the patched go-jose versions,
# rebuild the Docker image and remove these suppressions.
#
# Risk Assessment: ACCEPTED (No direct use + fix requires upstream rebuild)
# - Charon does not import or call go-jose functions; the library is only present as a
# transitive dependency inside the Caddy binary.
# - The attack vector requires crafted JWE input reaching Caddy's internal JWT handling,
# which is limited to authenticated admin-API paths not exposed in Charon deployments.
#
# Mitigation (active while suppression is in effect):
# - Monitor Caddy releases: https://github.com/caddyserver/caddy/releases
# - Weekly CI security rebuild flags the moment a fixed image ships.
#
# Review:
# - Reviewed 2026-04-05 (initial suppression): fix available upstream but not yet in Caddy release.
# Set 30-day review.
# - Next review: 2026-05-05. Remove suppression once Caddy ships with patched go-jose.
#
# Removal Criteria:
# - Caddy releases a version built with go-jose/v3 >= v3.0.5 and go-jose/v4 >= v4.1.4
# - Rebuild Docker image, run security-scan-docker-image, confirm finding is resolved
# - Remove both entries (v3 and v4) and any corresponding .trivyignore entries simultaneously
#
# References:
# - GHSA-78h2-9frx-2jm8: https://github.com/advisories/GHSA-78h2-9frx-2jm8
# - go-jose releases: https://github.com/go-jose/go-jose/releases
# - Caddy releases: https://github.com/caddyserver/caddy/releases
- vulnerability: GHSA-78h2-9frx-2jm8
package:
name: github.com/go-jose/go-jose/v3
version: "v3.0.4"
type: go-module
reason: |
HIGH — JWE decryption panic in go-jose v3.0.4 embedded in /usr/bin/caddy.
Fix available in v3.0.5 but requires upstream Caddy rebuild. Charon does not use go-jose
directly. Deferring to next Caddy release.
expiry: "2026-05-05" # 30-day review: remove once Caddy ships with go-jose/v3 >= v3.0.5.
# Action items when this suppression expires:
# 1. Check Caddy releases: https://github.com/caddyserver/caddy/releases
# 2. Verify with: `go version -m /usr/bin/caddy | grep go-jose`
# Expected: go-jose/v3 >= v3.0.5
# 3. If Caddy has updated:
# a. Rebuild Docker image and run local security-scan-docker-image
# b. Remove this entry, the v4 entry below, and any corresponding .trivyignore entries
# 4. If not yet updated: Extend expiry by 30 days and update the review comment above
# 5. If extended 3+ times: Open an upstream issue on caddyserver/caddy requesting go-jose update
# GHSA-78h2-9frx-2jm8 (go-jose/v4) — see full justification in the go-jose/v3 entry above
- vulnerability: GHSA-78h2-9frx-2jm8
package:
name: github.com/go-jose/go-jose/v4
version: "v4.1.3"
type: go-module
reason: |
HIGH — JWE decryption panic in go-jose v4.1.3 embedded in /usr/bin/caddy.
Fix available in v4.1.4 but requires upstream Caddy rebuild. Charon does not use go-jose
directly. Deferring to next Caddy release.
expiry: "2026-05-05" # 30-day review: see go-jose/v3 entry above for action items.
# Match exclusions (patterns to ignore during scanning)
# Use sparingly - prefer specific CVE suppressions above
match:

View File

@@ -3,24 +3,19 @@ playwright/.auth/
# GHSA-69x3-g4r3-p962 / CVE-2026-25793: Nebula ECDSA Signature Malleability
# Severity: HIGH (CVSS 8.1) — Package: github.com/slackhq/nebula v1.9.7 in /usr/bin/caddy
# Cannot upgrade: smallstep/certificates v0.27.5 (latest stable as of 2026-02-19) still pins nebula v1.9.x.
# Charon does not use Nebula VPN PKI by default. Review by: 2026-03-05
# Fix exists in nebula v1.10.3, but smallstep/certificates (through v0.30.2) uses legacy nebula
# APIs removed in v1.10+, causing compile failures. Waiting on certificates upstream update.
# Charon does not use Nebula VPN PKI by default. Review by: 2026-05-10
# See also: .grype.yaml for full justification
# exp: 2026-05-10
CVE-2026-25793
# CVE-2026-22184: zlib Global Buffer Overflow in untgz utility
# Severity: CRITICAL (CVSS 9.8) — Package: zlib 1.3.1-r2 in Alpine base image
# No upstream fix available: Alpine 3.23 (including edge) still ships zlib 1.3.1-r2.
# Charon does not use untgz or process untrusted tar archives. Review by: 2026-03-14
# See also: .grype.yaml for full justification
CVE-2026-22184
# CVE-2026-27171: zlib CPU spin via crc32_combine64 infinite loop (DoS)
# Severity: MEDIUM (CVSS 5.5 NVD / 2.9 MITRE) — Package: zlib 1.3.1-r2 in Alpine base image
# Fix requires zlib >= 1.3.2. No upstream fix available: Alpine 3.23 still ships zlib 1.3.1-r2.
# Attack requires local access (AV:L); the vulnerable code path is not reachable via Charon's
# network-facing surface. Non-blocking by CI policy (MEDIUM). Review by: 2026-04-21
# exp: 2026-04-21
# network-facing surface. Non-blocking by CI policy (MEDIUM). Review by: 2026-05-21
# exp: 2026-05-21
CVE-2026-27171
# CVE-2026-2673: OpenSSL TLS 1.3 server key exchange group downgrade (libcrypto3/libssl3)
@@ -28,45 +23,47 @@ CVE-2026-27171
# No upstream fix available: Alpine 3.23 still ships libcrypto3/libssl3 3.5.5-r0 as of 2026-03-18.
# When DEFAULT is in TLS 1.3 group config, server may select a weaker key exchange group.
# Charon terminates TLS at the Caddy layer — the Go backend does not act as a raw TLS 1.3 server.
# Review by: 2026-04-18
# Review by: 2026-05-18
# See also: .grype.yaml for full justification
# exp: 2026-04-18
# exp: 2026-05-18
CVE-2026-2673
# CVE-2026-33186 / GHSA-p77j-4mvh-x3m3: gRPC-Go authorization bypass via missing leading slash
# Severity: CRITICAL (CVSS 9.1) — Package: google.golang.org/grpc, embedded in CrowdSec (v1.74.2) and Caddy (v1.79.1)
# Fix exists at v1.79.3 — Charon's own dep is patched. Waiting on CrowdSec and Caddy upstream releases.
# CrowdSec's and Caddy's grpc servers are not exposed externally in a standard Charon deployment.
# Review by: 2026-04-02
# Suppressed for CrowdSec/Caddy embedded binaries only — Charon's direct deps are fixed (v1.79.3).
# Review by: 2026-05-04
# See also: .grype.yaml for full justification
# exp: 2026-04-02
# exp: 2026-05-04
CVE-2026-33186
# GHSA-479m-364c-43vc: goxmldsig XML signature validation bypass (loop variable capture)
# Severity: HIGH (CVSS 7.5) — Package: github.com/russellhaering/goxmldsig v1.5.0, embedded in /usr/bin/caddy
# Fix exists at v1.6.0 — waiting on Caddy upstream (or caddy-security plugin) to release with patched goxmldsig.
# Charon does not configure SAML-based SSO by default; the vulnerable path is not reachable in a standard deployment.
# Review by: 2026-04-02
# Awaiting Caddy upstream update to include goxmldsig v1.6.0.
# Review by: 2026-05-04
# See also: .grype.yaml for full justification
# exp: 2026-04-02
# exp: 2026-05-04
GHSA-479m-364c-43vc
# GHSA-6g7g-w4f8-9c9x: buger/jsonparser Delete panic on malformed JSON (DoS)
# Severity: HIGH (CVSS 7.5) — Package: github.com/buger/jsonparser v1.1.1, embedded in CrowdSec binaries
# No upstream fix available as of 2026-03-19 (issue #275 open, golang/vulndb #4514 open).
# Charon does not use this package; the vector requires reaching CrowdSec's internal processing pipeline.
# Review by: 2026-04-19
# Review by: 2026-05-19
# See also: .grype.yaml for full justification
# exp: 2026-04-19
# exp: 2026-05-19
GHSA-6g7g-w4f8-9c9x
# GHSA-jqcq-xjh3-6g23: pgproto3/v2 DataRow.Decode panic on negative field length (DoS)
# Severity: HIGH (CVSS 7.5) — Package: github.com/jackc/pgproto3/v2 v2.3.3, embedded in CrowdSec binaries
# pgproto3/v2 is archived/EOL — no fix will be released. Fix path requires CrowdSec to migrate to pgx/v5.
# Charon uses SQLite; the PostgreSQL code path is not reachable in a standard deployment.
# Review by: 2026-04-19
# Review by: 2026-05-19
# See also: .grype.yaml for full justification
# exp: 2026-04-19
# exp: 2026-05-19
GHSA-jqcq-xjh3-6g23
# GHSA-x6gf-mpr2-68h6 / CVE-2026-4427: pgproto3/v2 DataRow.Decode panic on negative field length (DoS)
@@ -74,7 +71,52 @@ GHSA-jqcq-xjh3-6g23
# NVD/Red Hat alias (CVE-2026-4427) for the same underlying bug as GHSA-jqcq-xjh3-6g23.
# pgproto3/v2 is archived/EOL — no fix will be released. Fix path requires CrowdSec to migrate to pgx/v5.
# Charon uses SQLite; the PostgreSQL code path is not reachable in a standard deployment.
# Review by: 2026-04-21
# Review by: 2026-05-21
# See also: .grype.yaml for full justification
# exp: 2026-04-21
# exp: 2026-05-21
GHSA-x6gf-mpr2-68h6
# CVE-2026-32286: pgproto3/v2 buffer overflow in DataRow handling (DoS)
# Severity: HIGH (CVSS 7.5) — Package: github.com/jackc/pgproto3/v2 v2.3.3, embedded in CrowdSec binaries
# pgproto3/v2 v2.3.3 is the final release — repository archived Jul 12, 2025. No fix will be released.
# Fix exists only in pgproto3/v3 (used by pgx/v5). CrowdSec v1.7.7 (latest) still depends on pgx/v4 → pgproto3/v2.
# Dockerfile already applies best-effort mitigation (pgx/v4@v4.18.3).
# Charon uses SQLite; the PostgreSQL code path is not reachable in a standard deployment.
# Review by: 2026-07-09
# See also: .grype.yaml for full justification
# exp: 2026-07-09
CVE-2026-32286
# CVE-2026-34040 / GHSA-x744-4wpc-v9h2: Docker AuthZ plugin bypass via oversized request body
# Severity: HIGH (CVSS 8.8) — Package: github.com/docker/docker v28.5.2+incompatible
# Incomplete fix for CVE-2024-41110. Fixed in moby/moby v29.3.1 but no fix for docker/docker import path.
# Charon uses Docker client SDK only (list containers); the vulnerability is server-side in the Docker daemon.
# Review by: 2026-04-30
# See also: .grype.yaml for full justification
# exp: 2026-04-30
CVE-2026-34040
# GHSA-x744-4wpc-v9h2: Docker AuthZ plugin bypass via oversized request body (GHSA alias)
# Severity: HIGH (CVSS 8.8) — Package: github.com/docker/docker v28.5.2+incompatible
# GHSA alias for CVE-2026-34040. See CVE-2026-34040 entry above for full details.
# Review by: 2026-04-30
# See also: .grype.yaml for full justification
# exp: 2026-04-30
GHSA-x744-4wpc-v9h2
# CVE-2026-33997 / GHSA-pxq6-2prw-chj9: Moby off-by-one error in plugin privilege validation
# Severity: MEDIUM (CVSS 6.8) — Package: github.com/docker/docker v28.5.2+incompatible
# Fixed in moby/moby v29.3.1 but no fix for docker/docker import path.
# Charon uses Docker client SDK only (list containers); plugin privilege validation is server-side.
# Review by: 2026-04-30
# See also: .grype.yaml for full justification
# exp: 2026-04-30
CVE-2026-33997
# GHSA-pxq6-2prw-chj9: Moby off-by-one error in plugin privilege validation (GHSA alias)
# Severity: MEDIUM (CVSS 6.8) — Package: github.com/docker/docker v28.5.2+incompatible
# GHSA alias for CVE-2026-33997. See CVE-2026-33997 entry above for full details.
# Review by: 2026-04-30
# See also: .grype.yaml for full justification
# exp: 2026-04-30
GHSA-pxq6-2prw-chj9

View File

@@ -9,6 +9,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
- **CrowdSec Dashboard**: Visual analytics for CrowdSec security data within the Security section
- Summary cards showing total bans, active bans, unique IPs, and top scenario
- Interactive charts: ban timeline (area), top attacking IPs (bar), scenario breakdown (pie)
- Configurable time range selector (1h, 6h, 24h, 7d, 30d)
- Active decisions table with IP, scenario, duration, type, and time remaining
- Alerts feed with pagination sourced from CrowdSec LAPI
- CSV and JSON export for decisions data
- Server-side caching (3060s TTL) for fast dashboard loads
- Full i18n support across all 5 locales (en, de, fr, es, zh)
- Keyboard navigable, screen-reader compatible (WCAG 2.2 AA)
- **Notifications:** Added Ntfy notification provider with support for self-hosted and cloud instances, optional Bearer token authentication, and JSON template customization
- **Certificate Deletion**: Clean up expired and unused certificates directly from the Certificates page

View File

@@ -10,14 +10,14 @@ ARG BUILD_DEBUG=0
# ---- Pinned Toolchain Versions ----
# renovate: datasource=docker depName=golang versioning=docker
ARG GO_VERSION=1.26.1
ARG GO_VERSION=1.26.2
# renovate: datasource=docker depName=alpine versioning=docker
ARG ALPINE_IMAGE=alpine:3.23.3@sha256:25109184c71bdad752c8312a8623239686a9a2071e8825f20acb8f2198c3f659
# ---- Shared CrowdSec Version ----
# renovate: datasource=github-releases depName=crowdsecurity/crowdsec
ARG CROWDSEC_VERSION=1.7.6
ARG CROWDSEC_VERSION=1.7.7
# CrowdSec fallback tarball checksum (v${CROWDSEC_VERSION})
ARG CROWDSEC_RELEASE_SHA256=704e37121e7ac215991441cef0d8732e33fa3b1a2b2b88b53a0bfe5e38f863bd
@@ -25,7 +25,7 @@ ARG CROWDSEC_RELEASE_SHA256=704e37121e7ac215991441cef0d8732e33fa3b1a2b2b88b53a0b
# renovate: datasource=go depName=github.com/expr-lang/expr
ARG EXPR_LANG_VERSION=1.17.8
# renovate: datasource=go depName=golang.org/x/net
ARG XNET_VERSION=0.52.0
ARG XNET_VERSION=0.53.0
# renovate: datasource=go depName=github.com/smallstep/certificates
ARG SMALLSTEP_CERTIFICATES_VERSION=0.30.0
# renovate: datasource=npm depName=npm
@@ -43,9 +43,9 @@ ARG CADDY_CANDIDATE_VERSION=2.11.2
ARG CADDY_USE_CANDIDATE=0
ARG CADDY_PATCH_SCENARIO=B
# renovate: datasource=go depName=github.com/greenpau/caddy-security
ARG CADDY_SECURITY_VERSION=1.1.51
ARG CADDY_SECURITY_VERSION=1.1.61
# renovate: datasource=go depName=github.com/corazawaf/coraza-caddy
ARG CORAZA_CADDY_VERSION=2.2.0
ARG CORAZA_CADDY_VERSION=2.4.0
## When an official caddy image tag isn't available on the host, use a
## plain Alpine base image and overwrite its caddy binary with our
## xcaddy-built binary in the later COPY step. This avoids relying on
@@ -92,7 +92,7 @@ RUN --mount=type=cache,target=/root/.cache/go-build \
# ---- Frontend Builder ----
# Build the frontend using the BUILDPLATFORM to avoid arm64 musl Rollup native issues
# renovate: datasource=docker depName=node
FROM --platform=$BUILDPLATFORM node:24.14.0-alpine@sha256:7fddd9ddeae8196abf4a3ef2de34e11f7b1a722119f91f28ddf1e99dcafdf114 AS frontend-builder
FROM --platform=$BUILDPLATFORM node:24.14.1-alpine@sha256:01743339035a5c3c11a373cd7c83aeab6ed1457b55da6a69e014a95ac4e4700b AS frontend-builder
WORKDIR /app/frontend
# Copy frontend package files
@@ -282,11 +282,27 @@ RUN --mount=type=cache,target=/root/.cache/go-build \
# renovate: datasource=go depName=github.com/hslatman/ipstore
go get github.com/hslatman/ipstore@v0.4.0; \
go get golang.org/x/net@v${XNET_VERSION}; \
# CVE-2026-33186 (GHSA-p77j-4mvh-x3m3): gRPC-Go auth bypass via missing leading slash
# Fix available at v1.79.3. Pin here so the Caddy binary is patched immediately;
# remove once Caddy ships a release built with grpc >= v1.79.3.
# CVE-2026-33186: gRPC-Go auth bypass (fixed in v1.79.3)
# CVE-2026-34986: go-jose/v4 transitive fix (requires grpc >= v1.80.0)
# Pin here so the Caddy binary is patched immediately;
# remove once Caddy ships a release built with grpc >= v1.80.0.
# renovate: datasource=go depName=google.golang.org/grpc
go get google.golang.org/grpc@v1.79.3; \
go get google.golang.org/grpc@v1.80.0; \
# CVE-2026-34986: go-jose JOSE/JWT validation bypass
# renovate: datasource=go depName=github.com/go-jose/go-jose/v3
go get github.com/go-jose/go-jose/v3@v3.0.5; \
# renovate: datasource=go depName=github.com/go-jose/go-jose/v4
go get github.com/go-jose/go-jose/v4@v4.1.4; \
# CVE-2026-39883: OTel SDK resource leak
# renovate: datasource=go depName=go.opentelemetry.io/otel/sdk
go get go.opentelemetry.io/otel/sdk@v1.43.0; \
# CVE-2026-39882: OTel HTTP exporter request smuggling
# renovate: datasource=go depName=go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp
go get go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp@v0.19.0; \
# renovate: datasource=go depName=go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp
go get go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp@v1.43.0; \
# renovate: datasource=go depName=go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp
go get go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp@v1.43.0; \
# GHSA-479m-364c-43vc: goxmldsig XML signature validation bypass (loop variable capture)
# Fix available at v1.6.0. Pin here so the Caddy binary is patched immediately;
# remove once caddy-security ships a release built with goxmldsig >= v1.6.0.
@@ -364,7 +380,19 @@ RUN go get github.com/expr-lang/expr@v${EXPR_LANG_VERSION} && \
# Fix available at v1.79.3. Pin here so the CrowdSec binary is patched immediately;
# remove once CrowdSec ships a release built with grpc >= v1.79.3.
# renovate: datasource=go depName=google.golang.org/grpc
go get google.golang.org/grpc@v1.79.3 && \
go get google.golang.org/grpc@v1.80.0 && \
# CVE-2026-32286: pgproto3/v2 buffer overflow (no v2 fix exists; bump pgx/v4 to latest patch)
# renovate: datasource=go depName=github.com/jackc/pgx/v4
go get github.com/jackc/pgx/v4@v4.18.3 && \
# GHSA-xmrv-pmrh-hhx2: AWS SDK v2 event stream injection
# renovate: datasource=go depName=github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream
go get github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream@v1.7.8 && \
# renovate: datasource=go depName=github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs
go get github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs@v1.68.0 && \
# renovate: datasource=go depName=github.com/aws/aws-sdk-go-v2/service/kinesis
go get github.com/aws/aws-sdk-go-v2/service/kinesis@v1.43.5 && \
# renovate: datasource=go depName=github.com/aws/aws-sdk-go-v2/service/s3
go get github.com/aws/aws-sdk-go-v2/service/s3@v1.99.0 && \
go mod tidy
# Fix compatibility issues with expr-lang v1.17.7
@@ -458,7 +486,7 @@ SHELL ["/bin/ash", "-o", "pipefail", "-c"]
# Note: In production, users should provide their own MaxMind license key
# This uses the publicly available GeoLite2 database
# In CI, timeout quickly rather than retrying to save build time
ARG GEOLITE2_COUNTRY_SHA256=f5e80a9a3129d46e75c8cccd66bfac725b0449a6c89ba5093a16561d58f20bda
ARG GEOLITE2_COUNTRY_SHA256=b018842033872f19ed9ccefb863ec954f8024db2ae913d0d4ea14e35ace4eba1
RUN mkdir -p /app/data/geoip && \
if [ "$CI" = "true" ] || [ "$CI" = "1" ]; then \
echo "⏱️ CI detected - quick download (10s timeout, no retries)"; \

View File

@@ -54,7 +54,7 @@ If you can use a website, you can run Charon.
Charon includes security features that normally require multiple tools:
- Web Application Firewall (WAF)
- CrowdSec intrusion detection
- CrowdSec intrusion detection with analytics dashboard
- Access Control Lists (ACLs)
- Rate limiting
- Emergency recovery tools
@@ -148,7 +148,7 @@ Secure all your subdomains with a single *.example.com certificate. Supports 15+
### 🛡️ **Enterprise-Grade Security Built In**
Web Application Firewall, rate limiting, geographic blocking, access control lists, and intrusion detection via CrowdSec. Protection that "just works."
Web Application Firewall, rate limiting, geographic blocking, access control lists, and intrusion detection via CrowdSec—with a built-in analytics dashboard showing attack trends, top offenders, and ban history. Protection that "just works."
### 🔐 **Supply Chain Security**

View File

@@ -27,7 +27,91 @@ public disclosure.
## Known Vulnerabilities
Last reviewed: 2026-03-24
Last reviewed: 2026-04-09
### [HIGH] CVE-2026-31790 · OpenSSL Vulnerability in Alpine Base Image
| Field | Value |
|--------------|-------|
| **ID** | CVE-2026-31790 (affects `libcrypto3` and `libssl3`) |
| **Severity** | High · CVSS pending |
| **Status** | Awaiting Upstream |
**What**
An OpenSSL vulnerability in the Alpine base image system packages `libcrypto3` and `libssl3`.
This is a pre-existing issue in the Alpine base image and was not introduced by Charon.
**Who**
- Discovered by: Automated scan (Grype)
- Reported: 2026-04-09
- Affects: Container runtime environment; does not affect Charon application code directly
**Where**
- Component: Alpine base image (`libcrypto3`, `libssl3`)
- Versions affected: Current Alpine base image OpenSSL packages
**When**
- Discovered: 2026-04-09
- Disclosed (if public): Public
- Target fix: When Alpine Security publishes a patched OpenSSL APK
**How**
The vulnerability resides in Alpine's system OpenSSL library and affects TLS operations at
the OS level. Charon's application code does not directly invoke these libraries. Practical
exploitability depends on direct TLS usage through the system OpenSSL, which is limited to
the container runtime environment.
**Planned Remediation**
Monitor <https://security.alpinelinux.org/> for a patched Alpine APK. No upstream fix
available as of 2026-04-09. Once available, update the pinned `ALPINE_IMAGE` digest in the
Dockerfile.
---
### [HIGH] CVE-2026-34040 · Docker AuthZ Plugin Bypass via Oversized Request Body
| Field | Value |
|--------------|-------|
| **ID** | CVE-2026-34040 (GHSA-x744-4wpc-v9h2) |
| **Severity** | High · 8.8 |
| **Status** | Awaiting Upstream |
**What**
Docker Engine AuthZ plugins can be bypassed when an API request body exceeds a
certain size threshold. Charon uses the Docker client SDK only; this is a
server-side vulnerability in the Docker daemon's authorization plugin handler.
**Who**
- Discovered by: Automated scan (govulncheck, Grype)
- Reported: 2026-04-04
- Affects: Docker Engine daemon operators; Charon application is not directly vulnerable
**Where**
- Component: `github.com/docker/docker` v28.5.2+incompatible (Docker client SDK)
- Versions affected: Docker Engine < 29.3.1
**When**
- Discovered: 2026-04-04
- Disclosed (if public): Public
- Target fix: When moby/moby/v2 stabilizes or docker/docker import path is updated
**How**
The vulnerability requires an attacker to send oversized API request bodies to the
Docker daemon. Charon uses the Docker client SDK for container management operations
only and does not expose the Docker socket externally. The attack vector is limited
to the Docker daemon host, not the Charon application.
**Planned Remediation**
Monitor moby/moby/v2 module stabilization. The `docker/docker` import path has no
fix available. When a compatible module path exists, migrate the Docker SDK import.
---
### [HIGH] CVE-2026-2673 · OpenSSL TLS 1.3 Key Exchange Group Downgrade
@@ -73,6 +157,48 @@ available, update the pinned `ALPINE_IMAGE` digest in the Dockerfile, or add an
---
### [MEDIUM] CVE-2026-33997 · Docker Off-by-One Plugin Privilege Validation
| Field | Value |
|--------------|-------|
| **ID** | CVE-2026-33997 (GHSA-pxq6-2prw-chj9) |
| **Severity** | Medium · 6.8 |
| **Status** | Awaiting Upstream |
**What**
An off-by-one error in Docker Engine's plugin privilege validation could allow
a malicious plugin to escalate privileges. Charon uses the Docker client SDK
for container management and does not install or manage Docker plugins.
**Who**
- Discovered by: Automated scan (govulncheck, Grype)
- Reported: 2026-04-04
- Affects: Docker Engine plugin operators; Charon application is not directly vulnerable
**Where**
- Component: `github.com/docker/docker` v28.5.2+incompatible (Docker client SDK)
- Versions affected: Docker Engine < 29.3.1
**When**
- Discovered: 2026-04-04
- Disclosed (if public): Public
- Target fix: When moby/moby/v2 stabilizes or docker/docker import path is updated
**How**
The vulnerability is in Docker Engine's plugin privilege validation at the
daemon level. Charon does not use Docker plugins — it only manages containers
via the Docker client SDK. The attack requires a malicious Docker plugin to be
installed on the host, which is outside Charon's operational scope.
**Planned Remediation**
Same as CVE-2026-34040: monitor moby/moby/v2 module stabilization. No fix
available for the current `docker/docker` import path.
---
### [MEDIUM] CVE-2025-60876 · BusyBox wget HTTP Request Smuggling
| Field | Value |
@@ -111,15 +237,15 @@ attacker-controlled URLs. Charon's application logic does not use busybox wget.
Monitor Alpine 3.23 for a patched busybox APK. No immediate action required. Practical risk to
Charon users is negligible since the vulnerable code path is not exercised.
---
## Patched Vulnerabilities
### [LOW] CVE-2026-26958 · edwards25519 MultiScalarMult Invalid Results
### [LOW] CVE-2026-26958 · edwards25519 MultiScalarMult Invalid Results
| Field | Value |
|--------------|-------|
| **ID** | CVE-2026-26958 (GHSA-fw7p-63qq-7hpr) |
| **Severity** | Low · 1.7 |
| **Status** | Awaiting Upstream |
| **Patched** | 2026-04-04 |
**What**
`filippo.io/edwards25519` v1.1.0 `MultiScalarMult` produces invalid results or undefined
@@ -130,8 +256,6 @@ CrowdSec to rebuild.
- Discovered by: Automated scan (Grype)
- Reported: 2026-03-24
- Affects: CrowdSec Agent component within the container; not directly exposed through Charon's
primary application interface
**Where**
@@ -141,21 +265,19 @@ CrowdSec to rebuild.
**When**
- Discovered: 2026-03-24
- Disclosed (if public): Public
- Target fix: When CrowdSec releases a build with updated dependency
- Patched: 2026-04-04
- Time to patch: 11 days
**How**
This is a rarely used advanced API within the edwards25519 library. CrowdSec does not directly
expose MultiScalarMult to external input. EPSS score is 0.00018 (0.04 percentile).
**Planned Remediation**
Awaiting CrowdSec upstream release with updated dependency. No action available for Charon
maintainers.
**Resolution**
Dependency no longer present in Charon's dependency tree. CrowdSec binaries no longer bundle
affected version.
---
## Patched Vulnerabilities
### ✅ [CRITICAL] CVE-2025-68121 · Go Stdlib Critical in CrowdSec Bundled Binaries
| Field | Value |

View File

@@ -1,24 +1,24 @@
module github.com/Wikid82/charon/backend
go 1.26.1
go 1.26.2
require (
github.com/docker/docker v28.5.2+incompatible
github.com/gin-contrib/gzip v1.2.5
github.com/gin-contrib/gzip v1.2.6
github.com/gin-gonic/gin v1.12.0
github.com/glebarez/sqlite v1.11.0
github.com/golang-jwt/jwt/v5 v5.3.1
github.com/google/uuid v1.6.0
github.com/gorilla/websocket v1.5.3
github.com/mattn/go-sqlite3 v1.14.37
github.com/mattn/go-sqlite3 v1.14.42
github.com/oschwald/geoip2-golang/v2 v2.1.0
github.com/prometheus/client_golang v1.23.2
github.com/robfig/cron/v3 v3.0.1
github.com/sirupsen/logrus v1.9.4
github.com/stretchr/testify v1.11.1
golang.org/x/crypto v0.49.0
golang.org/x/net v0.52.0
golang.org/x/text v0.35.0
golang.org/x/crypto v0.50.0
golang.org/x/net v0.53.0
golang.org/x/text v0.36.0
golang.org/x/time v0.15.0
gopkg.in/natefinch/lumberjack.v2 v2.2.1
gorm.io/driver/sqlite v1.6.0
@@ -30,7 +30,7 @@ require (
github.com/beorn7/perks v1.0.1 // indirect
github.com/bytedance/gopkg v0.1.4 // indirect
github.com/bytedance/sonic v1.15.0 // indirect
github.com/bytedance/sonic/loader v0.5.0 // indirect
github.com/bytedance/sonic/loader v0.5.1 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/cloudwego/base64x v0.1.6 // indirect
github.com/containerd/errdefs v1.0.0 // indirect
@@ -43,13 +43,13 @@ require (
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/gabriel-vasile/mimetype v1.4.13 // indirect
github.com/gin-contrib/sse v1.1.0 // indirect
github.com/gin-contrib/sse v1.1.1 // indirect
github.com/glebarez/go-sqlite v1.22.0 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.30.1 // indirect
github.com/go-playground/validator/v10 v10.30.2 // indirect
github.com/goccy/go-json v0.10.6 // indirect
github.com/goccy/go-yaml v1.19.2 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
@@ -58,7 +58,7 @@ require (
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-isatty v0.0.21 // indirect
github.com/moby/docker-image-spec v1.3.1 // indirect
github.com/moby/sys/atomicwriter v0.1.0 // indirect
github.com/moby/term v0.5.2 // indirect
@@ -84,20 +84,19 @@ require (
github.com/ugorji/go/codec v1.3.1 // indirect
go.mongodb.org/mongo-driver/v2 v2.5.0 // indirect
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0 // indirect
go.opentelemetry.io/otel v1.42.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.42.0 // indirect
go.opentelemetry.io/otel/metric v1.42.0 // indirect
go.opentelemetry.io/otel/trace v1.42.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.68.0 // indirect
go.opentelemetry.io/otel v1.43.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.43.0 // indirect
go.opentelemetry.io/otel/metric v1.43.0 // indirect
go.opentelemetry.io/otel/trace v1.43.0 // indirect
go.yaml.in/yaml/v2 v2.4.4 // indirect
golang.org/x/arch v0.25.0 // indirect
golang.org/x/sys v0.42.0 // indirect
google.golang.org/grpc v1.79.3 // indirect
golang.org/x/arch v0.26.0 // indirect
golang.org/x/sys v0.43.0 // indirect
google.golang.org/protobuf v1.36.11 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
gotest.tools/v3 v3.5.2 // indirect
modernc.org/libc v1.70.0 // indirect
modernc.org/libc v1.72.0 // indirect
modernc.org/mathutil v1.7.1 // indirect
modernc.org/memory v1.11.0 // indirect
modernc.org/sqlite v1.47.0 // indirect
modernc.org/sqlite v1.48.2 // indirect
)

View File

@@ -8,8 +8,8 @@ github.com/bytedance/gopkg v0.1.4 h1:oZnQwnX82KAIWb7033bEwtxvTqXcYMxDBaQxo5JJHWM
github.com/bytedance/gopkg v0.1.4/go.mod h1:v1zWfPm21Fb+OsyXN2VAHdL6TBb2L88anLQgdyje6R4=
github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE=
github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k=
github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE=
github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo=
github.com/bytedance/sonic/loader v0.5.1 h1:Ygpfa9zwRCCKSlrp5bBP/b/Xzc3VxsAW+5NIYXrOOpI=
github.com/bytedance/sonic/loader v0.5.1/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo=
github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM=
github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
@@ -39,10 +39,10 @@ github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/gabriel-vasile/mimetype v1.4.13 h1:46nXokslUBsAJE/wMsp5gtO500a4F3Nkz9Ufpk2AcUM=
github.com/gabriel-vasile/mimetype v1.4.13/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s=
github.com/gin-contrib/gzip v1.2.5 h1:fIZs0S+l17pIu1P5XRJOo/YNqfIuPCrZZ3TWB7pjckI=
github.com/gin-contrib/gzip v1.2.5/go.mod h1:aomRgR7ftdZV3uWY0gW/m8rChfxau0n8YVvwlOHONzw=
github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w=
github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM=
github.com/gin-contrib/gzip v1.2.6 h1:OtN8DplD5DNZCSLAnQ5HxRkD2qZ5VU+JhOrcfJrcRvg=
github.com/gin-contrib/gzip v1.2.6/go.mod h1:BQy8/+JApnRjAVUplSGZiVtD2k8GmIE2e9rYu/hLzzU=
github.com/gin-contrib/sse v1.1.1 h1:uGYpNwTacv5R68bSGMapo62iLTRa9l5zxGCps4hK6ko=
github.com/gin-contrib/sse v1.1.1/go.mod h1:QXzuVkA0YO7o/gun03UI1Q+FTI8ZV/n5t03kIQAI89s=
github.com/gin-gonic/gin v1.12.0 h1:b3YAbrZtnf8N//yjKeU2+MQsh2mY5htkZidOM7O0wG8=
github.com/gin-gonic/gin v1.12.0/go.mod h1:VxccKfsSllpKshkBWgVgRniFFAzFb9csfngsqANjnLc=
github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ=
@@ -60,8 +60,8 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.30.1 h1:f3zDSN/zOma+w6+1Wswgd9fLkdwy06ntQJp0BBvFG0w=
github.com/go-playground/validator/v10 v10.30.1/go.mod h1:oSuBIQzuJxL//3MelwSLD5hc2Tu889bF0Idm9Dg26cM=
github.com/go-playground/validator/v10 v10.30.2 h1:JiFIMtSSHb2/XBUbWM4i/MpeQm9ZK2xqPNk8vgvu5JQ=
github.com/go-playground/validator/v10 v10.30.2/go.mod h1:mAf2pIOVXjTEBrwUMGKkCWKKPs9NheYGabeB04txQSc=
github.com/goccy/go-json v0.10.6 h1:p8HrPJzOakx/mn/bQtjgNjdTcN+/S6FcG2CTtQOrHVU=
github.com/goccy/go-json v0.10.6/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/goccy/go-yaml v1.19.2 h1:PmFC1S6h8ljIz6gMRBopkjP1TVT7xuwrButHID66PoM=
@@ -99,10 +99,10 @@ github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-sqlite3 v1.14.37 h1:3DOZp4cXis1cUIpCfXLtmlGolNLp2VEqhiB/PARNBIg=
github.com/mattn/go-sqlite3 v1.14.37/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mattn/go-isatty v0.0.21 h1:xYae+lCNBP7QuW4PUnNG61ffM4hVIfm+zUzDuSzYLGs=
github.com/mattn/go-isatty v0.0.21/go.mod h1:ZXfXG4SQHsB/w3ZeOYbR0PrPwLy+n6xiMrJlRFqopa4=
github.com/mattn/go-sqlite3 v1.14.42 h1:MigqEP4ZmHw3aIdIT7T+9TLa90Z6smwcthx+Azv4Cgo=
github.com/mattn/go-sqlite3 v1.14.42/go.mod h1:pjEuOr8IwzLJP2MfGeTb0A35jauH+C2kbHKBr7yXKVQ=
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw=
@@ -177,55 +177,54 @@ go.mongodb.org/mongo-driver/v2 v2.5.0 h1:yXUhImUjjAInNcpTcAlPHiT7bIXhshCTL3jVBkF
go.mongodb.org/mongo-driver/v2 v2.5.0/go.mod h1:yOI9kBsufol30iFsl1slpdq1I0eHPzybRWdyYUs8K/0=
go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64=
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0 h1:OyrsyzuttWTSur2qN/Lm0m2a8yqyIjUVBZcxFPuXq2o=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0/go.mod h1:C2NGBr+kAB4bk3xtMXfZ94gqFDtg/GkI7e9zqGh5Beg=
go.opentelemetry.io/otel v1.42.0 h1:lSQGzTgVR3+sgJDAU/7/ZMjN9Z+vUip7leaqBKy4sho=
go.opentelemetry.io/otel v1.42.0/go.mod h1:lJNsdRMxCUIWuMlVJWzecSMuNjE7dOYyWlqOXWkdqCc=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.42.0 h1:THuZiwpQZuHPul65w4WcwEnkX2QIuMT+UFoOrygtoJw=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.42.0/go.mod h1:J2pvYM5NGHofZ2/Ru6zw/TNWnEQp5crgyDeSrYpXkAw=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.42.0 h1:uLXP+3mghfMf7XmV4PkGfFhFKuNWoCvvx5wP/wOXo0o=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.42.0/go.mod h1:v0Tj04armyT59mnURNUJf7RCKcKzq+lgJs6QSjHjaTc=
go.opentelemetry.io/otel/metric v1.42.0 h1:2jXG+3oZLNXEPfNmnpxKDeZsFI5o4J+nz6xUlaFdF/4=
go.opentelemetry.io/otel/metric v1.42.0/go.mod h1:RlUN/7vTU7Ao/diDkEpQpnz3/92J9ko05BIwxYa2SSI=
go.opentelemetry.io/otel/sdk v1.42.0 h1:LyC8+jqk6UJwdrI/8VydAq/hvkFKNHZVIWuslJXYsDo=
go.opentelemetry.io/otel/sdk v1.42.0/go.mod h1:rGHCAxd9DAph0joO4W6OPwxjNTYWghRWmkHuGbayMts=
go.opentelemetry.io/otel/sdk/metric v1.42.0 h1:D/1QR46Clz6ajyZ3G8SgNlTJKBdGp84q9RKCAZ3YGuA=
go.opentelemetry.io/otel/sdk/metric v1.42.0/go.mod h1:Ua6AAlDKdZ7tdvaQKfSmnFTdHx37+J4ba8MwVCYM5hc=
go.opentelemetry.io/otel/trace v1.42.0 h1:OUCgIPt+mzOnaUTpOQcBiM/PLQ/Op7oq6g4LenLmOYY=
go.opentelemetry.io/otel/trace v1.42.0/go.mod h1:f3K9S+IFqnumBkKhRJMeaZeNk9epyhnCmQh/EysQCdc=
go.opentelemetry.io/proto/otlp v1.9.0 h1:l706jCMITVouPOqEnii2fIAuO3IVGBRPV5ICjceRb/A=
go.opentelemetry.io/proto/otlp v1.9.0/go.mod h1:xE+Cx5E/eEHw+ISFkwPLwCZefwVjY+pqKg1qcK03+/4=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.68.0 h1:CqXxU8VOmDefoh0+ztfGaymYbhdB/tT3zs79QaZTNGY=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.68.0/go.mod h1:BuhAPThV8PBHBvg8ZzZ/Ok3idOdhWIodywz2xEcRbJo=
go.opentelemetry.io/otel v1.43.0 h1:mYIM03dnh5zfN7HautFE4ieIig9amkNANT+xcVxAj9I=
go.opentelemetry.io/otel v1.43.0/go.mod h1:JuG+u74mvjvcm8vj8pI5XiHy1zDeoCS2LB1spIq7Ay0=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.43.0 h1:88Y4s2C8oTui1LGM6bTWkw0ICGcOLCAI5l6zsD1j20k=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.43.0/go.mod h1:Vl1/iaggsuRlrHf/hfPJPvVag77kKyvrLeD10kpMl+A=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.43.0 h1:3iZJKlCZufyRzPzlQhUIWVmfltrXuGyfjREgGP3UUjc=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.43.0/go.mod h1:/G+nUPfhq2e+qiXMGxMwumDrP5jtzU+mWN7/sjT2rak=
go.opentelemetry.io/otel/metric v1.43.0 h1:d7638QeInOnuwOONPp4JAOGfbCEpYb+K6DVWvdxGzgM=
go.opentelemetry.io/otel/metric v1.43.0/go.mod h1:RDnPtIxvqlgO8GRW18W6Z/4P462ldprJtfxHxyKd2PY=
go.opentelemetry.io/otel/sdk v1.43.0 h1:pi5mE86i5rTeLXqoF/hhiBtUNcrAGHLKQdhg4h4V9Dg=
go.opentelemetry.io/otel/sdk v1.43.0/go.mod h1:P+IkVU3iWukmiit/Yf9AWvpyRDlUeBaRg6Y+C58QHzg=
go.opentelemetry.io/otel/sdk/metric v1.43.0 h1:S88dyqXjJkuBNLeMcVPRFXpRw2fuwdvfCGLEo89fDkw=
go.opentelemetry.io/otel/sdk/metric v1.43.0/go.mod h1:C/RJtwSEJ5hzTiUz5pXF1kILHStzb9zFlIEe85bhj6A=
go.opentelemetry.io/otel/trace v1.43.0 h1:BkNrHpup+4k4w+ZZ86CZoHHEkohws8AY+WTX09nk+3A=
go.opentelemetry.io/otel/trace v1.43.0/go.mod h1:/QJhyVBUUswCphDVxq+8mld+AvhXZLhe+8WVFxiFff0=
go.opentelemetry.io/proto/otlp v1.10.0 h1:IQRWgT5srOCYfiWnpqUYz9CVmbO8bFmKcwYxpuCSL2g=
go.opentelemetry.io/proto/otlp v1.10.0/go.mod h1:/CV4QoCR/S9yaPj8utp3lvQPoqMtxXdzn7ozvvozVqk=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y=
go.uber.org/mock v0.6.0/go.mod h1:KiVJ4BqZJaMj4svdfmHM0AUx4NJYO8ZNpPnZn1Z+BBU=
go.yaml.in/yaml/v2 v2.4.4 h1:tuyd0P+2Ont/d6e2rl3be67goVK4R6deVxCUX5vyPaQ=
go.yaml.in/yaml/v2 v2.4.4/go.mod h1:gMZqIpDtDqOfM0uNfy0SkpRhvUryYH0Z6wdMYcacYXQ=
golang.org/x/arch v0.25.0 h1:qnk6Ksugpi5Bz32947rkUgDt9/s5qvqDPl/gBKdMJLE=
golang.org/x/arch v0.25.0/go.mod h1:0X+GdSIP+kL5wPmpK7sdkEVTt2XoYP0cSjQSbZBwOi8=
golang.org/x/crypto v0.49.0 h1:+Ng2ULVvLHnJ/ZFEq4KdcDd/cfjrrjjNSXNzxg0Y4U4=
golang.org/x/crypto v0.49.0/go.mod h1:ErX4dUh2UM+CFYiXZRTcMpEcN8b/1gxEuv3nODoYtCA=
golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8=
golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w=
golang.org/x/net v0.52.0 h1:He/TN1l0e4mmR3QqHMT2Xab3Aj3L9qjbhRm78/6jrW0=
golang.org/x/net v0.52.0/go.mod h1:R1MAz7uMZxVMualyPXb+VaqGSa3LIaUqk0eEt3w36Sw=
golang.org/x/arch v0.26.0 h1:jZ6dpec5haP/fUv1kLCbuJy6dnRrfX6iVK08lZBFpk4=
golang.org/x/arch v0.26.0/go.mod h1:0X+GdSIP+kL5wPmpK7sdkEVTt2XoYP0cSjQSbZBwOi8=
golang.org/x/crypto v0.50.0 h1:zO47/JPrL6vsNkINmLoo/PH1gcxpls50DNogFvB5ZGI=
golang.org/x/crypto v0.50.0/go.mod h1:3muZ7vA7PBCE6xgPX7nkzzjiUq87kRItoJQM1Yo8S+Q=
golang.org/x/mod v0.34.0 h1:xIHgNUUnW6sYkcM5Jleh05DvLOtwc6RitGHbDk4akRI=
golang.org/x/mod v0.34.0/go.mod h1:ykgH52iCZe79kzLLMhyCUzhMci+nQj+0XkbXpNYtVjY=
golang.org/x/net v0.53.0 h1:d+qAbo5L0orcWAr0a9JweQpjXF19LMXJE8Ey7hwOdUA=
golang.org/x/net v0.53.0/go.mod h1:JvMuJH7rrdiCfbeHoo3fCQU24Lf5JJwT9W3sJFulfgs=
golang.org/x/sync v0.20.0 h1:e0PTpb7pjO8GAtTs2dQ6jYa5BWYlMuX047Dco/pItO4=
golang.org/x/sync v0.20.0/go.mod h1:9xrNwdLfx4jkKbNva9FpL6vEN7evnE43NNNJQ2LF3+0=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.42.0 h1:omrd2nAlyT5ESRdCLYdm3+fMfNFE/+Rf4bDIQImRJeo=
golang.org/x/sys v0.42.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw=
golang.org/x/text v0.35.0 h1:JOVx6vVDFokkpaq1AEptVzLTpDe9KGpj5tR4/X+ybL8=
golang.org/x/text v0.35.0/go.mod h1:khi/HExzZJ2pGnjenulevKNX1W67CUy0AsXcNubPGCA=
golang.org/x/sys v0.43.0 h1:Rlag2XtaFTxp19wS8MXlJwTvoh8ArU6ezoyFsMyCTNI=
golang.org/x/sys v0.43.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw=
golang.org/x/text v0.36.0 h1:JfKh3XmcRPqZPKevfXVpI1wXPTqbkE5f7JA92a55Yxg=
golang.org/x/text v0.36.0/go.mod h1:NIdBknypM8iqVmPiuco0Dh6P5Jcdk8lJL0CUebqK164=
golang.org/x/time v0.15.0 h1:bbrp8t3bGUeFOx08pvsMYRTCVSMk89u4tKbNOZbp88U=
golang.org/x/time v0.15.0/go.mod h1:Y4YMaQmXwGQZoFaVFk4YpCt4FLQMYKZe9oeV/f4MSno=
golang.org/x/tools v0.42.0 h1:uNgphsn75Tdz5Ji2q36v/nsFSfR/9BRFvqhGBaJGd5k=
golang.org/x/tools v0.42.0/go.mod h1:Ma6lCIwGZvHK6XtgbswSoWroEkhugApmsXyrUmBhfr0=
google.golang.org/genproto/googleapis/api v0.0.0-20260209200024-4cfbd4190f57 h1:JLQynH/LBHfCTSbDWl+py8C+Rg/k1OVH3xfcaiANuF0=
google.golang.org/genproto/googleapis/api v0.0.0-20260209200024-4cfbd4190f57/go.mod h1:kSJwQxqmFXeo79zOmbrALdflXQeAYcUbgS7PbpMknCY=
google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57 h1:mWPCjDEyshlQYzBpMNHaEof6UX1PmHcaUODUywQ0uac=
google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ=
google.golang.org/grpc v1.79.3 h1:sybAEdRIEtvcD68Gx7dmnwjZKlyfuc61Dyo9pGXXkKE=
google.golang.org/grpc v1.79.3/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ=
golang.org/x/tools v0.43.0 h1:12BdW9CeB3Z+J/I/wj34VMl8X+fEXBxVR90JeMX5E7s=
golang.org/x/tools v0.43.0/go.mod h1:uHkMso649BX2cZK6+RpuIPXS3ho2hZo4FVwfoy1vIk0=
google.golang.org/genproto/googleapis/api v0.0.0-20260401024825-9d38bb4040a9 h1:VPWxll4HlMw1Vs/qXtN7BvhZqsS9cdAittCNvVENElA=
google.golang.org/genproto/googleapis/api v0.0.0-20260401024825-9d38bb4040a9/go.mod h1:7QBABkRtR8z+TEnmXTqIqwJLlzrZKVfAUm7tY3yGv0M=
google.golang.org/genproto/googleapis/rpc v0.0.0-20260401024825-9d38bb4040a9 h1:m8qni9SQFH0tJc1X0vmnpw/0t+AImlSvp30sEupozUg=
google.golang.org/genproto/googleapis/rpc v0.0.0-20260401024825-9d38bb4040a9/go.mod h1:4Hqkh8ycfw05ld/3BWL7rJOSfebL2Q+DVDeRgYgxUU8=
google.golang.org/grpc v1.80.0 h1:Xr6m2WmWZLETvUNvIUmeD5OAagMw3FiKmMlTdViWsHM=
google.golang.org/grpc v1.80.0/go.mod h1:ho/dLnxwi3EDJA4Zghp7k2Ec1+c2jqup0bFkw07bwF4=
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
@@ -242,10 +241,10 @@ gorm.io/gorm v1.31.1 h1:7CA8FTFz/gRfgqgpeKIBcervUn3xSyPUmr6B2WXJ7kg=
gorm.io/gorm v1.31.1/go.mod h1:XyQVbO2k6YkOis7C2437jSit3SsDK72s7n7rsSHd+Gs=
gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q=
gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA=
modernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis=
modernc.org/cc/v4 v4.27.1/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
modernc.org/ccgo/v4 v4.32.0 h1:hjG66bI/kqIPX1b2yT6fr/jt+QedtP2fqojG2VrFuVw=
modernc.org/ccgo/v4 v4.32.0/go.mod h1:6F08EBCx5uQc38kMGl+0Nm0oWczoo1c7cgpzEry7Uc0=
modernc.org/cc/v4 v4.27.3 h1:uNCgn37E5U09mTv1XgskEVUJ8ADKpmFMPxzGJ0TSo+U=
modernc.org/cc/v4 v4.27.3/go.mod h1:3YjcbCqhoTTHPycJDRl2WZKKFj0nwcOIPBfEZK0Hdk8=
modernc.org/ccgo/v4 v4.32.4 h1:L5OB8rpEX4ZsXEQwGozRfJyJSFHbbNVOoQ59DU9/KuU=
modernc.org/ccgo/v4 v4.32.4/go.mod h1:lY7f+fiTDHfcv6YlRgSkxYfhs+UvOEEzj49jAn2TOx0=
modernc.org/fileutil v1.4.0 h1:j6ZzNTftVS054gi281TyLjHPp6CPHr2KCxEXjEbD6SM=
modernc.org/fileutil v1.4.0/go.mod h1:EqdKFDxiByqxLk8ozOxObDSfcVOv/54xDs/DUHdvCUU=
modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI=
@@ -254,8 +253,8 @@ modernc.org/gc/v3 v3.1.2 h1:ZtDCnhonXSZexk/AYsegNRV1lJGgaNZJuKjJSWKyEqo=
modernc.org/gc/v3 v3.1.2/go.mod h1:HFK/6AGESC7Ex+EZJhJ2Gni6cTaYpSMmU/cT9RmlfYY=
modernc.org/goabi0 v0.2.0 h1:HvEowk7LxcPd0eq6mVOAEMai46V+i7Jrj13t4AzuNks=
modernc.org/goabi0 v0.2.0/go.mod h1:CEFRnnJhKvWT1c1JTI3Avm+tgOWbkOu5oPA8eH8LnMI=
modernc.org/libc v1.70.0 h1:U58NawXqXbgpZ/dcdS9kMshu08aiA6b7gusEusqzNkw=
modernc.org/libc v1.70.0/go.mod h1:OVmxFGP1CI/Z4L3E0Q3Mf1PDE0BucwMkcXjjLntvHJo=
modernc.org/libc v1.72.0 h1:IEu559v9a0XWjw0DPoVKtXpO2qt5NVLAnFaBbjq+n8c=
modernc.org/libc v1.72.0/go.mod h1:tTU8DL8A+XLVkEY3x5E/tO7s2Q/q42EtnNWda/L5QhQ=
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=
@@ -264,8 +263,8 @@ modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
modernc.org/sqlite v1.47.0 h1:R1XyaNpoW4Et9yly+I2EeX7pBza/w+pmYee/0HJDyKk=
modernc.org/sqlite v1.47.0/go.mod h1:hWjRO6Tj/5Ik8ieqxQybiEOUXy0NJFNp2tpvVpKlvig=
modernc.org/sqlite v1.48.2 h1:5CnW4uP8joZtA0LedVqLbZV5GD7F/0x91AXeSyjoh5c=
modernc.org/sqlite v1.48.2/go.mod h1:hWjRO6Tj/5Ik8ieqxQybiEOUXy0NJFNp2tpvVpKlvig=
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=

View File

@@ -121,7 +121,6 @@ func TestAccessListHandler_List_DBError(t *testing.T) {
db, _ := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
// Don't migrate the table to cause error
gin.SetMode(gin.TestMode)
router := gin.New()
handler := NewAccessListHandler(db)
@@ -138,7 +137,6 @@ func TestAccessListHandler_Get_DBError(t *testing.T) {
db, _ := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
// Don't migrate the table to cause error
gin.SetMode(gin.TestMode)
router := gin.New()
handler := NewAccessListHandler(db)
@@ -157,7 +155,6 @@ func TestAccessListHandler_Delete_InternalError(t *testing.T) {
// Migrate AccessList but not ProxyHost to cause internal error on delete
_ = db.AutoMigrate(&models.AccessList{})
gin.SetMode(gin.TestMode)
router := gin.New()
handler := NewAccessListHandler(db)
@@ -285,7 +282,6 @@ func TestAccessListHandler_TestIP_InternalError(t *testing.T) {
db, _ := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
// Don't migrate - this causes a "no such table" error which is an internal error
gin.SetMode(gin.TestMode)
router := gin.New()
handler := NewAccessListHandler(db)

View File

@@ -21,7 +21,6 @@ func setupAccessListTestRouter(t *testing.T) (*gin.Engine, *gorm.DB) {
err = db.AutoMigrate(&models.AccessList{}, &models.ProxyHost{})
assert.NoError(t, err)
gin.SetMode(gin.TestMode)
router := gin.New()
handler := NewAccessListHandler(db)

View File

@@ -27,7 +27,6 @@ func setupImportCoverageDB(t *testing.T) *gorm.DB {
}
func TestImportHandler_Commit_InvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -44,7 +43,6 @@ func TestImportHandler_Commit_InvalidJSON(t *testing.T) {
}
func TestImportHandler_Commit_InvalidSessionUUID(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -67,7 +65,6 @@ func TestImportHandler_Commit_InvalidSessionUUID(t *testing.T) {
}
func TestImportHandler_Commit_SessionNotFound(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -98,7 +95,6 @@ func setupRemoteServerCoverageDB2(t *testing.T) *gorm.DB {
}
func TestRemoteServerHandler_TestConnection_Unreachable(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupRemoteServerCoverageDB2(t)
svc := services.NewRemoteServerService(db)
h := NewRemoteServerHandler(svc, nil)
@@ -137,7 +133,6 @@ func setupSecurityCoverageDB3(t *testing.T) *gorm.DB {
}
func TestSecurityHandler_GetConfig_InternalError(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSecurityCoverageDB3(t)
h := NewSecurityHandler(config.SecurityConfig{}, db, nil)
@@ -157,7 +152,6 @@ func TestSecurityHandler_GetConfig_InternalError(t *testing.T) {
}
func TestSecurityHandler_UpdateConfig_ApplyCaddyError(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSecurityCoverageDB3(t)
// Create handler with nil caddy manager (ApplyConfig will be called but is nil)
@@ -181,7 +175,6 @@ func TestSecurityHandler_UpdateConfig_ApplyCaddyError(t *testing.T) {
}
func TestSecurityHandler_GenerateBreakGlass_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSecurityCoverageDB3(t)
h := NewSecurityHandler(config.SecurityConfig{}, db, nil)
@@ -201,7 +194,6 @@ func TestSecurityHandler_GenerateBreakGlass_Error(t *testing.T) {
}
func TestSecurityHandler_ListDecisions_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSecurityCoverageDB3(t)
h := NewSecurityHandler(config.SecurityConfig{}, db, nil)
@@ -220,7 +212,6 @@ func TestSecurityHandler_ListDecisions_Error(t *testing.T) {
}
func TestSecurityHandler_ListRuleSets_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSecurityCoverageDB3(t)
h := NewSecurityHandler(config.SecurityConfig{}, db, nil)
@@ -239,7 +230,6 @@ func TestSecurityHandler_ListRuleSets_Error(t *testing.T) {
}
func TestSecurityHandler_UpsertRuleSet_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSecurityCoverageDB3(t)
h := NewSecurityHandler(config.SecurityConfig{}, db, nil)
@@ -265,7 +255,6 @@ func TestSecurityHandler_UpsertRuleSet_Error(t *testing.T) {
}
func TestSecurityHandler_CreateDecision_LogError(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSecurityCoverageDB3(t)
h := NewSecurityHandler(config.SecurityConfig{}, db, nil)
@@ -291,7 +280,6 @@ func TestSecurityHandler_CreateDecision_LogError(t *testing.T) {
}
func TestSecurityHandler_DeleteRuleSet_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSecurityCoverageDB3(t)
h := NewSecurityHandler(config.SecurityConfig{}, db, nil)
@@ -313,7 +301,6 @@ func TestSecurityHandler_DeleteRuleSet_Error(t *testing.T) {
// CrowdSec ImportConfig additional coverage tests
func TestCrowdsec_ImportConfig_EmptyUpload(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -344,7 +331,6 @@ func TestCrowdsec_ImportConfig_EmptyUpload(t *testing.T) {
// Backup Handler additional coverage tests
func TestBackupHandler_List_DBError(t *testing.T) {
gin.SetMode(gin.TestMode)
// Use a non-writable temp dir to simulate errors
tmpDir := t.TempDir()
@@ -370,7 +356,6 @@ func TestBackupHandler_List_DBError(t *testing.T) {
// ImportHandler UploadMulti coverage tests
func TestImportHandler_UploadMulti_InvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -387,7 +372,6 @@ func TestImportHandler_UploadMulti_InvalidJSON(t *testing.T) {
}
func TestImportHandler_UploadMulti_MissingCaddyfile(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -411,7 +395,6 @@ func TestImportHandler_UploadMulti_MissingCaddyfile(t *testing.T) {
}
func TestImportHandler_UploadMulti_EmptyContent(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -435,7 +418,6 @@ func TestImportHandler_UploadMulti_EmptyContent(t *testing.T) {
}
func TestImportHandler_UploadMulti_PathTraversal(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -481,7 +463,6 @@ func setupLogsDownloadTest(t *testing.T) (h *LogsHandler, logsDir string) {
}
func TestLogsHandler_Download_PathTraversal(t *testing.T) {
gin.SetMode(gin.TestMode)
h, _ := setupLogsDownloadTest(t)
w := httptest.NewRecorder()
@@ -496,7 +477,6 @@ func TestLogsHandler_Download_PathTraversal(t *testing.T) {
}
func TestLogsHandler_Download_NotFound(t *testing.T) {
gin.SetMode(gin.TestMode)
h, _ := setupLogsDownloadTest(t)
w := httptest.NewRecorder()
@@ -511,7 +491,6 @@ func TestLogsHandler_Download_NotFound(t *testing.T) {
}
func TestLogsHandler_Download_Success(t *testing.T) {
gin.SetMode(gin.TestMode)
h, logsDir := setupLogsDownloadTest(t)
// Create a log file to download
@@ -531,7 +510,6 @@ func TestLogsHandler_Download_Success(t *testing.T) {
// Import Handler Upload error tests
func TestImportHandler_Upload_InvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -548,7 +526,6 @@ func TestImportHandler_Upload_InvalidJSON(t *testing.T) {
}
func TestImportHandler_Upload_EmptyContent(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -571,7 +548,6 @@ func TestImportHandler_Upload_EmptyContent(t *testing.T) {
// Additional Backup Handler tests
func TestBackupHandler_List_ServiceError(t *testing.T) {
gin.SetMode(gin.TestMode)
// Create a temp dir with invalid permission for backup dir
tmpDir := t.TempDir()
@@ -608,7 +584,6 @@ func TestBackupHandler_List_ServiceError(t *testing.T) {
}
func TestBackupHandler_Delete_PathTraversal(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
@@ -639,7 +614,6 @@ func TestBackupHandler_Delete_PathTraversal(t *testing.T) {
}
func TestBackupHandler_Delete_InternalError2(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
@@ -689,7 +663,6 @@ func TestBackupHandler_Delete_InternalError2(t *testing.T) {
// Remote Server TestConnection error paths
func TestRemoteServerHandler_TestConnection_NotFound2(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupRemoteServerCoverageDB2(t)
svc := services.NewRemoteServerService(db)
h := NewRemoteServerHandler(svc, nil)
@@ -704,7 +677,6 @@ func TestRemoteServerHandler_TestConnection_NotFound2(t *testing.T) {
}
func TestRemoteServerHandler_TestConnectionCustom_Unreachable2(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupRemoteServerCoverageDB2(t)
svc := services.NewRemoteServerService(db)
h := NewRemoteServerHandler(svc, nil)
@@ -735,7 +707,6 @@ func setupAuthCoverageDB(t *testing.T) *gorm.DB {
}
func TestAuthHandler_Register_InvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuthCoverageDB(t)
cfg := config.Config{JWTSecret: "test-secret"}
@@ -755,7 +726,6 @@ func TestAuthHandler_Register_InvalidJSON(t *testing.T) {
// Health handler coverage
func TestHealthHandler_Basic(t *testing.T) {
gin.SetMode(gin.TestMode)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
@@ -771,7 +741,6 @@ func TestHealthHandler_Basic(t *testing.T) {
// Backup Create error coverage
func TestBackupHandler_Create_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
// Use a path where database file doesn't exist
tmpDir := t.TempDir()
@@ -811,7 +780,6 @@ func setupSettingsCoverageDB(t *testing.T) *gorm.DB {
}
func TestSettingsHandler_GetSettings_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSettingsCoverageDB(t)
h := NewSettingsHandler(db)
@@ -830,7 +798,6 @@ func TestSettingsHandler_GetSettings_Error(t *testing.T) {
}
func TestSettingsHandler_UpdateSetting_InvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSettingsCoverageDB(t)
h := NewSettingsHandler(db)
@@ -849,7 +816,6 @@ func TestSettingsHandler_UpdateSetting_InvalidJSON(t *testing.T) {
// Additional remote server TestConnection tests
func TestRemoteServerHandler_TestConnection_Reachable(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupRemoteServerCoverageDB2(t)
svc := services.NewRemoteServerService(db)
h := NewRemoteServerHandler(svc, nil)
@@ -873,7 +839,6 @@ func TestRemoteServerHandler_TestConnection_Reachable(t *testing.T) {
}
func TestRemoteServerHandler_TestConnection_EmptyHost(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupRemoteServerCoverageDB2(t)
svc := services.NewRemoteServerService(db)
h := NewRemoteServerHandler(svc, nil)
@@ -900,7 +865,6 @@ func TestRemoteServerHandler_TestConnection_EmptyHost(t *testing.T) {
// Additional UploadMulti test with valid Caddyfile content
func TestImportHandler_UploadMulti_ValidCaddyfile(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")
@@ -925,7 +889,6 @@ func TestImportHandler_UploadMulti_ValidCaddyfile(t *testing.T) {
}
func TestImportHandler_UploadMulti_SubdirFile(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageDB(t)
h := NewImportHandler(db, "", t.TempDir(), "")

View File

@@ -30,7 +30,6 @@ func setupAuditLogTestDB(t *testing.T) *gorm.DB {
}
func TestAuditLogHandler_List(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuditLogTestDB(t)
securityService := services.NewSecurityService(db)
defer securityService.Close()
@@ -130,7 +129,6 @@ func TestAuditLogHandler_List(t *testing.T) {
}
func TestAuditLogHandler_Get(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuditLogTestDB(t)
securityService := services.NewSecurityService(db)
defer securityService.Close()
@@ -198,7 +196,6 @@ func TestAuditLogHandler_Get(t *testing.T) {
}
func TestAuditLogHandler_ListByProvider(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuditLogTestDB(t)
securityService := services.NewSecurityService(db)
defer securityService.Close()
@@ -286,7 +283,6 @@ func TestAuditLogHandler_ListByProvider(t *testing.T) {
}
func TestAuditLogHandler_ListWithDateFilters(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuditLogTestDB(t)
securityService := services.NewSecurityService(db)
defer securityService.Close()
@@ -371,7 +367,6 @@ func TestAuditLogHandler_ListWithDateFilters(t *testing.T) {
// TestAuditLogHandler_ServiceErrors tests error handling when service layer fails
func TestAuditLogHandler_ServiceErrors(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuditLogTestDB(t)
securityService := services.NewSecurityService(db)
defer securityService.Close()
@@ -422,7 +417,6 @@ func TestAuditLogHandler_ServiceErrors(t *testing.T) {
// TestAuditLogHandler_List_PaginationBoundaryEdgeCases tests pagination boundary edge cases
func TestAuditLogHandler_List_PaginationBoundaryEdgeCases(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuditLogTestDB(t)
securityService := services.NewSecurityService(db)
defer securityService.Close()
@@ -513,7 +507,6 @@ func TestAuditLogHandler_List_PaginationBoundaryEdgeCases(t *testing.T) {
// TestAuditLogHandler_ListByProvider_PaginationBoundaryEdgeCases tests pagination boundary edge cases for provider list
func TestAuditLogHandler_ListByProvider_PaginationBoundaryEdgeCases(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuditLogTestDB(t)
securityService := services.NewSecurityService(db)
defer securityService.Close()
@@ -583,7 +576,6 @@ func TestAuditLogHandler_ListByProvider_PaginationBoundaryEdgeCases(t *testing.T
// TestAuditLogHandler_List_InvalidDateFormats tests handling of invalid date formats
func TestAuditLogHandler_List_InvalidDateFormats(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupAuditLogTestDB(t)
securityService := services.NewSecurityService(db)
defer securityService.Close()
@@ -624,7 +616,6 @@ func TestAuditLogHandler_List_InvalidDateFormats(t *testing.T) {
// TestAuditLogHandler_Get_InternalError tests Get when service returns internal error
func TestAuditLogHandler_Get_InternalError(t *testing.T) {
gin.SetMode(gin.TestMode)
// Create a fresh DB and immediately close it to simulate internal error
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})

View File

@@ -6,7 +6,6 @@ import (
"encoding/json"
"net/http"
"net/http/httptest"
"os"
"testing"
"github.com/Wikid82/charon/backend/internal/api/middleware"
@@ -45,7 +44,6 @@ func TestAuthHandler_Login(t *testing.T) {
_ = user.SetPassword("password123")
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/login", handler.Login)
@@ -65,9 +63,6 @@ func TestAuthHandler_Login(t *testing.T) {
}
func TestSetSecureCookie_HTTPS_Strict(t *testing.T) {
gin.SetMode(gin.TestMode)
_ = os.Setenv("CHARON_ENV", "production")
defer func() { _ = os.Unsetenv("CHARON_ENV") }()
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "https://example.com/login", http.NoBody)
@@ -83,7 +78,6 @@ func TestSetSecureCookie_HTTPS_Strict(t *testing.T) {
func TestSetSecureCookie_HTTP_Lax(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "http://192.0.2.10/login", http.NoBody)
@@ -100,7 +94,6 @@ func TestSetSecureCookie_HTTP_Lax(t *testing.T) {
func TestSetSecureCookie_HTTP_Loopback_Insecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "http://127.0.0.1:8080/login", http.NoBody)
@@ -118,9 +111,6 @@ func TestSetSecureCookie_HTTP_Loopback_Insecure(t *testing.T) {
func TestSetSecureCookie_ForwardedHTTPS_LocalhostForcesInsecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
_ = os.Setenv("CHARON_ENV", "production")
defer func() { _ = os.Unsetenv("CHARON_ENV") }()
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
@@ -139,9 +129,6 @@ func TestSetSecureCookie_ForwardedHTTPS_LocalhostForcesInsecure(t *testing.T) {
func TestSetSecureCookie_ForwardedHTTPS_LoopbackForcesInsecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
_ = os.Setenv("CHARON_ENV", "production")
defer func() { _ = os.Unsetenv("CHARON_ENV") }()
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
@@ -160,9 +147,6 @@ func TestSetSecureCookie_ForwardedHTTPS_LoopbackForcesInsecure(t *testing.T) {
func TestSetSecureCookie_ForwardedHostLocalhostForcesInsecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
_ = os.Setenv("CHARON_ENV", "production")
defer func() { _ = os.Unsetenv("CHARON_ENV") }()
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
@@ -182,9 +166,6 @@ func TestSetSecureCookie_ForwardedHostLocalhostForcesInsecure(t *testing.T) {
func TestSetSecureCookie_OriginLoopbackForcesInsecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
_ = os.Setenv("CHARON_ENV", "production")
defer func() { _ = os.Unsetenv("CHARON_ENV") }()
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
@@ -204,7 +185,6 @@ func TestSetSecureCookie_OriginLoopbackForcesInsecure(t *testing.T) {
func TestSetSecureCookie_HTTP_PrivateIP_Insecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "http://192.168.1.50:8080/login", http.NoBody)
@@ -222,7 +202,6 @@ func TestSetSecureCookie_HTTP_PrivateIP_Insecure(t *testing.T) {
func TestSetSecureCookie_HTTP_10Network_Insecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "http://10.0.0.5:8080/login", http.NoBody)
@@ -240,7 +219,6 @@ func TestSetSecureCookie_HTTP_10Network_Insecure(t *testing.T) {
func TestSetSecureCookie_HTTP_172Network_Insecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "http://172.16.0.1:8080/login", http.NoBody)
@@ -258,7 +236,6 @@ func TestSetSecureCookie_HTTP_172Network_Insecure(t *testing.T) {
func TestSetSecureCookie_HTTPS_PrivateIP_Secure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "https://192.168.1.50:8080/login", http.NoBody)
@@ -276,7 +253,6 @@ func TestSetSecureCookie_HTTPS_PrivateIP_Secure(t *testing.T) {
func TestSetSecureCookie_HTTP_IPv6ULA_Insecure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "http://[fd12::1]:8080/login", http.NoBody)
@@ -294,7 +270,6 @@ func TestSetSecureCookie_HTTP_IPv6ULA_Insecure(t *testing.T) {
func TestSetSecureCookie_HTTP_PublicIP_Secure(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
req := httptest.NewRequest("POST", "http://203.0.113.5:8080/login", http.NoBody)
@@ -322,7 +297,6 @@ func TestIsProduction(t *testing.T) {
}
func TestRequestScheme(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Run("forwarded proto first value wins", func(t *testing.T) {
recorder := httptest.NewRecorder()
@@ -393,7 +367,6 @@ func TestHostHelpers(t *testing.T) {
}
func TestIsLocalRequest(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Run("forwarded host list includes localhost", func(t *testing.T) {
recorder := httptest.NewRecorder()
@@ -428,7 +401,6 @@ func TestIsLocalRequest(t *testing.T) {
}
func TestClearSecureCookie(t *testing.T) {
gin.SetMode(gin.TestMode)
recorder := httptest.NewRecorder()
ctx, _ := gin.CreateTestContext(recorder)
ctx.Request = httptest.NewRequest("POST", "http://example.com/logout", http.NoBody)
@@ -445,7 +417,6 @@ func TestClearSecureCookie(t *testing.T) {
func TestAuthHandler_Login_Errors(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/login", handler.Login)
@@ -473,7 +444,6 @@ func TestAuthHandler_Register(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/register", handler.Register)
@@ -497,7 +467,6 @@ func TestAuthHandler_Register_Duplicate(t *testing.T) {
handler, db := setupAuthHandler(t)
db.Create(&models.User{UUID: uuid.NewString(), Email: "dup@example.com", Name: "Dup"})
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/register", handler.Register)
@@ -519,7 +488,6 @@ func TestAuthHandler_Logout(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/logout", handler.Logout)
@@ -548,7 +516,6 @@ func TestAuthHandler_Me(t *testing.T) {
}
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
// Simulate middleware
r.Use(func(c *gin.Context) {
@@ -574,7 +541,6 @@ func TestAuthHandler_Me(t *testing.T) {
func TestAuthHandler_Me_NotFound(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", uint(999)) // Non-existent ID
@@ -602,7 +568,6 @@ func TestAuthHandler_ChangePassword(t *testing.T) {
_ = user.SetPassword("oldpassword")
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
// Simulate middleware
r.Use(func(c *gin.Context) {
@@ -637,7 +602,6 @@ func TestAuthHandler_ChangePassword_WrongOld(t *testing.T) {
_ = user.SetPassword("correct")
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", user.ID)
@@ -661,7 +625,6 @@ func TestAuthHandler_ChangePassword_WrongOld(t *testing.T) {
func TestAuthHandler_ChangePassword_Errors(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/change-password", handler.ChangePassword)
@@ -708,7 +671,6 @@ func TestNewAuthHandlerWithDB(t *testing.T) {
func TestAuthHandler_Verify_NoCookie(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandlerWithDB(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/verify", handler.Verify)
@@ -723,7 +685,6 @@ func TestAuthHandler_Verify_NoCookie(t *testing.T) {
func TestAuthHandler_Verify_InvalidToken(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandlerWithDB(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/verify", handler.Verify)
@@ -753,7 +714,6 @@ func TestAuthHandler_Verify_ValidToken(t *testing.T) {
// Generate token
token, _ := handler.authService.GenerateToken(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/verify", handler.Verify)
@@ -783,7 +743,6 @@ func TestAuthHandler_Verify_BearerToken(t *testing.T) {
token, _ := handler.authService.GenerateToken(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/verify", handler.Verify)
@@ -813,7 +772,6 @@ func TestAuthHandler_Verify_DisabledUser(t *testing.T) {
token, _ := handler.authService.GenerateToken(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/verify", handler.Verify)
@@ -853,7 +811,6 @@ func TestAuthHandler_Verify_ForwardAuthDenied(t *testing.T) {
token, _ := handler.authService.GenerateToken(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/verify", handler.Verify)
@@ -869,7 +826,6 @@ func TestAuthHandler_Verify_ForwardAuthDenied(t *testing.T) {
func TestAuthHandler_VerifyStatus_NotAuthenticated(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandlerWithDB(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/status", handler.VerifyStatus)
@@ -886,7 +842,6 @@ func TestAuthHandler_VerifyStatus_NotAuthenticated(t *testing.T) {
func TestAuthHandler_VerifyStatus_InvalidToken(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandlerWithDB(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/status", handler.VerifyStatus)
@@ -917,7 +872,6 @@ func TestAuthHandler_VerifyStatus_Authenticated(t *testing.T) {
token, _ := handler.authService.GenerateToken(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/status", handler.VerifyStatus)
@@ -951,7 +905,6 @@ func TestAuthHandler_VerifyStatus_DisabledUser(t *testing.T) {
token, _ := handler.authService.GenerateToken(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/status", handler.VerifyStatus)
@@ -969,7 +922,6 @@ func TestAuthHandler_VerifyStatus_DisabledUser(t *testing.T) {
func TestAuthHandler_GetAccessibleHosts_Unauthorized(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandlerWithDB(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/hosts", handler.GetAccessibleHosts)
@@ -1000,7 +952,6 @@ func TestAuthHandler_GetAccessibleHosts_AllowAll(t *testing.T) {
}
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", user.ID)
@@ -1037,7 +988,6 @@ func TestAuthHandler_GetAccessibleHosts_DenyAll(t *testing.T) {
}
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", user.ID)
@@ -1077,7 +1027,6 @@ func TestAuthHandler_GetAccessibleHosts_PermittedHosts(t *testing.T) {
}
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", user.ID)
@@ -1100,7 +1049,6 @@ func TestAuthHandler_GetAccessibleHosts_UserNotFound(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandlerWithDB(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", uint(99999))
@@ -1118,7 +1066,6 @@ func TestAuthHandler_GetAccessibleHosts_UserNotFound(t *testing.T) {
func TestAuthHandler_CheckHostAccess_Unauthorized(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandlerWithDB(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/hosts/:hostId/access", handler.CheckHostAccess)
@@ -1136,7 +1083,6 @@ func TestAuthHandler_CheckHostAccess_InvalidHostID(t *testing.T) {
user := &models.User{UUID: uuid.NewString(), Email: "check@example.com", Enabled: true}
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", user.ID)
@@ -1166,7 +1112,6 @@ func TestAuthHandler_CheckHostAccess_Allowed(t *testing.T) {
}
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", user.ID)
@@ -1199,7 +1144,6 @@ func TestAuthHandler_CheckHostAccess_Denied(t *testing.T) {
}
db.Create(user)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", user.ID)
@@ -1276,7 +1220,6 @@ func TestAuthHandler_Me_RequiresUserContext(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/me", handler.Me)
@@ -1360,7 +1303,6 @@ func TestAuthHandler_Refresh(t *testing.T) {
require.NoError(t, user.SetPassword("password123"))
require.NoError(t, db.Create(user).Error)
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/refresh", func(c *gin.Context) {
c.Set("userID", user.ID)
@@ -1381,7 +1323,6 @@ func TestAuthHandler_Refresh_Unauthorized(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/refresh", handler.Refresh)
@@ -1396,7 +1337,6 @@ func TestAuthHandler_Register_BadRequest(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.POST("/register", handler.Register)
@@ -1412,7 +1352,6 @@ func TestAuthHandler_Logout_InvalidateSessionsFailure(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", uint(999999))
@@ -1456,7 +1395,6 @@ func TestAuthHandler_Verify_UsesOriginalHostFallback(t *testing.T) {
token, err := handler.authService.GenerateToken(user)
require.NoError(t, err)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/verify", handler.Verify)
@@ -1474,7 +1412,6 @@ func TestAuthHandler_GetAccessibleHosts_DatabaseUnavailable(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", uint(1))
@@ -1494,7 +1431,6 @@ func TestAuthHandler_CheckHostAccess_DatabaseUnavailable(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandler(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", uint(1))
@@ -1514,7 +1450,6 @@ func TestAuthHandler_CheckHostAccess_UserNotFound(t *testing.T) {
t.Parallel()
handler, _ := setupAuthHandlerWithDB(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("userID", uint(999999))

View File

@@ -16,7 +16,6 @@ import (
)
func TestBackupHandlerSanitizesFilename(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
// prepare a fake "database"
dbPath := filepath.Join(tmpDir, "db.sqlite")

View File

@@ -21,7 +21,6 @@ import (
)
func init() {
gin.SetMode(gin.TestMode)
}
// TestCerberusLogsHandler_NewHandler verifies handler creation.

View File

@@ -16,7 +16,6 @@ func TestCertificateHandler_List_DBError(t *testing.T) {
db := OpenTestDB(t)
// Don't migrate to cause error
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -33,7 +32,6 @@ func TestCertificateHandler_List_DBError(t *testing.T) {
func TestCertificateHandler_Delete_InvalidID(t *testing.T) {
db := OpenTestDBWithMigrations(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -50,7 +48,6 @@ func TestCertificateHandler_Delete_InvalidID(t *testing.T) {
func TestCertificateHandler_Delete_NotFound(t *testing.T) {
db := OpenTestDBWithMigrations(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -71,7 +68,6 @@ func TestCertificateHandler_Delete_NoBackupService(t *testing.T) {
cert := models.SSLCertificate{UUID: "test-cert-no-backup", Name: "no-backup-cert", Provider: "custom", Domains: "nobackup.example.com"}
db.Create(&cert)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -97,7 +93,6 @@ func TestCertificateHandler_Delete_CheckUsageDBError(t *testing.T) {
cert := models.SSLCertificate{UUID: "test-cert-db-err", Name: "db-error-cert", Provider: "custom", Domains: "dberr.example.com"}
db.Create(&cert)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -118,7 +113,6 @@ func TestCertificateHandler_List_WithCertificates(t *testing.T) {
db.Create(&models.SSLCertificate{UUID: "cert-1", Name: "Cert 1", Provider: "custom", Domains: "one.example.com"})
db.Create(&models.SSLCertificate{UUID: "cert-2", Name: "Cert 2", Provider: "custom", Domains: "two.example.com"})
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -139,7 +133,6 @@ func TestCertificateHandler_Delete_ZeroID(t *testing.T) {
// DELETE /api/certificates/0 should return 400 Bad Request
db := OpenTestDBWithMigrations(t)
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -173,7 +166,6 @@ func TestCertificateHandler_DBSetupOrdering(t *testing.T) {
t.Fatalf("expected proxy_hosts table to exist before service initialization")
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())

View File

@@ -25,7 +25,6 @@ func TestCertificateHandler_Delete_RequiresAuth(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
// Add a middleware that rejects all unauthenticated requests
r.Use(func(c *gin.Context) {
@@ -55,7 +54,6 @@ func TestCertificateHandler_List_RequiresAuth(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
// Add a middleware that rejects all unauthenticated requests
r.Use(func(c *gin.Context) {
@@ -85,7 +83,6 @@ func TestCertificateHandler_Upload_RequiresAuth(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
// Add a middleware that rejects all unauthenticated requests
r.Use(func(c *gin.Context) {
@@ -126,7 +123,6 @@ func TestCertificateHandler_Delete_DiskSpaceCheck(t *testing.T) {
t.Fatalf("failed to create cert: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -179,7 +175,6 @@ func TestCertificateHandler_Delete_NotificationRateLimiting(t *testing.T) {
t.Fatalf("failed to create cert2: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)

View File

@@ -36,7 +36,6 @@ func mockAuthMiddleware() gin.HandlerFunc {
func setupCertTestRouter(t *testing.T, db *gorm.DB) *gin.Engine {
t.Helper()
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
@@ -110,7 +109,6 @@ func TestDeleteCertificate_CreatesBackup(t *testing.T) {
t.Fatalf("failed to create cert: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -164,7 +162,6 @@ func TestDeleteCertificate_BackupFailure(t *testing.T) {
t.Fatalf("failed to create cert: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -218,7 +215,6 @@ func TestDeleteCertificate_InUse_NoBackup(t *testing.T) {
t.Fatalf("failed to create proxy host: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -296,7 +292,6 @@ func TestCertificateHandler_List(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
r.Use(mockAuthMiddleware())
@@ -324,7 +319,6 @@ func TestCertificateHandler_Upload_MissingName(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -352,7 +346,6 @@ func TestCertificateHandler_Upload_MissingCertFile(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -383,7 +376,6 @@ func TestCertificateHandler_Upload_MissingKeyFile(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -410,7 +402,6 @@ func TestCertificateHandler_Upload_MissingKeyFile_MultipartWithCert(t *testing.T
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -450,7 +441,6 @@ func TestCertificateHandler_Upload_Success(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
@@ -525,7 +515,6 @@ func TestCertificateHandler_Upload_WithNotificationService(t *testing.T) {
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}, &models.Setting{}, &models.NotificationProvider{}))
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
@@ -564,7 +553,6 @@ func TestDeleteCertificate_InvalidID(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -590,7 +578,6 @@ func TestDeleteCertificate_ZeroID(t *testing.T) {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -622,7 +609,6 @@ func TestDeleteCertificate_LowDiskSpace(t *testing.T) {
t.Fatalf("failed to create cert: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -671,7 +657,6 @@ func TestDeleteCertificate_DiskSpaceCheckError(t *testing.T) {
t.Fatalf("failed to create cert: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -730,7 +715,6 @@ func TestDeleteCertificate_ExpiredLetsEncrypt_NotInUse(t *testing.T) {
t.Fatalf("failed to create cert: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -789,7 +773,6 @@ func TestDeleteCertificate_ValidLetsEncrypt_NotInUse(t *testing.T) {
t.Fatalf("failed to create cert: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -835,7 +818,6 @@ func TestDeleteCertificate_UsageCheckError(t *testing.T) {
t.Fatalf("failed to create cert: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
@@ -873,7 +855,6 @@ func TestDeleteCertificate_NotificationRateLimit(t *testing.T) {
t.Fatalf("failed to create cert2: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)

View File

@@ -129,7 +129,6 @@ func Test_mapCrowdsecStatus(t *testing.T) {
// Test actorFromContext helper function
func Test_actorFromContext(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Run("with userID in context", func(t *testing.T) {
c, _ := gin.CreateTestContext(httptest.NewRecorder())
@@ -157,7 +156,6 @@ func Test_actorFromContext(t *testing.T) {
// Test hubEndpoints helper function
func Test_hubEndpoints(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Run("nil Hub returns nil", func(t *testing.T) {
h := &CrowdsecHandler{Hub: nil}
@@ -193,7 +191,6 @@ func TestRealCommandExecutor_Execute(t *testing.T) {
// Test isCerberusEnabled helper
func Test_isCerberusEnabled(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.Setting{}))
@@ -243,7 +240,6 @@ func Test_isCerberusEnabled(t *testing.T) {
// Test isConsoleEnrollmentEnabled helper
func Test_isConsoleEnrollmentEnabled(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.Setting{}))
@@ -293,7 +289,6 @@ func Test_isConsoleEnrollmentEnabled(t *testing.T) {
// Test CrowdsecHandler.ExportConfig
func TestCrowdsecHandler_ExportConfig(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}))
@@ -320,7 +315,6 @@ func TestCrowdsecHandler_ExportConfig(t *testing.T) {
// Test CrowdsecHandler.CheckLAPIHealth
func TestCrowdsecHandler_CheckLAPIHealth(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}))
@@ -340,7 +334,6 @@ func TestCrowdsecHandler_CheckLAPIHealth(t *testing.T) {
// Test CrowdsecHandler Console endpoints
func TestCrowdsecHandler_ConsoleStatus(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}, &models.CrowdsecConsoleEnrollment{}))
@@ -362,7 +355,6 @@ func TestCrowdsecHandler_ConsoleStatus(t *testing.T) {
}
func TestCrowdsecHandler_ConsoleEnroll_Disabled(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -385,7 +377,6 @@ func TestCrowdsecHandler_ConsoleEnroll_Disabled(t *testing.T) {
}
func TestCrowdsecHandler_DeleteConsoleEnrollment(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -405,7 +396,6 @@ func TestCrowdsecHandler_DeleteConsoleEnrollment(t *testing.T) {
// Test CrowdsecHandler.BanIP and UnbanIP
func TestCrowdsecHandler_BanIP(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -434,7 +424,6 @@ func TestCrowdsecHandler_BanIP(t *testing.T) {
}
func TestCrowdsecHandler_UnbanIP(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -460,7 +449,6 @@ func TestCrowdsecHandler_UnbanIP(t *testing.T) {
// Test CrowdsecHandler.UpdateAcquisitionConfig
func TestCrowdsecHandler_UpdateAcquisitionConfig(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -540,7 +528,6 @@ func Test_safeFloat64ToUint(t *testing.T) {
// Test CrowdsecHandler_DiagnosticsConnectivity
func TestCrowdsecHandler_DiagnosticsConnectivity(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}, &models.CrowdsecConsoleEnrollment{}))
@@ -569,7 +556,6 @@ func TestCrowdsecHandler_DiagnosticsConnectivity(t *testing.T) {
// Test CrowdsecHandler_DiagnosticsConfig
func TestCrowdsecHandler_DiagnosticsConfig(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -595,7 +581,6 @@ func TestCrowdsecHandler_DiagnosticsConfig(t *testing.T) {
// Test CrowdsecHandler_ConsoleHeartbeat
func TestCrowdsecHandler_ConsoleHeartbeat(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}, &models.CrowdsecConsoleEnrollment{}))
@@ -623,7 +608,6 @@ func TestCrowdsecHandler_ConsoleHeartbeat(t *testing.T) {
// Test CrowdsecHandler_ConsoleHeartbeat_Disabled
func TestCrowdsecHandler_ConsoleHeartbeat_Disabled(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))

View File

@@ -33,7 +33,6 @@ func createValidSQLiteDB(t *testing.T, dbPath string) error {
// Use a real BackupService, but point it at tmpDir for isolation
func TestBackupHandlerQuick(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
// Create a valid SQLite database for backup operations
dbPath := filepath.Join(tmpDir, "db.sqlite")

View File

@@ -31,7 +31,6 @@ func setupCredentialHandlerTest(t *testing.T) (*gin.Engine, *gorm.DB, *models.DN
_ = os.Unsetenv("CHARON_ENCRYPTION_KEY")
})
gin.SetMode(gin.TestMode)
router := gin.New()
// Use test name for unique database with WAL mode to avoid locking issues

View File

@@ -251,7 +251,6 @@ func TestConfigArchiveValidator_RequiredFiles(t *testing.T) {
// TestImportConfig_Validation tests the enhanced ImportConfig handler with validation.
func TestImportConfig_Validation(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -320,7 +319,6 @@ func TestImportConfig_Validation(t *testing.T) {
// TestImportConfig_Rollback tests backup restoration on validation failure.
func TestImportConfig_Rollback(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()

View File

@@ -16,7 +16,6 @@ import (
// TestListPresetsShowsCachedStatus verifies the /presets endpoint marks cached presets.
func TestListPresetsShowsCachedStatus(t *testing.T) {
gin.SetMode(gin.TestMode)
cacheDir := t.TempDir()
dataDir := t.TempDir()

View File

@@ -16,7 +16,6 @@ import (
// ============================================
func TestUpdateAcquisitionConfigMissingContent(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -33,7 +32,6 @@ func TestUpdateAcquisitionConfigMissingContent(t *testing.T) {
}
func TestUpdateAcquisitionConfigInvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -49,7 +47,6 @@ func TestUpdateAcquisitionConfigInvalidJSON(t *testing.T) {
}
func TestGetLAPIDecisionsWithIPFilter(t *testing.T) {
gin.SetMode(gin.TestMode)
mockExec := &mockCommandExecutor{output: []byte(`[]`), err: nil}
h := &CrowdsecHandler{
CmdExec: mockExec,
@@ -68,7 +65,6 @@ func TestGetLAPIDecisionsWithIPFilter(t *testing.T) {
}
func TestGetLAPIDecisionsWithScopeFilter(t *testing.T) {
gin.SetMode(gin.TestMode)
mockExec := &mockCommandExecutor{output: []byte(`[]`), err: nil}
h := &CrowdsecHandler{
CmdExec: mockExec,
@@ -86,7 +82,6 @@ func TestGetLAPIDecisionsWithScopeFilter(t *testing.T) {
}
func TestGetLAPIDecisionsWithTypeFilter(t *testing.T) {
gin.SetMode(gin.TestMode)
mockExec := &mockCommandExecutor{output: []byte(`[]`), err: nil}
h := &CrowdsecHandler{
CmdExec: mockExec,
@@ -104,7 +99,6 @@ func TestGetLAPIDecisionsWithTypeFilter(t *testing.T) {
}
func TestGetLAPIDecisionsWithMultipleFilters(t *testing.T) {
gin.SetMode(gin.TestMode)
mockExec := &mockCommandExecutor{output: []byte(`[]`), err: nil}
h := &CrowdsecHandler{
CmdExec: mockExec,

View File

@@ -32,7 +32,6 @@ func (m *MockCommandExecutor) ExecuteWithEnv(ctx context.Context, name string, a
// TestConsoleEnrollMissingKey covers the "enrollment_key required" branch
func TestConsoleEnrollMissingKey(t *testing.T) {
gin.SetMode(gin.TestMode)
mockExec := new(MockCommandExecutor)
@@ -59,7 +58,6 @@ func TestConsoleEnrollMissingKey(t *testing.T) {
// TestGetCachedPreset_ValidationAndMiss covers path param validation empty check (if any) and cache miss
func TestGetCachedPreset_ValidationAndMiss(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
cache, _ := crowdsec.NewHubCache(tmpDir, time.Hour)
@@ -86,7 +84,6 @@ func TestGetCachedPreset_ValidationAndMiss(t *testing.T) {
}
func TestGetCachedPreset_SlugRequired(t *testing.T) {
gin.SetMode(gin.TestMode)
h := &CrowdsecHandler{}
t.Setenv("FEATURE_CERBERUS_ENABLED", "1")

View File

@@ -22,7 +22,6 @@ import (
// TestUpdateAcquisitionConfigSuccess tests successful config update
func TestUpdateAcquisitionConfigSuccess(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
// Create fake acquis.yaml path in tmp
@@ -50,7 +49,6 @@ func TestUpdateAcquisitionConfigSuccess(t *testing.T) {
// TestRegisterBouncerScriptPathError tests script not found
func TestRegisterBouncerScriptPathError(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -92,7 +90,6 @@ func (f *fakeExecWithOutput) Status(ctx context.Context, configDir string) (runn
// TestGetLAPIDecisionsRequestError tests request creation error
func TestGetLAPIDecisionsEmptyResponse(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -109,7 +106,6 @@ func TestGetLAPIDecisionsEmptyResponse(t *testing.T) {
// TestGetLAPIDecisionsWithFilters tests query parameter handling
func TestGetLAPIDecisionsIPQueryParam(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -124,7 +120,6 @@ func TestGetLAPIDecisionsIPQueryParam(t *testing.T) {
// TestGetLAPIDecisionsScopeParam tests scope parameter
func TestGetLAPIDecisionsScopeParam(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -139,7 +134,6 @@ func TestGetLAPIDecisionsScopeParam(t *testing.T) {
// TestGetLAPIDecisionsTypeParam tests type parameter
func TestGetLAPIDecisionsTypeParam(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -154,7 +148,6 @@ func TestGetLAPIDecisionsTypeParam(t *testing.T) {
// TestGetLAPIDecisionsCombinedParams tests multiple query params
func TestGetLAPIDecisionsCombinedParams(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -169,7 +162,6 @@ func TestGetLAPIDecisionsCombinedParams(t *testing.T) {
// TestCheckLAPIHealthTimeout tests health check
func TestCheckLAPIHealthRequest(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -223,7 +215,6 @@ func TestGetLAPIKeyAlternative(t *testing.T) {
// TestStatusContextTimeout tests context handling
func TestStatusRequest(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
@@ -238,7 +229,6 @@ func TestStatusRequest(t *testing.T) {
// TestRegisterBouncerExecutionSuccess tests successful registration
func TestRegisterBouncerFlow(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
// Create fake script
@@ -267,7 +257,6 @@ func TestRegisterBouncerFlow(t *testing.T) {
// TestRegisterBouncerWithError tests execution error
func TestRegisterBouncerExecutionFailure(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
// Create fake script
@@ -294,7 +283,6 @@ func TestRegisterBouncerExecutionFailure(t *testing.T) {
// TestGetAcquisitionConfigFileError tests file read error
func TestGetAcquisitionConfigNotPresent(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")

View File

@@ -0,0 +1,632 @@
package handlers
import (
"context"
"encoding/csv"
"encoding/json"
"fmt"
"math"
"net/http"
"net/url"
"strconv"
"strings"
"time"
"github.com/Wikid82/charon/backend/internal/logger"
"github.com/Wikid82/charon/backend/internal/models"
"github.com/Wikid82/charon/backend/internal/network"
"github.com/gin-gonic/gin"
)
// Cache TTL constants for dashboard endpoints.
const (
dashSummaryTTL = 30 * time.Second
dashTimelineTTL = 60 * time.Second
dashTopIPsTTL = 60 * time.Second
dashScenariosTTL = 60 * time.Second
dashAlertsTTL = 30 * time.Second
exportMaxRows = 100_000
)
// parseTimeRange converts a range string to a start time. Empty string defaults to 24h.
func parseTimeRange(rangeStr string) (time.Time, error) {
now := time.Now().UTC()
switch rangeStr {
case "1h":
return now.Add(-1 * time.Hour), nil
case "6h":
return now.Add(-6 * time.Hour), nil
case "24h", "":
return now.Add(-24 * time.Hour), nil
case "7d":
return now.Add(-7 * 24 * time.Hour), nil
case "30d":
return now.Add(-30 * 24 * time.Hour), nil
default:
return time.Time{}, fmt.Errorf("invalid range: %s (valid: 1h, 6h, 24h, 7d, 30d)", rangeStr)
}
}
// normalizeRange returns the canonical range string (defaults empty to "24h").
func normalizeRange(r string) string {
if r == "" {
return "24h"
}
return r
}
// intervalForRange selects the default time-bucket interval for a given range.
func intervalForRange(rangeStr string) string {
switch rangeStr {
case "1h":
return "5m"
case "6h":
return "15m"
case "24h", "":
return "1h"
case "7d":
return "6h"
case "30d":
return "1d"
default:
return "1h"
}
}
// intervalToStrftime maps an interval string to the SQLite strftime expression
// used for time bucketing.
func intervalToStrftime(interval string) string {
switch interval {
case "5m":
return "strftime('%Y-%m-%dT%H:', created_at) || printf('%02d:00Z', (CAST(strftime('%M', created_at) AS INTEGER) / 5) * 5)"
case "15m":
return "strftime('%Y-%m-%dT%H:', created_at) || printf('%02d:00Z', (CAST(strftime('%M', created_at) AS INTEGER) / 15) * 15)"
case "1h":
return "strftime('%Y-%m-%dT%H:00:00Z', created_at)"
case "6h":
return "strftime('%Y-%m-%dT', created_at) || printf('%02d:00:00Z', (CAST(strftime('%H', created_at) AS INTEGER) / 6) * 6)"
case "1d":
return "strftime('%Y-%m-%dT00:00:00Z', created_at)"
default:
return "strftime('%Y-%m-%dT%H:00:00Z', created_at)"
}
}
// validInterval checks whether the provided interval is one of the known values.
func validInterval(interval string) bool {
switch interval {
case "5m", "15m", "1h", "6h", "1d":
return true
default:
return false
}
}
// sanitizeCSVField prefixes fields starting with formula-trigger characters
// to prevent CSV injection (CWE-1236).
func sanitizeCSVField(field string) string {
if field == "" {
return field
}
switch field[0] {
case '=', '+', '-', '@', '\t', '\r':
return "'" + field
}
return field
}
// DashboardSummary returns aggregate counts for the dashboard summary cards.
func (h *CrowdsecHandler) DashboardSummary(c *gin.Context) {
rangeStr := normalizeRange(c.Query("range"))
cacheKey := "dashboard:summary:" + rangeStr
if cached, ok := h.dashCache.Get(cacheKey); ok {
c.JSON(http.StatusOK, cached)
return
}
since, err := parseTimeRange(rangeStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
// Historical metrics from SQLite
var totalDecisions int64
h.DB.Model(&models.SecurityDecision{}).
Where("source = ? AND created_at >= ?", "crowdsec", since).
Count(&totalDecisions)
var uniqueIPs int64
h.DB.Model(&models.SecurityDecision{}).
Where("source = ? AND created_at >= ?", "crowdsec", since).
Distinct("ip").Count(&uniqueIPs)
var topScenario struct {
Scenario string
Cnt int64
}
h.DB.Model(&models.SecurityDecision{}).
Select("scenario, COUNT(*) as cnt").
Where("source = ? AND created_at >= ? AND scenario != ''", "crowdsec", since).
Group("scenario").
Order("cnt DESC").
Limit(1).
Scan(&topScenario)
// Trend calculation: compare current period vs previous equal-length period
duration := time.Since(since)
previousSince := since.Add(-duration)
var previousCount int64
h.DB.Model(&models.SecurityDecision{}).
Where("source = ? AND created_at >= ? AND created_at < ?", "crowdsec", previousSince, since).
Count(&previousCount)
// Trend: percentage change vs. the previous equal-length period.
// Formula: round((current - previous) / previous * 100, 1)
// Special cases: no previous data → 0; no current data → -100%.
var trend float64
if previousCount == 0 {
trend = 0.0
} else if totalDecisions == 0 && previousCount > 0 {
trend = -100.0
} else {
trend = math.Round(float64(totalDecisions-previousCount)/float64(previousCount)*1000) / 10
}
// Active decisions from LAPI (real-time)
activeDecisions := h.fetchActiveDecisionCount(c.Request.Context())
result := gin.H{
"total_decisions": totalDecisions,
"active_decisions": activeDecisions,
"unique_ips": uniqueIPs,
"top_scenario": topScenario.Scenario,
"decisions_trend": trend,
"range": rangeStr,
"cached": false,
"generated_at": time.Now().UTC().Format(time.RFC3339),
}
h.dashCache.Set(cacheKey, result, dashSummaryTTL)
c.JSON(http.StatusOK, result)
}
// fetchActiveDecisionCount queries LAPI for active decisions count.
// Returns -1 when LAPI is unreachable.
func (h *CrowdsecHandler) fetchActiveDecisionCount(ctx context.Context) int64 {
lapiURL := "http://127.0.0.1:8085"
if h.Security != nil {
cfg, err := h.Security.Get()
if err == nil && cfg != nil && cfg.CrowdSecAPIURL != "" {
lapiURL = cfg.CrowdSecAPIURL
}
}
baseURL, err := h.resolveLAPIURLValidator(lapiURL)
if err != nil {
return -1
}
endpoint := baseURL.ResolveReference(&url.URL{Path: "/v1/decisions"})
reqURL := endpoint.String()
apiKey := getLAPIKey()
reqCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
defer cancel()
req, err := http.NewRequestWithContext(reqCtx, http.MethodGet, reqURL, http.NoBody)
if err != nil {
return -1
}
if apiKey != "" {
req.Header.Set("X-Api-Key", apiKey)
}
req.Header.Set("Accept", "application/json")
client := network.NewInternalServiceHTTPClient(10 * time.Second)
resp, err := client.Do(req)
if err != nil {
return -1
}
defer func() { _ = resp.Body.Close() }()
if resp.StatusCode != http.StatusOK {
return -1
}
var decisions []interface{}
if decErr := json.NewDecoder(resp.Body).Decode(&decisions); decErr != nil {
return -1
}
return int64(len(decisions))
}
// DashboardTimeline returns time-bucketed decision counts for the timeline chart.
func (h *CrowdsecHandler) DashboardTimeline(c *gin.Context) {
rangeStr := normalizeRange(c.Query("range"))
interval := c.Query("interval")
if interval == "" {
interval = intervalForRange(rangeStr)
}
if !validInterval(interval) {
c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("invalid interval: %s (valid: 5m, 15m, 1h, 6h, 1d)", interval)})
return
}
cacheKey := fmt.Sprintf("dashboard:timeline:%s:%s", rangeStr, interval)
if cached, ok := h.dashCache.Get(cacheKey); ok {
c.JSON(http.StatusOK, cached)
return
}
since, err := parseTimeRange(rangeStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
bucketExpr := intervalToStrftime(interval)
type bucketRow struct {
Bucket string
Bans int64
Captchas int64
}
var rows []bucketRow
h.DB.Model(&models.SecurityDecision{}).
Select(fmt.Sprintf("(%s) as bucket, SUM(CASE WHEN action = 'block' THEN 1 ELSE 0 END) as bans, SUM(CASE WHEN action = 'challenge' THEN 1 ELSE 0 END) as captchas", bucketExpr)).
Where("source = ? AND created_at >= ?", "crowdsec", since).
Group("bucket").
Order("bucket ASC").
Scan(&rows)
buckets := make([]gin.H, 0, len(rows))
for _, r := range rows {
buckets = append(buckets, gin.H{
"timestamp": r.Bucket,
"bans": r.Bans,
"captchas": r.Captchas,
})
}
result := gin.H{
"buckets": buckets,
"range": rangeStr,
"interval": interval,
"cached": false,
}
h.dashCache.Set(cacheKey, result, dashTimelineTTL)
c.JSON(http.StatusOK, result)
}
// DashboardTopIPs returns top attacking IPs ranked by decision count.
func (h *CrowdsecHandler) DashboardTopIPs(c *gin.Context) {
rangeStr := normalizeRange(c.Query("range"))
limitStr := c.DefaultQuery("limit", "10")
limit, err := strconv.Atoi(limitStr)
if err != nil || limit < 1 {
limit = 10
}
if limit > 50 {
limit = 50
}
cacheKey := fmt.Sprintf("dashboard:top-ips:%s:%d", rangeStr, limit)
if cached, ok := h.dashCache.Get(cacheKey); ok {
c.JSON(http.StatusOK, cached)
return
}
since, err := parseTimeRange(rangeStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
type ipRow struct {
IP string
Count int64
LastSeen time.Time
Country string
}
var rows []ipRow
h.DB.Model(&models.SecurityDecision{}).
Select("ip, COUNT(*) as count, MAX(created_at) as last_seen, MAX(country) as country").
Where("source = ? AND created_at >= ?", "crowdsec", since).
Group("ip").
Order("count DESC").
Limit(limit).
Scan(&rows)
ips := make([]gin.H, 0, len(rows))
for _, r := range rows {
ips = append(ips, gin.H{
"ip": r.IP,
"count": r.Count,
"last_seen": r.LastSeen.UTC().Format(time.RFC3339),
"country": r.Country,
})
}
result := gin.H{
"ips": ips,
"range": rangeStr,
"cached": false,
}
h.dashCache.Set(cacheKey, result, dashTopIPsTTL)
c.JSON(http.StatusOK, result)
}
// DashboardScenarios returns scenario breakdown with counts and percentages.
func (h *CrowdsecHandler) DashboardScenarios(c *gin.Context) {
rangeStr := normalizeRange(c.Query("range"))
cacheKey := "dashboard:scenarios:" + rangeStr
if cached, ok := h.dashCache.Get(cacheKey); ok {
c.JSON(http.StatusOK, cached)
return
}
since, err := parseTimeRange(rangeStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
type scenarioRow struct {
Name string
Count int64
}
var rows []scenarioRow
h.DB.Model(&models.SecurityDecision{}).
Select("scenario as name, COUNT(*) as count").
Where("source = ? AND created_at >= ? AND scenario != ''", "crowdsec", since).
Group("scenario").
Order("count DESC").
Limit(50).
Scan(&rows)
var total int64
for _, r := range rows {
total += r.Count
}
scenarios := make([]gin.H, 0, len(rows))
for _, r := range rows {
pct := 0.0
if total > 0 {
pct = math.Round(float64(r.Count)/float64(total)*1000) / 10
}
scenarios = append(scenarios, gin.H{
"name": r.Name,
"count": r.Count,
"percentage": pct,
})
}
result := gin.H{
"scenarios": scenarios,
"total": total,
"range": rangeStr,
"cached": false,
}
h.dashCache.Set(cacheKey, result, dashScenariosTTL)
c.JSON(http.StatusOK, result)
}
// ListAlerts wraps the CrowdSec LAPI /v1/alerts endpoint.
func (h *CrowdsecHandler) ListAlerts(c *gin.Context) {
rangeStr := normalizeRange(c.Query("range"))
scenario := strings.TrimSpace(c.Query("scenario"))
limitStr := c.DefaultQuery("limit", "50")
offsetStr := c.DefaultQuery("offset", "0")
limit, err := strconv.Atoi(limitStr)
if err != nil || limit < 1 {
limit = 50
}
if limit > 200 {
limit = 200
}
offset, err := strconv.Atoi(offsetStr)
if err != nil || offset < 0 {
offset = 0
}
cacheKey := fmt.Sprintf("dashboard:alerts:%s:%s:%d:%d", rangeStr, scenario, limit, offset)
if cached, ok := h.dashCache.Get(cacheKey); ok {
c.JSON(http.StatusOK, cached)
return
}
since, tErr := parseTimeRange(rangeStr)
if tErr != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": tErr.Error()})
return
}
alerts, total, source := h.fetchLAPIAlerts(c.Request.Context(), since, scenario, limit, offset)
result := gin.H{
"alerts": alerts,
"total": total,
"source": source,
"cached": false,
}
h.dashCache.Set(cacheKey, result, dashAlertsTTL)
c.JSON(http.StatusOK, result)
}
// fetchLAPIAlerts attempts to get alerts from LAPI, falling back to cscli.
func (h *CrowdsecHandler) fetchLAPIAlerts(ctx context.Context, since time.Time, scenario string, limit, offset int) (alerts []interface{}, total int, source string) {
lapiURL := "http://127.0.0.1:8085"
if h.Security != nil {
cfg, err := h.Security.Get()
if err == nil && cfg != nil && cfg.CrowdSecAPIURL != "" {
lapiURL = cfg.CrowdSecAPIURL
}
}
baseURL, err := h.resolveLAPIURLValidator(lapiURL)
if err != nil {
return h.fetchAlertsCscli(ctx, scenario, limit)
}
q := url.Values{}
q.Set("since", since.Format(time.RFC3339))
if scenario != "" {
q.Set("scenario", scenario)
}
q.Set("limit", strconv.Itoa(limit))
endpoint := baseURL.ResolveReference(&url.URL{Path: "/v1/alerts"})
endpoint.RawQuery = q.Encode()
reqURL := endpoint.String()
apiKey := getLAPIKey()
reqCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
defer cancel()
req, reqErr := http.NewRequestWithContext(reqCtx, http.MethodGet, reqURL, http.NoBody)
if reqErr != nil {
return h.fetchAlertsCscli(ctx, scenario, limit)
}
if apiKey != "" {
req.Header.Set("X-Api-Key", apiKey)
}
req.Header.Set("Accept", "application/json")
client := network.NewInternalServiceHTTPClient(10 * time.Second)
resp, doErr := client.Do(req)
if doErr != nil {
return h.fetchAlertsCscli(ctx, scenario, limit)
}
defer func() { _ = resp.Body.Close() }()
if resp.StatusCode != http.StatusOK {
return h.fetchAlertsCscli(ctx, scenario, limit)
}
var rawAlerts []interface{}
if decErr := json.NewDecoder(resp.Body).Decode(&rawAlerts); decErr != nil {
return h.fetchAlertsCscli(ctx, scenario, limit)
}
// Capture full count before slicing for correct pagination semantics
fullTotal := len(rawAlerts)
// Apply offset for pagination
if offset > 0 && offset < len(rawAlerts) {
rawAlerts = rawAlerts[offset:]
} else if offset >= len(rawAlerts) {
rawAlerts = nil
}
if limit < len(rawAlerts) {
rawAlerts = rawAlerts[:limit]
}
return rawAlerts, fullTotal, "lapi"
}
// fetchAlertsCscli falls back to using cscli to list alerts.
func (h *CrowdsecHandler) fetchAlertsCscli(ctx context.Context, scenario string, limit int) (alerts []interface{}, total int, source string) {
args := []string{"alerts", "list", "-o", "json"}
if scenario != "" {
args = append(args, "-s", scenario)
}
args = append(args, "-l", strconv.Itoa(limit))
output, err := h.CmdExec.Execute(ctx, "cscli", args...)
if err != nil {
logger.Log().WithError(err).Warn("Failed to list alerts via cscli")
return []interface{}{}, 0, "cscli"
}
if jErr := json.Unmarshal(output, &alerts); jErr != nil {
return []interface{}{}, 0, "cscli"
}
return alerts, len(alerts), "cscli"
}
// ExportDecisions exports decisions as downloadable CSV or JSON.
func (h *CrowdsecHandler) ExportDecisions(c *gin.Context) {
format := strings.ToLower(c.DefaultQuery("format", "csv"))
rangeStr := normalizeRange(c.Query("range"))
source := strings.ToLower(c.DefaultQuery("source", "all"))
if format != "csv" && format != "json" {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid format: must be csv or json"})
return
}
validSources := map[string]bool{"crowdsec": true, "waf": true, "ratelimit": true, "manual": true, "all": true}
if !validSources[source] {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid source: must be crowdsec, waf, ratelimit, manual, or all"})
return
}
since, err := parseTimeRange(rangeStr)
if err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
var decisions []models.SecurityDecision
q := h.DB.Where("created_at >= ?", since)
if source != "all" {
q = q.Where("source = ?", source)
}
q.Order("created_at DESC").Limit(exportMaxRows).Find(&decisions)
ts := time.Now().UTC().Format("20060102-150405")
switch format {
case "csv":
filename := fmt.Sprintf("crowdsec-decisions-%s.csv", ts)
c.Header("Content-Type", "text/csv; charset=utf-8")
c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=%s", filename))
w := csv.NewWriter(c.Writer)
_ = w.Write([]string{"uuid", "ip", "action", "source", "scenario", "rule_id", "host", "country", "created_at", "expires_at"})
for _, d := range decisions {
_ = w.Write([]string{
d.UUID,
sanitizeCSVField(d.IP),
d.Action,
d.Source,
sanitizeCSVField(d.Scenario),
sanitizeCSVField(d.RuleID),
sanitizeCSVField(d.Host),
sanitizeCSVField(d.Country),
d.CreatedAt.UTC().Format(time.RFC3339),
func() string {
if d.ExpiresAt != nil {
return d.ExpiresAt.UTC().Format(time.RFC3339)
}
return ""
}(),
})
}
w.Flush()
if err := w.Error(); err != nil {
logger.Log().WithError(err).Warn("CSV export write error")
}
case "json":
filename := fmt.Sprintf("crowdsec-decisions-%s.json", ts)
c.Header("Content-Type", "application/json")
c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=%s", filename))
c.JSON(http.StatusOK, decisions)
}
}

View File

@@ -0,0 +1,70 @@
package handlers
import (
"strings"
"sync"
"time"
)
type cacheEntry struct {
data interface{}
expiresAt time.Time
}
type dashboardCache struct {
mu sync.RWMutex
entries map[string]*cacheEntry
}
func newDashboardCache() *dashboardCache {
return &dashboardCache{
entries: make(map[string]*cacheEntry),
}
}
func (c *dashboardCache) Get(key string) (interface{}, bool) {
c.mu.RLock()
entry, ok := c.entries[key]
if !ok {
c.mu.RUnlock()
return nil, false
}
if time.Now().Before(entry.expiresAt) {
data := entry.data
c.mu.RUnlock()
return data, true
}
c.mu.RUnlock()
c.mu.Lock()
defer c.mu.Unlock()
entry, ok = c.entries[key]
if ok && time.Now().After(entry.expiresAt) {
delete(c.entries, key)
}
return nil, false
}
func (c *dashboardCache) Set(key string, data interface{}, ttl time.Duration) {
c.mu.Lock()
defer c.mu.Unlock()
c.entries[key] = &cacheEntry{
data: data,
expiresAt: time.Now().Add(ttl),
}
}
func (c *dashboardCache) Invalidate(prefixes ...string) {
c.mu.Lock()
defer c.mu.Unlock()
for key := range c.entries {
for _, prefix := range prefixes {
if strings.HasPrefix(key, prefix) {
delete(c.entries, key)
break
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -28,7 +28,6 @@ func (m *mockCommandExecutor) Execute(ctx context.Context, name string, args ...
}
func TestListDecisions_Success(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -67,7 +66,6 @@ func TestListDecisions_Success(t *testing.T) {
}
func TestListDecisions_EmptyList(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -98,7 +96,6 @@ func TestListDecisions_EmptyList(t *testing.T) {
}
func TestListDecisions_CscliError(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -130,7 +127,6 @@ func TestListDecisions_CscliError(t *testing.T) {
}
func TestListDecisions_InvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -154,7 +150,6 @@ func TestListDecisions_InvalidJSON(t *testing.T) {
}
func TestBanIP_Success(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -205,7 +200,6 @@ func TestBanIP_Success(t *testing.T) {
}
func TestBanIP_DefaultDuration(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -245,7 +239,6 @@ func TestBanIP_DefaultDuration(t *testing.T) {
}
func TestBanIP_MissingIP(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -268,7 +261,6 @@ func TestBanIP_MissingIP(t *testing.T) {
}
func TestBanIP_EmptyIP(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -293,7 +285,6 @@ func TestBanIP_EmptyIP(t *testing.T) {
}
func TestBanIP_CscliError(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -323,7 +314,6 @@ func TestBanIP_CscliError(t *testing.T) {
}
func TestUnbanIP_Success(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -357,7 +347,6 @@ func TestUnbanIP_Success(t *testing.T) {
}
func TestUnbanIP_CscliError(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -381,7 +370,6 @@ func TestUnbanIP_CscliError(t *testing.T) {
}
func TestListDecisions_MultipleDecisions(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -430,7 +418,6 @@ func TestListDecisions_MultipleDecisions(t *testing.T) {
}
func TestBanIP_InvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()

View File

@@ -66,6 +66,12 @@ type CrowdsecHandler struct {
CaddyManager *caddy.Manager // For config reload after bouncer registration
LAPIMaxWait time.Duration // For testing; 0 means 60s default
LAPIPollInterval time.Duration // For testing; 0 means 500ms default
dashCache *dashboardCache
// validateLAPIURL validates and parses a LAPI base URL.
// This field allows tests to inject a permissive validator for mock servers
// without mutating package-level state (which causes data races).
validateLAPIURL func(string) (*url.URL, error)
// registrationMutex protects concurrent bouncer registration attempts
registrationMutex sync.Mutex
@@ -84,6 +90,14 @@ const (
bouncerName = "caddy-bouncer"
)
// resolveLAPIURLValidator returns the handler's validator or the default.
func (h *CrowdsecHandler) resolveLAPIURLValidator(raw string) (*url.URL, error) {
if h.validateLAPIURL != nil {
return h.validateLAPIURL(raw)
}
return validateCrowdsecLAPIBaseURLDefault(raw)
}
func (h *CrowdsecHandler) bouncerKeyPath() string {
if h != nil && strings.TrimSpace(h.DataDir) != "" {
return filepath.Join(h.DataDir, "bouncer_key")
@@ -370,14 +384,16 @@ func NewCrowdsecHandler(db *gorm.DB, executor CrowdsecExecutor, binPath, dataDir
consoleSvc = crowdsec.NewConsoleEnrollmentService(db, &crowdsec.SecureCommandExecutor{}, dataDir, consoleSecret)
}
return &CrowdsecHandler{
DB: db,
Executor: executor,
CmdExec: &RealCommandExecutor{},
BinPath: binPath,
DataDir: dataDir,
Hub: hubSvc,
Console: consoleSvc,
Security: securitySvc,
DB: db,
Executor: executor,
CmdExec: &RealCommandExecutor{},
BinPath: binPath,
DataDir: dataDir,
Hub: hubSvc,
Console: consoleSvc,
Security: securitySvc,
dashCache: newDashboardCache(),
validateLAPIURL: validateCrowdsecLAPIBaseURLDefault,
}
}
@@ -1442,18 +1458,10 @@ const (
defaultCrowdsecLAPIPort = 8085
)
// validateCrowdsecLAPIBaseURLFunc is a variable holding the LAPI URL validation function.
// This indirection allows tests to inject a permissive validator for mock servers.
var validateCrowdsecLAPIBaseURLFunc = validateCrowdsecLAPIBaseURLDefault
func validateCrowdsecLAPIBaseURLDefault(raw string) (*url.URL, error) {
return security.ValidateInternalServiceBaseURL(raw, defaultCrowdsecLAPIPort, security.InternalServiceHostAllowlist())
}
func validateCrowdsecLAPIBaseURL(raw string) (*url.URL, error) {
return validateCrowdsecLAPIBaseURLFunc(raw)
}
// GetLAPIDecisions queries CrowdSec LAPI directly for current decisions.
// This is an alternative to ListDecisions which uses cscli.
// Query params:
@@ -1471,7 +1479,7 @@ func (h *CrowdsecHandler) GetLAPIDecisions(c *gin.Context) {
}
}
baseURL, err := validateCrowdsecLAPIBaseURL(lapiURL)
baseURL, err := h.resolveLAPIURLValidator(lapiURL)
if err != nil {
logger.Log().WithError(err).WithField("lapi_url", lapiURL).Warn("Blocked CrowdSec LAPI URL by internal allowlist policy")
// Fallback to cscli-based method.
@@ -2142,7 +2150,7 @@ func (h *CrowdsecHandler) CheckLAPIHealth(c *gin.Context) {
ctx, cancel := context.WithTimeout(c.Request.Context(), 5*time.Second)
defer cancel()
baseURL, err := validateCrowdsecLAPIBaseURL(lapiURL)
baseURL, err := h.resolveLAPIURLValidator(lapiURL)
if err != nil {
c.JSON(http.StatusOK, gin.H{"healthy": false, "error": "invalid LAPI URL (blocked by SSRF policy)", "lapi_url": lapiURL})
return
@@ -2287,6 +2295,21 @@ func (h *CrowdsecHandler) BanIP(c *gin.Context) {
}
c.JSON(http.StatusOK, gin.H{"status": "banned", "ip": ip, "duration": duration})
// Log to security_decisions for dashboard aggregation
if h.Security != nil {
parsedDur, _ := time.ParseDuration(duration)
expiry := time.Now().Add(parsedDur)
_ = h.Security.LogDecision(&models.SecurityDecision{
IP: ip,
Action: "block",
Source: "crowdsec",
RuleID: reason,
Scenario: "manual",
ExpiresAt: &expiry,
})
}
h.dashCache.Invalidate("dashboard")
}
// UnbanIP removes a ban for an IP address
@@ -2313,6 +2336,7 @@ func (h *CrowdsecHandler) UnbanIP(c *gin.Context) {
}
c.JSON(http.StatusOK, gin.H{"status": "unbanned", "ip": ip})
h.dashCache.Invalidate("dashboard")
}
// RegisterBouncer registers a new bouncer or returns existing bouncer status.
@@ -2711,4 +2735,11 @@ func (h *CrowdsecHandler) RegisterRoutes(rg *gin.RouterGroup) {
// Acquisition configuration endpoints
rg.GET("/admin/crowdsec/acquisition", h.GetAcquisitionConfig)
rg.PUT("/admin/crowdsec/acquisition", h.UpdateAcquisitionConfig)
// Dashboard aggregation endpoints (PR-1)
rg.GET("/admin/crowdsec/dashboard/summary", h.DashboardSummary)
rg.GET("/admin/crowdsec/dashboard/timeline", h.DashboardTimeline)
rg.GET("/admin/crowdsec/dashboard/top-ips", h.DashboardTopIPs)
rg.GET("/admin/crowdsec/dashboard/scenarios", h.DashboardScenarios)
rg.GET("/admin/crowdsec/alerts", h.ListAlerts)
rg.GET("/admin/crowdsec/decisions/export", h.ExportDecisions)
}

View File

@@ -106,7 +106,6 @@ func TestMapCrowdsecStatus(t *testing.T) {
// TestIsConsoleEnrollmentEnabled tests the isConsoleEnrollmentEnabled helper
func TestIsConsoleEnrollmentEnabled(t *testing.T) {
gin.SetMode(gin.TestMode)
tests := []struct {
name string
@@ -191,7 +190,6 @@ func TestActorFromContext(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gin.SetMode(gin.TestMode)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
tt.setupCtx(c)
@@ -204,7 +202,6 @@ func TestActorFromContext(t *testing.T) {
// TestHubEndpoints tests the hubEndpoints helper
func TestHubEndpoints(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -233,7 +230,6 @@ func TestHubEndpoints(t *testing.T) {
// TestGetCachedPreset tests the GetCachedPreset handler
func TestGetCachedPreset(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -264,7 +260,6 @@ func TestGetCachedPreset(t *testing.T) {
// TestGetCachedPreset_NotFound tests GetCachedPreset with non-existent preset
func TestGetCachedPreset_NotFound(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -293,7 +288,6 @@ func TestGetCachedPreset_NotFound(t *testing.T) {
// TestGetLAPIDecisions tests the GetLAPIDecisions handler
func TestGetLAPIDecisions(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -313,7 +307,6 @@ func TestGetLAPIDecisions(t *testing.T) {
// TestCheckLAPIHealth tests the CheckLAPIHealth handler
func TestCheckLAPIHealth(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -332,7 +325,6 @@ func TestCheckLAPIHealth(t *testing.T) {
// TestListDecisions tests the ListDecisions handler
func TestListDecisions(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -351,7 +343,6 @@ func TestListDecisions(t *testing.T) {
// TestBanIP tests the BanIP handler
func TestBanIP(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -373,7 +364,6 @@ func TestBanIP(t *testing.T) {
// TestUnbanIP tests the UnbanIP handler
func TestUnbanIP(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
@@ -395,7 +385,6 @@ func TestUnbanIP(t *testing.T) {
// TestGetAcquisitionConfig tests the GetAcquisitionConfig handler
func TestGetAcquisitionConfig(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
acquisPath := filepath.Join(tmpDir, "acquis.yaml")
@@ -417,7 +406,6 @@ func TestGetAcquisitionConfig(t *testing.T) {
// TestUpdateAcquisitionConfig tests the UpdateAcquisitionConfig handler
func TestUpdateAcquisitionConfig(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
tmpDir := t.TempDir()
acquisPath := filepath.Join(tmpDir, "acquis.yaml")

View File

@@ -29,7 +29,6 @@ func (f *errorExec) Status(ctx context.Context, configDir string) (running bool,
}
func TestCrowdsec_Start_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -48,7 +47,6 @@ func TestCrowdsec_Start_Error(t *testing.T) {
}
func TestCrowdsec_Stop_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -67,7 +65,6 @@ func TestCrowdsec_Stop_Error(t *testing.T) {
}
func TestCrowdsec_Status_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -87,7 +84,6 @@ func TestCrowdsec_Status_Error(t *testing.T) {
// ReadFile tests
func TestCrowdsec_ReadFile_MissingPath(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -106,7 +102,6 @@ func TestCrowdsec_ReadFile_MissingPath(t *testing.T) {
}
func TestCrowdsec_ReadFile_PathTraversal(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -126,7 +121,6 @@ func TestCrowdsec_ReadFile_PathTraversal(t *testing.T) {
}
func TestCrowdsec_ReadFile_NotFound(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -146,7 +140,6 @@ func TestCrowdsec_ReadFile_NotFound(t *testing.T) {
// WriteFile tests
func TestCrowdsec_WriteFile_InvalidPayload(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -166,7 +159,6 @@ func TestCrowdsec_WriteFile_InvalidPayload(t *testing.T) {
}
func TestCrowdsec_WriteFile_MissingPath(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -189,7 +181,6 @@ func TestCrowdsec_WriteFile_MissingPath(t *testing.T) {
}
func TestCrowdsec_WriteFile_PathTraversal(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -214,7 +205,6 @@ func TestCrowdsec_WriteFile_PathTraversal(t *testing.T) {
// ExportConfig tests
func TestCrowdsec_ExportConfig_NotFound(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
// Use a non-existent directory
nonExistentDir := "/tmp/crowdsec-nonexistent-dir-12345"
@@ -238,7 +228,6 @@ func TestCrowdsec_ExportConfig_NotFound(t *testing.T) {
// ListFiles tests
func TestCrowdsec_ListFiles_EmptyDir(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -263,7 +252,6 @@ func TestCrowdsec_ListFiles_EmptyDir(t *testing.T) {
}
func TestCrowdsec_ListFiles_NonExistent(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
nonExistentDir := "/tmp/crowdsec-nonexistent-dir-67890"
_ = os.RemoveAll(nonExistentDir)
@@ -289,7 +277,6 @@ func TestCrowdsec_ListFiles_NonExistent(t *testing.T) {
// ImportConfig error cases
func TestCrowdsec_ImportConfig_NoFile(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -310,7 +297,6 @@ func TestCrowdsec_ImportConfig_NoFile(t *testing.T) {
// Additional ReadFile test with nested path that exists
func TestCrowdsec_ReadFile_NestedPath(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -336,7 +322,6 @@ func TestCrowdsec_ReadFile_NestedPath(t *testing.T) {
// Test WriteFile when backup fails (simulate by making dir unwritable)
func TestCrowdsec_WriteFile_Success(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -364,7 +349,6 @@ func TestCrowdsec_WriteFile_Success(t *testing.T) {
}
func TestCrowdsec_ListPresets_Disabled(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
t.Setenv("FEATURE_CERBERUS_ENABLED", "false")
tmpDir := t.TempDir()
@@ -383,7 +367,6 @@ func TestCrowdsec_ListPresets_Disabled(t *testing.T) {
}
func TestCrowdsec_ListPresets_Success(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -406,7 +389,6 @@ func TestCrowdsec_ListPresets_Success(t *testing.T) {
}
func TestCrowdsec_PullPreset_Validation(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -431,7 +413,6 @@ func TestCrowdsec_PullPreset_Validation(t *testing.T) {
}
func TestCrowdsec_ApplyPreset_Validation(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()

File diff suppressed because it is too large Load Diff

View File

@@ -12,7 +12,6 @@ import (
)
func TestGetLAPIDecisions_FallbackToCscli(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
// Create handler with mock executor
@@ -40,7 +39,6 @@ func TestGetLAPIDecisions_FallbackToCscli(t *testing.T) {
}
func TestGetLAPIDecisions_EmptyResponse(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
// Create handler with mock executor that returns empty array
@@ -67,7 +65,6 @@ func TestGetLAPIDecisions_EmptyResponse(t *testing.T) {
}
func TestCheckLAPIHealth_Handler(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
handler := &CrowdsecHandler{

View File

@@ -46,7 +46,6 @@ func makePresetTar(t *testing.T, files map[string]string) []byte {
}
func TestListPresetsIncludesCacheAndIndex(t *testing.T) {
gin.SetMode(gin.TestMode)
cache, err := crowdsec.NewHubCache(t.TempDir(), time.Hour)
require.NoError(t, err)
_, err = cache.Store(context.Background(), "crowdsecurity/demo", "etag1", "hub", "preview", []byte("archive"))
@@ -92,7 +91,6 @@ func TestListPresetsIncludesCacheAndIndex(t *testing.T) {
}
func TestPullPresetHandlerSuccess(t *testing.T) {
gin.SetMode(gin.TestMode)
cache, err := crowdsec.NewHubCache(t.TempDir(), time.Hour)
require.NoError(t, err)
dataDir := filepath.Join(t.TempDir(), "crowdsec")
@@ -132,7 +130,6 @@ func TestPullPresetHandlerSuccess(t *testing.T) {
}
func TestApplyPresetHandlerAudits(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.CrowdsecPresetEvent{}))
@@ -186,7 +183,6 @@ func TestApplyPresetHandlerAudits(t *testing.T) {
}
func TestPullPresetHandlerHubError(t *testing.T) {
gin.SetMode(gin.TestMode)
cache, err := crowdsec.NewHubCache(t.TempDir(), time.Hour)
require.NoError(t, err)
@@ -213,7 +209,6 @@ func TestPullPresetHandlerHubError(t *testing.T) {
}
func TestPullPresetHandlerTimeout(t *testing.T) {
gin.SetMode(gin.TestMode)
cache, err := crowdsec.NewHubCache(t.TempDir(), time.Hour)
require.NoError(t, err)
@@ -241,7 +236,6 @@ func TestPullPresetHandlerTimeout(t *testing.T) {
}
func TestGetCachedPresetNotFound(t *testing.T) {
gin.SetMode(gin.TestMode)
cache, err := crowdsec.NewHubCache(t.TempDir(), time.Hour)
require.NoError(t, err)
@@ -260,7 +254,6 @@ func TestGetCachedPresetNotFound(t *testing.T) {
}
func TestGetCachedPresetServiceUnavailable(t *testing.T) {
gin.SetMode(gin.TestMode)
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
h.Hub = &crowdsec.HubService{}
@@ -277,7 +270,6 @@ func TestGetCachedPresetServiceUnavailable(t *testing.T) {
}
func TestApplyPresetHandlerBackupFailure(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.CrowdsecPresetEvent{}))
@@ -325,7 +317,6 @@ func TestApplyPresetHandlerBackupFailure(t *testing.T) {
}
func TestListPresetsMergesCuratedAndHub(t *testing.T) {
gin.SetMode(gin.TestMode)
hub := crowdsec.NewHubService(nil, nil, t.TempDir())
hub.HubBaseURL = "http://hub.example"
@@ -375,7 +366,6 @@ func TestListPresetsMergesCuratedAndHub(t *testing.T) {
}
func TestGetCachedPresetSuccess(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("FEATURE_CERBERUS_ENABLED", "true")
cache, err := crowdsec.NewHubCache(t.TempDir(), time.Hour)
require.NoError(t, err)
@@ -403,7 +393,6 @@ func TestGetCachedPresetSuccess(t *testing.T) {
}
func TestGetCachedPresetSlugRequired(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("FEATURE_CERBERUS_ENABLED", "true")
cache, err := crowdsec.NewHubCache(t.TempDir(), time.Hour)
require.NoError(t, err)
@@ -424,7 +413,6 @@ func TestGetCachedPresetSlugRequired(t *testing.T) {
}
func TestGetCachedPresetPreviewError(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("FEATURE_CERBERUS_ENABLED", "true")
cacheDir := t.TempDir()
cache, err := crowdsec.NewHubCache(cacheDir, time.Hour)
@@ -451,7 +439,6 @@ func TestGetCachedPresetPreviewError(t *testing.T) {
}
func TestPullCuratedPresetSkipsHub(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("FEATURE_CERBERUS_ENABLED", "true")
// Setup handler with a hub service that would fail if called
@@ -489,7 +476,6 @@ func TestPullCuratedPresetSkipsHub(t *testing.T) {
}
func TestApplyCuratedPresetSkipsHub(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("FEATURE_CERBERUS_ENABLED", "true")
db := OpenTestDB(t)

View File

@@ -24,7 +24,6 @@ import (
// TestPullThenApplyIntegration tests the complete pull→apply workflow from the user's perspective.
// This reproduces the scenario where a user pulls a preset and then tries to apply it.
func TestPullThenApplyIntegration(t *testing.T) {
gin.SetMode(gin.TestMode)
// Setup
cacheDir := t.TempDir()
@@ -111,7 +110,6 @@ func TestPullThenApplyIntegration(t *testing.T) {
// TestApplyWithoutPullReturnsProperError verifies the error message when applying without pulling first.
func TestApplyWithoutPullReturnsProperError(t *testing.T) {
gin.SetMode(gin.TestMode)
cacheDir := t.TempDir()
dataDir := t.TempDir()
@@ -155,7 +153,6 @@ func TestApplyWithoutPullReturnsProperError(t *testing.T) {
}
func TestApplyRollbackWhenCacheMissingAndRepullFails(t *testing.T) {
gin.SetMode(gin.TestMode)
cacheDir := t.TempDir()
dataRoot := t.TempDir()

View File

@@ -14,7 +14,6 @@ import (
// TestStartSyncsSettingsTable verifies that Start() updates the settings table.
func TestStartSyncsSettingsTable(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
// Migrate both SecurityConfig and Setting tables
@@ -78,7 +77,6 @@ func TestStartSyncsSettingsTable(t *testing.T) {
// TestStopSyncsSettingsTable verifies that Stop() updates the settings table.
func TestStopSyncsSettingsTable(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
// Migrate both SecurityConfig and Setting tables
@@ -147,7 +145,6 @@ func TestStopSyncsSettingsTable(t *testing.T) {
// TestStartAndStopStateConsistency verifies consistent state across Start/Stop cycles.
func TestStartAndStopStateConsistency(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -219,7 +216,6 @@ func TestStartAndStopStateConsistency(t *testing.T) {
// TestExistingSettingIsUpdated verifies that an existing setting is updated, not duplicated.
func TestExistingSettingIsUpdated(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -293,7 +289,6 @@ func (f *fakeFailingExec) Status(ctx context.Context, configDir string) (running
// TestStartFailureRevertsSettings verifies that a failed Start reverts the settings.
func TestStartFailureRevertsSettings(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -330,7 +325,6 @@ func TestStartFailureRevertsSettings(t *testing.T) {
// TestStatusResponseFormat verifies the status endpoint response format.
func TestStatusResponseFormat(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))

View File

@@ -51,7 +51,6 @@ func createTestSecurityService(t *testing.T, db *gorm.DB) *services.SecurityServ
// TestCrowdsecHandler_Stop_Success tests the Stop handler with successful execution
func TestCrowdsecHandler_Stop_Success(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -97,7 +96,6 @@ func TestCrowdsecHandler_Stop_Success(t *testing.T) {
// TestCrowdsecHandler_Stop_Error tests the Stop handler with an execution error
func TestCrowdsecHandler_Stop_Error(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -123,7 +121,6 @@ func TestCrowdsecHandler_Stop_Error(t *testing.T) {
// TestCrowdsecHandler_Stop_NoSecurityConfig tests Stop when there's no existing SecurityConfig
func TestCrowdsecHandler_Stop_NoSecurityConfig(t *testing.T) {
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
@@ -152,10 +149,6 @@ func TestCrowdsecHandler_Stop_NoSecurityConfig(t *testing.T) {
// TestGetLAPIDecisions_WithMockServer tests GetLAPIDecisions with a mock LAPI server
func TestGetLAPIDecisions_WithMockServer(t *testing.T) {
// Use permissive validator for testing with mock server on random port
orig := validateCrowdsecLAPIBaseURLFunc
validateCrowdsecLAPIBaseURLFunc = permissiveLAPIURLValidator
defer func() { validateCrowdsecLAPIBaseURLFunc = orig }()
// Create a mock LAPI server
mockLAPI := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
@@ -165,7 +158,6 @@ func TestGetLAPIDecisions_WithMockServer(t *testing.T) {
}))
defer mockLAPI.Close()
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}))
@@ -179,6 +171,7 @@ func TestGetLAPIDecisions_WithMockServer(t *testing.T) {
Security: secSvc,
CmdExec: &mockCommandExecutor{},
DataDir: t.TempDir(),
validateLAPIURL: permissiveLAPIURLValidator,
}
r := gin.New()
@@ -202,10 +195,6 @@ func TestGetLAPIDecisions_WithMockServer(t *testing.T) {
// TestGetLAPIDecisions_Unauthorized tests GetLAPIDecisions when LAPI returns 401
func TestGetLAPIDecisions_Unauthorized(t *testing.T) {
// Use permissive validator for testing with mock server on random port
orig := validateCrowdsecLAPIBaseURLFunc
validateCrowdsecLAPIBaseURLFunc = permissiveLAPIURLValidator
defer func() { validateCrowdsecLAPIBaseURLFunc = orig }()
// Create a mock LAPI server that returns 401
mockLAPI := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
@@ -213,7 +202,6 @@ func TestGetLAPIDecisions_Unauthorized(t *testing.T) {
}))
defer mockLAPI.Close()
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}))
@@ -226,6 +214,7 @@ func TestGetLAPIDecisions_Unauthorized(t *testing.T) {
Security: secSvc,
CmdExec: &mockCommandExecutor{},
DataDir: t.TempDir(),
validateLAPIURL: permissiveLAPIURLValidator,
}
r := gin.New()
@@ -240,10 +229,6 @@ func TestGetLAPIDecisions_Unauthorized(t *testing.T) {
// TestGetLAPIDecisions_NullResponse tests GetLAPIDecisions when LAPI returns null
func TestGetLAPIDecisions_NullResponse(t *testing.T) {
// Use permissive validator for testing with mock server on random port
orig := validateCrowdsecLAPIBaseURLFunc
validateCrowdsecLAPIBaseURLFunc = permissiveLAPIURLValidator
defer func() { validateCrowdsecLAPIBaseURLFunc = orig }()
mockLAPI := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
@@ -252,7 +237,6 @@ func TestGetLAPIDecisions_NullResponse(t *testing.T) {
}))
defer mockLAPI.Close()
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}))
@@ -265,6 +249,7 @@ func TestGetLAPIDecisions_NullResponse(t *testing.T) {
Security: secSvc,
CmdExec: &mockCommandExecutor{},
DataDir: t.TempDir(),
validateLAPIURL: permissiveLAPIURLValidator,
}
r := gin.New()
@@ -292,7 +277,6 @@ func TestGetLAPIDecisions_NonJSONContentType(t *testing.T) {
}))
defer mockLAPI.Close()
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}))
@@ -320,10 +304,6 @@ func TestGetLAPIDecisions_NonJSONContentType(t *testing.T) {
// TestCheckLAPIHealth_WithMockServer tests CheckLAPIHealth with a healthy LAPI
func TestCheckLAPIHealth_WithMockServer(t *testing.T) {
// Use permissive validator for testing with mock server on random port
orig := validateCrowdsecLAPIBaseURLFunc
validateCrowdsecLAPIBaseURLFunc = permissiveLAPIURLValidator
defer func() { validateCrowdsecLAPIBaseURLFunc = orig }()
mockLAPI := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/health" {
@@ -335,7 +315,6 @@ func TestCheckLAPIHealth_WithMockServer(t *testing.T) {
}))
defer mockLAPI.Close()
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}))
@@ -348,6 +327,7 @@ func TestCheckLAPIHealth_WithMockServer(t *testing.T) {
Security: secSvc,
CmdExec: &mockCommandExecutor{},
DataDir: t.TempDir(),
validateLAPIURL: permissiveLAPIURLValidator,
}
r := gin.New()
@@ -368,10 +348,6 @@ func TestCheckLAPIHealth_WithMockServer(t *testing.T) {
// TestCheckLAPIHealth_FallbackToDecisions tests the fallback to /v1/decisions endpoint
// when the primary /health endpoint is unreachable
func TestCheckLAPIHealth_FallbackToDecisions(t *testing.T) {
// Use permissive validator for testing with mock server on random port
orig := validateCrowdsecLAPIBaseURLFunc
validateCrowdsecLAPIBaseURLFunc = permissiveLAPIURLValidator
defer func() { validateCrowdsecLAPIBaseURLFunc = orig }()
// Create a mock server that only responds to /v1/decisions, not /health
mockLAPI := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
@@ -385,7 +361,6 @@ func TestCheckLAPIHealth_FallbackToDecisions(t *testing.T) {
}))
defer mockLAPI.Close()
gin.SetMode(gin.TestMode)
db := OpenTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}))
@@ -398,6 +373,7 @@ func TestCheckLAPIHealth_FallbackToDecisions(t *testing.T) {
Security: secSvc,
CmdExec: &mockCommandExecutor{},
DataDir: t.TempDir(),
validateLAPIURL: permissiveLAPIURLValidator,
}
r := gin.New()

View File

@@ -47,7 +47,6 @@ func TestReadAcquisitionConfig_ErrorsAndSuccess(t *testing.T) {
}
func TestCrowdsec_AcquisitionEndpoints_InvalidConfiguredPath(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("CHARON_CROWDSEC_ACQUIS_PATH", "relative/path.yaml")
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
@@ -68,7 +67,6 @@ func TestCrowdsec_AcquisitionEndpoints_InvalidConfiguredPath(t *testing.T) {
}
func TestCrowdsec_GetBouncerKey_NotConfigured(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("CROWDSEC_API_KEY", "")
t.Setenv("CROWDSEC_BOUNCER_API_KEY", "")
t.Setenv("CERBERUS_SECURITY_CROWDSEC_API_KEY", "")

View File

@@ -27,7 +27,6 @@ func TestCrowdsecWave5_ReadAcquisitionConfig_InvalidFilenameBranch(t *testing.T)
}
func TestCrowdsecWave5_GetLAPIDecisions_Unauthorized(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -36,17 +35,11 @@ func TestCrowdsecWave5_GetLAPIDecisions_Unauthorized(t *testing.T) {
}))
t.Cleanup(server.Close)
original := validateCrowdsecLAPIBaseURLFunc
validateCrowdsecLAPIBaseURLFunc = func(raw string) (*url.URL, error) {
return url.Parse(raw)
}
t.Cleanup(func() {
validateCrowdsecLAPIBaseURLFunc = original
})
require.NoError(t, db.Create(&models.SecurityConfig{UUID: "default", CrowdSecAPIURL: server.URL}).Error)
h := newTestCrowdsecHandler(t, db, &fakeExec{}, "/bin/false", tmpDir)
h.validateLAPIURL = func(raw string) (*url.URL, error) { return url.Parse(raw) }
r := gin.New()
g := r.Group("/api/v1")
h.RegisterRoutes(g)
@@ -60,7 +53,6 @@ func TestCrowdsecWave5_GetLAPIDecisions_Unauthorized(t *testing.T) {
}
func TestCrowdsecWave5_GetLAPIDecisions_NonJSONContentTypeFallsBack(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
@@ -71,17 +63,11 @@ func TestCrowdsecWave5_GetLAPIDecisions_NonJSONContentTypeFallsBack(t *testing.T
}))
t.Cleanup(server.Close)
original := validateCrowdsecLAPIBaseURLFunc
validateCrowdsecLAPIBaseURLFunc = func(raw string) (*url.URL, error) {
return url.Parse(raw)
}
t.Cleanup(func() {
validateCrowdsecLAPIBaseURLFunc = original
})
require.NoError(t, db.Create(&models.SecurityConfig{UUID: "default", CrowdSecAPIURL: server.URL}).Error)
h := newTestCrowdsecHandler(t, db, &fakeExec{}, "/bin/false", tmpDir)
h.validateLAPIURL = func(raw string) (*url.URL, error) { return url.Parse(raw) }
h.CmdExec = &mockCmdExecutor{output: []byte("[]"), err: nil}
r := gin.New()
g := r.Group("/api/v1")
@@ -96,7 +82,6 @@ func TestCrowdsecWave5_GetLAPIDecisions_NonJSONContentTypeFallsBack(t *testing.T
}
func TestCrowdsecWave5_GetBouncerInfo_And_GetBouncerKey_FileSource(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("CROWDSEC_BOUNCER_API_KEY", "")
t.Setenv("CERBERUS_SECURITY_CROWDSEC_API_KEY", "")
t.Setenv("CHARON_SECURITY_CROWDSEC_API_KEY", "")
@@ -105,6 +90,7 @@ func TestCrowdsecWave5_GetBouncerInfo_And_GetBouncerKey_FileSource(t *testing.T)
tmpDir := t.TempDir()
h := newTestCrowdsecHandler(t, db, &fakeExec{}, "/bin/false", tmpDir)
h.validateLAPIURL = func(raw string) (*url.URL, error) { return url.Parse(raw) }
keyPath := h.bouncerKeyPath()
require.NoError(t, os.MkdirAll(filepath.Dir(keyPath), 0o750))
require.NoError(t, os.WriteFile(keyPath, []byte("abcdefghijklmnop1234567890"), 0o600))

View File

@@ -17,7 +17,6 @@ func TestCrowdsecWave6_BouncerKeyPath_UsesEnvFallback(t *testing.T) {
}
func TestCrowdsecWave6_GetBouncerInfo_NoneSource(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("CROWDSEC_API_KEY", "")
t.Setenv("CROWDSEC_BOUNCER_API_KEY", "")
t.Setenv("CERBERUS_SECURITY_CROWDSEC_API_KEY", "")
@@ -40,7 +39,6 @@ func TestCrowdsecWave6_GetBouncerInfo_NoneSource(t *testing.T) {
}
func TestCrowdsecWave6_GetKeyStatus_NoKeyConfiguredMessage(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("CROWDSEC_API_KEY", "")
t.Setenv("CROWDSEC_BOUNCER_API_KEY", "")
t.Setenv("CERBERUS_SECURITY_CROWDSEC_API_KEY", "")

View File

@@ -28,7 +28,6 @@ func TestCrowdsecWave7_ReadAcquisitionConfig_ReadErrorOnDirectory(t *testing.T)
}
func TestCrowdsecWave7_Start_CreateSecurityConfigFailsOnReadOnlyDB(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
dbPath := filepath.Join(tmpDir, "crowdsec-readonly.db")

View File

@@ -36,7 +36,6 @@ func createTestSQLiteDB(dbPath string) error {
}
func TestDBHealthHandler_Check_Healthy(t *testing.T) {
gin.SetMode(gin.TestMode)
// Create in-memory database
db, err := database.Connect("file::memory:?cache=shared")
@@ -65,7 +64,6 @@ func TestDBHealthHandler_Check_Healthy(t *testing.T) {
}
func TestDBHealthHandler_Check_WithBackupService(t *testing.T) {
gin.SetMode(gin.TestMode)
// Setup temp dirs for backup service
tmpDir := t.TempDir()
@@ -116,7 +114,6 @@ func TestDBHealthHandler_Check_WithBackupService(t *testing.T) {
}
func TestDBHealthHandler_Check_WALMode(t *testing.T) {
gin.SetMode(gin.TestMode)
// Create file-based database to test WAL mode
tmpDir := t.TempDir()
@@ -145,7 +142,6 @@ func TestDBHealthHandler_Check_WALMode(t *testing.T) {
}
func TestDBHealthHandler_ResponseJSONTags(t *testing.T) {
gin.SetMode(gin.TestMode)
db, err := database.Connect("file::memory:?cache=shared")
require.NoError(t, err)
@@ -200,7 +196,6 @@ func TestNewDBHealthHandler(t *testing.T) {
// Phase 1 & 3: Critical coverage tests
func TestDBHealthHandler_Check_CorruptedDatabase(t *testing.T) {
gin.SetMode(gin.TestMode)
// Create a file-based database and corrupt it
tmpDir := t.TempDir()
@@ -252,7 +247,6 @@ func TestDBHealthHandler_Check_CorruptedDatabase(t *testing.T) {
}
func TestDBHealthHandler_Check_BackupServiceError(t *testing.T) {
gin.SetMode(gin.TestMode)
// Create database
db, err := database.Connect("file::memory:?cache=shared")
@@ -294,7 +288,6 @@ func TestDBHealthHandler_Check_BackupServiceError(t *testing.T) {
}
func TestDBHealthHandler_Check_BackupTimeZero(t *testing.T) {
gin.SetMode(gin.TestMode)
// Create database
db, err := database.Connect("file::memory:?cache=shared")

View File

@@ -51,7 +51,6 @@ func TestNewDNSDetectionHandler(t *testing.T) {
}
func TestDetect_Success(t *testing.T) {
gin.SetMode(gin.TestMode)
mockService := new(mockDNSDetectionService)
handler := NewDNSDetectionHandler(mockService)
@@ -177,7 +176,6 @@ func TestDetect_Success(t *testing.T) {
}
func TestDetect_ValidationErrors(t *testing.T) {
gin.SetMode(gin.TestMode)
mockService := new(mockDNSDetectionService)
handler := NewDNSDetectionHandler(mockService)
@@ -216,7 +214,6 @@ func TestDetect_ValidationErrors(t *testing.T) {
}
func TestDetect_ServiceError(t *testing.T) {
gin.SetMode(gin.TestMode)
mockService := new(mockDNSDetectionService)
handler := NewDNSDetectionHandler(mockService)
@@ -246,7 +243,6 @@ func TestDetect_ServiceError(t *testing.T) {
}
func TestGetPatterns(t *testing.T) {
gin.SetMode(gin.TestMode)
mockService := new(mockDNSDetectionService)
handler := NewDNSDetectionHandler(mockService)
@@ -287,7 +283,6 @@ func TestGetPatterns(t *testing.T) {
}
func TestDetect_WildcardDomain(t *testing.T) {
gin.SetMode(gin.TestMode)
mockService := new(mockDNSDetectionService)
handler := NewDNSDetectionHandler(mockService)
@@ -327,7 +322,6 @@ func TestDetect_WildcardDomain(t *testing.T) {
}
func TestDetect_LowConfidence(t *testing.T) {
gin.SetMode(gin.TestMode)
mockService := new(mockDNSDetectionService)
handler := NewDNSDetectionHandler(mockService)
@@ -368,7 +362,6 @@ func TestDetect_LowConfidence(t *testing.T) {
}
func TestDetect_DNSLookupError(t *testing.T) {
gin.SetMode(gin.TestMode)
mockService := new(mockDNSDetectionService)
handler := NewDNSDetectionHandler(mockService)
@@ -438,7 +431,6 @@ func TestDetectRequest_Binding(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gin.SetMode(gin.TestMode)
c, _ := gin.CreateTestContext(httptest.NewRecorder())
c.Request = httptest.NewRequest(http.MethodPost, "/", bytes.NewBufferString(tt.body))
c.Request.Header.Set("Content-Type", "application/json")

View File

@@ -106,7 +106,6 @@ func (m *MockDNSProviderService) GetDecryptedCredentials(ctx context.Context, id
}
func setupDNSProviderTestRouter() (*gin.Engine, *MockDNSProviderService) {
gin.SetMode(gin.TestMode)
router := gin.New()
mockService := new(MockDNSProviderService)
handler := NewDNSProviderHandler(mockService)

View File

@@ -41,7 +41,6 @@ func (f *fakeRemoteServerService) GetByUUID(uuidStr string) (*models.RemoteServe
}
func TestDockerHandler_ListContainers_InvalidHostRejected(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
dockerSvc := &fakeDockerService{}
@@ -60,7 +59,6 @@ func TestDockerHandler_ListContainers_InvalidHostRejected(t *testing.T) {
}
func TestDockerHandler_ListContainers_DockerUnavailableMappedTo503(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
dockerSvc := &fakeDockerService{err: services.NewDockerUnavailableError(errors.New("no docker socket"), "Local Docker socket is mounted but not accessible by current process")}
@@ -82,7 +80,6 @@ func TestDockerHandler_ListContainers_DockerUnavailableMappedTo503(t *testing.T)
}
func TestDockerHandler_ListContainers_ServerIDResolvesToTCPHost(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
dockerSvc := &fakeDockerService{ret: []services.DockerContainer{}}
@@ -103,7 +100,6 @@ func TestDockerHandler_ListContainers_ServerIDResolvesToTCPHost(t *testing.T) {
}
func TestDockerHandler_ListContainers_ServerIDNotFoundReturns404(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
dockerSvc := &fakeDockerService{}
@@ -125,7 +121,6 @@ func TestDockerHandler_ListContainers_ServerIDNotFoundReturns404(t *testing.T) {
func TestDockerHandler_ListContainers_Local(t *testing.T) {
// Test local/default docker connection (empty host parameter)
gin.SetMode(gin.TestMode)
router := gin.New()
dockerSvc := &fakeDockerService{
@@ -163,7 +158,6 @@ func TestDockerHandler_ListContainers_Local(t *testing.T) {
func TestDockerHandler_ListContainers_RemoteServerSuccess(t *testing.T) {
// Test successful remote server connection via server_id
gin.SetMode(gin.TestMode)
router := gin.New()
dockerSvc := &fakeDockerService{
@@ -203,7 +197,6 @@ func TestDockerHandler_ListContainers_RemoteServerSuccess(t *testing.T) {
func TestDockerHandler_ListContainers_RemoteServerNotFound(t *testing.T) {
// Test server_id that doesn't exist in database
gin.SetMode(gin.TestMode)
router := gin.New()
dockerSvc := &fakeDockerService{}
@@ -226,7 +219,6 @@ func TestDockerHandler_ListContainers_RemoteServerNotFound(t *testing.T) {
func TestDockerHandler_ListContainers_InvalidHost(t *testing.T) {
// Test SSRF protection: reject arbitrary host values
gin.SetMode(gin.TestMode)
router := gin.New()
dockerSvc := &fakeDockerService{}
@@ -289,7 +281,6 @@ func TestDockerHandler_ListContainers_DockerUnavailable(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
dockerSvc := &fakeDockerService{err: tt.err}
@@ -340,7 +331,6 @@ func TestDockerHandler_ListContainers_GenericError(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
dockerSvc := &fakeDockerService{err: tt.err}
@@ -362,7 +352,6 @@ func TestDockerHandler_ListContainers_GenericError(t *testing.T) {
}
func TestDockerHandler_ListContainers_503FallbackDetailsWhenEmpty(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
dockerSvc := &fakeDockerService{err: services.NewDockerUnavailableError(errors.New("socket error"))}
@@ -382,7 +371,6 @@ func TestDockerHandler_ListContainers_503FallbackDetailsWhenEmpty(t *testing.T)
}
func TestDockerHandler_ListContainers_503DetailsWithGroupGuidance(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
groupDetails := `Local Docker socket is mounted but not accessible by current process (uid=1000 gid=1000). Process groups (1000) do not include socket gid 988; run container with matching supplemental group (e.g., --group-add 988 or compose group_add: ["988"]).`

View File

@@ -87,7 +87,6 @@ func setupEmergencyTestDB(t *testing.T) *gorm.DB {
}
func setupEmergencyRouter(handler *EmergencyHandler) *gin.Engine {
gin.SetMode(gin.TestMode)
router := gin.New()
_ = router.SetTrustedProxies(nil)
router.POST("/api/v1/emergency/security-reset", handler.SecurityReset)
@@ -385,7 +384,6 @@ func TestEmergencySecurityReset_MiddlewarePrevalidatedBypass(t *testing.T) {
db := setupEmergencyTestDB(t)
handler := NewEmergencyHandler(db)
gin.SetMode(gin.TestMode)
router := gin.New()
router.POST("/api/v1/emergency/security-reset", func(c *gin.Context) {
c.Set("emergency_bypass", true)
@@ -407,7 +405,6 @@ func TestEmergencySecurityReset_MiddlewareBypass_ResetFailure(t *testing.T) {
require.NoError(t, err)
require.NoError(t, stdDB.Close())
gin.SetMode(gin.TestMode)
router := gin.New()
router.POST("/api/v1/emergency/security-reset", func(c *gin.Context) {
c.Set("emergency_bypass", true)
@@ -475,7 +472,6 @@ func TestGenerateToken_Success(t *testing.T) {
handler := NewEmergencyTokenHandler(tokenService)
defer handler.Close()
gin.SetMode(gin.TestMode)
router := gin.New()
router.POST("/api/v1/emergency/token", func(c *gin.Context) {
c.Set("role", "admin")
@@ -504,7 +500,6 @@ func TestGenerateToken_AdminRequired(t *testing.T) {
handler := NewEmergencyTokenHandler(tokenService)
defer handler.Close()
gin.SetMode(gin.TestMode)
router := gin.New()
router.POST("/api/v1/emergency/token", func(c *gin.Context) {
// No role set - simulating non-admin user
@@ -527,7 +522,6 @@ func TestGenerateToken_InvalidExpirationDays(t *testing.T) {
handler := NewEmergencyTokenHandler(tokenService)
defer handler.Close()
gin.SetMode(gin.TestMode)
router := gin.New()
router.POST("/api/v1/emergency/token", func(c *gin.Context) {
c.Set("role", "admin")
@@ -554,7 +548,6 @@ func TestGetTokenStatus_Success(t *testing.T) {
// Generate a token first
_, _ = tokenService.Generate(services.GenerateRequest{ExpirationDays: 30})
gin.SetMode(gin.TestMode)
router := gin.New()
router.GET("/api/v1/emergency/token/status", func(c *gin.Context) {
c.Set("role", "admin")
@@ -581,7 +574,6 @@ func TestGetTokenStatus_AdminRequired(t *testing.T) {
handler := NewEmergencyTokenHandler(tokenService)
defer handler.Close()
gin.SetMode(gin.TestMode)
router := gin.New()
router.GET("/api/v1/emergency/token/status", handler.GetTokenStatus)
@@ -602,7 +594,6 @@ func TestRevokeToken_Success(t *testing.T) {
// Generate a token first
_, _ = tokenService.Generate(services.GenerateRequest{ExpirationDays: 30})
gin.SetMode(gin.TestMode)
router := gin.New()
router.DELETE("/api/v1/emergency/token", func(c *gin.Context) {
c.Set("role", "admin")
@@ -624,7 +615,6 @@ func TestRevokeToken_AdminRequired(t *testing.T) {
handler := NewEmergencyTokenHandler(tokenService)
defer handler.Close()
gin.SetMode(gin.TestMode)
router := gin.New()
router.DELETE("/api/v1/emergency/token", handler.RevokeToken)
@@ -645,7 +635,6 @@ func TestUpdateTokenExpiration_Success(t *testing.T) {
// Generate a token first
_, _ = tokenService.Generate(services.GenerateRequest{ExpirationDays: 30})
gin.SetMode(gin.TestMode)
router := gin.New()
router.PATCH("/api/v1/emergency/token/expiration", func(c *gin.Context) {
c.Set("role", "admin")
@@ -669,7 +658,6 @@ func TestUpdateTokenExpiration_AdminRequired(t *testing.T) {
handler := NewEmergencyTokenHandler(tokenService)
defer handler.Close()
gin.SetMode(gin.TestMode)
router := gin.New()
router.PATCH("/api/v1/emergency/token/expiration", handler.UpdateTokenExpiration)
@@ -689,7 +677,6 @@ func TestUpdateTokenExpiration_InvalidDays(t *testing.T) {
handler := NewEmergencyTokenHandler(tokenService)
defer handler.Close()
gin.SetMode(gin.TestMode)
router := gin.New()
router.PATCH("/api/v1/emergency/token/expiration", func(c *gin.Context) {
c.Set("role", "admin")

View File

@@ -40,7 +40,6 @@ func setupEncryptionTestDB(t *testing.T) *gorm.DB {
}
func setupEncryptionTestRouter(handler *EncryptionHandler, isAdmin bool) *gin.Engine {
gin.SetMode(gin.TestMode)
router := gin.New()
// Mock admin middleware - matches production auth middleware key names
@@ -558,7 +557,6 @@ func TestEncryptionHandler_IntegrationFlow(t *testing.T) {
// TestEncryptionHandler_HelperFunctions tests the isAdmin and getActorFromGinContext helpers
func TestEncryptionHandler_HelperFunctions(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Run("isAdmin with invalid role type", func(t *testing.T) {
router := gin.New()
@@ -787,7 +785,6 @@ func TestEncryptionHandler_RefreshKey_InvalidOldKey(t *testing.T) {
// TestEncryptionHandler_GetActorFromGinContext_InvalidType tests getActorFromGinContext with invalid type
func TestEncryptionHandler_GetActorFromGinContext_InvalidType(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
var capturedActor string
@@ -884,7 +881,6 @@ func TestEncryptionHandler_RotateWithPartialFailures(t *testing.T) {
// TestEncryptionHandler_isAdmin_NoRoleSet tests isAdmin when no role is set
func TestEncryptionHandler_isAdmin_NoRoleSet(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
// No middleware setting user_role
@@ -905,7 +901,6 @@ func TestEncryptionHandler_isAdmin_NoRoleSet(t *testing.T) {
// TestEncryptionHandler_isAdmin_NonAdminRole tests isAdmin with non-admin role
func TestEncryptionHandler_isAdmin_NonAdminRole(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
router.Use(func(c *gin.Context) {

View File

@@ -15,7 +15,6 @@ import (
// TestBlocker3_SecurityProviderEventsFlagInResponse tests that the feature flag is included in GET response.
func TestBlocker3_SecurityProviderEventsFlagInResponse(t *testing.T) {
gin.SetMode(gin.TestMode)
// Setup test database
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
@@ -50,7 +49,6 @@ func TestBlocker3_SecurityProviderEventsFlagInResponse(t *testing.T) {
// TestBlocker3_SecurityProviderEventsFlagDefaultValue tests the default value of the flag.
func TestBlocker3_SecurityProviderEventsFlagDefaultValue(t *testing.T) {
gin.SetMode(gin.TestMode)
// Setup test database
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
@@ -85,7 +83,6 @@ func TestBlocker3_SecurityProviderEventsFlagDefaultValue(t *testing.T) {
// TestBlocker3_SecurityProviderEventsFlagCanBeEnabled tests that the flag can be enabled.
func TestBlocker3_SecurityProviderEventsFlagCanBeEnabled(t *testing.T) {
gin.SetMode(gin.TestMode)
// Setup test database
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})

View File

@@ -15,7 +15,6 @@ import (
)
func TestFeatureFlagsHandler_GetFlags_DBPrecedence(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupFlagsDB(t)
// Set a flag in DB
@@ -48,7 +47,6 @@ func TestFeatureFlagsHandler_GetFlags_DBPrecedence(t *testing.T) {
}
func TestFeatureFlagsHandler_GetFlags_EnvFallback(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupFlagsDB(t)
// Set env var (no DB value exists)
@@ -73,7 +71,6 @@ func TestFeatureFlagsHandler_GetFlags_EnvFallback(t *testing.T) {
}
func TestFeatureFlagsHandler_GetFlags_EnvShortForm(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupFlagsDB(t)
// Set short form env var (CERBERUS_ENABLED instead of FEATURE_CERBERUS_ENABLED)
@@ -98,7 +95,6 @@ func TestFeatureFlagsHandler_GetFlags_EnvShortForm(t *testing.T) {
}
func TestFeatureFlagsHandler_GetFlags_EnvNumeric(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupFlagsDB(t)
// Set numeric env var (1/0 instead of true/false)
@@ -123,7 +119,6 @@ func TestFeatureFlagsHandler_GetFlags_EnvNumeric(t *testing.T) {
}
func TestFeatureFlagsHandler_GetFlags_DefaultTrue(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupFlagsDB(t)
// No DB value, no env var - check defaults
@@ -148,7 +143,6 @@ func TestFeatureFlagsHandler_GetFlags_DefaultTrue(t *testing.T) {
}
func TestFeatureFlagsHandler_GetFlags_AllDefaultFlagsPresent(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupFlagsDB(t)
h := NewFeatureFlagsHandler(db)
@@ -173,7 +167,6 @@ func TestFeatureFlagsHandler_GetFlags_AllDefaultFlagsPresent(t *testing.T) {
}
func TestFeatureFlagsHandler_UpdateFlags_Success(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupFlagsDB(t)
h := NewFeatureFlagsHandler(db)
@@ -208,7 +201,6 @@ func TestFeatureFlagsHandler_UpdateFlags_Success(t *testing.T) {
}
func TestFeatureFlagsHandler_UpdateFlags_Upsert(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupFlagsDB(t)
// Create existing setting
@@ -249,7 +241,6 @@ func TestFeatureFlagsHandler_UpdateFlags_Upsert(t *testing.T) {
}
func TestFeatureFlagsHandler_UpdateFlags_InvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupFlagsDB(t)
h := NewFeatureFlagsHandler(db)
@@ -265,7 +256,6 @@ func TestFeatureFlagsHandler_UpdateFlags_InvalidJSON(t *testing.T) {
}
func TestFeatureFlagsHandler_UpdateFlags_OnlyAllowedKeys(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupFlagsDB(t)
h := NewFeatureFlagsHandler(db)
@@ -298,7 +288,6 @@ func TestFeatureFlagsHandler_UpdateFlags_OnlyAllowedKeys(t *testing.T) {
}
func TestFeatureFlagsHandler_UpdateFlags_EmptyPayload(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupFlagsDB(t)
h := NewFeatureFlagsHandler(db)
@@ -339,7 +328,6 @@ func TestFeatureFlagsHandler_GetFlags_DBValueVariants(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupFlagsDB(t)
// Set flag with test value
@@ -387,7 +375,6 @@ func TestFeatureFlagsHandler_GetFlags_EnvValueVariants(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupFlagsDB(t)
// Set env var (no DB value)
@@ -425,7 +412,6 @@ func TestFeatureFlagsHandler_UpdateFlags_BoolValues(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupFlagsDB(t)
h := NewFeatureFlagsHandler(db)
@@ -462,7 +448,6 @@ func TestFeatureFlagsHandler_NewFeatureFlagsHandler(t *testing.T) {
}
func TestFeatureFlagsHandler_GetFlags_EmailFlagDefaultFalse(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupFlagsDB(t)
h := NewFeatureFlagsHandler(db)

View File

@@ -28,7 +28,6 @@ func TestFeatureFlags_GetAndUpdate(t *testing.T) {
h := NewFeatureFlagsHandler(db)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/api/v1/feature-flags", h.GetFlags)
r.PUT("/api/v1/feature-flags", h.UpdateFlags)
@@ -81,7 +80,6 @@ func TestFeatureFlags_EnvFallback(t *testing.T) {
db := setupFlagsDB(t)
// Do not write any settings so DB lookup fails and env is used
h := NewFeatureFlagsHandler(db)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/api/v1/feature-flags", h.GetFlags)
@@ -178,7 +176,6 @@ func TestGetFlags_BatchQuery(t *testing.T) {
db.Create(&models.Setting{Key: "feature.crowdsec.console_enrollment", Value: "true", Type: "bool", Category: "feature"})
h := NewFeatureFlagsHandler(db)
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/api/v1/feature-flags", h.GetFlags)
@@ -219,7 +216,6 @@ func TestUpdateFlags_TransactionRollback(t *testing.T) {
_ = sqlDB.Close()
h := NewFeatureFlagsHandler(db)
gin.SetMode(gin.TestMode)
r := gin.New()
r.PUT("/api/v1/feature-flags", h.UpdateFlags)
@@ -244,7 +240,6 @@ func TestUpdateFlags_TransactionAtomic(t *testing.T) {
db := setupFlagsDB(t)
h := NewFeatureFlagsHandler(db)
gin.SetMode(gin.TestMode)
r := gin.New()
r.PUT("/api/v1/feature-flags", h.UpdateFlags)

View File

@@ -50,7 +50,6 @@ func addAdminMiddleware(router *gin.Engine) {
}
func TestImportHandler_GetStatus(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
// Case 1: No active session, no mount
@@ -78,7 +77,6 @@ func TestImportHandler_GetStatus(t *testing.T) {
}
func TestImportHandler_Commit(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
handler := handlers.NewImportHandler(db, "echo", "/tmp", "")
router := gin.New()
@@ -120,7 +118,6 @@ func TestImportHandler_Commit(t *testing.T) {
}
func TestImportHandler_Upload(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
// Use fake caddy script
@@ -151,7 +148,6 @@ func TestImportHandler_Upload(t *testing.T) {
}
func TestImportHandler_GetPreview_WithContent(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
tmpDir := t.TempDir()
handler := handlers.NewImportHandler(db, "echo", tmpDir, "")
@@ -188,7 +184,6 @@ func TestImportHandler_GetPreview_WithContent(t *testing.T) {
}
func TestImportHandler_Commit_Errors(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
handler := handlers.NewImportHandler(db, "echo", "/tmp", "")
router := gin.New()
@@ -233,7 +228,6 @@ func TestImportHandler_Commit_Errors(t *testing.T) {
}
func TestImportHandler_Cancel_Errors(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
handler := handlers.NewImportHandler(db, "echo", "/tmp", "")
router := gin.New()
@@ -279,7 +273,6 @@ func TestCheckMountedImport(t *testing.T) {
}
func TestImportHandler_Upload_Failure(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
// Use fake caddy script that fails
@@ -310,7 +303,6 @@ func TestImportHandler_Upload_Failure(t *testing.T) {
}
func TestImportHandler_Upload_Conflict(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
// Pre-create a host to cause conflict
@@ -359,7 +351,6 @@ func TestImportHandler_Upload_Conflict(t *testing.T) {
}
func TestImportHandler_GetPreview_BackupContent(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
tmpDir := t.TempDir()
handler := handlers.NewImportHandler(db, "echo", tmpDir, "")
@@ -410,7 +401,6 @@ func TestImportHandler_RegisterRoutes(t *testing.T) {
}
func TestImportHandler_GetPreview_TransientMount(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
tmpDir := t.TempDir()
mountPath := filepath.Join(tmpDir, "mounted.caddyfile")
@@ -455,7 +445,6 @@ func TestImportHandler_GetPreview_TransientMount(t *testing.T) {
}
func TestImportHandler_Commit_TransientUpload(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
tmpDir := t.TempDir()
@@ -515,7 +504,6 @@ func TestImportHandler_Commit_TransientUpload(t *testing.T) {
}
func TestImportHandler_Commit_TransientMount(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
tmpDir := t.TempDir()
mountPath := filepath.Join(tmpDir, "mounted.caddyfile")
@@ -562,7 +550,6 @@ func TestImportHandler_Commit_TransientMount(t *testing.T) {
}
func TestImportHandler_Cancel_TransientUpload(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
tmpDir := t.TempDir()
@@ -597,7 +584,6 @@ func TestImportHandler_Cancel_TransientUpload(t *testing.T) {
}
func TestImportHandler_DetectImports(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
handler := handlers.NewImportHandler(db, "echo", "/tmp", "")
router := gin.New()
@@ -660,7 +646,6 @@ func TestImportHandler_DetectImports(t *testing.T) {
}
func TestImportHandler_DetectImports_InvalidJSON(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
handler := handlers.NewImportHandler(db, "echo", "/tmp", "")
router := gin.New()
@@ -676,7 +661,6 @@ func TestImportHandler_DetectImports_InvalidJSON(t *testing.T) {
}
func TestImportHandler_UploadMulti(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
tmpDir := t.TempDir()
@@ -791,7 +775,6 @@ func TestImportHandler_UploadMulti(t *testing.T) {
// Additional tests for comprehensive coverage
func TestImportHandler_Cancel_MissingSessionUUID(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
handler := handlers.NewImportHandler(db, "echo", "/tmp", "")
router := gin.New()
@@ -810,7 +793,6 @@ func TestImportHandler_Cancel_MissingSessionUUID(t *testing.T) {
}
func TestImportHandler_Cancel_InvalidSessionUUID(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
handler := handlers.NewImportHandler(db, "echo", "/tmp", "")
router := gin.New()
@@ -829,7 +811,6 @@ func TestImportHandler_Cancel_InvalidSessionUUID(t *testing.T) {
}
func TestImportHandler_Commit_InvalidSessionUUID(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
handler := handlers.NewImportHandler(db, "echo", "/tmp", "")
router := gin.New()
@@ -884,7 +865,6 @@ func (m *mockProxyHostService) List() ([]models.ProxyHost, error) {
// TestImportHandler_Commit_UpdateFailure tests the error logging path when Update fails (line 676)
func TestImportHandler_Commit_UpdateFailure(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
// Create an existing host that we'll try to overwrite
@@ -959,7 +939,6 @@ func TestImportHandler_Commit_UpdateFailure(t *testing.T) {
// TestImportHandler_Commit_CreateFailure tests the error logging path when Create fails (line 682)
func TestImportHandler_Commit_CreateFailure(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
// Create an existing host to cause a duplicate error
@@ -1019,7 +998,6 @@ func TestImportHandler_Commit_CreateFailure(t *testing.T) {
// TestUpload_NormalizationSuccess tests the success path where NormalizeCaddyfile succeeds (line 271)
func TestUpload_NormalizationSuccess(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
// Use fake caddy script that handles both fmt and adapt
@@ -1065,7 +1043,6 @@ func TestUpload_NormalizationSuccess(t *testing.T) {
// TestUpload_NormalizationFallback tests the fallback path where NormalizeCaddyfile fails (line 269)
func TestUpload_NormalizationFallback(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
// Use fake caddy script that fails fmt but succeeds on adapt
@@ -1113,7 +1090,6 @@ func TestUpload_NormalizationFallback(t *testing.T) {
// TestCommit_OverwriteAction tests that overwrite preserves certificate ID
func TestCommit_OverwriteAction(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
// Create existing host with certificate association
@@ -1184,7 +1160,6 @@ func ptrToUint(v uint) *uint {
// TestCommit_RenameAction tests that rename appends suffix
func TestCommit_RenameAction(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
// Create existing host
@@ -1252,7 +1227,6 @@ func TestCommit_RenameAction(t *testing.T) {
}
func TestGetPreview_WithConflictDetails(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
tmpDir := t.TempDir()
mountPath := filepath.Join(tmpDir, "mounted.caddyfile")
@@ -1310,7 +1284,6 @@ func TestGetPreview_WithConflictDetails(t *testing.T) {
}
func TestSafeJoin_PathTraversalCases(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
tmpDir := t.TempDir()
handler := handlers.NewImportHandler(db, "echo", tmpDir, "")
@@ -1375,7 +1348,6 @@ func TestSafeJoin_PathTraversalCases(t *testing.T) {
}
func TestCommit_SkipAction(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
session := models.ImportSession{
@@ -1433,7 +1405,6 @@ func TestCommit_SkipAction(t *testing.T) {
}
func TestCommit_CustomNames(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
session := models.ImportSession{
@@ -1483,7 +1454,6 @@ func TestCommit_CustomNames(t *testing.T) {
}
func TestGetStatus_AlreadyCommittedMount(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
tmpDir := t.TempDir()
mountPath := filepath.Join(tmpDir, "mounted.caddyfile")
@@ -1519,7 +1489,6 @@ func TestGetStatus_AlreadyCommittedMount(t *testing.T) {
}
func TestImportHandler_Commit_SessionSaveWarning(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
// Create an import session with one host to create
@@ -1591,7 +1560,6 @@ func newTestImportHandler(t *testing.T, db *gorm.DB, importDir string, mountPath
// TestGetStatus_DatabaseError tests GetStatus when database query fails
func TestGetStatus_DatabaseError(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
handler := newTestImportHandler(t, db, t.TempDir(), "")
@@ -1613,7 +1581,6 @@ func TestGetStatus_DatabaseError(t *testing.T) {
// TestGetPreview_MountAlreadyCommitted tests GetPreview when mount is already committed with FUTURE timestamp
func TestGetPreview_MountAlreadyCommitted(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
// Create mount file
@@ -1648,7 +1615,6 @@ func TestGetPreview_MountAlreadyCommitted(t *testing.T) {
// TestUpload_MkdirAllFailure tests Upload when MkdirAll fails
func TestUpload_MkdirAllFailure(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportTestDB(t)
// Create a FILE where uploads directory should be (blocks MkdirAll)

View File

@@ -36,7 +36,6 @@ func setupTestDB(t *testing.T) *gorm.DB {
func TestRemoteServerHandler_List(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
db := setupTestDB(t)
// Create test server
@@ -71,7 +70,6 @@ func TestRemoteServerHandler_List(t *testing.T) {
func TestRemoteServerHandler_Create(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
db := setupTestDB(t)
ns := services.NewNotificationService(db, nil)
@@ -105,7 +103,6 @@ func TestRemoteServerHandler_Create(t *testing.T) {
func TestRemoteServerHandler_TestConnection(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
db := setupTestDB(t)
// Create test server
@@ -140,7 +137,6 @@ func TestRemoteServerHandler_TestConnection(t *testing.T) {
func TestRemoteServerHandler_Get(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
db := setupTestDB(t)
// Create test server
@@ -174,7 +170,6 @@ func TestRemoteServerHandler_Get(t *testing.T) {
func TestRemoteServerHandler_Update(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
db := setupTestDB(t)
// Create test server
@@ -220,7 +215,6 @@ func TestRemoteServerHandler_Update(t *testing.T) {
func TestRemoteServerHandler_Delete(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
db := setupTestDB(t)
// Create test server
@@ -256,7 +250,6 @@ func TestRemoteServerHandler_Delete(t *testing.T) {
func TestProxyHostHandler_List(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
db := setupTestDB(t)
// Create test proxy host
@@ -292,7 +285,6 @@ func TestProxyHostHandler_List(t *testing.T) {
func TestProxyHostHandler_Create(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
db := setupTestDB(t)
ns := services.NewNotificationService(db, nil)
@@ -328,7 +320,6 @@ func TestProxyHostHandler_Create(t *testing.T) {
func TestProxyHostHandler_PartialUpdate_DoesNotWipeFields(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
db := setupTestDB(t)
// Seed a proxy host
@@ -386,7 +377,6 @@ func TestProxyHostHandler_PartialUpdate_DoesNotWipeFields(t *testing.T) {
func TestHealthHandler(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
router := gin.New()
router.GET("/health", handlers.HealthHandler)
@@ -405,7 +395,6 @@ func TestHealthHandler(t *testing.T) {
func TestRemoteServerHandler_Errors(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
db := setupTestDB(t)
ns := services.NewNotificationService(db, nil)

View File

@@ -11,7 +11,6 @@ import (
)
func TestHealthHandler(t *testing.T) {
gin.SetMode(gin.TestMode)
r := gin.New()
r.GET("/health", HealthHandler)

View File

@@ -101,7 +101,6 @@ func (m *MockImporterService) ValidateCaddyBinary() error {
// TestUploadMulti_EmptyList covers the manual check for len(Files) == 0
func TestUploadMulti_EmptyList(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageTestDB(t)
@@ -135,7 +134,6 @@ func TestUploadMulti_EmptyList(t *testing.T) {
// TestUploadMulti_FileServerDetected covers the logic where parsable routes trigger a warning
// because they contain file_server but no valid reverse_proxy hosts
func TestUploadMulti_FileServerDetected(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageTestDB(t)
mockSvc := new(MockImporterService)
@@ -185,7 +183,6 @@ func TestUploadMulti_FileServerDetected(t *testing.T) {
// TestUploadMulti_NoSitesParsed covers successfull parsing but 0 result hosts
func TestUploadMulti_NoSitesParsed(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageTestDB(t)
mockSvc := new(MockImporterService)
@@ -227,7 +224,6 @@ func TestUploadMulti_NoSitesParsed(t *testing.T) {
}
func TestUpload_ImportsDetectedNoImportableHosts(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageTestDB(t)
mockSvc := new(MockImporterService)
@@ -263,7 +259,6 @@ func TestUpload_ImportsDetectedNoImportableHosts(t *testing.T) {
}
func TestUploadMulti_RequiresMainCaddyfile(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageTestDB(t)
h := NewImportHandler(db, "caddy", t.TempDir(), "")
@@ -291,7 +286,6 @@ func TestUploadMulti_RequiresMainCaddyfile(t *testing.T) {
}
func TestUploadMulti_RejectsEmptyFileContent(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageTestDB(t)
h := NewImportHandler(db, "caddy", t.TempDir(), "")
@@ -319,7 +313,6 @@ func TestUploadMulti_RejectsEmptyFileContent(t *testing.T) {
}
func TestCommitAndCancel_InvalidSessionUUID(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageTestDB(t)
tmpImport := t.TempDir()
@@ -352,7 +345,6 @@ func TestCommitAndCancel_InvalidSessionUUID(t *testing.T) {
}
func TestCancel_RemovesTransientUpload(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageTestDB(t)
tmpImport := t.TempDir()
@@ -381,7 +373,6 @@ func TestCancel_RemovesTransientUpload(t *testing.T) {
}
func TestUpload_ReadOnlyDBRespondsWithPermissionError(t *testing.T) {
gin.SetMode(gin.TestMode)
roDB := setupReadOnlyImportDB(t)
mockSvc := new(MockImporterService)
@@ -414,7 +405,6 @@ func TestUpload_ReadOnlyDBRespondsWithPermissionError(t *testing.T) {
}
func TestUploadMulti_ReadOnlyDBRespondsWithPermissionError(t *testing.T) {
gin.SetMode(gin.TestMode)
roDB := setupReadOnlyImportDB(t)
mockSvc := new(MockImporterService)
@@ -448,7 +438,6 @@ func TestUploadMulti_ReadOnlyDBRespondsWithPermissionError(t *testing.T) {
}
func TestCommit_ReadOnlyDBSaveRespondsWithPermissionError(t *testing.T) {
gin.SetMode(gin.TestMode)
roDB := setupReadOnlyImportDB(t)
mockSvc := new(MockImporterService)
@@ -483,7 +472,6 @@ func TestCommit_ReadOnlyDBSaveRespondsWithPermissionError(t *testing.T) {
}
func TestCancel_ReadOnlyDBSaveRespondsWithPermissionError(t *testing.T) {
gin.SetMode(gin.TestMode)
tmp := t.TempDir()
dbPath := filepath.Join(tmp, "cancel_ro.db")

View File

@@ -17,7 +17,6 @@ import (
)
func TestImportUploadSanitizesFilename(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
// set up in-memory DB for handler
db := OpenTestDB(t)

View File

@@ -136,7 +136,6 @@ func TestImportHandler_GetStatus_MountCommittedUnchanged(t *testing.T) {
handler, _, _ := setupTestHandler(t, tx)
handler.mountPath = mountPath
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -173,7 +172,6 @@ func TestImportHandler_GetStatus_MountModifiedAfterCommit(t *testing.T) {
handler, _, _ := setupTestHandler(t, tx)
handler.mountPath = mountPath
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -223,7 +221,6 @@ func TestUpload_NormalizationSuccess(t *testing.T) {
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -272,7 +269,6 @@ func TestUpload_NormalizationFailure(t *testing.T) {
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -313,7 +309,6 @@ func TestUpload_PathTraversalBlocked(t *testing.T) {
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -354,7 +349,6 @@ func TestUploadMulti_ArchiveExtraction(t *testing.T) {
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -400,7 +394,6 @@ func TestUploadMulti_ConflictDetection(t *testing.T) {
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -439,7 +432,6 @@ func TestCommit_TransientToImport(t *testing.T) {
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -484,7 +476,6 @@ func TestCommit_RollbackOnError(t *testing.T) {
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -517,7 +508,6 @@ func TestDetectImports_EmptyCaddyfile(t *testing.T) {
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -662,7 +652,6 @@ func TestImportHandler_Upload_NullByteInjection(t *testing.T) {
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -689,7 +678,6 @@ func TestImportHandler_DetectImports_MalformedFile(t *testing.T) {
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -835,7 +823,6 @@ func TestImportHandler_Upload_InvalidSessionPaths(t *testing.T) {
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -860,7 +847,6 @@ func TestImportHandler_Commit_InvalidSessionUUID_BranchCoverage(t *testing.T) {
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -893,7 +879,6 @@ func TestImportHandler_Upload_NoImportableHosts_WithImportsDetected(t *testing.T
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -925,7 +910,6 @@ func TestImportHandler_Upload_NoImportableHosts_NoImportsNoFileServer(t *testing
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -967,7 +951,6 @@ func TestImportHandler_Commit_OverwriteAndRenameFlows(t *testing.T) {
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -986,7 +969,6 @@ func TestImportHandler_Cancel_ValidationAndNotFound_BranchCoverage(t *testing.T)
testutil.WithTx(t, setupImportTestDB(t), func(tx *gorm.DB) {
handler, _, _ := setupTestHandler(t, tx)
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
@@ -1021,7 +1003,6 @@ func TestImportHandler_Cancel_TransientUploadCancelled_BranchCoverage(t *testing
uploadPath := filepath.Join(uploadDir, sessionID+".caddyfile")
require.NoError(t, os.WriteFile(uploadPath, []byte("example.com { respond \"ok\" }"), 0o600))
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))

View File

@@ -40,7 +40,6 @@ func TestJSONImportHandler_RegisterRoutes(t *testing.T) {
db := setupJSONTestDB(t)
handler := NewJSONImportHandler(db)
gin.SetMode(gin.TestMode)
router := gin.New()
api := router.Group("/api/v1")
handler.RegisterRoutes(api)
@@ -60,7 +59,6 @@ func TestJSONImportHandler_Upload_CharonFormat(t *testing.T) {
db := setupJSONTestDB(t)
handler := NewJSONImportHandler(db)
gin.SetMode(gin.TestMode)
router := gin.New()
api := router.Group("/api/v1")
handler.RegisterRoutes(api)
@@ -119,7 +117,6 @@ func TestJSONImportHandler_Upload_NPMFormatFallback(t *testing.T) {
db := setupJSONTestDB(t)
handler := NewJSONImportHandler(db)
gin.SetMode(gin.TestMode)
router := gin.New()
api := router.Group("/api/v1")
handler.RegisterRoutes(api)
@@ -162,7 +159,6 @@ func TestJSONImportHandler_Upload_UnrecognizedFormat(t *testing.T) {
db := setupJSONTestDB(t)
handler := NewJSONImportHandler(db)
gin.SetMode(gin.TestMode)
router := gin.New()
api := router.Group("/api/v1")
handler.RegisterRoutes(api)
@@ -188,7 +184,6 @@ func TestJSONImportHandler_Upload_InvalidJSON(t *testing.T) {
db := setupJSONTestDB(t)
handler := NewJSONImportHandler(db)
gin.SetMode(gin.TestMode)
router := gin.New()
api := router.Group("/api/v1")
handler.RegisterRoutes(api)
@@ -208,7 +203,6 @@ func TestJSONImportHandler_Commit_CharonFormat(t *testing.T) {
db := setupJSONTestDB(t)
handler := NewJSONImportHandler(db)
gin.SetMode(gin.TestMode)
router := gin.New()
api := router.Group("/api/v1")
handler.RegisterRoutes(api)
@@ -277,7 +271,6 @@ func TestJSONImportHandler_Commit_NPMFormatFallback(t *testing.T) {
db := setupJSONTestDB(t)
handler := NewJSONImportHandler(db)
gin.SetMode(gin.TestMode)
router := gin.New()
api := router.Group("/api/v1")
handler.RegisterRoutes(api)
@@ -339,7 +332,6 @@ func TestJSONImportHandler_Commit_SessionNotFound(t *testing.T) {
db := setupJSONTestDB(t)
handler := NewJSONImportHandler(db)
gin.SetMode(gin.TestMode)
router := gin.New()
api := router.Group("/api/v1")
handler.RegisterRoutes(api)
@@ -370,7 +362,6 @@ func TestJSONImportHandler_Cancel(t *testing.T) {
db := setupJSONTestDB(t)
handler := NewJSONImportHandler(db)
gin.SetMode(gin.TestMode)
router := gin.New()
api := router.Group("/api/v1")
handler.RegisterRoutes(api)
@@ -459,7 +450,6 @@ func TestJSONImportHandler_ConflictDetection(t *testing.T) {
handler := NewJSONImportHandler(db)
gin.SetMode(gin.TestMode)
router := gin.New()
api := router.Group("/api/v1")
handler.RegisterRoutes(api)
@@ -501,7 +491,6 @@ func TestJSONImportHandler_Cancel_RequiresValidJSONBody(t *testing.T) {
db := setupJSONTestDB(t)
handler := NewJSONImportHandler(db)
gin.SetMode(gin.TestMode)
router := gin.New()
api := router.Group("/api/v1")
handler.RegisterRoutes(api)

View File

@@ -17,7 +17,6 @@ import (
)
func TestLogsHandler_Read_FilterBySearch(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
@@ -50,7 +49,6 @@ func TestLogsHandler_Read_FilterBySearch(t *testing.T) {
}
func TestLogsHandler_Read_FilterByHost(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
@@ -80,7 +78,6 @@ func TestLogsHandler_Read_FilterByHost(t *testing.T) {
}
func TestLogsHandler_Read_FilterByLevel(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
@@ -110,7 +107,6 @@ func TestLogsHandler_Read_FilterByLevel(t *testing.T) {
}
func TestLogsHandler_Read_FilterByStatus(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
@@ -140,7 +136,6 @@ func TestLogsHandler_Read_FilterByStatus(t *testing.T) {
}
func TestLogsHandler_Read_SortAsc(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
@@ -170,7 +165,6 @@ func TestLogsHandler_Read_SortAsc(t *testing.T) {
}
func TestLogsHandler_List_DirectoryIsFile(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
@@ -197,7 +191,6 @@ func TestLogsHandler_List_DirectoryIsFile(t *testing.T) {
}
func TestLogsHandler_Download_TempFileError(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")

View File

@@ -71,7 +71,6 @@ func TestUpgraderCheckOrigin(t *testing.T) {
}
func TestLogsWebSocketHandler_DeprecatedWrapperUpgradeFailure(t *testing.T) {
gin.SetMode(gin.TestMode)
charonlogger.Init(false, io.Discard)
r := gin.New()
@@ -85,7 +84,6 @@ func TestLogsWebSocketHandler_DeprecatedWrapperUpgradeFailure(t *testing.T) {
}
func TestLogsWSHandler_StreamWithFiltersAndTracker(t *testing.T) {
gin.SetMode(gin.TestMode)
charonlogger.Init(false, io.Discard)
tracker := services.NewWebSocketTracker()

Some files were not shown because too many files have changed in this diff Show More