Compare commits
89 Commits
v0.26.1
...
feature/be
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3fe592926d | ||
|
|
5bcf3069c6 | ||
|
|
6546130518 | ||
|
|
07108cfa8d | ||
|
|
de945c358b | ||
|
|
e5c7b85f82 | ||
|
|
6e06cc3396 | ||
|
|
7e3b5b13b4 | ||
|
|
91ba53476c | ||
|
|
442425a4a5 | ||
|
|
71fe278e33 | ||
|
|
468af25887 | ||
|
|
d437de1ccf | ||
|
|
8c56f40131 | ||
|
|
2bf4f869ab | ||
|
|
dd698afa7e | ||
|
|
5db3f7046c | ||
|
|
b59a788101 | ||
|
|
e7460f7e50 | ||
|
|
1e1727faa1 | ||
|
|
0c87c350e5 | ||
|
|
03101012b9 | ||
|
|
5f855ea779 | ||
|
|
a74d10d138 | ||
|
|
515a95aaf1 | ||
|
|
1bcb4de6f8 | ||
|
|
07764db43e | ||
|
|
54f32c03d0 | ||
|
|
c983250327 | ||
|
|
2308f372d7 | ||
|
|
d68001b949 | ||
|
|
a599623ea9 | ||
|
|
96f0be19a4 | ||
|
|
c1470eaac0 | ||
|
|
a8cd4bf34c | ||
|
|
02911109ef | ||
|
|
2bad9fec53 | ||
|
|
54ce6f677c | ||
|
|
ad7704c1df | ||
|
|
330ccae82f | ||
|
|
0a5bb296a9 | ||
|
|
437a35bd47 | ||
|
|
612d3655fa | ||
|
|
38cdc5d9d0 | ||
|
|
816124634b | ||
|
|
2b2f3c876b | ||
|
|
20f2624653 | ||
|
|
e8724c5edc | ||
|
|
2c284bdd49 | ||
|
|
db1e77ceb3 | ||
|
|
df5e69236a | ||
|
|
a3259b042d | ||
|
|
f5e7c2bdfc | ||
|
|
0859ab31ab | ||
|
|
c02219cc92 | ||
|
|
d73b3aee5c | ||
|
|
80eb91e9a1 | ||
|
|
aa6c751007 | ||
|
|
1af786e7c8 | ||
|
|
c46c1976a2 | ||
|
|
3b3ea83ecd | ||
|
|
5980a8081c | ||
|
|
55f64f8050 | ||
|
|
983ae34147 | ||
|
|
4232c0a8ee | ||
|
|
402a8b3105 | ||
|
|
f46bb838ca | ||
|
|
3d0179a119 | ||
|
|
557b33dc73 | ||
|
|
2a1652d0b1 | ||
|
|
f0fdf9b752 | ||
|
|
973efd6412 | ||
|
|
028342c63a | ||
|
|
eb9b907ba3 | ||
|
|
aee0eeef82 | ||
|
|
c977cf6190 | ||
|
|
28bc73bb1a | ||
|
|
19719693b0 | ||
|
|
a243066691 | ||
|
|
741a59c333 | ||
|
|
5642a37c44 | ||
|
|
1726a19cb6 | ||
|
|
40090cda23 | ||
|
|
9945fac150 | ||
|
|
abf88ab4cb | ||
|
|
98c720987d | ||
|
|
1bd7eab223 | ||
|
|
080e17d85a | ||
|
|
0a3b64ba5c |
@@ -303,6 +303,19 @@ ACQUIS_EOF
|
||||
# Also handle case where it might be without trailing slash
|
||||
sed -i 's|log_dir: /var/log$|log_dir: /var/log/crowdsec|g' "$CS_CONFIG_DIR/config.yaml"
|
||||
|
||||
# Redirect CrowdSec LAPI database to persistent volume
|
||||
# Default path /var/lib/crowdsec/data/crowdsec.db is ephemeral (not volume-mounted),
|
||||
# so it is destroyed on every container rebuild. The bouncer API key (stored on the
|
||||
# persistent volume at /app/data/crowdsec/) survives rebuilds but the LAPI database
|
||||
# that validates it does not — causing perpetual key rejection.
|
||||
# Redirecting db_path to the volume-mounted CS_DATA_DIR fixes this.
|
||||
sed -i "s|db_path: /var/lib/crowdsec/data/crowdsec.db|db_path: ${CS_DATA_DIR}/crowdsec.db|g" "$CS_CONFIG_DIR/config.yaml"
|
||||
if grep -q "db_path:.*${CS_DATA_DIR}" "$CS_CONFIG_DIR/config.yaml"; then
|
||||
echo "✓ CrowdSec LAPI database redirected to persistent volume: ${CS_DATA_DIR}/crowdsec.db"
|
||||
else
|
||||
echo "⚠️ WARNING: Could not verify LAPI db_path redirect — bouncer keys may not survive rebuilds"
|
||||
fi
|
||||
|
||||
# Verify LAPI configuration was applied correctly
|
||||
if grep -q "listen_uri:.*:8085" "$CS_CONFIG_DIR/config.yaml"; then
|
||||
echo "✓ CrowdSec LAPI configured for port 8085"
|
||||
|
||||
6
.github/renovate.json
vendored
6
.github/renovate.json
vendored
@@ -324,6 +324,12 @@
|
||||
"matchDatasources": ["go"],
|
||||
"matchPackageNames": ["github.com/oschwald/geoip2-golang/v2"],
|
||||
"sourceUrl": "https://github.com/oschwald/geoip2-golang"
|
||||
},
|
||||
{
|
||||
"description": "Fix Renovate lookup for google/uuid",
|
||||
"matchDatasources": ["go"],
|
||||
"matchPackageNames": ["github.com/google/uuid"],
|
||||
"sourceUrl": "https://github.com/google/uuid"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
2
.github/workflows/benchmark.yml
vendored
2
.github/workflows/benchmark.yml
vendored
@@ -52,7 +52,7 @@ jobs:
|
||||
# This avoids gh-pages branch errors and permission issues on fork PRs
|
||||
if: github.event.workflow_run.event == 'push' && github.event.workflow_run.head_branch == 'main'
|
||||
# Security: Pinned to full SHA for supply chain security
|
||||
uses: benchmark-action/github-action-benchmark@4e0b38bc48375986542b13c0d8976b7b80c60c00 # v1
|
||||
uses: benchmark-action/github-action-benchmark@a60cea5bc7b49e15c1f58f411161f99e0df48372 # v1.22.0
|
||||
with:
|
||||
name: Go Benchmark
|
||||
tool: 'go'
|
||||
|
||||
2
.github/workflows/codecov-upload.yml
vendored
2
.github/workflows/codecov-upload.yml
vendored
@@ -166,7 +166,7 @@ jobs:
|
||||
ref: ${{ github.sha }}
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
|
||||
8
.github/workflows/docker-build.yml
vendored
8
.github/workflows/docker-build.yml
vendored
@@ -541,7 +541,7 @@ jobs:
|
||||
format: 'table'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
exit-code: '0'
|
||||
version: 'v0.69.3'
|
||||
version: 'v0.70.0'
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run Trivy vulnerability scanner (SARIF)
|
||||
@@ -553,7 +553,7 @@ jobs:
|
||||
format: 'sarif'
|
||||
output: 'trivy-results.sarif'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
version: 'v0.69.3'
|
||||
version: 'v0.70.0'
|
||||
continue-on-error: true
|
||||
|
||||
- name: Check Trivy SARIF exists
|
||||
@@ -701,7 +701,7 @@ jobs:
|
||||
format: 'table'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
exit-code: '0'
|
||||
version: 'v0.69.3'
|
||||
version: 'v0.70.0'
|
||||
|
||||
- name: Run Trivy scan on PR image (SARIF - blocking)
|
||||
id: trivy-scan
|
||||
@@ -712,7 +712,7 @@ jobs:
|
||||
output: 'trivy-pr-results.sarif'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
exit-code: '1' # Intended to block, but continued on error for now
|
||||
version: 'v0.69.3'
|
||||
version: 'v0.70.0'
|
||||
continue-on-error: true
|
||||
|
||||
- name: Check Trivy PR SARIF exists
|
||||
|
||||
2
.github/workflows/docs-to-issues.yml
vendored
2
.github/workflows/docs-to-issues.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
||||
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
|
||||
|
||||
2
.github/workflows/docs.yml
vendored
2
.github/workflows/docs.yml
vendored
@@ -38,7 +38,7 @@ jobs:
|
||||
|
||||
# Step 2: Set up Node.js (for building any JS-based doc tools)
|
||||
- name: 🔧 Set up Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
|
||||
|
||||
17
.github/workflows/e2e-tests-split.yml
vendored
17
.github/workflows/e2e-tests-split.yml
vendored
@@ -151,7 +151,7 @@ jobs:
|
||||
|
||||
- name: Set up Node.js
|
||||
if: steps.resolve-image.outputs.image_source == 'build'
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
@@ -225,7 +225,7 @@ jobs:
|
||||
ref: ${{ github.sha }}
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
@@ -427,7 +427,7 @@ jobs:
|
||||
ref: ${{ github.sha }}
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
@@ -637,7 +637,7 @@ jobs:
|
||||
ref: ${{ github.sha }}
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
@@ -859,7 +859,7 @@ jobs:
|
||||
ref: ${{ github.sha }}
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
@@ -980,6 +980,7 @@ jobs:
|
||||
--project=chromium \
|
||||
--shard=${{ matrix.shard }}/${{ matrix.total-shards }} \
|
||||
--output=playwright-output/chromium-shard-${{ matrix.shard }} \
|
||||
tests/a11y \
|
||||
tests/core \
|
||||
tests/dns-provider-crud.spec.ts \
|
||||
tests/dns-provider-types.spec.ts \
|
||||
@@ -1096,7 +1097,7 @@ jobs:
|
||||
ref: ${{ github.sha }}
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
@@ -1225,6 +1226,7 @@ jobs:
|
||||
--project=firefox \
|
||||
--shard=${{ matrix.shard }}/${{ matrix.total-shards }} \
|
||||
--output=playwright-output/firefox-shard-${{ matrix.shard }} \
|
||||
tests/a11y \
|
||||
tests/core \
|
||||
tests/dns-provider-crud.spec.ts \
|
||||
tests/dns-provider-types.spec.ts \
|
||||
@@ -1341,7 +1343,7 @@ jobs:
|
||||
ref: ${{ github.sha }}
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
@@ -1470,6 +1472,7 @@ jobs:
|
||||
--project=webkit \
|
||||
--shard=${{ matrix.shard }}/${{ matrix.total-shards }} \
|
||||
--output=playwright-output/webkit-shard-${{ matrix.shard }} \
|
||||
tests/a11y \
|
||||
tests/core \
|
||||
tests/dns-provider-crud.spec.ts \
|
||||
tests/dns-provider-types.spec.ts \
|
||||
|
||||
2
.github/workflows/nightly-build.yml
vendored
2
.github/workflows/nightly-build.yml
vendored
@@ -464,7 +464,7 @@ jobs:
|
||||
image-ref: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:nightly@${{ needs.build-and-push-nightly.outputs.digest }}
|
||||
format: 'sarif'
|
||||
output: 'trivy-nightly.sarif'
|
||||
version: 'v0.69.3'
|
||||
version: 'v0.70.0'
|
||||
trivyignores: '.trivyignore'
|
||||
|
||||
- name: Upload Trivy results
|
||||
|
||||
11
.github/workflows/propagate-changes.yml
vendored
11
.github/workflows/propagate-changes.yml
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
(github.event.workflow_run.head_branch == 'main' || github.event.workflow_run.head_branch == 'development')
|
||||
steps:
|
||||
- name: Set up Node (for github-script)
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
|
||||
@@ -37,6 +37,8 @@ jobs:
|
||||
env:
|
||||
CURRENT_BRANCH: ${{ github.event.workflow_run.head_branch || github.ref_name }}
|
||||
CURRENT_SHA: ${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CHARON_TOKEN: ${{ secrets.CHARON_TOKEN }}
|
||||
with:
|
||||
script: |
|
||||
const currentBranch = process.env.CURRENT_BRANCH || context.ref.replace('refs/heads/', '');
|
||||
@@ -133,7 +135,9 @@ jobs:
|
||||
|
||||
const sensitive = files.some(fn => configPaths.some(sp => fn.startsWith(sp) || fn.includes(sp)));
|
||||
if (sensitive) {
|
||||
core.info(`${src} -> ${base} contains sensitive changes (${files.join(', ')}). Skipping automatic propagation.`);
|
||||
const preview = files.slice(0, 25).join(', ');
|
||||
const suffix = files.length > 25 ? ` …(+${files.length - 25} more)` : '';
|
||||
core.info(`${src} -> ${base} contains sensitive changes (${preview}${suffix}). Skipping automatic propagation.`);
|
||||
return;
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -203,6 +207,3 @@ jobs:
|
||||
await createPR('development', targetBranch);
|
||||
}
|
||||
}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CHARON_TOKEN: ${{ secrets.CHARON_TOKEN }}
|
||||
|
||||
2
.github/workflows/quality-checks.yml
vendored
2
.github/workflows/quality-checks.yml
vendored
@@ -262,7 +262,7 @@ jobs:
|
||||
bash "scripts/repo_health_check.sh"
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6.4.0
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
|
||||
6
.github/workflows/release-goreleaser.yml
vendored
6
.github/workflows/release-goreleaser.yml
vendored
@@ -52,7 +52,7 @@ jobs:
|
||||
cache-dependency-path: backend/go.sum
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
|
||||
uses: actions/setup-node@48b55a011bda9f5d6aeb4c2d9c7362e8dae4041e # v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
|
||||
- name: Install Cross-Compilation Tools (Zig)
|
||||
# Security: Pinned to full SHA for supply chain security
|
||||
uses: goto-bus-stop/setup-zig@abea47f85e598557f500fa1fd2ab7464fcb39406 # v2
|
||||
uses: mlugg/setup-zig@d1434d08867e3ee9daa34448df10607b98908d29 # v2.2.1
|
||||
with:
|
||||
version: 0.13.0
|
||||
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@ec59f474b9834571250b370d4735c50f8e2d1e29 # v7
|
||||
uses: goreleaser/goreleaser-action@e24998b8b67b290c2fa8b7c14fcfa7de2c5c9b8c # v7
|
||||
with:
|
||||
distribution: goreleaser
|
||||
version: '~> v2.5'
|
||||
|
||||
2
.github/workflows/renovate.yml
vendored
2
.github/workflows/renovate.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
go-version: ${{ env.GO_VERSION }}
|
||||
|
||||
- name: Run Renovate
|
||||
uses: renovatebot/github-action@eb932558ad942cccfd8211cf535f17ff183a9f74 # v46.1.9
|
||||
uses: renovatebot/github-action@83ec54fee49ab67d9cd201084c1ff325b4b462e4 # v46.1.10
|
||||
with:
|
||||
configurationFile: .github/renovate.json
|
||||
token: ${{ secrets.RENOVATE_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
|
||||
@@ -102,7 +102,7 @@ jobs:
|
||||
format: 'table'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
exit-code: '1' # Fail workflow if vulnerabilities found
|
||||
version: 'v0.69.3'
|
||||
version: 'v0.70.0'
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run Trivy vulnerability scanner (SARIF)
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
format: 'sarif'
|
||||
output: 'trivy-weekly-results.sarif'
|
||||
severity: 'CRITICAL,HIGH,MEDIUM'
|
||||
version: 'v0.69.3'
|
||||
version: 'v0.70.0'
|
||||
|
||||
- name: Upload Trivy results to GitHub Security
|
||||
uses: github/codeql-action/upload-sarif@95e58e9a2cdfd71adc6e0353d5c52f41a045d225 # v4.35.2
|
||||
@@ -127,7 +127,7 @@ jobs:
|
||||
format: 'json'
|
||||
output: 'trivy-weekly-results.json'
|
||||
severity: 'CRITICAL,HIGH,MEDIUM,LOW'
|
||||
version: 'v0.69.3'
|
||||
version: 'v0.70.0'
|
||||
|
||||
- name: Upload Trivy JSON results
|
||||
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7
|
||||
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -316,6 +316,10 @@ docs/reports/codecove_patch_report.md
|
||||
vuln-results.json
|
||||
test_output.txt
|
||||
coverage_results.txt
|
||||
new-results.json
|
||||
.gitignore
|
||||
final-results.json
|
||||
new-results.json
|
||||
scan_output.json
|
||||
coverage_output.txt
|
||||
frontend/lint_output.txt
|
||||
lefthook_out.txt
|
||||
backend/test_out.txt
|
||||
|
||||
136
.grype.yaml
136
.grype.yaml
@@ -203,45 +203,47 @@ ignore:
|
||||
# GHSA-6g7g-w4f8-9c9x: buger/jsonparser Delete panic on malformed JSON (DoS)
|
||||
# Severity: HIGH (CVSS 7.5)
|
||||
# Package: github.com/buger/jsonparser v1.1.1 (embedded in /usr/local/bin/crowdsec and /usr/local/bin/cscli)
|
||||
# Status: NO upstream fix available — OSV marks "Last affected: v1.1.1" with no Fixed event
|
||||
# Status: UPSTREAM FIX EXISTS (v1.1.2 released 2026-03-20) — awaiting CrowdSec to update dependency
|
||||
# NOTE: As of 2026-04-20, grype v0.111.0 with fresh DB no longer flags this finding in the image.
|
||||
# This suppression is retained as a safety net in case future DB updates re-surface it.
|
||||
#
|
||||
# Vulnerability Details:
|
||||
# - The Delete function fails to validate offsets on malformed JSON input, producing a
|
||||
# negative slice index and a runtime panic — denial of service (CWE-125).
|
||||
# - CVSSv3: AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H
|
||||
#
|
||||
# Root Cause (Third-Party Binary + No Upstream Fix):
|
||||
# Root Cause (Third-Party Binary — Fix Exists Upstream, Not Yet in CrowdSec):
|
||||
# - Charon does not use buger/jsonparser directly. It is compiled into CrowdSec binaries.
|
||||
# - The buger/jsonparser repository has no released fix as of 2026-03-19 (GitHub issue #275
|
||||
# and golang/vulndb #4514 are both open).
|
||||
# - Fix path: once buger/jsonparser releases a patched version and CrowdSec updates their
|
||||
# dependency, rebuild the Docker image and remove this suppression.
|
||||
# - buger/jsonparser released v1.1.2 on 2026-03-20 fixing issue #275.
|
||||
# - CrowdSec has not yet released a version built with buger/jsonparser v1.1.2.
|
||||
# - Fix path: once CrowdSec updates their dependency and rebuilds, rebuild the Docker image
|
||||
# and remove this suppression.
|
||||
#
|
||||
# Risk Assessment: ACCEPTED (Limited exploitability + no upstream fix)
|
||||
# Risk Assessment: ACCEPTED (Limited exploitability; fix exists upstream but not yet in CrowdSec)
|
||||
# - The DoS vector requires passing malformed JSON to the vulnerable Delete function within
|
||||
# CrowdSec's internal processing pipeline; this is not a direct attack surface in Charon.
|
||||
# - CrowdSec's exposed surface is its HTTP API (not raw JSON stream parsing via this path).
|
||||
#
|
||||
# Mitigation (active while suppression is in effect):
|
||||
# - Monitor buger/jsonparser: https://github.com/buger/jsonparser/issues/275
|
||||
# - Monitor CrowdSec releases: https://github.com/crowdsecurity/crowdsec/releases
|
||||
# - Monitor CrowdSec releases for a build using buger/jsonparser >= v1.1.2.
|
||||
# - CrowdSec releases: https://github.com/crowdsecurity/crowdsec/releases
|
||||
# - Weekly CI security rebuild flags the moment a fixed image ships.
|
||||
#
|
||||
# Review:
|
||||
# - Reviewed 2026-03-19 (initial suppression): no upstream fix exists. Set 30-day review.
|
||||
# - Extended 2026-04-04: no upstream fix available. buger/jsonparser issue #275 still open.
|
||||
# - Next review: 2026-05-19. Remove suppression once buger/jsonparser ships a fix and
|
||||
# CrowdSec updates their dependency.
|
||||
# - Reviewed 2026-03-19 (initial suppression): no upstream fix. Set 30-day review.
|
||||
# - Extended 2026-04-04: no upstream fix. buger/jsonparser issue #275 still open.
|
||||
# - Updated 2026-04-20: buger/jsonparser v1.1.2 released 2026-03-20. CrowdSec not yet updated.
|
||||
# Grype v0.111.0 with fresh DB (2026-04-20) no longer flags this finding. Suppression retained
|
||||
# as a safety net. Next review: 2026-05-19 — remove if CrowdSec ships with v1.1.2+.
|
||||
#
|
||||
# Removal Criteria:
|
||||
# - buger/jsonparser releases a patched version (v1.1.2 or higher)
|
||||
# - CrowdSec releases a version built with the patched jsonparser
|
||||
# - CrowdSec releases a version built with buger/jsonparser >= v1.1.2
|
||||
# - Rebuild Docker image, run security-scan-docker-image, confirm finding is resolved
|
||||
# - Remove this entry and the corresponding .trivyignore entry simultaneously
|
||||
#
|
||||
# References:
|
||||
# - GHSA-6g7g-w4f8-9c9x: https://github.com/advisories/GHSA-6g7g-w4f8-9c9x
|
||||
# - Upstream issue: https://github.com/buger/jsonparser/issues/275
|
||||
# - Upstream fix: https://github.com/buger/jsonparser/releases/tag/v1.1.2
|
||||
# - golang/vulndb: https://github.com/golang/vulndb/issues/4514
|
||||
# - CrowdSec releases: https://github.com/crowdsecurity/crowdsec/releases
|
||||
- vulnerability: GHSA-6g7g-w4f8-9c9x
|
||||
@@ -251,21 +253,20 @@ ignore:
|
||||
type: go-module
|
||||
reason: |
|
||||
HIGH — DoS panic via malformed JSON in buger/jsonparser v1.1.1 embedded in CrowdSec binaries.
|
||||
No upstream fix: buger/jsonparser has no released patch as of 2026-03-19 (issue #275 open).
|
||||
Charon does not use this package directly; the vector requires reaching CrowdSec's internal
|
||||
JSON processing pipeline. Risk accepted; no remediation path until upstream ships a fix.
|
||||
Reviewed 2026-03-19: no patched release available.
|
||||
expiry: "2026-05-19" # Extended 2026-04-04: no upstream fix. Next review 2026-05-19.
|
||||
Upstream fix: buger/jsonparser v1.1.2 released 2026-03-20; CrowdSec has not yet updated their
|
||||
dependency. Grype no longer flags this as of 2026-04-20 (fresh DB). Suppression retained as
|
||||
safety net pending CrowdSec update. Charon does not use this package directly.
|
||||
Updated 2026-04-20: fix v1.1.2 exists upstream; awaiting CrowdSec dependency update.
|
||||
expiry: "2026-05-19" # Review 2026-05-19: remove if CrowdSec ships with buger/jsonparser >= v1.1.2.
|
||||
|
||||
# Action items when this suppression expires:
|
||||
# 1. Check buger/jsonparser releases: https://github.com/buger/jsonparser/releases
|
||||
# and issue #275: https://github.com/buger/jsonparser/issues/275
|
||||
# 2. If a fix has shipped AND CrowdSec has updated their dependency:
|
||||
# a. Rebuild Docker image and run local security-scan-docker-image
|
||||
# b. Remove this suppression entry and the corresponding .trivyignore entry
|
||||
# 3. If no fix yet: Extend expiry by 30 days and update the review comment above
|
||||
# 4. If extended 3+ times with no progress: Consider opening an issue upstream or
|
||||
# evaluating whether CrowdSec can replace buger/jsonparser with a safe alternative
|
||||
# 1. Check if CrowdSec has released a version with buger/jsonparser >= v1.1.2:
|
||||
# https://github.com/crowdsecurity/crowdsec/releases
|
||||
# 2. If CrowdSec has updated: rebuild Docker image, run security-scan-docker-image,
|
||||
# and remove this suppression entry and the corresponding .trivyignore entry
|
||||
# 3. If grype still does not flag it with fresh DB: consider removing the suppression as
|
||||
# it may no longer be necessary
|
||||
# 4. If no CrowdSec update yet: Extend expiry by 30 days
|
||||
|
||||
# GHSA-jqcq-xjh3-6g23: pgproto3/v2 DataRow.Decode panic on negative field length (DoS)
|
||||
# Severity: HIGH (CVSS 7.5)
|
||||
@@ -482,73 +483,6 @@ ignore:
|
||||
# 4. If not yet migrated: Extend expiry by 30 days and update the review comment above
|
||||
# 5. If extended 3+ times: Open an upstream issue on crowdsecurity/crowdsec requesting pgx/v5 migration
|
||||
|
||||
# GHSA-x744-4wpc-v9h2 / CVE-2026-34040: Docker AuthZ plugin bypass via oversized request body
|
||||
# Severity: HIGH (CVSS 8.8)
|
||||
# CVSS Vector: CVSS:3.1/AV:L/AC:L/PR:L/UI:N/S:C/C:H/I:H/A:H
|
||||
# CWE: CWE-863 (Incorrect Authorization)
|
||||
# Package: github.com/docker/docker v28.5.2+incompatible (go-module)
|
||||
# Status: Fixed in moby/moby v29.3.1 — NO fix available for docker/docker import path
|
||||
#
|
||||
# Vulnerability Details:
|
||||
# - Incomplete fix for Docker AuthZ plugin bypass (CVE-2024-41110). An attacker can send an
|
||||
# oversized request body to the Docker daemon, causing it to forward the request to the AuthZ
|
||||
# plugin without the body, allowing unauthorized approvals.
|
||||
#
|
||||
# Root Cause (No Fix Available for Import Path):
|
||||
# - The fix exists in moby/moby v29.3.1, but not for the docker/docker import path that Charon uses.
|
||||
# - Migration to moby/moby/v2 is not practical: currently beta with breaking changes.
|
||||
# - Fix path: once docker/docker publishes a patched version or moby/moby/v2 stabilizes,
|
||||
# update the dependency and remove this suppression.
|
||||
#
|
||||
# Risk Assessment: ACCEPTED (Not exploitable in Charon context)
|
||||
# - Charon uses the Docker client SDK only (list containers). The vulnerability is server-side
|
||||
# in the Docker daemon's AuthZ plugin handler.
|
||||
# - Charon does not run a Docker daemon or use AuthZ plugins.
|
||||
# - The attack vector requires local access to the Docker daemon socket with AuthZ plugins enabled.
|
||||
#
|
||||
# Mitigation (active while suppression is in effect):
|
||||
# - Monitor docker/docker releases: https://github.com/moby/moby/releases
|
||||
# - Monitor moby/moby/v2 stabilization: https://github.com/moby/moby
|
||||
# - Weekly CI security rebuild flags the moment a fixed version ships.
|
||||
#
|
||||
# Review:
|
||||
# - Reviewed 2026-03-30 (initial suppression): no fix for docker/docker import path. Set 30-day review.
|
||||
# - Next review: 2026-04-30. Remove suppression once a fix is available for the docker/docker import path.
|
||||
#
|
||||
# Removal Criteria:
|
||||
# - docker/docker publishes a patched version OR moby/moby/v2 stabilizes and migration is feasible
|
||||
# - Update dependency, rebuild, run security-scan-docker-image, confirm finding is resolved
|
||||
# - Remove this entry, the GHSA-pxq6-2prw-chj9 entry, and the corresponding .trivyignore entries simultaneously
|
||||
#
|
||||
# References:
|
||||
# - GHSA-x744-4wpc-v9h2: https://github.com/advisories/GHSA-x744-4wpc-v9h2
|
||||
# - CVE-2026-34040: https://nvd.nist.gov/vuln/detail/CVE-2026-34040
|
||||
# - CVE-2024-41110 (original): https://nvd.nist.gov/vuln/detail/CVE-2024-41110
|
||||
# - moby/moby releases: https://github.com/moby/moby/releases
|
||||
- vulnerability: GHSA-x744-4wpc-v9h2
|
||||
package:
|
||||
name: github.com/docker/docker
|
||||
version: "v28.5.2+incompatible"
|
||||
type: go-module
|
||||
reason: |
|
||||
HIGH — Docker AuthZ plugin bypass via oversized request body in docker/docker v28.5.2+incompatible.
|
||||
Incomplete fix for CVE-2024-41110. Fixed in moby/moby v29.3.1 but no fix for docker/docker import path.
|
||||
Charon uses Docker client SDK only (list containers); the vulnerability is server-side in the Docker
|
||||
daemon's AuthZ plugin handler. Charon does not run a Docker daemon or use AuthZ plugins.
|
||||
Risk accepted; no remediation path until docker/docker publishes a fix or moby/moby/v2 stabilizes.
|
||||
Reviewed 2026-03-30: no patched release available for docker/docker import path.
|
||||
expiry: "2026-04-30" # 30-day review: no fix for docker/docker import path. Extend in 30-day increments with documented justification.
|
||||
|
||||
# Action items when this suppression expires:
|
||||
# 1. Check docker/docker and moby/moby releases: https://github.com/moby/moby/releases
|
||||
# 2. Check if moby/moby/v2 has stabilized: https://github.com/moby/moby
|
||||
# 3. If a fix has shipped for docker/docker import path OR moby/moby/v2 is stable:
|
||||
# a. Update the dependency and rebuild Docker image
|
||||
# b. Run local security-scan-docker-image and confirm finding is resolved
|
||||
# c. Remove this entry, GHSA-pxq6-2prw-chj9 entry, and all corresponding .trivyignore entries
|
||||
# 4. If no fix yet: Extend expiry by 30 days and update the review comment above
|
||||
# 5. If extended 3+ times: Open an issue to track moby/moby/v2 migration feasibility
|
||||
|
||||
# GHSA-pxq6-2prw-chj9 / CVE-2026-33997: Moby off-by-one error in plugin privilege validation
|
||||
# Severity: MEDIUM (CVSS 6.8)
|
||||
# Package: github.com/docker/docker v28.5.2+incompatible (go-module)
|
||||
@@ -559,9 +493,9 @@ ignore:
|
||||
# via crafted plugin configurations.
|
||||
#
|
||||
# Root Cause (No Fix Available for Import Path):
|
||||
# - Same import path issue as GHSA-x744-4wpc-v9h2. The fix exists in moby/moby v29.3.1 but not
|
||||
# - Same import path issue as CVE-2026-34040. The fix exists in moby/moby v29.3.1 but not
|
||||
# for the docker/docker import path that Charon uses.
|
||||
# - Fix path: same as GHSA-x744-4wpc-v9h2 — wait for docker/docker patch or moby/moby/v2 stabilization.
|
||||
# - Fix path: same dependency migration pattern as CVE-2026-34040 (if needed) or upstream fix.
|
||||
#
|
||||
# Risk Assessment: ACCEPTED (Not exploitable in Charon context)
|
||||
# - Charon uses the Docker client SDK only (list containers). The vulnerability is in Docker's
|
||||
@@ -577,9 +511,9 @@ ignore:
|
||||
# - Next review: 2026-04-30. Remove suppression once a fix is available for the docker/docker import path.
|
||||
#
|
||||
# Removal Criteria:
|
||||
# - Same as GHSA-x744-4wpc-v9h2: docker/docker publishes a patched version OR moby/moby/v2 stabilizes
|
||||
# - docker/docker publishes a patched version OR moby/moby/v2 stabilizes
|
||||
# - Update dependency, rebuild, run security-scan-docker-image, confirm finding is resolved
|
||||
# - Remove this entry, GHSA-x744-4wpc-v9h2 entry, and all corresponding .trivyignore entries simultaneously
|
||||
# - Remove this entry and all corresponding .trivyignore entries simultaneously
|
||||
#
|
||||
# References:
|
||||
# - GHSA-pxq6-2prw-chj9: https://github.com/advisories/GHSA-pxq6-2prw-chj9
|
||||
@@ -605,7 +539,7 @@ ignore:
|
||||
# 3. If a fix has shipped for docker/docker import path OR moby/moby/v2 is stable:
|
||||
# a. Update the dependency and rebuild Docker image
|
||||
# b. Run local security-scan-docker-image and confirm finding is resolved
|
||||
# c. Remove this entry, GHSA-x744-4wpc-v9h2 entry, and all corresponding .trivyignore entries
|
||||
# c. Remove this entry and all corresponding .trivyignore entries
|
||||
# 4. If no fix yet: Extend expiry by 30 days and update the review comment above
|
||||
# 5. If extended 3+ times: Open an issue to track moby/moby/v2 migration feasibility
|
||||
|
||||
|
||||
17
.trivyignore
17
.trivyignore
@@ -87,23 +87,6 @@ GHSA-x6gf-mpr2-68h6
|
||||
# exp: 2026-07-09
|
||||
CVE-2026-32286
|
||||
|
||||
# CVE-2026-34040 / GHSA-x744-4wpc-v9h2: Docker AuthZ plugin bypass via oversized request body
|
||||
# Severity: HIGH (CVSS 8.8) — Package: github.com/docker/docker v28.5.2+incompatible
|
||||
# Incomplete fix for CVE-2024-41110. Fixed in moby/moby v29.3.1 but no fix for docker/docker import path.
|
||||
# Charon uses Docker client SDK only (list containers); the vulnerability is server-side in the Docker daemon.
|
||||
# Review by: 2026-04-30
|
||||
# See also: .grype.yaml for full justification
|
||||
# exp: 2026-04-30
|
||||
CVE-2026-34040
|
||||
|
||||
# GHSA-x744-4wpc-v9h2: Docker AuthZ plugin bypass via oversized request body (GHSA alias)
|
||||
# Severity: HIGH (CVSS 8.8) — Package: github.com/docker/docker v28.5.2+incompatible
|
||||
# GHSA alias for CVE-2026-34040. See CVE-2026-34040 entry above for full details.
|
||||
# Review by: 2026-04-30
|
||||
# See also: .grype.yaml for full justification
|
||||
# exp: 2026-04-30
|
||||
GHSA-x744-4wpc-v9h2
|
||||
|
||||
# CVE-2026-33997 / GHSA-pxq6-2prw-chj9: Moby off-by-one error in plugin privilege validation
|
||||
# Severity: MEDIUM (CVSS 6.8) — Package: github.com/docker/docker v28.5.2+incompatible
|
||||
# Fixed in moby/moby v29.3.1 but no fix for docker/docker import path.
|
||||
|
||||
@@ -577,6 +577,7 @@ graph LR
|
||||
- Global threat intelligence (crowd-sourced IP reputation)
|
||||
- Automatic IP banning with configurable duration
|
||||
- Decision management API (view, create, delete bans)
|
||||
- IP whitelist management: operators add/remove IPs and CIDRs via the management UI; entries are persisted in SQLite and regenerated into a `crowdsecurity/whitelists` parser YAML on every mutating operation and at startup
|
||||
|
||||
**Modes:**
|
||||
|
||||
|
||||
14
Dockerfile
14
Dockerfile
@@ -13,7 +13,7 @@ ARG BUILD_DEBUG=0
|
||||
ARG GO_VERSION=1.26.2
|
||||
|
||||
# renovate: datasource=docker depName=alpine versioning=docker
|
||||
ARG ALPINE_IMAGE=alpine:3.23.3@sha256:25109184c71bdad752c8312a8623239686a9a2071e8825f20acb8f2198c3f659
|
||||
ARG ALPINE_IMAGE=alpine:3.23.4@sha256:5b10f432ef3da1b8d4c7eb6c487f2f5a8f096bc91145e68878dd4a5019afde11
|
||||
|
||||
# ---- Shared CrowdSec Version ----
|
||||
# renovate: datasource=github-releases depName=crowdsecurity/crowdsec
|
||||
@@ -92,7 +92,7 @@ RUN --mount=type=cache,target=/root/.cache/go-build \
|
||||
# ---- Frontend Builder ----
|
||||
# Build the frontend using the BUILDPLATFORM to avoid arm64 musl Rollup native issues
|
||||
# renovate: datasource=docker depName=node
|
||||
FROM --platform=$BUILDPLATFORM node:24.14.1-alpine@sha256:8510330d3eb72c804231a834b1a8ebb55cb3796c3e4431297a24d246b8add4d5 AS frontend-builder
|
||||
FROM --platform=$BUILDPLATFORM node:24.15.0-alpine@sha256:d1b3b4da11eefd5941e7f0b9cf17783fc99d9c6fc34884a665f40a06dbdfc94f AS frontend-builder
|
||||
WORKDIR /app/frontend
|
||||
|
||||
# Copy frontend package files
|
||||
@@ -160,7 +160,7 @@ RUN set -eux; \
|
||||
# Note: xx-go install puts binaries in /go/bin/TARGETOS_TARGETARCH/dlv if cross-compiling.
|
||||
# We find it and move it to /go/bin/dlv so it's in a consistent location for the next stage.
|
||||
# renovate: datasource=go depName=github.com/go-delve/delve
|
||||
ARG DLV_VERSION=1.26.1
|
||||
ARG DLV_VERSION=1.26.2
|
||||
# hadolint ignore=DL3059,DL4006
|
||||
RUN CGO_ENABLED=0 xx-go install github.com/go-delve/delve/cmd/dlv@v${DLV_VERSION} && \
|
||||
DLV_PATH=$(find /go/bin -name dlv -type f | head -n 1) && \
|
||||
@@ -386,13 +386,13 @@ RUN go get github.com/expr-lang/expr@v${EXPR_LANG_VERSION} && \
|
||||
go get github.com/jackc/pgx/v4@v4.18.3 && \
|
||||
# GHSA-xmrv-pmrh-hhx2: AWS SDK v2 event stream injection
|
||||
# renovate: datasource=go depName=github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream
|
||||
go get github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream@v1.7.8 && \
|
||||
go get github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream@v1.7.9 && \
|
||||
# renovate: datasource=go depName=github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs
|
||||
go get github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs@v1.68.0 && \
|
||||
go get github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs@v1.69.1 && \
|
||||
# renovate: datasource=go depName=github.com/aws/aws-sdk-go-v2/service/kinesis
|
||||
go get github.com/aws/aws-sdk-go-v2/service/kinesis@v1.43.5 && \
|
||||
go get github.com/aws/aws-sdk-go-v2/service/kinesis@v1.43.6 && \
|
||||
# renovate: datasource=go depName=github.com/aws/aws-sdk-go-v2/service/s3
|
||||
go get github.com/aws/aws-sdk-go-v2/service/s3@v1.99.0 && \
|
||||
go get github.com/aws/aws-sdk-go-v2/service/s3@v1.99.1 && \
|
||||
go mod tidy
|
||||
|
||||
# Fix compatibility issues with expr-lang v1.17.7
|
||||
|
||||
91
SECURITY.md
91
SECURITY.md
@@ -27,7 +27,7 @@ public disclosure.
|
||||
|
||||
## Known Vulnerabilities
|
||||
|
||||
Last reviewed: 2026-04-09
|
||||
Last reviewed: 2026-04-21
|
||||
|
||||
### [HIGH] CVE-2026-31790 · OpenSSL Vulnerability in Alpine Base Image
|
||||
|
||||
@@ -71,48 +71,6 @@ Dockerfile.
|
||||
|
||||
---
|
||||
|
||||
### [HIGH] CVE-2026-34040 · Docker AuthZ Plugin Bypass via Oversized Request Body
|
||||
|
||||
| Field | Value |
|
||||
|--------------|-------|
|
||||
| **ID** | CVE-2026-34040 (GHSA-x744-4wpc-v9h2) |
|
||||
| **Severity** | High · 8.8 |
|
||||
| **Status** | Awaiting Upstream |
|
||||
|
||||
**What**
|
||||
Docker Engine AuthZ plugins can be bypassed when an API request body exceeds a
|
||||
certain size threshold. Charon uses the Docker client SDK only; this is a
|
||||
server-side vulnerability in the Docker daemon's authorization plugin handler.
|
||||
|
||||
**Who**
|
||||
|
||||
- Discovered by: Automated scan (govulncheck, Grype)
|
||||
- Reported: 2026-04-04
|
||||
- Affects: Docker Engine daemon operators; Charon application is not directly vulnerable
|
||||
|
||||
**Where**
|
||||
|
||||
- Component: `github.com/docker/docker` v28.5.2+incompatible (Docker client SDK)
|
||||
- Versions affected: Docker Engine < 29.3.1
|
||||
|
||||
**When**
|
||||
|
||||
- Discovered: 2026-04-04
|
||||
- Disclosed (if public): Public
|
||||
- Target fix: When moby/moby/v2 stabilizes or docker/docker import path is updated
|
||||
|
||||
**How**
|
||||
The vulnerability requires an attacker to send oversized API request bodies to the
|
||||
Docker daemon. Charon uses the Docker client SDK for container management operations
|
||||
only and does not expose the Docker socket externally. The attack vector is limited
|
||||
to the Docker daemon host, not the Charon application.
|
||||
|
||||
**Planned Remediation**
|
||||
Monitor moby/moby/v2 module stabilization. The `docker/docker` import path has no
|
||||
fix available. When a compatible module path exists, migrate the Docker SDK import.
|
||||
|
||||
---
|
||||
|
||||
### [HIGH] CVE-2026-2673 · OpenSSL TLS 1.3 Key Exchange Group Downgrade
|
||||
|
||||
| Field | Value |
|
||||
@@ -194,8 +152,8 @@ via the Docker client SDK. The attack requires a malicious Docker plugin to be
|
||||
installed on the host, which is outside Charon's operational scope.
|
||||
|
||||
**Planned Remediation**
|
||||
Same as CVE-2026-34040: monitor moby/moby/v2 module stabilization. No fix
|
||||
available for the current `docker/docker` import path.
|
||||
Monitor Moby advisory updates and verify scanner results against current modular
|
||||
Moby dependency paths.
|
||||
|
||||
---
|
||||
|
||||
@@ -239,6 +197,49 @@ Charon users is negligible since the vulnerable code path is not exercised.
|
||||
|
||||
## Patched Vulnerabilities
|
||||
|
||||
### ✅ [HIGH] CVE-2026-34040 · Docker AuthZ Plugin Bypass via Oversized Request Body
|
||||
|
||||
| Field | Value |
|
||||
|--------------|-------|
|
||||
| **ID** | CVE-2026-34040 (GHSA-x744-4wpc-v9h2) |
|
||||
| **Severity** | High · 8.8 |
|
||||
| **Patched** | 2026-04-21 |
|
||||
|
||||
**What**
|
||||
Docker Engine AuthZ plugins can be bypassed when an API request body exceeds a
|
||||
certain size threshold. The previous Charon backend dependency path was
|
||||
`github.com/docker/docker`.
|
||||
|
||||
**Who**
|
||||
|
||||
- Discovered by: Automated scan (govulncheck, Grype)
|
||||
- Reported: 2026-04-04
|
||||
|
||||
**Where**
|
||||
|
||||
- Previous component: `github.com/docker/docker` v28.5.2+incompatible (Docker client SDK)
|
||||
- Remediated component path: `github.com/moby/moby/client` with `github.com/moby/moby/api`
|
||||
|
||||
**When**
|
||||
|
||||
- Discovered: 2026-04-04
|
||||
- Patched: 2026-04-21
|
||||
- Time to patch: 17 days
|
||||
|
||||
**How**
|
||||
The backend Docker service imports and module dependencies were migrated away from
|
||||
the vulnerable monolith package path to modular Moby dependencies.
|
||||
|
||||
**Resolution**
|
||||
Validation evidence after remediation:
|
||||
|
||||
- Backend: `go mod tidy`, `go test ./...`, and `go build ./cmd/api` passed.
|
||||
- Trivy gate output did not include `CVE-2026-34040` or `GHSA-x744-4wpc-v9h2`.
|
||||
- Docker image scan gate reported `0 Critical` and `0 High`, and did not include
|
||||
`CVE-2026-34040` or `GHSA-x744-4wpc-v9h2`.
|
||||
|
||||
---
|
||||
|
||||
### ✅ [LOW] CVE-2026-26958 · edwards25519 MultiScalarMult Invalid Results
|
||||
|
||||
| Field | Value |
|
||||
|
||||
@@ -3,7 +3,6 @@ module github.com/Wikid82/charon/backend
|
||||
go 1.26.2
|
||||
|
||||
require (
|
||||
github.com/docker/docker v28.5.2+incompatible
|
||||
github.com/gin-contrib/gzip v1.2.6
|
||||
github.com/gin-gonic/gin v1.12.0
|
||||
github.com/glebarez/sqlite v1.11.0
|
||||
@@ -11,6 +10,7 @@ require (
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/gorilla/websocket v1.5.3
|
||||
github.com/mattn/go-sqlite3 v1.14.42
|
||||
github.com/moby/moby/client v0.4.1
|
||||
github.com/oschwald/geoip2-golang/v2 v2.1.0
|
||||
github.com/prometheus/client_golang v1.23.2
|
||||
github.com/robfig/cron/v3 v3.0.1
|
||||
@@ -36,10 +36,9 @@ require (
|
||||
github.com/cloudwego/base64x v0.1.6 // indirect
|
||||
github.com/containerd/errdefs v1.0.0 // indirect
|
||||
github.com/containerd/errdefs/pkg v0.3.0 // indirect
|
||||
github.com/containerd/log v0.1.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/distribution/reference v0.6.0 // indirect
|
||||
github.com/docker/go-connections v0.6.0 // indirect
|
||||
github.com/docker/go-connections v0.7.0 // indirect
|
||||
github.com/docker/go-units v0.5.0 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
@@ -61,18 +60,15 @@ require (
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.21 // indirect
|
||||
github.com/moby/docker-image-spec v1.3.1 // indirect
|
||||
github.com/moby/sys/atomicwriter v0.1.0 // indirect
|
||||
github.com/moby/term v0.5.2 // indirect
|
||||
github.com/moby/moby/api v1.54.2 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/morikuni/aec v1.1.0 // indirect
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/ncruces/go-strftime v1.0.0 // indirect
|
||||
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||
github.com/opencontainers/image-spec v1.1.1 // indirect
|
||||
github.com/oschwald/maxminddb-golang/v2 v2.1.1 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.3.0 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/prometheus/client_model v0.6.2 // indirect
|
||||
github.com/prometheus/common v0.67.5 // indirect
|
||||
@@ -87,7 +83,6 @@ require (
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.68.0 // indirect
|
||||
go.opentelemetry.io/otel v1.43.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.43.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.43.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.43.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.4 // indirect
|
||||
@@ -95,9 +90,8 @@ require (
|
||||
golang.org/x/sys v0.43.0 // indirect
|
||||
google.golang.org/protobuf v1.36.11 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
gotest.tools/v3 v3.5.2 // indirect
|
||||
modernc.org/libc v1.72.0 // indirect
|
||||
modernc.org/mathutil v1.7.1 // indirect
|
||||
modernc.org/memory v1.11.0 // indirect
|
||||
modernc.org/sqlite v1.48.2 // indirect
|
||||
modernc.org/sqlite v1.49.1 // indirect
|
||||
)
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c h1:udKWzYgxTojEKWjV8V+WSxDXJ4NFATAsZjh8iIbsQIg=
|
||||
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
||||
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
||||
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||
@@ -10,8 +8,6 @@ github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uS
|
||||
github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k=
|
||||
github.com/bytedance/sonic/loader v0.5.1 h1:Ygpfa9zwRCCKSlrp5bBP/b/Xzc3VxsAW+5NIYXrOOpI=
|
||||
github.com/bytedance/sonic/loader v0.5.1/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo=
|
||||
github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM=
|
||||
github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw=
|
||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M=
|
||||
@@ -20,17 +16,13 @@ github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG
|
||||
github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M=
|
||||
github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE=
|
||||
github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk=
|
||||
github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
|
||||
github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
|
||||
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
|
||||
github.com/docker/docker v28.5.2+incompatible h1:DBX0Y0zAjZbSrm1uzOkdr1onVghKaftjlSWt4AFexzM=
|
||||
github.com/docker/docker v28.5.2+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94=
|
||||
github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE=
|
||||
github.com/docker/go-connections v0.7.0 h1:6SsRfJddP22WMrCkj19x9WKjEDTB+ahsdiGYf0mN39c=
|
||||
github.com/docker/go-connections v0.7.0/go.mod h1:no1qkHdjq7kLMGUXYAduOhYPSJxxvgWBh7ogVvptn3Q=
|
||||
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
|
||||
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
@@ -77,8 +69,6 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.28.0 h1:HWRh5R2+9EifMyIHV7ZV+MIZqgz+PMpZ14Jynv3O2Zs=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.28.0/go.mod h1:JfhWUomR1baixubs02l85lZYYOm7LV6om4ceouMv45c=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
||||
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
||||
@@ -105,19 +95,15 @@ github.com/mattn/go-sqlite3 v1.14.42 h1:MigqEP4ZmHw3aIdIT7T+9TLa90Z6smwcthx+Azv4
|
||||
github.com/mattn/go-sqlite3 v1.14.42/go.mod h1:pjEuOr8IwzLJP2MfGeTb0A35jauH+C2kbHKBr7yXKVQ=
|
||||
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
|
||||
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
|
||||
github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw=
|
||||
github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs=
|
||||
github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU=
|
||||
github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko=
|
||||
github.com/moby/term v0.5.2 h1:6qk3FJAFDs6i/q3W/pQ97SX192qKfZgGjCQqfCJkgzQ=
|
||||
github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc=
|
||||
github.com/moby/moby/api v1.54.2 h1:wiat9QAhnDQjA7wk1kh/TqHz2I1uUA7M7t9SAl/JNXg=
|
||||
github.com/moby/moby/api v1.54.2/go.mod h1:+RQ6wluLwtYaTd1WnPLykIDPekkuyD/ROWQClE83pzs=
|
||||
github.com/moby/moby/client v0.4.1 h1:DMQgisVoMkmMs7fp3ROSdiBnoAu8+vo3GggFl06M/wY=
|
||||
github.com/moby/moby/client v0.4.1/go.mod h1:z52C9O2POPOsnxZAy//WtKcQ32P+jT/NGeXu/7nfjGQ=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/morikuni/aec v1.1.0 h1:vBBl0pUnvi/Je71dsRrhMBtreIqNMYErSAbEeb8jrXQ=
|
||||
github.com/morikuni/aec v1.1.0/go.mod h1:xDRgiq/iw5l+zkao76YTKzKttOp2cwPEne25HDkJnBw=
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
||||
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
|
||||
@@ -132,8 +118,6 @@ github.com/oschwald/maxminddb-golang/v2 v2.1.1 h1:lA8FH0oOrM4u7mLvowq8IT6a3Q/qEn
|
||||
github.com/oschwald/maxminddb-golang/v2 v2.1.1/go.mod h1:PLdx6PR+siSIoXqqy7C7r3SB3KZnhxWr1Dp6g0Hacl8=
|
||||
github.com/pelletier/go-toml/v2 v2.3.0 h1:k59bC/lIZREW0/iVaQR8nDHxVq8OVlIzYCOJf421CaM=
|
||||
github.com/pelletier/go-toml/v2 v2.3.0/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o=
|
||||
@@ -181,10 +165,6 @@ go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.68.0 h1:CqXxU8V
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.68.0/go.mod h1:BuhAPThV8PBHBvg8ZzZ/Ok3idOdhWIodywz2xEcRbJo=
|
||||
go.opentelemetry.io/otel v1.43.0 h1:mYIM03dnh5zfN7HautFE4ieIig9amkNANT+xcVxAj9I=
|
||||
go.opentelemetry.io/otel v1.43.0/go.mod h1:JuG+u74mvjvcm8vj8pI5XiHy1zDeoCS2LB1spIq7Ay0=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.43.0 h1:88Y4s2C8oTui1LGM6bTWkw0ICGcOLCAI5l6zsD1j20k=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.43.0/go.mod h1:Vl1/iaggsuRlrHf/hfPJPvVag77kKyvrLeD10kpMl+A=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.43.0 h1:3iZJKlCZufyRzPzlQhUIWVmfltrXuGyfjREgGP3UUjc=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.43.0/go.mod h1:/G+nUPfhq2e+qiXMGxMwumDrP5jtzU+mWN7/sjT2rak=
|
||||
go.opentelemetry.io/otel/metric v1.43.0 h1:d7638QeInOnuwOONPp4JAOGfbCEpYb+K6DVWvdxGzgM=
|
||||
go.opentelemetry.io/otel/metric v1.43.0/go.mod h1:RDnPtIxvqlgO8GRW18W6Z/4P462ldprJtfxHxyKd2PY=
|
||||
go.opentelemetry.io/otel/sdk v1.43.0 h1:pi5mE86i5rTeLXqoF/hhiBtUNcrAGHLKQdhg4h4V9Dg=
|
||||
@@ -193,8 +173,6 @@ go.opentelemetry.io/otel/sdk/metric v1.43.0 h1:S88dyqXjJkuBNLeMcVPRFXpRw2fuwdvfC
|
||||
go.opentelemetry.io/otel/sdk/metric v1.43.0/go.mod h1:C/RJtwSEJ5hzTiUz5pXF1kILHStzb9zFlIEe85bhj6A=
|
||||
go.opentelemetry.io/otel/trace v1.43.0 h1:BkNrHpup+4k4w+ZZ86CZoHHEkohws8AY+WTX09nk+3A=
|
||||
go.opentelemetry.io/otel/trace v1.43.0/go.mod h1:/QJhyVBUUswCphDVxq+8mld+AvhXZLhe+8WVFxiFff0=
|
||||
go.opentelemetry.io/proto/otlp v1.10.0 h1:IQRWgT5srOCYfiWnpqUYz9CVmbO8bFmKcwYxpuCSL2g=
|
||||
go.opentelemetry.io/proto/otlp v1.10.0/go.mod h1:/CV4QoCR/S9yaPj8utp3lvQPoqMtxXdzn7ozvvozVqk=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y=
|
||||
@@ -219,12 +197,6 @@ golang.org/x/time v0.15.0 h1:bbrp8t3bGUeFOx08pvsMYRTCVSMk89u4tKbNOZbp88U=
|
||||
golang.org/x/time v0.15.0/go.mod h1:Y4YMaQmXwGQZoFaVFk4YpCt4FLQMYKZe9oeV/f4MSno=
|
||||
golang.org/x/tools v0.43.0 h1:12BdW9CeB3Z+J/I/wj34VMl8X+fEXBxVR90JeMX5E7s=
|
||||
golang.org/x/tools v0.43.0/go.mod h1:uHkMso649BX2cZK6+RpuIPXS3ho2hZo4FVwfoy1vIk0=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20260401024825-9d38bb4040a9 h1:VPWxll4HlMw1Vs/qXtN7BvhZqsS9cdAittCNvVENElA=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20260401024825-9d38bb4040a9/go.mod h1:7QBABkRtR8z+TEnmXTqIqwJLlzrZKVfAUm7tY3yGv0M=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20260401024825-9d38bb4040a9 h1:m8qni9SQFH0tJc1X0vmnpw/0t+AImlSvp30sEupozUg=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20260401024825-9d38bb4040a9/go.mod h1:4Hqkh8ycfw05ld/3BWL7rJOSfebL2Q+DVDeRgYgxUU8=
|
||||
google.golang.org/grpc v1.80.0 h1:Xr6m2WmWZLETvUNvIUmeD5OAagMw3FiKmMlTdViWsHM=
|
||||
google.golang.org/grpc v1.80.0/go.mod h1:ho/dLnxwi3EDJA4Zghp7k2Ec1+c2jqup0bFkw07bwF4=
|
||||
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
|
||||
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
@@ -263,11 +235,13 @@ modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
|
||||
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
|
||||
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
|
||||
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
|
||||
modernc.org/sqlite v1.48.2 h1:5CnW4uP8joZtA0LedVqLbZV5GD7F/0x91AXeSyjoh5c=
|
||||
modernc.org/sqlite v1.48.2/go.mod h1:hWjRO6Tj/5Ik8ieqxQybiEOUXy0NJFNp2tpvVpKlvig=
|
||||
modernc.org/sqlite v1.49.1 h1:dYGHTKcX1sJ+EQDnUzvz4TJ5GbuvhNJa8Fg6ElGx73U=
|
||||
modernc.org/sqlite v1.49.1/go.mod h1:m0w8xhwYUVY3H6pSDwc3gkJ/irZT/0YEXwBlhaxQEew=
|
||||
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
|
||||
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
|
||||
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
|
||||
modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
|
||||
pgregory.net/rapid v1.2.0 h1:keKAYRcjm+e1F0oAuU5F5+YPAWcyxNNRK2wud503Gnk=
|
||||
pgregory.net/rapid v1.2.0/go.mod h1:PY5XlDGj0+V1FCq0o192FdRhpKHGTRIWBgqjDBTrq04=
|
||||
software.sslmate.com/src/go-pkcs12 v0.7.1 h1:bxkUPRsvTPNRBZa4M/aSX4PyMOEbq3V8I6hbkG4F4Q8=
|
||||
software.sslmate.com/src/go-pkcs12 v0.7.1/go.mod h1:Qiz0EyvDRJjjxGyUQa2cCNZn/wMyzrRJ/qcDXOQazLI=
|
||||
|
||||
@@ -63,6 +63,7 @@ type CrowdsecHandler struct {
|
||||
Hub *crowdsec.HubService
|
||||
Console *crowdsec.ConsoleEnrollmentService
|
||||
Security *services.SecurityService
|
||||
WhitelistSvc *services.CrowdSecWhitelistService
|
||||
CaddyManager *caddy.Manager // For config reload after bouncer registration
|
||||
LAPIMaxWait time.Duration // For testing; 0 means 60s default
|
||||
LAPIPollInterval time.Duration // For testing; 0 means 500ms default
|
||||
@@ -383,7 +384,7 @@ func NewCrowdsecHandler(db *gorm.DB, executor CrowdsecExecutor, binPath, dataDir
|
||||
securitySvc = services.NewSecurityService(db)
|
||||
consoleSvc = crowdsec.NewConsoleEnrollmentService(db, &crowdsec.SecureCommandExecutor{}, dataDir, consoleSecret)
|
||||
}
|
||||
return &CrowdsecHandler{
|
||||
h := &CrowdsecHandler{
|
||||
DB: db,
|
||||
Executor: executor,
|
||||
CmdExec: &RealCommandExecutor{},
|
||||
@@ -395,6 +396,10 @@ func NewCrowdsecHandler(db *gorm.DB, executor CrowdsecExecutor, binPath, dataDir
|
||||
dashCache: newDashboardCache(),
|
||||
validateLAPIURL: validateCrowdsecLAPIBaseURLDefault,
|
||||
}
|
||||
if db != nil {
|
||||
h.WhitelistSvc = services.NewCrowdSecWhitelistService(db, dataDir)
|
||||
}
|
||||
return h
|
||||
}
|
||||
|
||||
// isCerberusEnabled returns true when Cerberus is enabled via DB or env flag.
|
||||
@@ -2700,6 +2705,75 @@ func fileExists(path string) bool {
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// ListWhitelists returns all CrowdSec IP/CIDR whitelist entries.
|
||||
func (h *CrowdsecHandler) ListWhitelists(c *gin.Context) {
|
||||
entries, err := h.WhitelistSvc.List(c.Request.Context())
|
||||
if err != nil {
|
||||
logger.Log().WithError(err).Error("failed to list whitelist entries")
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to list whitelist entries"})
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, gin.H{"whitelist": entries})
|
||||
}
|
||||
|
||||
// AddWhitelist adds a new IP or CIDR to the CrowdSec whitelist.
|
||||
func (h *CrowdsecHandler) AddWhitelist(c *gin.Context) {
|
||||
var req struct {
|
||||
IPOrCIDR string `json:"ip_or_cidr" binding:"required"`
|
||||
Reason string `json:"reason"`
|
||||
}
|
||||
if err := c.ShouldBindJSON(&req); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "ip_or_cidr is required"})
|
||||
return
|
||||
}
|
||||
|
||||
entry, err := h.WhitelistSvc.Add(c.Request.Context(), req.IPOrCIDR, req.Reason)
|
||||
if err != nil {
|
||||
switch {
|
||||
case errors.Is(err, services.ErrInvalidIPOrCIDR):
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid IP address or CIDR notation"})
|
||||
case errors.Is(err, services.ErrDuplicateEntry):
|
||||
c.JSON(http.StatusConflict, gin.H{"error": "entry already exists in whitelist"})
|
||||
default:
|
||||
logger.Log().WithError(err).Error("failed to add whitelist entry")
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to add whitelist entry"})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if _, execErr := h.CmdExec.Execute(c.Request.Context(), "cscli", "hub", "reload"); execErr != nil {
|
||||
logger.Log().WithError(execErr).Warn("cscli hub reload failed after whitelist add (non-fatal)")
|
||||
}
|
||||
|
||||
c.JSON(http.StatusCreated, entry)
|
||||
}
|
||||
|
||||
// DeleteWhitelist removes a whitelist entry by UUID.
|
||||
func (h *CrowdsecHandler) DeleteWhitelist(c *gin.Context) {
|
||||
id := c.Param("uuid")
|
||||
if id == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "uuid is required"})
|
||||
return
|
||||
}
|
||||
|
||||
if err := h.WhitelistSvc.Delete(c.Request.Context(), id); err != nil {
|
||||
switch {
|
||||
case errors.Is(err, services.ErrWhitelistNotFound):
|
||||
c.JSON(http.StatusNotFound, gin.H{"error": "whitelist entry not found"})
|
||||
default:
|
||||
logger.Log().WithError(err).Error("failed to delete whitelist entry")
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to delete whitelist entry"})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if _, execErr := h.CmdExec.Execute(c.Request.Context(), "cscli", "hub", "reload"); execErr != nil {
|
||||
logger.Log().WithError(execErr).Warn("cscli hub reload failed after whitelist delete (non-fatal)")
|
||||
}
|
||||
|
||||
c.Status(http.StatusNoContent)
|
||||
}
|
||||
|
||||
// RegisterRoutes registers crowdsec admin routes under protected group
|
||||
func (h *CrowdsecHandler) RegisterRoutes(rg *gin.RouterGroup) {
|
||||
rg.POST("/admin/crowdsec/start", h.Start)
|
||||
@@ -2742,4 +2816,8 @@ func (h *CrowdsecHandler) RegisterRoutes(rg *gin.RouterGroup) {
|
||||
rg.GET("/admin/crowdsec/dashboard/scenarios", h.DashboardScenarios)
|
||||
rg.GET("/admin/crowdsec/alerts", h.ListAlerts)
|
||||
rg.GET("/admin/crowdsec/decisions/export", h.ExportDecisions)
|
||||
// Whitelist management endpoints (Issue #939)
|
||||
rg.GET("/admin/crowdsec/whitelist", h.ListWhitelists)
|
||||
rg.POST("/admin/crowdsec/whitelist", h.AddWhitelist)
|
||||
rg.DELETE("/admin/crowdsec/whitelist/:uuid", h.DeleteWhitelist)
|
||||
}
|
||||
|
||||
284
backend/internal/api/handlers/crowdsec_whitelist_handler_test.go
Normal file
284
backend/internal/api/handlers/crowdsec_whitelist_handler_test.go
Normal file
@@ -0,0 +1,284 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/models"
|
||||
"github.com/Wikid82/charon/backend/internal/services"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
type mockCmdExecWhitelist struct {
|
||||
reloadCalled bool
|
||||
reloadErr error
|
||||
}
|
||||
|
||||
func (m *mockCmdExecWhitelist) Execute(_ context.Context, _ string, _ ...string) ([]byte, error) {
|
||||
m.reloadCalled = true
|
||||
return nil, m.reloadErr
|
||||
}
|
||||
|
||||
func setupWhitelistHandler(t *testing.T) (*CrowdsecHandler, *gin.Engine, *gorm.DB) {
|
||||
t.Helper()
|
||||
db := OpenTestDB(t)
|
||||
require.NoError(t, db.AutoMigrate(&models.CrowdSecWhitelist{}))
|
||||
fe := &fakeExec{}
|
||||
h := newTestCrowdsecHandler(t, db, fe, "/bin/false", "")
|
||||
h.WhitelistSvc = services.NewCrowdSecWhitelistService(db, "")
|
||||
|
||||
r := gin.New()
|
||||
g := r.Group("/api/v1")
|
||||
g.GET("/admin/crowdsec/whitelist", h.ListWhitelists)
|
||||
g.POST("/admin/crowdsec/whitelist", h.AddWhitelist)
|
||||
g.DELETE("/admin/crowdsec/whitelist/:uuid", h.DeleteWhitelist)
|
||||
|
||||
return h, r, db
|
||||
}
|
||||
|
||||
func TestListWhitelists_Empty(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, r, _ := setupWhitelistHandler(t)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/v1/admin/crowdsec/whitelist", nil)
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
var resp map[string]interface{}
|
||||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
||||
entries, ok := resp["whitelist"].([]interface{})
|
||||
assert.True(t, ok)
|
||||
assert.Empty(t, entries)
|
||||
}
|
||||
|
||||
func TestAddWhitelist_ValidIP(t *testing.T) {
|
||||
t.Parallel()
|
||||
h, r, _ := setupWhitelistHandler(t)
|
||||
mock := &mockCmdExecWhitelist{}
|
||||
h.CmdExec = mock
|
||||
|
||||
body := `{"ip_or_cidr":"1.2.3.4","reason":"test"}`
|
||||
w := httptest.NewRecorder()
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/admin/crowdsec/whitelist", bytes.NewBufferString(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusCreated, w.Code)
|
||||
assert.True(t, mock.reloadCalled)
|
||||
|
||||
var entry models.CrowdSecWhitelist
|
||||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &entry))
|
||||
assert.Equal(t, "1.2.3.4", entry.IPOrCIDR)
|
||||
assert.NotEmpty(t, entry.UUID)
|
||||
}
|
||||
|
||||
func TestAddWhitelist_InvalidIP(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, r, _ := setupWhitelistHandler(t)
|
||||
|
||||
body := `{"ip_or_cidr":"not-valid","reason":""}`
|
||||
w := httptest.NewRecorder()
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/admin/crowdsec/whitelist", bytes.NewBufferString(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusBadRequest, w.Code)
|
||||
}
|
||||
|
||||
func TestAddWhitelist_Duplicate(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, r, _ := setupWhitelistHandler(t)
|
||||
|
||||
body := `{"ip_or_cidr":"9.9.9.9","reason":""}`
|
||||
for i := 0; i < 2; i++ {
|
||||
w := httptest.NewRecorder()
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/admin/crowdsec/whitelist", bytes.NewBufferString(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
r.ServeHTTP(w, req)
|
||||
if i == 0 {
|
||||
assert.Equal(t, http.StatusCreated, w.Code)
|
||||
} else {
|
||||
assert.Equal(t, http.StatusConflict, w.Code)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDeleteWhitelist_Existing(t *testing.T) {
|
||||
t.Parallel()
|
||||
h, r, db := setupWhitelistHandler(t)
|
||||
mock := &mockCmdExecWhitelist{}
|
||||
h.CmdExec = mock
|
||||
|
||||
svc := services.NewCrowdSecWhitelistService(db, "")
|
||||
entry, err := svc.Add(t.Context(), "7.7.7.7", "to delete")
|
||||
require.NoError(t, err)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
req := httptest.NewRequest(http.MethodDelete, "/api/v1/admin/crowdsec/whitelist/"+entry.UUID, nil)
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusNoContent, w.Code)
|
||||
assert.True(t, mock.reloadCalled)
|
||||
}
|
||||
|
||||
func TestDeleteWhitelist_NotFound(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, r, _ := setupWhitelistHandler(t)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
req := httptest.NewRequest(http.MethodDelete, "/api/v1/admin/crowdsec/whitelist/00000000-0000-0000-0000-000000000000", nil)
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusNotFound, w.Code)
|
||||
}
|
||||
|
||||
func TestListWhitelists_AfterAdd(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, r, db := setupWhitelistHandler(t)
|
||||
svc := services.NewCrowdSecWhitelistService(db, "")
|
||||
_, err := svc.Add(t.Context(), "8.8.8.8", "google dns")
|
||||
require.NoError(t, err)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/v1/admin/crowdsec/whitelist", nil)
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
var resp map[string]interface{}
|
||||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
||||
entries := resp["whitelist"].([]interface{})
|
||||
assert.Len(t, entries, 1)
|
||||
}
|
||||
|
||||
func TestAddWhitelist_400_MissingField(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, r, _ := setupWhitelistHandler(t)
|
||||
|
||||
body := `{}`
|
||||
w := httptest.NewRecorder()
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/admin/crowdsec/whitelist", bytes.NewBufferString(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusBadRequest, w.Code)
|
||||
var resp map[string]interface{}
|
||||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
||||
assert.Equal(t, "ip_or_cidr is required", resp["error"])
|
||||
}
|
||||
|
||||
func TestListWhitelists_DBError(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, r, db := setupWhitelistHandler(t)
|
||||
sqlDB, err := db.DB()
|
||||
require.NoError(t, err)
|
||||
_ = sqlDB.Close()
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/v1/admin/crowdsec/whitelist", nil)
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusInternalServerError, w.Code)
|
||||
var resp map[string]interface{}
|
||||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
||||
assert.Equal(t, "failed to list whitelist entries", resp["error"])
|
||||
}
|
||||
|
||||
func TestAddWhitelist_DBError(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, r, db := setupWhitelistHandler(t)
|
||||
sqlDB, err := db.DB()
|
||||
require.NoError(t, err)
|
||||
_ = sqlDB.Close()
|
||||
|
||||
body := `{"ip_or_cidr":"1.2.3.4","reason":"test"}`
|
||||
w := httptest.NewRecorder()
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/admin/crowdsec/whitelist", bytes.NewBufferString(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusInternalServerError, w.Code)
|
||||
var resp map[string]interface{}
|
||||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
||||
assert.Equal(t, "failed to add whitelist entry", resp["error"])
|
||||
}
|
||||
|
||||
func TestAddWhitelist_ReloadFailure(t *testing.T) {
|
||||
t.Parallel()
|
||||
h, r, _ := setupWhitelistHandler(t)
|
||||
mock := &mockCmdExecWhitelist{reloadErr: errors.New("cscli failed")}
|
||||
h.CmdExec = mock
|
||||
|
||||
body := `{"ip_or_cidr":"3.3.3.3","reason":"reload test"}`
|
||||
w := httptest.NewRecorder()
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/admin/crowdsec/whitelist", bytes.NewBufferString(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusCreated, w.Code)
|
||||
assert.True(t, mock.reloadCalled)
|
||||
}
|
||||
|
||||
func TestDeleteWhitelist_DBError(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, r, db := setupWhitelistHandler(t)
|
||||
svc := services.NewCrowdSecWhitelistService(db, "")
|
||||
entry, err := svc.Add(t.Context(), "4.4.4.4", "will close db")
|
||||
require.NoError(t, err)
|
||||
|
||||
sqlDB, err := db.DB()
|
||||
require.NoError(t, err)
|
||||
_ = sqlDB.Close()
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
req := httptest.NewRequest(http.MethodDelete, "/api/v1/admin/crowdsec/whitelist/"+entry.UUID, nil)
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusInternalServerError, w.Code)
|
||||
var resp map[string]interface{}
|
||||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
||||
assert.Equal(t, "failed to delete whitelist entry", resp["error"])
|
||||
}
|
||||
|
||||
func TestDeleteWhitelist_ReloadFailure(t *testing.T) {
|
||||
t.Parallel()
|
||||
h, r, db := setupWhitelistHandler(t)
|
||||
mock := &mockCmdExecWhitelist{reloadErr: errors.New("cscli failed")}
|
||||
h.CmdExec = mock
|
||||
|
||||
svc := services.NewCrowdSecWhitelistService(db, "")
|
||||
entry, err := svc.Add(t.Context(), "5.5.5.5", "reload test")
|
||||
require.NoError(t, err)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
req := httptest.NewRequest(http.MethodDelete, "/api/v1/admin/crowdsec/whitelist/"+entry.UUID, nil)
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusNoContent, w.Code)
|
||||
assert.True(t, mock.reloadCalled)
|
||||
}
|
||||
|
||||
func TestDeleteWhitelist_EmptyUUID(t *testing.T) {
|
||||
t.Parallel()
|
||||
h, _, _ := setupWhitelistHandler(t)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request = httptest.NewRequest(http.MethodDelete, "/api/v1/admin/crowdsec/whitelist/", nil)
|
||||
c.Params = gin.Params{{Key: "uuid", Value: ""}}
|
||||
|
||||
h.DeleteWhitelist(c)
|
||||
|
||||
assert.Equal(t, http.StatusBadRequest, w.Code)
|
||||
var resp map[string]interface{}
|
||||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
||||
assert.Equal(t, "uuid is required", resp["error"])
|
||||
}
|
||||
@@ -122,6 +122,7 @@ func RegisterWithDeps(ctx context.Context, router *gin.Engine, db *gorm.DB, cfg
|
||||
&models.DNSProviderCredential{}, // Multi-credential support (Phase 3)
|
||||
&models.Plugin{}, // Phase 5: DNS provider plugins
|
||||
&models.ManualChallenge{}, // Phase 1: Manual DNS challenges
|
||||
&models.CrowdSecWhitelist{}, // Issue #939: CrowdSec IP whitelist management
|
||||
); err != nil {
|
||||
return fmt.Errorf("auto migrate: %w", err)
|
||||
}
|
||||
|
||||
13
backend/internal/models/crowdsec_whitelist.go
Normal file
13
backend/internal/models/crowdsec_whitelist.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package models
|
||||
|
||||
import "time"
|
||||
|
||||
// CrowdSecWhitelist represents a single IP or CIDR block that CrowdSec should never ban.
|
||||
type CrowdSecWhitelist struct {
|
||||
ID uint `json:"-" gorm:"primaryKey"`
|
||||
UUID string `json:"uuid" gorm:"uniqueIndex;not null"`
|
||||
IPOrCIDR string `json:"ip_or_cidr" gorm:"not null;uniqueIndex"`
|
||||
Reason string `json:"reason" gorm:"not null;default:''"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
@@ -197,6 +197,12 @@ func ReconcileCrowdSecOnStartup(db *gorm.DB, executor CrowdsecProcessManager, bi
|
||||
"data_dir": dataDir,
|
||||
}).Info("CrowdSec reconciliation: starting CrowdSec (mode=local, not currently running)")
|
||||
|
||||
// Regenerate whitelist YAML before starting so CrowdSec loads the current entries.
|
||||
whitelistSvc := NewCrowdSecWhitelistService(db, dataDir)
|
||||
if writeErr := whitelistSvc.WriteYAML(context.Background()); writeErr != nil {
|
||||
logger.Log().WithError(writeErr).Warn("CrowdSec reconciliation: failed to write whitelist YAML on startup (non-fatal)")
|
||||
}
|
||||
|
||||
startCtx, startCancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer startCancel()
|
||||
|
||||
|
||||
190
backend/internal/services/crowdsec_whitelist_service.go
Normal file
190
backend/internal/services/crowdsec_whitelist_service.go
Normal file
@@ -0,0 +1,190 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/logger"
|
||||
"github.com/Wikid82/charon/backend/internal/models"
|
||||
"github.com/google/uuid"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
// Sentinel errors for CrowdSecWhitelistService operations.
|
||||
var (
|
||||
ErrWhitelistNotFound = errors.New("whitelist entry not found")
|
||||
ErrInvalidIPOrCIDR = errors.New("invalid IP address or CIDR notation")
|
||||
ErrDuplicateEntry = errors.New("entry already exists in whitelist")
|
||||
)
|
||||
|
||||
const whitelistYAMLHeader = `name: charon-whitelist
|
||||
description: "Charon-managed IP/CIDR whitelist"
|
||||
filter: "evt.Meta.service == 'http'"
|
||||
whitelist:
|
||||
reason: "Charon managed whitelist"
|
||||
`
|
||||
|
||||
// CrowdSecWhitelistService manages the CrowdSec IP/CIDR whitelist.
|
||||
type CrowdSecWhitelistService struct {
|
||||
db *gorm.DB
|
||||
dataDir string
|
||||
}
|
||||
|
||||
// NewCrowdSecWhitelistService creates a new CrowdSecWhitelistService.
|
||||
func NewCrowdSecWhitelistService(db *gorm.DB, dataDir string) *CrowdSecWhitelistService {
|
||||
return &CrowdSecWhitelistService{db: db, dataDir: dataDir}
|
||||
}
|
||||
|
||||
// List returns all whitelist entries ordered by creation time.
|
||||
func (s *CrowdSecWhitelistService) List(ctx context.Context) ([]models.CrowdSecWhitelist, error) {
|
||||
var entries []models.CrowdSecWhitelist
|
||||
if err := s.db.WithContext(ctx).Order("created_at ASC").Find(&entries).Error; err != nil {
|
||||
return nil, fmt.Errorf("list whitelist entries: %w", err)
|
||||
}
|
||||
return entries, nil
|
||||
}
|
||||
|
||||
// Add validates and persists a new whitelist entry, then regenerates the YAML file.
|
||||
// Returns ErrInvalidIPOrCIDR for malformed input and ErrDuplicateEntry for conflicts.
|
||||
func (s *CrowdSecWhitelistService) Add(ctx context.Context, ipOrCIDR, reason string) (*models.CrowdSecWhitelist, error) {
|
||||
normalized, err := normalizeIPOrCIDR(strings.TrimSpace(ipOrCIDR))
|
||||
if err != nil {
|
||||
return nil, ErrInvalidIPOrCIDR
|
||||
}
|
||||
|
||||
entry := models.CrowdSecWhitelist{
|
||||
UUID: uuid.New().String(),
|
||||
IPOrCIDR: normalized,
|
||||
Reason: reason,
|
||||
}
|
||||
|
||||
if err := s.db.WithContext(ctx).Create(&entry).Error; err != nil {
|
||||
if errors.Is(err, gorm.ErrDuplicatedKey) || strings.Contains(err.Error(), "UNIQUE constraint failed") {
|
||||
return nil, ErrDuplicateEntry
|
||||
}
|
||||
return nil, fmt.Errorf("add whitelist entry: %w", err)
|
||||
}
|
||||
|
||||
if err := s.WriteYAML(ctx); err != nil {
|
||||
logger.Log().WithError(err).Warn("failed to write CrowdSec whitelist YAML after add (non-fatal)")
|
||||
}
|
||||
|
||||
return &entry, nil
|
||||
}
|
||||
|
||||
// Delete removes a whitelist entry by UUID and regenerates the YAML file.
|
||||
// Returns ErrWhitelistNotFound if the UUID does not exist.
|
||||
func (s *CrowdSecWhitelistService) Delete(ctx context.Context, id string) error {
|
||||
result := s.db.WithContext(ctx).Where("uuid = ?", id).Delete(&models.CrowdSecWhitelist{})
|
||||
if result.Error != nil {
|
||||
return fmt.Errorf("delete whitelist entry: %w", result.Error)
|
||||
}
|
||||
if result.RowsAffected == 0 {
|
||||
return ErrWhitelistNotFound
|
||||
}
|
||||
|
||||
if err := s.WriteYAML(ctx); err != nil {
|
||||
logger.Log().WithError(err).Warn("failed to write CrowdSec whitelist YAML after delete (non-fatal)")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// WriteYAML renders and atomically writes the CrowdSec whitelist YAML file.
|
||||
// It is a no-op when dataDir is empty (unit-test mode).
|
||||
func (s *CrowdSecWhitelistService) WriteYAML(ctx context.Context) error {
|
||||
if s.dataDir == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
var entries []models.CrowdSecWhitelist
|
||||
if err := s.db.WithContext(ctx).Order("created_at ASC").Find(&entries).Error; err != nil {
|
||||
return fmt.Errorf("write whitelist yaml: query entries: %w", err)
|
||||
}
|
||||
|
||||
var ips, cidrs []string
|
||||
for _, e := range entries {
|
||||
if strings.Contains(e.IPOrCIDR, "/") {
|
||||
cidrs = append(cidrs, e.IPOrCIDR)
|
||||
} else {
|
||||
ips = append(ips, e.IPOrCIDR)
|
||||
}
|
||||
}
|
||||
|
||||
content := buildWhitelistYAML(ips, cidrs)
|
||||
|
||||
dir := filepath.Join(s.dataDir, "config", "parsers", "s02-enrich")
|
||||
if err := os.MkdirAll(dir, 0o750); err != nil {
|
||||
return fmt.Errorf("write whitelist yaml: create dir: %w", err)
|
||||
}
|
||||
|
||||
target := filepath.Join(dir, "charon-whitelist.yaml")
|
||||
tmp := target + ".tmp"
|
||||
|
||||
if err := os.WriteFile(tmp, content, 0o640); err != nil {
|
||||
return fmt.Errorf("write whitelist yaml: write temp: %w", err)
|
||||
}
|
||||
|
||||
if err := os.Rename(tmp, target); err != nil {
|
||||
_ = os.Remove(tmp)
|
||||
return fmt.Errorf("write whitelist yaml: rename: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// normalizeIPOrCIDR validates and normalizes an IP address or CIDR block.
|
||||
// For CIDRs, the network address is returned (e.g. "10.0.0.1/8" → "10.0.0.0/8").
|
||||
func normalizeIPOrCIDR(raw string) (string, error) {
|
||||
if strings.Contains(raw, "/") {
|
||||
ip, network, err := net.ParseCIDR(raw)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
_ = ip
|
||||
return network.String(), nil
|
||||
}
|
||||
ip := net.ParseIP(raw)
|
||||
if ip == nil {
|
||||
return "", fmt.Errorf("invalid IP: %q", raw)
|
||||
}
|
||||
|
||||
return ip.String(), nil
|
||||
}
|
||||
|
||||
// buildWhitelistYAML constructs the YAML content for the CrowdSec whitelist parser.
|
||||
func buildWhitelistYAML(ips, cidrs []string) []byte {
|
||||
var sb strings.Builder
|
||||
sb.WriteString(whitelistYAMLHeader)
|
||||
|
||||
sb.WriteString(" ip:")
|
||||
if len(ips) == 0 {
|
||||
sb.WriteString(" []\n")
|
||||
} else {
|
||||
sb.WriteString("\n")
|
||||
for _, ip := range ips {
|
||||
sb.WriteString(" - \"")
|
||||
sb.WriteString(ip)
|
||||
sb.WriteString("\"\n")
|
||||
}
|
||||
}
|
||||
|
||||
sb.WriteString(" cidr:")
|
||||
if len(cidrs) == 0 {
|
||||
sb.WriteString(" []\n")
|
||||
} else {
|
||||
sb.WriteString("\n")
|
||||
for _, cidr := range cidrs {
|
||||
sb.WriteString(" - \"")
|
||||
sb.WriteString(cidr)
|
||||
sb.WriteString("\"\n")
|
||||
}
|
||||
}
|
||||
|
||||
return []byte(sb.String())
|
||||
}
|
||||
309
backend/internal/services/crowdsec_whitelist_service_test.go
Normal file
309
backend/internal/services/crowdsec_whitelist_service_test.go
Normal file
@@ -0,0 +1,309 @@
|
||||
package services_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/models"
|
||||
"github.com/Wikid82/charon/backend/internal/services"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
gormlogger "gorm.io/gorm/logger"
|
||||
)
|
||||
|
||||
func openWhitelistTestDB(t *testing.T) *gorm.DB {
|
||||
t.Helper()
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{
|
||||
Logger: gormlogger.Default.LogMode(gormlogger.Silent),
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, db.AutoMigrate(&models.CrowdSecWhitelist{}))
|
||||
t.Cleanup(func() {
|
||||
sqlDB, err := db.DB()
|
||||
if err == nil {
|
||||
_ = sqlDB.Close()
|
||||
}
|
||||
})
|
||||
return db
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_List_Empty(t *testing.T) {
|
||||
t.Parallel()
|
||||
svc := services.NewCrowdSecWhitelistService(openWhitelistTestDB(t), "")
|
||||
entries, err := svc.List(context.Background())
|
||||
require.NoError(t, err)
|
||||
assert.Empty(t, entries)
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_Add_ValidIP(t *testing.T) {
|
||||
t.Parallel()
|
||||
svc := services.NewCrowdSecWhitelistService(openWhitelistTestDB(t), "")
|
||||
entry, err := svc.Add(context.Background(), "1.2.3.4", "test reason")
|
||||
require.NoError(t, err)
|
||||
assert.NotEmpty(t, entry.UUID)
|
||||
assert.Equal(t, "1.2.3.4", entry.IPOrCIDR)
|
||||
assert.Equal(t, "test reason", entry.Reason)
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_Add_ValidCIDR(t *testing.T) {
|
||||
t.Parallel()
|
||||
svc := services.NewCrowdSecWhitelistService(openWhitelistTestDB(t), "")
|
||||
entry, err := svc.Add(context.Background(), "192.168.1.0/24", "local net")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "192.168.1.0/24", entry.IPOrCIDR)
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_Add_NormalizesCIDR(t *testing.T) {
|
||||
t.Parallel()
|
||||
svc := services.NewCrowdSecWhitelistService(openWhitelistTestDB(t), "")
|
||||
entry, err := svc.Add(context.Background(), "10.0.0.1/8", "normalize test")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "10.0.0.0/8", entry.IPOrCIDR)
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_Add_InvalidIP(t *testing.T) {
|
||||
t.Parallel()
|
||||
svc := services.NewCrowdSecWhitelistService(openWhitelistTestDB(t), "")
|
||||
_, err := svc.Add(context.Background(), "not-an-ip", "")
|
||||
assert.ErrorIs(t, err, services.ErrInvalidIPOrCIDR)
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_Add_Duplicate(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := openWhitelistTestDB(t)
|
||||
svc := services.NewCrowdSecWhitelistService(db, "")
|
||||
_, err := svc.Add(context.Background(), "5.5.5.5", "first")
|
||||
require.NoError(t, err)
|
||||
_, err = svc.Add(context.Background(), "5.5.5.5", "second")
|
||||
assert.ErrorIs(t, err, services.ErrDuplicateEntry)
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_Delete_Existing(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := openWhitelistTestDB(t)
|
||||
svc := services.NewCrowdSecWhitelistService(db, "")
|
||||
entry, err := svc.Add(context.Background(), "6.6.6.6", "to delete")
|
||||
require.NoError(t, err)
|
||||
|
||||
err = svc.Delete(context.Background(), entry.UUID)
|
||||
require.NoError(t, err)
|
||||
|
||||
entries, err := svc.List(context.Background())
|
||||
require.NoError(t, err)
|
||||
assert.Empty(t, entries)
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_Delete_NotFound(t *testing.T) {
|
||||
t.Parallel()
|
||||
svc := services.NewCrowdSecWhitelistService(openWhitelistTestDB(t), "")
|
||||
err := svc.Delete(context.Background(), "00000000-0000-0000-0000-000000000000")
|
||||
assert.ErrorIs(t, err, services.ErrWhitelistNotFound)
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_WriteYAML_EmptyDataDir(t *testing.T) {
|
||||
t.Parallel()
|
||||
svc := services.NewCrowdSecWhitelistService(openWhitelistTestDB(t), "")
|
||||
err := svc.WriteYAML(context.Background())
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_WriteYAML_CreatesFile(t *testing.T) {
|
||||
t.Parallel()
|
||||
tmpDir := t.TempDir()
|
||||
db := openWhitelistTestDB(t)
|
||||
svc := services.NewCrowdSecWhitelistService(db, tmpDir)
|
||||
|
||||
_, err := svc.Add(context.Background(), "1.1.1.1", "dns")
|
||||
require.NoError(t, err)
|
||||
_, err = svc.Add(context.Background(), "10.0.0.0/8", "internal")
|
||||
require.NoError(t, err)
|
||||
|
||||
yamlPath := filepath.Join(tmpDir, "config", "parsers", "s02-enrich", "charon-whitelist.yaml")
|
||||
content, err := os.ReadFile(yamlPath)
|
||||
require.NoError(t, err)
|
||||
|
||||
s := string(content)
|
||||
assert.Contains(t, s, "name: charon-whitelist")
|
||||
assert.Contains(t, s, `"1.1.1.1"`)
|
||||
assert.Contains(t, s, `"10.0.0.0/8"`)
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_WriteYAML_EmptyLists(t *testing.T) {
|
||||
t.Parallel()
|
||||
tmpDir := t.TempDir()
|
||||
svc := services.NewCrowdSecWhitelistService(openWhitelistTestDB(t), tmpDir)
|
||||
|
||||
err := svc.WriteYAML(context.Background())
|
||||
require.NoError(t, err)
|
||||
|
||||
yamlPath := filepath.Join(tmpDir, "config", "parsers", "s02-enrich", "charon-whitelist.yaml")
|
||||
content, err := os.ReadFile(yamlPath)
|
||||
require.NoError(t, err)
|
||||
|
||||
s := string(content)
|
||||
assert.Contains(t, s, "ip: []")
|
||||
assert.Contains(t, s, "cidr: []")
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_List_AfterAdd(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := openWhitelistTestDB(t)
|
||||
svc := services.NewCrowdSecWhitelistService(db, "")
|
||||
|
||||
for i := 0; i < 3; i++ {
|
||||
_, err := svc.Add(context.Background(), fmt.Sprintf("10.0.0.%d", i+1), "")
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
entries, err := svc.List(context.Background())
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, entries, 3)
|
||||
}
|
||||
|
||||
func TestAdd_ValidIPv6_Success(t *testing.T) {
|
||||
t.Parallel()
|
||||
svc := services.NewCrowdSecWhitelistService(openWhitelistTestDB(t), "")
|
||||
entry, err := svc.Add(context.Background(), "2001:db8::1", "ipv6 test")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "2001:db8::1", entry.IPOrCIDR)
|
||||
|
||||
entries, err := svc.List(context.Background())
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, entries, 1)
|
||||
assert.Equal(t, "2001:db8::1", entries[0].IPOrCIDR)
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_List_DBError(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := openWhitelistTestDB(t)
|
||||
svc := services.NewCrowdSecWhitelistService(db, "")
|
||||
sqlDB, err := db.DB()
|
||||
require.NoError(t, err)
|
||||
_ = sqlDB.Close()
|
||||
|
||||
_, err = svc.List(context.Background())
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_Add_DBCreateError(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := openWhitelistTestDB(t)
|
||||
svc := services.NewCrowdSecWhitelistService(db, "")
|
||||
sqlDB, err := db.DB()
|
||||
require.NoError(t, err)
|
||||
_ = sqlDB.Close()
|
||||
|
||||
_, err = svc.Add(context.Background(), "1.2.3.4", "test")
|
||||
assert.Error(t, err)
|
||||
assert.NotErrorIs(t, err, services.ErrInvalidIPOrCIDR)
|
||||
assert.NotErrorIs(t, err, services.ErrDuplicateEntry)
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_Delete_DBError(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := openWhitelistTestDB(t)
|
||||
svc := services.NewCrowdSecWhitelistService(db, "")
|
||||
sqlDB, err := db.DB()
|
||||
require.NoError(t, err)
|
||||
_ = sqlDB.Close()
|
||||
|
||||
err = svc.Delete(context.Background(), "some-uuid")
|
||||
assert.Error(t, err)
|
||||
assert.NotErrorIs(t, err, services.ErrWhitelistNotFound)
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_WriteYAML_DBError(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := openWhitelistTestDB(t)
|
||||
tmpDir := t.TempDir()
|
||||
svc := services.NewCrowdSecWhitelistService(db, tmpDir)
|
||||
sqlDB, err := db.DB()
|
||||
require.NoError(t, err)
|
||||
_ = sqlDB.Close()
|
||||
|
||||
err = svc.WriteYAML(context.Background())
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "query entries")
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_WriteYAML_MkdirError(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := openWhitelistTestDB(t)
|
||||
// Use a path under /dev/null which cannot have subdirectories
|
||||
svc := services.NewCrowdSecWhitelistService(db, "/dev/null/impossible")
|
||||
|
||||
err := svc.WriteYAML(context.Background())
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "create dir")
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_WriteYAML_WriteFileError(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := openWhitelistTestDB(t)
|
||||
tmpDir := t.TempDir()
|
||||
svc := services.NewCrowdSecWhitelistService(db, tmpDir)
|
||||
|
||||
// Create a directory where the .tmp file would be written, causing WriteFile to fail
|
||||
dir := filepath.Join(tmpDir, "config", "parsers", "s02-enrich")
|
||||
require.NoError(t, os.MkdirAll(dir, 0o750))
|
||||
tmpTarget := filepath.Join(dir, "charon-whitelist.yaml.tmp")
|
||||
require.NoError(t, os.MkdirAll(tmpTarget, 0o750))
|
||||
|
||||
err := svc.WriteYAML(context.Background())
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "write temp")
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_Add_WriteYAMLWarning(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := openWhitelistTestDB(t)
|
||||
// dataDir that will cause MkdirAll to fail inside WriteYAML (non-fatal)
|
||||
svc := services.NewCrowdSecWhitelistService(db, "/dev/null/impossible")
|
||||
|
||||
entry, err := svc.Add(context.Background(), "2.2.2.2", "yaml warn test")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "2.2.2.2", entry.IPOrCIDR)
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_Delete_WriteYAMLWarning(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := openWhitelistTestDB(t)
|
||||
// First add with empty dataDir so it succeeds
|
||||
svcAdd := services.NewCrowdSecWhitelistService(db, "")
|
||||
entry, err := svcAdd.Add(context.Background(), "3.3.3.3", "to delete")
|
||||
require.NoError(t, err)
|
||||
|
||||
// Now create a service with a broken dataDir and delete
|
||||
svcDel := services.NewCrowdSecWhitelistService(db, "/dev/null/impossible")
|
||||
err = svcDel.Delete(context.Background(), entry.UUID)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_WriteYAML_RenameError(t *testing.T) {
|
||||
t.Parallel()
|
||||
db := openWhitelistTestDB(t)
|
||||
tmpDir := t.TempDir()
|
||||
svc := services.NewCrowdSecWhitelistService(db, tmpDir)
|
||||
|
||||
// Create target as a directory so rename (atomic replace) fails
|
||||
dir := filepath.Join(tmpDir, "config", "parsers", "s02-enrich")
|
||||
require.NoError(t, os.MkdirAll(dir, 0o750))
|
||||
target := filepath.Join(dir, "charon-whitelist.yaml")
|
||||
require.NoError(t, os.MkdirAll(target, 0o750))
|
||||
|
||||
err := svc.WriteYAML(context.Background())
|
||||
assert.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "rename")
|
||||
}
|
||||
|
||||
func TestCrowdSecWhitelistService_Add_InvalidCIDR(t *testing.T) {
|
||||
t.Parallel()
|
||||
svc := services.NewCrowdSecWhitelistService(openWhitelistTestDB(t), "")
|
||||
_, err := svc.Add(context.Background(), "not-an-ip/24", "invalid cidr with slash")
|
||||
assert.ErrorIs(t, err, services.ErrInvalidIPOrCIDR)
|
||||
}
|
||||
@@ -13,8 +13,7 @@ import (
|
||||
"syscall"
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/logger"
|
||||
"github.com/docker/docker/api/types/container"
|
||||
"github.com/docker/docker/client"
|
||||
"github.com/moby/moby/client"
|
||||
)
|
||||
|
||||
type DockerUnavailableError struct {
|
||||
@@ -86,7 +85,7 @@ func NewDockerService() *DockerService {
|
||||
logger.Log().WithFields(map[string]any{"docker_host_env": envHost, "local_host": localHost}).Info("ignoring non-unix DOCKER_HOST for local docker mode")
|
||||
}
|
||||
|
||||
cli, err := client.NewClientWithOpts(client.WithHost(localHost), client.WithAPIVersionNegotiation())
|
||||
cli, err := client.New(client.WithHost(localHost))
|
||||
if err != nil {
|
||||
logger.Log().WithError(err).Warn("Failed to initialize Docker client - Docker features will be unavailable")
|
||||
unavailableErr := NewDockerUnavailableError(err, buildLocalDockerUnavailableDetails(err, localHost))
|
||||
@@ -115,7 +114,7 @@ func (s *DockerService) ListContainers(ctx context.Context, host string) ([]Dock
|
||||
if host == "" || host == "local" {
|
||||
cli = s.client
|
||||
} else {
|
||||
cli, err = client.NewClientWithOpts(client.WithHost(host), client.WithAPIVersionNegotiation())
|
||||
cli, err = client.New(client.WithHost(host))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create remote client: %w", err)
|
||||
}
|
||||
@@ -126,7 +125,7 @@ func (s *DockerService) ListContainers(ctx context.Context, host string) ([]Dock
|
||||
}()
|
||||
}
|
||||
|
||||
containers, err := cli.ContainerList(ctx, container.ListOptions{All: false})
|
||||
containers, err := cli.ContainerList(ctx, client.ContainerListOptions{All: false})
|
||||
if err != nil {
|
||||
if isDockerConnectivityError(err) {
|
||||
if host == "" || host == "local" {
|
||||
@@ -138,14 +137,16 @@ func (s *DockerService) ListContainers(ctx context.Context, host string) ([]Dock
|
||||
}
|
||||
|
||||
var result []DockerContainer
|
||||
for _, c := range containers {
|
||||
for _, c := range containers.Items {
|
||||
// Get the first network's IP address if available
|
||||
networkName := ""
|
||||
ipAddress := ""
|
||||
if c.NetworkSettings != nil && len(c.NetworkSettings.Networks) > 0 {
|
||||
for name, net := range c.NetworkSettings.Networks {
|
||||
networkName = name
|
||||
ipAddress = net.IPAddress
|
||||
if net != nil && net.IPAddress.IsValid() {
|
||||
ipAddress = net.IPAddress.String()
|
||||
}
|
||||
break // Just take the first one for now
|
||||
}
|
||||
}
|
||||
@@ -166,11 +167,16 @@ func (s *DockerService) ListContainers(ctx context.Context, host string) ([]Dock
|
||||
})
|
||||
}
|
||||
|
||||
shortID := c.ID
|
||||
if len(shortID) > 12 {
|
||||
shortID = shortID[:12]
|
||||
}
|
||||
|
||||
result = append(result, DockerContainer{
|
||||
ID: c.ID[:12], // Short ID
|
||||
ID: shortID,
|
||||
Names: names,
|
||||
Image: c.Image,
|
||||
State: c.State,
|
||||
State: string(c.State),
|
||||
Status: c.Status,
|
||||
Network: networkName,
|
||||
IP: ipAddress,
|
||||
|
||||
@@ -24,6 +24,7 @@ echo "Installing base parsers..."
|
||||
cscli parsers install crowdsecurity/http-logs --force || echo "⚠️ Failed to install crowdsecurity/http-logs"
|
||||
cscli parsers install crowdsecurity/syslog-logs --force || echo "⚠️ Failed to install crowdsecurity/syslog-logs"
|
||||
cscli parsers install crowdsecurity/geoip-enrich --force || echo "⚠️ Failed to install crowdsecurity/geoip-enrich"
|
||||
cscli parsers install crowdsecurity/whitelists --force || echo "⚠️ Failed to install crowdsecurity/whitelists"
|
||||
|
||||
# Install HTTP scenarios for attack detection
|
||||
echo "Installing HTTP scenarios..."
|
||||
|
||||
460
docs/plans/archive/patch-coverage-improvement-plan-2026-05-02.md
Normal file
460
docs/plans/archive/patch-coverage-improvement-plan-2026-05-02.md
Normal file
@@ -0,0 +1,460 @@
|
||||
# Coverage Improvement Plan — Patch Coverage ≥ 90%
|
||||
|
||||
**Date**: 2026-05-02
|
||||
**Status**: Draft — Awaiting Approval
|
||||
**Priority**: High
|
||||
**Archived Previous Plan**: Custom Certificate Upload & Management (Issue #22) → `docs/plans/archive/custom-cert-upload-management-spec-2026-05-02.md`
|
||||
|
||||
---
|
||||
|
||||
## 1. Introduction
|
||||
|
||||
This plan identifies exact uncovered branches across the six highest-gap backend source files and two frontend components, and specifies new test cases to close those gaps. The target is to raise overall patch coverage from **85.61% (206 missing lines)** to **≥ 90%**.
|
||||
|
||||
**Constraints**:
|
||||
- No source file modifications — test files only
|
||||
- Go tests placed in `*_patch_coverage_test.go` (same package as source)
|
||||
- Frontend tests extend existing `__tests__/*.test.tsx` files
|
||||
- Use testify (Go) and Vitest + React Testing Library (frontend)
|
||||
|
||||
---
|
||||
|
||||
## 2. Research Findings
|
||||
|
||||
### 2.1 Coverage Gap Summary
|
||||
|
||||
| Package | File | Missing Lines | Current Coverage |
|
||||
|---|---|---|---|
|
||||
| `handlers` | `certificate_handler.go` | ~54 | 70.28% |
|
||||
| `services` | `certificate_service.go` | ~54 | 82.85% |
|
||||
| `services` | `certificate_validator.go` | ~18 | 88.68% |
|
||||
| `handlers` | `proxy_host_handler.go` | ~12 | 55.17% |
|
||||
| `config` | `config.go` | ~8 | ~92% |
|
||||
| `caddy` | `manager.go` | ~10 | ~88% |
|
||||
| Frontend | `CertificateList.tsx` | moderate | — |
|
||||
| Frontend | `CertificateUploadDialog.tsx` | moderate | — |
|
||||
|
||||
### 2.2 Test Infrastructure (Confirmed)
|
||||
|
||||
- **In-memory DB**: `gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})`
|
||||
- **Mock auth**: `mockAuthMiddleware()` from `coverage_helpers_test.go`
|
||||
- **Mock backup service**: `&mockBackupService{createFunc: ..., availableSpaceFunc: ...}`
|
||||
- **Manager test hooks**: package-level `generateConfigFunc`, `validateConfigFunc`, `writeFileFunc` vars with `defer` restore pattern
|
||||
- **Frontend mocks**: `vi.mock('../../hooks/...', ...)` and `vi.mock('react-i18next', ...)`
|
||||
|
||||
### 2.3 Existing Patch Test Files
|
||||
|
||||
| File | Existing Tests |
|
||||
|---|---|
|
||||
| `certificate_handler_patch_coverage_test.go` | `TestDelete_UUID_WithBackup_Success`, `_NotFound`, `_InUse` |
|
||||
| `certificate_service_patch_coverage_test.go` | `TestExportCertificate_DER`, `_PFX`, `_P12`, `_UnsupportedFormat` |
|
||||
| `certificate_validator_extra_coverage_test.go` | ECDSA/Ed25519 key match, `ConvertDERToPEM` valid/invalid |
|
||||
| `manager_patch_coverage_test.go` | DNS provider encryption key paths |
|
||||
| `proxy_host_handler_test.go` | Full CRUD + BulkUpdateACL + BulkUpdateSecurityHeaders |
|
||||
| `proxy_host_handler_update_test.go` | Update edge cases, `ParseForwardPortField`, `ParseNullableUintField` |
|
||||
|
||||
---
|
||||
|
||||
## 3. Technical Specifications — Per-File Gap Analysis
|
||||
|
||||
### 3.1 `certificate_handler.go` — Export Re-Auth Path (~18 lines)
|
||||
|
||||
The `Export` handler re-authenticates the user when `include_key=true`. All six guard branches are uncovered.
|
||||
|
||||
**Gap location**: Lines ~260–320 (password empty check, `user` context key extraction, `map[string]any` cast, `id` field lookup, DB user lookup, bcrypt check)
|
||||
|
||||
**New tests** (append to `certificate_handler_patch_coverage_test.go`):
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestExport_IncludeKey_MissingPassword` | POST with `include_key=true`, no `password` field | 403 |
|
||||
| `TestExport_IncludeKey_NoUserContext` | No `"user"` key in gin context | 403 |
|
||||
| `TestExport_IncludeKey_InvalidClaimsType` | `"user"` set to a plain string | 403 |
|
||||
| `TestExport_IncludeKey_UserIDNotInClaims` | `user = map[string]any{}` with no `"id"` key | 403 |
|
||||
| `TestExport_IncludeKey_UserNotFoundInDB` | Valid claims, no matching user row | 403 |
|
||||
| `TestExport_IncludeKey_WrongPassword` | User in DB, wrong plaintext password submitted | 403 |
|
||||
|
||||
### 3.2 `certificate_handler.go` — Export Service Errors (~4 lines)
|
||||
|
||||
**Gap location**: After `ExportCertificate` call — ErrCertNotFound and generic error branches
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestExport_CertNotFound` | Unknown UUID | 404 |
|
||||
| `TestExport_ServiceError` | Service returns non-not-found error | 500 |
|
||||
|
||||
### 3.3 `certificate_handler.go` — Delete Numeric-ID Error Paths (~12 lines)
|
||||
|
||||
**Gap location**: `IsCertificateInUse` error, disk space check, backup error, `DeleteCertificateByID` returning `ErrCertInUse` or generic error
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestDelete_NumericID_UsageCheckError` | `IsCertificateInUse` returns error | 500 |
|
||||
| `TestDelete_NumericID_LowDiskSpace` | `availableSpaceFunc` returns 0 | 507 |
|
||||
| `TestDelete_NumericID_BackupError` | `createFunc` returns error | 500 |
|
||||
| `TestDelete_NumericID_CertInUse_FromService` | `DeleteCertificateByID` → `ErrCertInUse` | 409 |
|
||||
| `TestDelete_NumericID_DeleteError` | `DeleteCertificateByID` → generic error | 500 |
|
||||
|
||||
### 3.4 `certificate_handler.go` — Delete UUID Additional Error Paths (~8 lines)
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestDelete_UUID_UsageCheckInternalError` | `IsCertificateInUseByUUID` returns non-ErrCertNotFound error | 500 |
|
||||
| `TestDelete_UUID_LowDiskSpace` | `availableSpaceFunc` returns 0 | 507 |
|
||||
| `TestDelete_UUID_BackupCreationError` | `createFunc` returns error | 500 |
|
||||
| `TestDelete_UUID_CertInUse_FromService` | `DeleteCertificate` → `ErrCertInUse` | 409 |
|
||||
|
||||
### 3.5 `certificate_handler.go` — Upload/Validate File Open Errors (~8 lines)
|
||||
|
||||
**Gap location**: `file.Open()` calls on multipart key and chain form files returning errors
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestUpload_KeyFile_OpenError` | Valid cert file, malformed key multipart entry | 500 |
|
||||
| `TestUpload_ChainFile_OpenError` | Valid cert+key, malformed chain multipart entry | 500 |
|
||||
| `TestValidate_KeyFile_OpenError` | Valid cert, malformed key multipart entry | 500 |
|
||||
| `TestValidate_ChainFile_OpenError` | Valid cert+key, malformed chain multipart entry | 500 |
|
||||
|
||||
### 3.6 `certificate_handler.go` — `sendDeleteNotification` Rate-Limit (~2 lines)
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestSendDeleteNotification_RateLimit` | Call `sendDeleteNotification` twice within 10-second window | Second call is a no-op |
|
||||
|
||||
---
|
||||
|
||||
### 3.7 `certificate_service.go` — `SyncFromDisk` Branches (~14 lines)
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestSyncFromDisk_StagingToProductionUpgrade` | DB has staging cert, disk has production cert for same domain | DB cert updated to production provider |
|
||||
| `TestSyncFromDisk_ExpiryOnlyUpdate` | Disk cert content matches DB cert, only expiry changed | Only `expires_at` column updated |
|
||||
| `TestSyncFromDisk_CertRootStatPermissionError` | `os.Chmod(certRoot, 0)` before sync; add skip guard `if os.Getuid() == 0 { t.Skip("chmod permission test cannot run as root") }` | No panic; logs error; function completes |
|
||||
|
||||
### 3.8 `certificate_service.go` — `ListCertificates` Background Goroutine (~4 lines)
|
||||
|
||||
**Gap location**: `initialized=true` && TTL expired path → spawns background goroutine
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestListCertificates_StaleCache_TriggersBackgroundSync` | `initialized=true`, `lastScan` = 10 min ago | Returns cached list without blocking; background sync completes |
|
||||
|
||||
*Use `require.Eventually(t, func() bool { return svc.lastScan.After(before) }, 2*time.Second, 10*time.Millisecond, "background sync did not update lastScan")` after the call — avoids flaky fixed sleeps.*
|
||||
|
||||
### 3.9 `certificate_service.go` — `GetDecryptedPrivateKey` Nil encSvc and Decrypt Failure (~4 lines)
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestGetDecryptedPrivateKey_NoEncSvc` | Service with `nil` encSvc, cert has non-empty `PrivateKeyEncrypted` | Returns error |
|
||||
| `TestGetDecryptedPrivateKey_DecryptFails` | encSvc configured, corrupted ciphertext in DB | Returns wrapped error |
|
||||
|
||||
### 3.10 `certificate_service.go` — `MigratePrivateKeys` Branches (~6 lines)
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestMigratePrivateKeys_NoEncSvc` | `encSvc == nil` | Returns nil; logs warning |
|
||||
| `TestMigratePrivateKeys_WithRows` | DB has cert with `private_key` populated, valid encSvc | Row migrated: `private_key` cleared, `private_key_enc` set |
|
||||
|
||||
### 3.11 `certificate_service.go` — `UpdateCertificate` Errors (~4 lines)
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestUpdateCertificate_NotFound` | Non-existent UUID | Returns `ErrCertNotFound` |
|
||||
| `TestUpdateCertificate_DBSaveError` | Valid UUID, DB closed before Save | Returns wrapped error |
|
||||
|
||||
### 3.12 `certificate_service.go` — `DeleteCertificate` ACME File Cleanup (~8 lines)
|
||||
|
||||
**Gap location**: `cert.Provider == "letsencrypt"` branch → Walk certRoot and remove `.crt`/`.key`/`.json` files
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestDeleteCertificate_LetsEncryptProvider_FileCleanup` | Create temp `.crt` matching cert domain, delete cert | `.crt` removed from disk |
|
||||
| `TestDeleteCertificate_StagingProvider_FileCleanup` | Provider = `"letsencrypt-staging"` | Same cleanup behavior triggered |
|
||||
|
||||
### 3.13 `certificate_service.go` — `CheckExpiringCertificates` (~8 lines)
|
||||
|
||||
**Implementation** (lines ~966–1020): queries `provider = 'custom'` certs expiring before `threshold`, iterates and sends notification for certs with `daysLeft <= warningDays`.
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestCheckExpiringCertificates_ExpiresInRange` | Custom cert `expires_at = now+5d`, warningDays=30 | Returns slice with 1 cert |
|
||||
| `TestCheckExpiringCertificates_AlreadyExpired` | Custom cert `expires_at = yesterday` | Result contains cert with negative days |
|
||||
| `TestCheckExpiringCertificates_DBError` | DB closed before query | Returns error |
|
||||
|
||||
---
|
||||
|
||||
### 3.14 `certificate_validator.go` — `DetectFormat` Password-Protected PFX (~2 lines)
|
||||
|
||||
**Gap location**: PFX where `pkcs12.DecodeAll("")` fails but first byte is `0x30` (ASN.1 SEQUENCE), DER parse also fails → returns `FormatPFX`
|
||||
|
||||
**New file**: `certificate_validator_patch_coverage_test.go`
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestDetectFormat_PasswordProtectedPFX` | Generate PFX with non-empty password, call `DetectFormat` | Returns `FormatPFX` |
|
||||
|
||||
### 3.15 `certificate_validator.go` — `parsePEMPrivateKey` Additional Block Types (~4 lines)
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestParsePEMPrivateKey_PKCS1RSA` | PEM block type `"RSA PRIVATE KEY"` (x509.MarshalPKCS1PrivateKey) | Returns RSA key |
|
||||
| `TestParsePEMPrivateKey_EC` | PEM block type `"EC PRIVATE KEY"` (x509.MarshalECPrivateKey) | Returns ECDSA key |
|
||||
|
||||
### 3.16 `certificate_validator.go` — `detectKeyType` P-384 and Unknown Curves (~4 lines)
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestDetectKeyType_ECDSAP384` | P-384 ECDSA key | Returns `"ECDSA-P384"` |
|
||||
| `TestDetectKeyType_ECDSAUnknownCurve` | ECDSA key with custom/unknown curve (e.g. P-224) | Returns `"ECDSA"` |
|
||||
|
||||
### 3.17 `certificate_validator.go` — `ConvertPEMToPFX` Empty Chain (~2 lines)
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestConvertPEMToPFX_EmptyChain` | Valid cert+key PEM, empty chain string | Returns PFX bytes without error |
|
||||
|
||||
### 3.18 `certificate_validator.go` — `ConvertPEMToDER` Non-Certificate Block (~2 lines)
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestConvertPEMToDER_NonCertBlock` | PEM block type `"PRIVATE KEY"` | Returns nil data and error |
|
||||
|
||||
### 3.19 `certificate_validator.go` — `formatSerial` Nil BigInt (~2 lines)
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestFormatSerial_Nil` | `formatSerial(nil)` | Returns `""` |
|
||||
|
||||
---
|
||||
|
||||
### 3.20 `proxy_host_handler.go` — `generateForwardHostWarnings` Private IP (~2 lines)
|
||||
|
||||
**Gap location**: `net.ParseIP(forwardHost) != nil && network.IsPrivateIP(ip)` branch (non-Docker private IP)
|
||||
|
||||
**New file**: `proxy_host_handler_patch_coverage_test.go`
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestGenerateForwardHostWarnings_PrivateIP` | forwardHost = `"192.168.1.100"` (RFC-1918, non-Docker) | Returns warning with field `"forward_host"` |
|
||||
|
||||
### 3.21 `proxy_host_handler.go` — `BulkUpdateSecurityHeaders` Edge Cases (~4 lines)
|
||||
|
||||
| Test Name | Scenario | Expected |
|
||||
|---|---|---|
|
||||
| `TestBulkUpdateSecurityHeaders_AllFail_Rollback` | All UUIDs not found → `updated == 0` at end | 400, transaction rolled back |
|
||||
| `TestBulkUpdateSecurityHeaders_ProfileDB_NonNotFoundError` | Profile lookup returns wrapped DB error | 500 |
|
||||
|
||||
---
|
||||
|
||||
### 3.22 Frontend: `CertificateList.tsx` — Untested Branches
|
||||
|
||||
**File**: `frontend/src/components/__tests__/CertificateList.test.tsx`
|
||||
|
||||
| Gap | New Test |
|
||||
|---|---|
|
||||
| `bulkDeleteMutation` success | `'calls bulkDeleteMutation.mutate with selected UUIDs on confirm'` |
|
||||
| `bulkDeleteMutation` error | `'shows error toast on bulk delete failure'` |
|
||||
| Sort direction toggle | `'toggles sort direction when same column clicked twice'` |
|
||||
| `selectedIds` reconciliation | `'reconciles selectedIds when certificate list shrinks'` |
|
||||
| Export dialog open | `'opens export dialog when export button clicked'` |
|
||||
|
||||
### 3.23 Frontend: `CertificateUploadDialog.tsx` — Untested Branches
|
||||
|
||||
**File**: `frontend/src/components/dialogs/__tests__/CertificateUploadDialog.test.tsx`
|
||||
|
||||
| Gap | New Test |
|
||||
|---|---|
|
||||
| PFX hides key/chain zones | `'hides key and chain file inputs when PFX file selected'` |
|
||||
| Upload success closes dialog | `'calls onOpenChange(false) on successful upload'` |
|
||||
| Upload error shows toast | `'shows error toast when upload mutation fails'` |
|
||||
| Validate result shown | `'displays validation result after validate clicked'` |
|
||||
|
||||
---
|
||||
|
||||
## 4. Implementation Plan
|
||||
|
||||
### Phase 1: Playwright Smoke Tests (Acceptance Gating)
|
||||
|
||||
Add smoke coverage to confirm certificate export and delete flows reach the backend.
|
||||
|
||||
**File**: `tests/certificate-coverage-smoke.spec.ts`
|
||||
|
||||
```typescript
|
||||
import { test, expect } from '@playwright/test'
|
||||
|
||||
test.describe('Certificate Coverage Smoke', () => {
|
||||
test('export dialog opens when export button clicked', async ({ page }) => {
|
||||
await page.goto('/')
|
||||
// navigate to Certificates, click export on a cert
|
||||
// assert dialog visible
|
||||
})
|
||||
|
||||
test('delete dialog opens for deletable certificate', async ({ page }) => {
|
||||
await page.goto('/')
|
||||
// assert delete confirmation dialog appears
|
||||
})
|
||||
})
|
||||
```
|
||||
|
||||
### Phase 2: Backend — Handler Tests
|
||||
|
||||
**File**: `backend/internal/api/handlers/certificate_handler_patch_coverage_test.go`
|
||||
**Action**: Append all tests from sections 3.1–3.6.
|
||||
|
||||
Setup pattern for handler tests:
|
||||
|
||||
```go
|
||||
func setupCertHandlerTest(t *testing.T) (*gin.Engine, *CertificateHandler, *gorm.DB) {
|
||||
t.Helper()
|
||||
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.User{}, &models.ProxyHost{}))
|
||||
tmpDir := t.TempDir()
|
||||
certSvc := services.NewCertificateService(tmpDir, db, nil)
|
||||
backup := &mockBackupService{
|
||||
availableSpaceFunc: func() (int64, error) { return 1 << 30, nil },
|
||||
createFunc: func(string) (string, error) { return "/tmp/backup.db", nil },
|
||||
}
|
||||
h := NewCertificateHandler(certSvc, backup, nil)
|
||||
h.SetDB(db)
|
||||
r := gin.New()
|
||||
r.Use(mockAuthMiddleware())
|
||||
h.RegisterRoutes(r.Group("/api"))
|
||||
return r, h, db
|
||||
}
|
||||
```
|
||||
|
||||
For `TestExport_IncludeKey_*` tests: inject user into gin context directly using a custom middleware wrapper that sets `"user"` (type `map[string]any`, field `"id"`) to the desired value.
|
||||
|
||||
### Phase 3: Backend — Service Tests
|
||||
|
||||
**File**: `backend/internal/services/certificate_service_patch_coverage_test.go`
|
||||
**Action**: Append all tests from sections 3.7–3.13.
|
||||
|
||||
Setup pattern:
|
||||
|
||||
```go
|
||||
func newTestSvc(t *testing.T) (*CertificateService, *gorm.DB, string) {
|
||||
t.Helper()
|
||||
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}))
|
||||
tmpDir := t.TempDir()
|
||||
return NewCertificateService(tmpDir, db, nil), db, tmpDir
|
||||
}
|
||||
```
|
||||
|
||||
For `TestMigratePrivateKeys_WithRows`: use `db.Exec("INSERT INTO ssl_certificates (..., private_key) VALUES (...)` raw SQL to bypass GORM's `gorm:"-"` tag.
|
||||
|
||||
### Phase 4: Backend — Validator Tests
|
||||
|
||||
**File**: `backend/internal/services/certificate_validator_patch_coverage_test.go` (new)
|
||||
|
||||
Key helpers needed:
|
||||
|
||||
```go
|
||||
// generatePKCS1RSAKeyPEM returns an RSA key in PKCS#1 "RSA PRIVATE KEY" PEM format.
|
||||
func generatePKCS1RSAKeyPEM(t *testing.T) []byte {
|
||||
key, err := rsa.GenerateKey(rand.Reader, 2048)
|
||||
require.NoError(t, err)
|
||||
return pem.EncodeToMemory(&pem.Block{
|
||||
Type: "RSA PRIVATE KEY",
|
||||
Bytes: x509.MarshalPKCS1PrivateKey(key),
|
||||
})
|
||||
}
|
||||
|
||||
// generateECKeyPEM returns an EC key in "EC PRIVATE KEY" (SEC1) PEM format.
|
||||
func generateECKeyPEM(t *testing.T, curve elliptic.Curve) []byte {
|
||||
key, err := ecdsa.GenerateKey(curve, rand.Reader)
|
||||
require.NoError(t, err)
|
||||
b, err := x509.MarshalECPrivateKey(key)
|
||||
require.NoError(t, err)
|
||||
return pem.EncodeToMemory(&pem.Block{Type: "EC PRIVATE KEY", Bytes: b})
|
||||
}
|
||||
```
|
||||
|
||||
### Phase 5: Backend — Proxy Host Handler Tests
|
||||
|
||||
**File**: `backend/internal/api/handlers/proxy_host_handler_patch_coverage_test.go` (new)
|
||||
|
||||
Setup pattern mirrors existing `proxy_host_handler_test.go` — use in-memory SQLite, `mockAuthMiddleware`, and `mockCaddyManager` (already available via test hook vars).
|
||||
|
||||
### Phase 6: Frontend Tests
|
||||
|
||||
**Files**:
|
||||
- `frontend/src/components/__tests__/CertificateList.test.tsx`
|
||||
- `frontend/src/components/dialogs/__tests__/CertificateUploadDialog.test.tsx`
|
||||
|
||||
Use existing mock structure; add new `it(...)` blocks inside existing `describe` blocks.
|
||||
|
||||
Frontend bulk delete success test pattern:
|
||||
|
||||
```typescript
|
||||
it('calls bulkDeleteMutation.mutate with selected UUIDs on confirm', async () => {
|
||||
const bulkDeleteFn = vi.fn()
|
||||
mockUseBulkDeleteCertificates.mockReturnValue({
|
||||
mutate: bulkDeleteFn,
|
||||
isPending: false,
|
||||
})
|
||||
render(<CertificateList />)
|
||||
// select checkboxes, click bulk delete, confirm dialog
|
||||
expect(bulkDeleteFn).toHaveBeenCalledWith(['uuid-1', 'uuid-2'])
|
||||
})
|
||||
```
|
||||
|
||||
### Phase 7: Validation
|
||||
|
||||
1. `cd /projects/Charon && bash scripts/go-test-coverage.sh`
|
||||
2. `cd /projects/Charon && bash scripts/frontend-test-coverage.sh`
|
||||
3. `bash scripts/local-patch-report.sh` → verify `test-results/local-patch-report.md` shows ≥ 90%
|
||||
4. `bash scripts/scan-gorm-security.sh --check` → zero CRITICAL/HIGH
|
||||
|
||||
---
|
||||
|
||||
## 5. Commit Slicing Strategy
|
||||
|
||||
**Decision**: One PR with 5 ordered, independently-reviewable commits.
|
||||
|
||||
**Rationale**: Four packages touched across two build systems (Go + Node). Atomic commits allow targeted revert if a mock approach proves brittle for a specific file, without rolling back unrelated coverage gains.
|
||||
|
||||
| # | Scope | Files | Dependencies | Validation Gate |
|
||||
|---|---|---|---|---|
|
||||
| **Commit 1** | Handler re-auth + delete + file-open errors | `certificate_handler_patch_coverage_test.go` (extend) | None | `go test ./backend/internal/api/handlers/...` |
|
||||
| **Commit 2** | Service SyncFromDisk, ListCerts, GetDecryptedKey, Migrate, Update, Delete, CheckExpiring | `certificate_service_patch_coverage_test.go` (extend) | None | `go test ./backend/internal/services/...` |
|
||||
| **Commit 3** | Validator DetectFormat, parsePEMPrivateKey, detectKeyType, ConvertPEMToPFX/DER, formatSerial | `certificate_validator_patch_coverage_test.go` (new) | Commit 2 not required (separate file) | `go test ./backend/internal/services/...` |
|
||||
| **Commit 4** | Proxy host warnings + BulkUpdateSecurityHeaders edge cases | `proxy_host_handler_patch_coverage_test.go` (new) | None | `go test ./backend/internal/api/handlers/...` |
|
||||
| **Commit 5** | Frontend CertificateList + CertificateUploadDialog | `CertificateList.test.tsx`, `CertificateUploadDialog.test.tsx` (extend) | None | `npm run test` |
|
||||
|
||||
**Rollback**: Any commit is safe to revert independently — all changes are additive test-only files.
|
||||
|
||||
**Contingency**: If the `Export` handler's re-auth tests require gin context injection that the current router wiring doesn't support cleanly, use a sub-router with a custom test middleware that pre-populates `"user"` (`map[string]any{"id": uint(1)}`) with the specific value under test, bypassing `mockAuthMiddleware` for those cases only.
|
||||
|
||||
---
|
||||
|
||||
## 6. Acceptance Criteria
|
||||
|
||||
- [ ] `go test -race ./backend/...` — all tests pass, no data races
|
||||
- [ ] Backend patch coverage ≥ 90% for all modified Go files per `test-results/local-patch-report.md`
|
||||
- [ ] `npm run test` — all Vitest tests pass
|
||||
- [ ] Frontend patch coverage ≥ 90% for `CertificateList.tsx` and `CertificateUploadDialog.tsx`
|
||||
- [ ] GORM security scan: zero CRITICAL/HIGH findings
|
||||
- [ ] No new `//nolint` or `//nosec` directives introduced
|
||||
- [ ] No source file modifications — test files only
|
||||
- [ ] All new Go test names follow `TestFunctionName_Scenario` convention
|
||||
- [ ] Previous spec archived to `docs/plans/archive/`
|
||||
|
||||
---
|
||||
|
||||
## 7. Estimated Coverage Impact
|
||||
|
||||
| File | Current | Estimated After | Lines Recovered |
|
||||
|---|---|---|---|
|
||||
| `certificate_handler.go` | 70.28% | ~85% | ~42 lines |
|
||||
| `certificate_service.go` | 82.85% | ~92% | ~44 lines |
|
||||
| `certificate_validator.go` | 88.68% | ~96% | ~18 lines |
|
||||
| `proxy_host_handler.go` | 55.17% | ~60% | ~8 lines |
|
||||
| `CertificateList.tsx` | moderate | high | ~15 lines |
|
||||
| `CertificateUploadDialog.tsx` | moderate | high | ~12 lines |
|
||||
| **Overall patch** | **85.61%** | **≥ 90%** | **~139 lines** |
|
||||
|
||||
> **Note**: Proxy host handler remains below 90% after this plan because the `Create`/`Update`/`Delete` handler paths require full Caddy manager mock integration. A follow-up plan should address these with a dedicated `mockCaddyManager` interface.
|
||||
File diff suppressed because it is too large
Load Diff
897
docs/plans/current_spec.md.bak3
Normal file
897
docs/plans/current_spec.md.bak3
Normal file
@@ -0,0 +1,897 @@
|
||||
# CrowdSec IP Whitelist Management — Implementation Plan
|
||||
|
||||
**Issue**: [#939 — CrowdSec IP Whitelist Management](https://github.com/owner/Charon/issues/939)
|
||||
**Date**: 2026-05-20
|
||||
**Status**: Draft — Awaiting Approval
|
||||
**Priority**: High
|
||||
**Archived Previous Plan**: Coverage Improvement Plan (patch coverage ≥ 90%) → `docs/plans/archive/patch-coverage-improvement-plan-2026-05-02.md`
|
||||
|
||||
---
|
||||
|
||||
## 1. Introduction
|
||||
|
||||
### 1.1 Overview
|
||||
|
||||
CrowdSec enforces IP ban decisions by default. Operators need a way to permanently exempt known-good IPs (uptime monitors, internal subnets, VPN exits, partners) from ever being banned. CrowdSec handles this through its `whitelists` parser, which intercepts alert evaluation and suppresses bans for matching IPs/CIDRs before decisions are even written.
|
||||
|
||||
This feature gives Charon operators a first-class UI for managing those whitelist entries: add an IP or CIDR, give it a reason, and have Charon persist it in the database, render the required YAML parser file into the CrowdSec config tree, and signal CrowdSec to reload—all without manual file editing.
|
||||
|
||||
### 1.2 Objectives
|
||||
|
||||
- Allow operators to add, view, and remove CrowdSec whitelist entries (IPs and CIDRs) through the Charon management UI.
|
||||
- Persist entries in SQLite so they survive container restarts.
|
||||
- Generate a `crowdsecurity/whitelists`-compatible YAML parser file on every mutating operation and on startup.
|
||||
- Automatically install the `crowdsecurity/whitelists` hub parser so CrowdSec can process the file.
|
||||
- Show the Whitelist tab only when CrowdSec is in `local` mode, consistent with other CrowdSec-only tabs.
|
||||
|
||||
---
|
||||
|
||||
## 2. Research Findings
|
||||
|
||||
### 2.1 Existing CrowdSec Architecture
|
||||
|
||||
| Component | Location | Notes |
|
||||
|---|---|---|
|
||||
| Hub parser installer | `configs/crowdsec/install_hub_items.sh` | Run at container start; uses `cscli parsers install --force` |
|
||||
| CrowdSec handler | `backend/internal/api/handlers/crowdsec_handler.go` | ~2750 LOC; `RegisterRoutes` at L2704 |
|
||||
| Route registration | `backend/internal/api/routes/routes.go` | `crowdsecHandler.RegisterRoutes(management)` at ~L620 |
|
||||
| CrowdSec startup | `backend/internal/services/crowdsec_startup.go` | `ReconcileCrowdSecOnStartup()` runs before process start |
|
||||
| Security config | `backend/internal/models/security_config.go` | `CrowdSecMode`, `CrowdSecConfigDir` (via `cfg.Security.CrowdSecConfigDir`) |
|
||||
| IP/CIDR helper | `backend/internal/security/whitelist.go` | `IsIPInCIDRList()` using `net.ParseIP` / `net.ParseCIDR` |
|
||||
| AutoMigrate | `routes.go` ~L95–125 | `&models.ManualChallenge{}` is currently the last entry |
|
||||
|
||||
### 2.2 Gap Analysis
|
||||
|
||||
- `crowdsecurity/whitelists` hub parser is **not** installed by `install_hub_items.sh` — the YAML file would be ignored by CrowdSec without it.
|
||||
- No `CrowdSecWhitelist` model exists in `backend/internal/models/`.
|
||||
- No whitelist service, handler methods, or API routes exist.
|
||||
- No frontend tab, API client functions, or TanStack Query hooks exist.
|
||||
- No E2E test spec covers whitelist management.
|
||||
|
||||
### 2.3 Relevant Patterns
|
||||
|
||||
**Model pattern** (from `access_list.go` + `security_config.go`):
|
||||
```go
|
||||
type Model struct {
|
||||
ID uint `json:"-" gorm:"primaryKey"`
|
||||
UUID string `json:"uuid" gorm:"uniqueIndex;not null"`
|
||||
// domain fields
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
```
|
||||
|
||||
**Service pattern** (from `access_list_service.go`):
|
||||
```go
|
||||
var ErrXxxNotFound = errors.New("xxx not found")
|
||||
|
||||
type XxxService struct { db *gorm.DB }
|
||||
|
||||
func NewXxxService(db *gorm.DB) *XxxService { return &XxxService{db: db} }
|
||||
```
|
||||
|
||||
**Handler error response pattern** (from `crowdsec_handler.go`):
|
||||
```go
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "..."})
|
||||
c.JSON(http.StatusNotFound, gin.H{"error": "..."})
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "..."})
|
||||
```
|
||||
|
||||
**Frontend API client pattern** (from `frontend/src/api/crowdsec.ts`):
|
||||
```typescript
|
||||
export const listXxx = async (): Promise<XxxEntry[]> => {
|
||||
const resp = await client.get<XxxEntry[]>('/admin/crowdsec/xxx')
|
||||
return resp.data
|
||||
}
|
||||
```
|
||||
|
||||
**Frontend mutation pattern** (from `CrowdSecConfig.tsx`):
|
||||
```typescript
|
||||
const mutation = useMutation({
|
||||
mutationFn: (data) => apiCall(data),
|
||||
onSuccess: () => {
|
||||
toast.success('...')
|
||||
queryClient.invalidateQueries({ queryKey: ['crowdsec-whitelist'] })
|
||||
},
|
||||
onError: (err) => toast.error(err instanceof Error ? err.message : '...'),
|
||||
})
|
||||
```
|
||||
|
||||
### 2.4 CrowdSec Whitelist YAML Format
|
||||
|
||||
CrowdSec's `crowdsecurity/whitelists` parser expects the following YAML structure at a path under the `parsers/s02-enrich/` directory:
|
||||
|
||||
```yaml
|
||||
name: charon-whitelist
|
||||
description: "Charon-managed IP/CIDR whitelist"
|
||||
filter: "evt.Meta.service == 'http'"
|
||||
whitelist:
|
||||
reason: "Charon managed whitelist"
|
||||
ip:
|
||||
- "1.2.3.4"
|
||||
cidr:
|
||||
- "10.0.0.0/8"
|
||||
- "192.168.0.0/16"
|
||||
```
|
||||
|
||||
For an empty whitelist, both `ip` and `cidr` must be present as empty lists (not omitted) to produce valid YAML that CrowdSec can parse without error.
|
||||
|
||||
---
|
||||
|
||||
## 3. Technical Specifications
|
||||
|
||||
### 3.1 Database Schema
|
||||
|
||||
**New model**: `backend/internal/models/crowdsec_whitelist.go`
|
||||
|
||||
```go
|
||||
package models
|
||||
|
||||
import "time"
|
||||
|
||||
// CrowdSecWhitelist represents a single IP or CIDR exempted from CrowdSec banning.
|
||||
type CrowdSecWhitelist struct {
|
||||
ID uint `json:"-" gorm:"primaryKey"`
|
||||
UUID string `json:"uuid" gorm:"uniqueIndex;not null"`
|
||||
IPOrCIDR string `json:"ip_or_cidr" gorm:"not null;uniqueIndex"`
|
||||
Reason string `json:"reason" gorm:"not null;default:''"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
```
|
||||
|
||||
**AutoMigrate registration** (`backend/internal/api/routes/routes.go`, append after `&models.ManualChallenge{}`):
|
||||
```go
|
||||
&models.CrowdSecWhitelist{},
|
||||
```
|
||||
|
||||
### 3.2 API Design
|
||||
|
||||
All new endpoints live under the existing `/api/v1` prefix and are registered inside `CrowdsecHandler.RegisterRoutes(rg *gin.RouterGroup)`, following the same `rg.METHOD("/admin/crowdsec/...")` naming pattern as every other CrowdSec endpoint.
|
||||
|
||||
#### Endpoint Table
|
||||
|
||||
| Method | Path | Auth | Description |
|
||||
|---|---|---|---|
|
||||
| `GET` | `/api/v1/admin/crowdsec/whitelist` | Management | List all whitelist entries |
|
||||
| `POST` | `/api/v1/admin/crowdsec/whitelist` | Management | Add a new entry |
|
||||
| `DELETE` | `/api/v1/admin/crowdsec/whitelist/:uuid` | Management | Remove an entry by UUID |
|
||||
|
||||
#### `GET /admin/crowdsec/whitelist`
|
||||
|
||||
**Response 200**:
|
||||
```json
|
||||
{
|
||||
"whitelist": [
|
||||
{
|
||||
"uuid": "a1b2c3d4-...",
|
||||
"ip_or_cidr": "10.0.0.0/8",
|
||||
"reason": "Internal subnet",
|
||||
"created_at": "2026-05-20T12:00:00Z",
|
||||
"updated_at": "2026-05-20T12:00:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### `POST /admin/crowdsec/whitelist`
|
||||
|
||||
**Request body**:
|
||||
```json
|
||||
{ "ip_or_cidr": "10.0.0.0/8", "reason": "Internal subnet" }
|
||||
```
|
||||
|
||||
**Response 201**:
|
||||
```json
|
||||
{
|
||||
"uuid": "a1b2c3d4-...",
|
||||
"ip_or_cidr": "10.0.0.0/8",
|
||||
"reason": "Internal subnet",
|
||||
"created_at": "...",
|
||||
"updated_at": "..."
|
||||
}
|
||||
```
|
||||
|
||||
**Error responses**:
|
||||
- `400` — missing/invalid `ip_or_cidr` field, unparseable IP/CIDR
|
||||
- `409` — duplicate entry (same `ip_or_cidr` already exists)
|
||||
- `500` — database or YAML write failure
|
||||
|
||||
#### `DELETE /admin/crowdsec/whitelist/:uuid`
|
||||
|
||||
**Response 204** — no body
|
||||
|
||||
**Error responses**:
|
||||
- `404` — entry not found
|
||||
- `500` — database or YAML write failure
|
||||
|
||||
### 3.3 Service Design
|
||||
|
||||
**New file**: `backend/internal/services/crowdsec_whitelist_service.go`
|
||||
|
||||
```go
|
||||
package services
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"text/template"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"github.com/yourusername/charon/backend/internal/models"
|
||||
"github.com/yourusername/charon/backend/internal/logger"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrWhitelistNotFound = errors.New("whitelist entry not found")
|
||||
ErrInvalidIPOrCIDR = errors.New("invalid IP address or CIDR notation")
|
||||
ErrDuplicateEntry = errors.New("whitelist entry already exists")
|
||||
)
|
||||
|
||||
type CrowdSecWhitelistService struct {
|
||||
db *gorm.DB
|
||||
dataDir string
|
||||
}
|
||||
|
||||
func NewCrowdSecWhitelistService(db *gorm.DB, dataDir string) *CrowdSecWhitelistService {
|
||||
return &CrowdSecWhitelistService{db: db, dataDir: dataDir}
|
||||
}
|
||||
|
||||
// List returns all whitelist entries ordered by creation time.
|
||||
func (s *CrowdSecWhitelistService) List(ctx context.Context) ([]models.CrowdSecWhitelist, error) { ... }
|
||||
|
||||
// Add validates, persists, and regenerates the YAML file.
|
||||
func (s *CrowdSecWhitelistService) Add(ctx context.Context, ipOrCIDR, reason string) (*models.CrowdSecWhitelist, error) { ... }
|
||||
|
||||
// Delete removes an entry by UUID and regenerates the YAML file.
|
||||
func (s *CrowdSecWhitelistService) Delete(ctx context.Context, uuid string) error { ... }
|
||||
|
||||
// WriteYAML renders all current entries to <dataDir>/parsers/s02-enrich/charon-whitelist.yaml
|
||||
func (s *CrowdSecWhitelistService) WriteYAML(ctx context.Context) error { ... }
|
||||
```
|
||||
|
||||
**Validation logic** in `Add()`:
|
||||
1. Trim whitespace from `ipOrCIDR`.
|
||||
2. Attempt `net.ParseIP(ipOrCIDR)` — if non-nil, it's a bare IP ✓
|
||||
3. Attempt `net.ParseCIDR(ipOrCIDR)` — if `err == nil`, it's a valid CIDR ✓; normalize host bits immediately: `ipOrCIDR = network.String()` (e.g., `"10.0.0.1/8"` → `"10.0.0.0/8"`).
|
||||
4. If both fail → return `ErrInvalidIPOrCIDR`
|
||||
5. Attempt DB insert; if GORM unique constraint error → return `ErrDuplicateEntry`
|
||||
6. On success → call `WriteYAML(ctx)` (non-fatal on YAML error: log + return original entry)
|
||||
|
||||
> **Note**: `Add()` and `Delete()` do **not** call `cscli hub reload`. Reload is the caller's responsibility (handled in `CrowdsecHandler.AddWhitelist` and `DeleteWhitelist` via `h.CmdExec`).
|
||||
|
||||
**CIDR normalization snippet** (step 3):
|
||||
```go
|
||||
if ip, network, err := net.ParseCIDR(ipOrCIDR); err == nil {
|
||||
_ = ip
|
||||
ipOrCIDR = network.String() // normalizes "10.0.0.1/8" → "10.0.0.0/8"
|
||||
}
|
||||
```
|
||||
|
||||
**YAML generation** in `WriteYAML()`:
|
||||
|
||||
Guard: if `s.dataDir == ""`, return `nil` immediately (no-op — used in unit tests that don't need file I/O).
|
||||
|
||||
```go
|
||||
const whitelistTmpl = `name: charon-whitelist
|
||||
description: "Charon-managed IP/CIDR whitelist"
|
||||
filter: "evt.Meta.service == 'http'"
|
||||
whitelist:
|
||||
reason: "Charon managed whitelist"
|
||||
ip:
|
||||
{{- range .IPs}}
|
||||
- "{{.}}"
|
||||
{{- end}}
|
||||
{{- if not .IPs}}
|
||||
[]
|
||||
{{- end}}
|
||||
cidr:
|
||||
{{- range .CIDRs}}
|
||||
- "{{.}}"
|
||||
{{- end}}
|
||||
{{- if not .CIDRs}}
|
||||
[]
|
||||
{{- end}}
|
||||
`
|
||||
```
|
||||
|
||||
Target file path: `<dataDir>/config/parsers/s02-enrich/charon-whitelist.yaml`
|
||||
|
||||
Directory created with `os.MkdirAll(..., 0o750)` if absent.
|
||||
|
||||
File written atomically: render to `<path>.tmp` → `os.Rename(tmp, path)`.
|
||||
|
||||
### 3.4 Handler Design
|
||||
|
||||
**Additions to `CrowdsecHandler` struct**:
|
||||
```go
|
||||
type CrowdsecHandler struct {
|
||||
// ... existing fields ...
|
||||
WhitelistSvc *services.CrowdSecWhitelistService // NEW
|
||||
}
|
||||
```
|
||||
|
||||
**`NewCrowdsecHandler` constructor** — initialize `WhitelistSvc`:
|
||||
```go
|
||||
h := &CrowdsecHandler{
|
||||
// ... existing assignments ...
|
||||
}
|
||||
if db != nil {
|
||||
h.WhitelistSvc = services.NewCrowdSecWhitelistService(db, dataDir)
|
||||
}
|
||||
return h
|
||||
```
|
||||
|
||||
**Three new methods on `CrowdsecHandler`**:
|
||||
|
||||
```go
|
||||
// ListWhitelists handles GET /admin/crowdsec/whitelist
|
||||
func (h *CrowdsecHandler) ListWhitelists(c *gin.Context) {
|
||||
entries, err := h.WhitelistSvc.List(c.Request.Context())
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to list whitelist entries"})
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, gin.H{"whitelist": entries})
|
||||
}
|
||||
|
||||
// AddWhitelist handles POST /admin/crowdsec/whitelist
|
||||
func (h *CrowdsecHandler) AddWhitelist(c *gin.Context) {
|
||||
var req struct {
|
||||
IPOrCIDR string `json:"ip_or_cidr" binding:"required"`
|
||||
Reason string `json:"reason"`
|
||||
}
|
||||
if err := c.ShouldBindJSON(&req); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "ip_or_cidr is required"})
|
||||
return
|
||||
}
|
||||
entry, err := h.WhitelistSvc.Add(c.Request.Context(), req.IPOrCIDR, req.Reason)
|
||||
if errors.Is(err, services.ErrInvalidIPOrCIDR) {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
if errors.Is(err, services.ErrDuplicateEntry) {
|
||||
c.JSON(http.StatusConflict, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to add whitelist entry"})
|
||||
return
|
||||
}
|
||||
// Reload CrowdSec so the new entry takes effect immediately (non-fatal).
|
||||
if reloadErr := h.CmdExec.Execute("cscli", "hub", "reload"); reloadErr != nil {
|
||||
logger.Log().WithError(reloadErr).Warn("failed to reload CrowdSec after whitelist add (non-fatal)")
|
||||
}
|
||||
c.JSON(http.StatusCreated, entry)
|
||||
}
|
||||
|
||||
// DeleteWhitelist handles DELETE /admin/crowdsec/whitelist/:uuid
|
||||
func (h *CrowdsecHandler) DeleteWhitelist(c *gin.Context) {
|
||||
id := strings.TrimSpace(c.Param("uuid"))
|
||||
if id == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "uuid required"})
|
||||
return
|
||||
}
|
||||
err := h.WhitelistSvc.Delete(c.Request.Context(), id)
|
||||
if errors.Is(err, services.ErrWhitelistNotFound) {
|
||||
c.JSON(http.StatusNotFound, gin.H{"error": "whitelist entry not found"})
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to delete whitelist entry"})
|
||||
return
|
||||
}
|
||||
// Reload CrowdSec so the removed entry is no longer exempt (non-fatal).
|
||||
if reloadErr := h.CmdExec.Execute("cscli", "hub", "reload"); reloadErr != nil {
|
||||
logger.Log().WithError(reloadErr).Warn("failed to reload CrowdSec after whitelist delete (non-fatal)")
|
||||
}
|
||||
c.Status(http.StatusNoContent)
|
||||
}
|
||||
```
|
||||
|
||||
**Route registration** (append inside `RegisterRoutes`, after existing decision/bouncer routes):
|
||||
```go
|
||||
// Whitelist management
|
||||
rg.GET("/admin/crowdsec/whitelist", h.ListWhitelists)
|
||||
rg.POST("/admin/crowdsec/whitelist", h.AddWhitelist)
|
||||
rg.DELETE("/admin/crowdsec/whitelist/:uuid", h.DeleteWhitelist)
|
||||
```
|
||||
|
||||
### 3.5 Startup Integration
|
||||
|
||||
**File**: `backend/internal/services/crowdsec_startup.go`
|
||||
|
||||
In `ReconcileCrowdSecOnStartup()`, before the CrowdSec process is started:
|
||||
|
||||
```go
|
||||
// Regenerate whitelist YAML to ensure it reflects the current DB state.
|
||||
whitelistSvc := NewCrowdSecWhitelistService(db, dataDir)
|
||||
if err := whitelistSvc.WriteYAML(ctx); err != nil {
|
||||
logger.Log().WithError(err).Warn("failed to write CrowdSec whitelist YAML on startup (non-fatal)")
|
||||
}
|
||||
```
|
||||
|
||||
This is **non-fatal**: if the DB has no entries, WriteYAML still writes an empty whitelist file, which is valid.
|
||||
|
||||
### 3.6 Hub Parser Installation
|
||||
|
||||
**File**: `configs/crowdsec/install_hub_items.sh`
|
||||
|
||||
Add after the existing `cscli parsers install` lines:
|
||||
|
||||
```bash
|
||||
cscli parsers install crowdsecurity/whitelists --force || echo "⚠️ Failed to install crowdsecurity/whitelists"
|
||||
```
|
||||
|
||||
### 3.7 Frontend Design
|
||||
|
||||
#### API Client (`frontend/src/api/crowdsec.ts`)
|
||||
|
||||
Append the following types and functions:
|
||||
|
||||
```typescript
|
||||
export interface CrowdSecWhitelistEntry {
|
||||
uuid: string
|
||||
ip_or_cidr: string
|
||||
reason: string
|
||||
created_at: string
|
||||
updated_at: string
|
||||
}
|
||||
|
||||
export interface AddWhitelistPayload {
|
||||
ip_or_cidr: string
|
||||
reason: string
|
||||
}
|
||||
|
||||
export const listWhitelists = async (): Promise<CrowdSecWhitelistEntry[]> => {
|
||||
const resp = await client.get<{ whitelist: CrowdSecWhitelistEntry[] }>('/admin/crowdsec/whitelist')
|
||||
return resp.data.whitelist
|
||||
}
|
||||
|
||||
export const addWhitelist = async (data: AddWhitelistPayload): Promise<CrowdSecWhitelistEntry> => {
|
||||
const resp = await client.post<CrowdSecWhitelistEntry>('/admin/crowdsec/whitelist', data)
|
||||
return resp.data
|
||||
}
|
||||
|
||||
export const deleteWhitelist = async (uuid: string): Promise<void> => {
|
||||
await client.delete(`/admin/crowdsec/whitelist/${uuid}`)
|
||||
}
|
||||
```
|
||||
|
||||
#### TanStack Query Hooks (`frontend/src/hooks/useCrowdSecWhitelist.ts`)
|
||||
|
||||
```typescript
|
||||
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'
|
||||
import { listWhitelists, addWhitelist, deleteWhitelist, AddWhitelistPayload } from '../api/crowdsec'
|
||||
import { toast } from 'sonner'
|
||||
|
||||
export const useWhitelistEntries = () =>
|
||||
useQuery({
|
||||
queryKey: ['crowdsec-whitelist'],
|
||||
queryFn: listWhitelists,
|
||||
})
|
||||
|
||||
export const useAddWhitelist = () => {
|
||||
const queryClient = useQueryClient()
|
||||
return useMutation({
|
||||
mutationFn: (data: AddWhitelistPayload) => addWhitelist(data),
|
||||
onSuccess: () => {
|
||||
toast.success('Whitelist entry added')
|
||||
queryClient.invalidateQueries({ queryKey: ['crowdsec-whitelist'] })
|
||||
},
|
||||
onError: (err: unknown) => {
|
||||
toast.error(err instanceof Error ? err.message : 'Failed to add whitelist entry')
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useDeleteWhitelist = () => {
|
||||
const queryClient = useQueryClient()
|
||||
return useMutation({
|
||||
mutationFn: (uuid: string) => deleteWhitelist(uuid),
|
||||
onSuccess: () => {
|
||||
toast.success('Whitelist entry removed')
|
||||
queryClient.invalidateQueries({ queryKey: ['crowdsec-whitelist'] })
|
||||
},
|
||||
onError: (err: unknown) => {
|
||||
toast.error(err instanceof Error ? err.message : 'Failed to remove whitelist entry')
|
||||
},
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
#### CrowdSecConfig.tsx Changes
|
||||
|
||||
The `CrowdSecConfig.tsx` page uses a tab navigation pattern. The new "Whitelist" tab:
|
||||
|
||||
1. **Visibility**: Only render the tab when `isLocalMode === true` (same guard as Decisions tab).
|
||||
2. **Tab value**: `"whitelist"` — append to the existing tab list.
|
||||
3. **Tab panel content** (isolated component or inline JSX):
|
||||
- **Add entry form**: `ip_or_cidr` text input + `reason` text input + "Add" button (disabled while `addMutation.isPending`). Validation error shown inline when backend returns 400/409.
|
||||
- **Quick-add current IP**: A secondary "Add My IP" button that calls `GET /api/v1/system/my-ip` (existing endpoint) and pre-fills the `ip_or_cidr` field with the returned IP.
|
||||
- **Entries table**: Columns — IP/CIDR, Reason, Added, Actions. Each row has a delete button with a confirmation dialog (matching the ban/unban modal pattern used for Decisions).
|
||||
- **Empty state**: "No whitelist entries" message when the list is empty.
|
||||
- **Loading state**: Skeleton rows while `useWhitelistEntries` is fetching.
|
||||
|
||||
**Imports added to `CrowdSecConfig.tsx`**:
|
||||
```typescript
|
||||
import { useWhitelistEntries, useAddWhitelist, useDeleteWhitelist } from '../hooks/useCrowdSecWhitelist'
|
||||
```
|
||||
|
||||
### 3.8 Data Flow Diagram
|
||||
|
||||
```
|
||||
Operator adds IP in UI
|
||||
│
|
||||
▼
|
||||
POST /api/v1/admin/crowdsec/whitelist
|
||||
│
|
||||
▼
|
||||
CrowdsecHandler.AddWhitelist()
|
||||
│
|
||||
▼
|
||||
CrowdSecWhitelistService.Add()
|
||||
├── Validate IP/CIDR (net.ParseIP / net.ParseCIDR)
|
||||
├── Normalize CIDR host bits (network.String())
|
||||
├── Insert into SQLite (models.CrowdSecWhitelist)
|
||||
└── WriteYAML() → <dataDir>/config/parsers/s02-enrich/charon-whitelist.yaml
|
||||
│
|
||||
▼
|
||||
h.CmdExec.Execute("cscli", "hub", "reload") [non-fatal on error]
|
||||
│
|
||||
▼
|
||||
Return 201 to frontend
|
||||
│
|
||||
▼
|
||||
invalidateQueries(['crowdsec-whitelist'])
|
||||
│
|
||||
▼
|
||||
Table re-fetches and shows new entry
|
||||
```
|
||||
|
||||
```
|
||||
Container restart
|
||||
│
|
||||
▼
|
||||
ReconcileCrowdSecOnStartup()
|
||||
│
|
||||
▼
|
||||
CrowdSecWhitelistService.WriteYAML()
|
||||
└── Reads all DB entries → renders YAML
|
||||
│
|
||||
▼
|
||||
CrowdSec process starts
|
||||
│
|
||||
▼
|
||||
CrowdSec loads parsers/s02-enrich/charon-whitelist.yaml
|
||||
└── crowdsecurity/whitelists parser activates
|
||||
│
|
||||
▼
|
||||
IPs/CIDRs in file are exempt from all ban decisions
|
||||
```
|
||||
|
||||
### 3.9 Error Handling Matrix
|
||||
|
||||
| Scenario | Service Error | HTTP Status | Frontend Behavior |
|
||||
|---|---|---|---|
|
||||
| Blank `ip_or_cidr` | — | 400 | Inline validation (required field) |
|
||||
| Malformed IP/CIDR | `ErrInvalidIPOrCIDR` | 400 | Toast: "Invalid IP address or CIDR notation" |
|
||||
| Duplicate entry | `ErrDuplicateEntry` | 409 | Toast: "This IP/CIDR is already whitelisted" |
|
||||
| DB unavailable | generic error | 500 | Toast: "Failed to add whitelist entry" |
|
||||
| UUID not found on DELETE | `ErrWhitelistNotFound` | 404 | Toast: "Whitelist entry not found" |
|
||||
| YAML write failure | logged, non-fatal | 201 (Add still succeeds) | No user-facing error; log warning |
|
||||
| CrowdSec reload failure | logged, non-fatal | 201/204 (operation still succeeds) | No user-facing error; log warning |
|
||||
|
||||
### 3.10 Security Considerations
|
||||
|
||||
- **Input validation**: All `ip_or_cidr` values are validated server-side with `net.ParseIP` / `net.ParseCIDR` before persisting. Arbitrary strings are rejected.
|
||||
- **Path traversal**: `WriteYAML` constructs the output path via `filepath.Join(s.dataDir, "config", "parsers", "s02-enrich", "charon-whitelist.yaml")`. `dataDir` is set at startup—not user-supplied at request time.
|
||||
- **Privilege**: All three endpoints require management-level access (same as all other CrowdSec endpoints).
|
||||
- **YAML injection**: Values are rendered through Go's `text/template` with explicit quoting of each entry; no raw string concatenation.
|
||||
- **Log safety**: IPs are logged using the same structured field pattern used in existing CrowdSec handler methods (e.g., `logger.Log().WithField("ip", entry.IPOrCIDR).Info(...)`).
|
||||
|
||||
---
|
||||
|
||||
## 4. Implementation Plan
|
||||
|
||||
### Phase 1 — Hub Parser Installation (Groundwork)
|
||||
|
||||
**Files Changed**:
|
||||
- `configs/crowdsec/install_hub_items.sh`
|
||||
|
||||
**Task 1.1**: Add `cscli parsers install crowdsecurity/whitelists --force` after the last parser install line (currently `crowdsecurity/syslog-logs`).
|
||||
|
||||
**Acceptance**: File change is syntactically valid bash; `shellcheck` passes.
|
||||
|
||||
---
|
||||
|
||||
### Phase 2 — Database Model
|
||||
|
||||
**Files Changed**:
|
||||
- `backend/internal/models/crowdsec_whitelist.go` _(new file)_
|
||||
- `backend/internal/api/routes/routes.go` _(append to AutoMigrate call)_
|
||||
|
||||
**Task 2.1**: Create `crowdsec_whitelist.go` with the `CrowdSecWhitelist` struct per §3.1.
|
||||
|
||||
**Task 2.2**: Append `&models.CrowdSecWhitelist{}` to the `db.AutoMigrate(...)` call in `routes.go`.
|
||||
|
||||
**Validation Gate**: `go build ./backend/...` passes; GORM generates `crowdsec_whitelists` table on next startup.
|
||||
|
||||
---
|
||||
|
||||
### Phase 3 — Whitelist Service
|
||||
|
||||
**Files Changed**:
|
||||
- `backend/internal/services/crowdsec_whitelist_service.go` _(new file)_
|
||||
|
||||
**Task 3.1**: Implement `CrowdSecWhitelistService` with `List`, `Add`, `Delete`, `WriteYAML` per §3.3.
|
||||
|
||||
**Task 3.2**: Implement IP/CIDR validation in `Add()`:
|
||||
- `net.ParseIP(ipOrCIDR) != nil` → valid bare IP
|
||||
- `net.ParseCIDR(ipOrCIDR)` returns no error → valid CIDR
|
||||
- Both fail → `ErrInvalidIPOrCIDR`
|
||||
|
||||
**Task 3.3**: Implement `WriteYAML()`:
|
||||
- Query all entries from DB.
|
||||
- Partition into `ips` (bare IPs) and `cidrs` (CIDR notation) slices.
|
||||
- Render template per §2.4.
|
||||
- Atomic write: temp file → `os.Rename`.
|
||||
- Create directory (`os.MkdirAll`) if not present.
|
||||
|
||||
**Validation Gate**: `go test ./backend/internal/services/... -run TestCrowdSecWhitelist` passes.
|
||||
|
||||
---
|
||||
|
||||
### Phase 4 — API Endpoints
|
||||
|
||||
**Files Changed**:
|
||||
- `backend/internal/api/handlers/crowdsec_handler.go`
|
||||
|
||||
**Task 4.1**: Add `WhitelistSvc *services.CrowdSecWhitelistService` field to `CrowdsecHandler` struct.
|
||||
|
||||
**Task 4.2**: Initialize `WhitelistSvc` in `NewCrowdsecHandler()` when `db != nil`.
|
||||
|
||||
**Task 4.3**: Implement `ListWhitelists`, `AddWhitelist`, `DeleteWhitelist` methods per §3.4.
|
||||
|
||||
**Task 4.4**: Register three routes in `RegisterRoutes()` per §3.4.
|
||||
|
||||
**Task 4.5**: In `AddWhitelist` and `DeleteWhitelist`, after the service call returns without error, call `h.CmdExec.Execute("cscli", "hub", "reload")`. Log a warning on failure; do not change the HTTP response status (reload failure is non-fatal).
|
||||
|
||||
**Validation Gate**: `go test ./backend/internal/api/handlers/... -run TestWhitelist` passes; `make lint-fast` clean.
|
||||
|
||||
---
|
||||
|
||||
### Phase 5 — Startup Integration
|
||||
|
||||
**Files Changed**:
|
||||
- `backend/internal/services/crowdsec_startup.go`
|
||||
|
||||
**Task 5.1**: In `ReconcileCrowdSecOnStartup()`, after the DB and config are loaded but before calling `h.Executor.Start()`, instantiate `CrowdSecWhitelistService` and call `WriteYAML(ctx)`. Log warning on error; do not abort startup.
|
||||
|
||||
**Validation Gate**: `go test ./backend/internal/services/... -run TestReconcile` passes; existing reconcile tests still pass.
|
||||
|
||||
---
|
||||
|
||||
### Phase 6 — Frontend API + Hooks
|
||||
|
||||
**Files Changed**:
|
||||
- `frontend/src/api/crowdsec.ts`
|
||||
- `frontend/src/hooks/useCrowdSecWhitelist.ts` _(new file)_
|
||||
|
||||
**Task 6.1**: Add `CrowdSecWhitelistEntry`, `AddWhitelistPayload` types and `listWhitelists`, `addWhitelist`, `deleteWhitelist` functions to `crowdsec.ts` per §3.7.
|
||||
|
||||
**Task 6.2**: Create `useCrowdSecWhitelist.ts` with `useWhitelistEntries`, `useAddWhitelist`, `useDeleteWhitelist` hooks per §3.7.
|
||||
|
||||
**Validation Gate**: `pnpm test` (Vitest) passes; TypeScript compilation clean.
|
||||
|
||||
---
|
||||
|
||||
### Phase 7 — Frontend UI
|
||||
|
||||
**Files Changed**:
|
||||
- `frontend/src/pages/CrowdSecConfig.tsx`
|
||||
|
||||
**Task 7.1**: Import the three hooks from `useCrowdSecWhitelist.ts`.
|
||||
|
||||
**Task 7.2**: Add `"whitelist"` to the tab list (visible only when `isLocalMode === true`).
|
||||
|
||||
**Task 7.3**: Implement the Whitelist tab panel:
|
||||
- Add-entry form with IP/CIDR + Reason inputs.
|
||||
- "Add My IP" button: `GET /api/v1/system/my-ip` → pre-fill `ip_or_cidr`.
|
||||
- Entries table with UUID key, IP/CIDR, Reason, created date, delete button.
|
||||
- Delete confirmation dialog (reuse existing modal pattern).
|
||||
|
||||
**Task 7.4**: Wire mutation errors to inline form validation messages (400/409 responses).
|
||||
|
||||
**Validation Gate**: `pnpm test` passes; TypeScript clean; `make lint-fast` clean.
|
||||
|
||||
---
|
||||
|
||||
### Phase 8 — Tests
|
||||
|
||||
**Files Changed**:
|
||||
- `backend/internal/services/crowdsec_whitelist_service_test.go` _(new file)_
|
||||
- `backend/internal/api/handlers/crowdsec_whitelist_handler_test.go` _(new file)_
|
||||
- `tests/crowdsec-whitelist.spec.ts` _(new file)_
|
||||
|
||||
**Task 8.1 — Service unit tests**:
|
||||
|
||||
| Test | Scenario |
|
||||
|---|---|
|
||||
| `TestAdd_ValidIP_Success` | Bare IPv4 inserted; YAML file created |
|
||||
| `TestAdd_ValidIPv6_Success` | Bare IPv6 inserted |
|
||||
| `TestAdd_ValidCIDR_Success` | CIDR range inserted |
|
||||
| `TestAdd_CIDRNormalization` | `"10.0.0.1/8"` stored as `"10.0.0.0/8"` |
|
||||
| `TestAdd_InvalidIPOrCIDR_Error` | Returns `ErrInvalidIPOrCIDR` |
|
||||
| `TestAdd_DuplicateEntry_Error` | Second identical insert returns `ErrDuplicateEntry` |
|
||||
| `TestDelete_Success` | Entry removed; YAML regenerated |
|
||||
| `TestDelete_NotFound_Error` | Returns `ErrWhitelistNotFound` |
|
||||
| `TestList_Empty` | Returns empty slice |
|
||||
| `TestList_Populated` | Returns all entries ordered by `created_at` |
|
||||
| `TestWriteYAML_EmptyList` | Writes valid YAML with empty `ip: []` and `cidr: []` |
|
||||
| `TestWriteYAML_MixedEntries` | IPs in `ip:` block; CIDRs in `cidr:` block |
|
||||
| `TestWriteYAML_EmptyDataDir_NoOp` | `dataDir == ""` → returns `nil`, no file written |
|
||||
|
||||
**Task 8.2 — Handler unit tests** (using in-memory SQLite + `mockAuthMiddleware`):
|
||||
|
||||
| Test | Scenario |
|
||||
|---|---|
|
||||
| `TestListWhitelists_200` | Returns 200 with entries array |
|
||||
| `TestAddWhitelist_201` | Valid payload → 201 |
|
||||
| `TestAddWhitelist_400_MissingField` | Empty body → 400 |
|
||||
| `TestAddWhitelist_400_InvalidIP` | Malformed IP → 400 |
|
||||
| `TestAddWhitelist_409_Duplicate` | Duplicate → 409 |
|
||||
| `TestDeleteWhitelist_204` | Valid UUID → 204 |
|
||||
| `TestDeleteWhitelist_404` | Unknown UUID → 404 |
|
||||
|
||||
**Task 8.3 — E2E Playwright tests** (`tests/crowdsec-whitelist.spec.ts`):
|
||||
|
||||
```typescript
|
||||
import { test, expect } from '@playwright/test'
|
||||
|
||||
test.describe('CrowdSec Whitelist Management', () => {
|
||||
test.beforeEach(async ({ page }) => {
|
||||
await page.goto('http://localhost:8080')
|
||||
await page.getByRole('link', { name: 'Security' }).click()
|
||||
await page.getByRole('tab', { name: 'CrowdSec' }).click()
|
||||
await page.getByRole('tab', { name: 'Whitelist' }).click()
|
||||
})
|
||||
|
||||
test('Whitelist tab only visible in local mode', async ({ page }) => {
|
||||
await page.goto('http://localhost:8080')
|
||||
await page.getByRole('link', { name: 'Security' }).click()
|
||||
await page.getByRole('tab', { name: 'CrowdSec' }).click()
|
||||
// When CrowdSec is not in local mode, the Whitelist tab must not exist
|
||||
await expect(page.getByRole('tab', { name: 'Whitelist' })).toBeHidden()
|
||||
})
|
||||
|
||||
test('displays empty state when no entries exist', async ({ page }) => {
|
||||
await expect(page.getByText('No whitelist entries')).toBeVisible()
|
||||
})
|
||||
|
||||
test('adds a valid IP address', async ({ page }) => {
|
||||
await page.getByRole('textbox', { name: 'IP or CIDR' }).fill('203.0.113.5')
|
||||
await page.getByRole('textbox', { name: 'Reason' }).fill('Uptime monitor')
|
||||
await page.getByRole('button', { name: 'Add' }).click()
|
||||
await expect(page.getByText('Whitelist entry added')).toBeVisible()
|
||||
await expect(page.getByRole('cell', { name: '203.0.113.5' })).toBeVisible()
|
||||
})
|
||||
|
||||
test('adds a valid CIDR range', async ({ page }) => {
|
||||
await page.getByRole('textbox', { name: 'IP or CIDR' }).fill('10.0.0.0/8')
|
||||
await page.getByRole('textbox', { name: 'Reason' }).fill('Internal subnet')
|
||||
await page.getByRole('button', { name: 'Add' }).click()
|
||||
await expect(page.getByText('Whitelist entry added')).toBeVisible()
|
||||
await expect(page.getByRole('cell', { name: '10.0.0.0/8' })).toBeVisible()
|
||||
})
|
||||
|
||||
test('"Add My IP" button pre-fills the detected client IP', async ({ page }) => {
|
||||
await page.getByRole('button', { name: 'Add My IP' }).click()
|
||||
const ipField = page.getByRole('textbox', { name: 'IP or CIDR' })
|
||||
const value = await ipField.inputValue()
|
||||
// Value must be a non-empty valid IP
|
||||
expect(value).toMatch(/^[\d.]+$|^[0-9a-fA-F:]+$/)
|
||||
})
|
||||
|
||||
test('shows validation error for invalid input', async ({ page }) => {
|
||||
await page.getByRole('textbox', { name: 'IP or CIDR' }).fill('not-an-ip')
|
||||
await page.getByRole('button', { name: 'Add' }).click()
|
||||
await expect(page.getByText('Invalid IP address or CIDR notation')).toBeVisible()
|
||||
})
|
||||
|
||||
test('removes an entry via delete confirmation', async ({ page }) => {
|
||||
// Seed an entry first
|
||||
await page.getByRole('textbox', { name: 'IP or CIDR' }).fill('198.51.100.1')
|
||||
await page.getByRole('button', { name: 'Add' }).click()
|
||||
await expect(page.getByRole('cell', { name: '198.51.100.1' })).toBeVisible()
|
||||
|
||||
// Delete it
|
||||
await page.getByRole('row', { name: /198\.51\.100\.1/ }).getByRole('button', { name: 'Delete' }).click()
|
||||
await page.getByRole('button', { name: 'Confirm' }).click()
|
||||
await expect(page.getByText('Whitelist entry removed')).toBeVisible()
|
||||
await expect(page.getByRole('cell', { name: '198.51.100.1' })).toBeHidden()
|
||||
})
|
||||
})
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Phase 9 — Documentation
|
||||
|
||||
**Files Changed**:
|
||||
- `ARCHITECTURE.md`
|
||||
- `docs/features/crowdsec-whitelist.md` _(new file, optional for this PR)_
|
||||
|
||||
**Task 9.1**: Update the CrowdSec row in the Cerberus security components table in `ARCHITECTURE.md` to mention whitelist management.
|
||||
|
||||
---
|
||||
|
||||
## 5. Acceptance Criteria
|
||||
|
||||
### Functional
|
||||
|
||||
- [ ] Operator can add a bare IPv4 address (e.g., `203.0.113.5`) to the whitelist.
|
||||
- [ ] Operator can add a bare IPv6 address (e.g., `2001:db8::1`) to the whitelist.
|
||||
- [ ] Operator can add a CIDR range (e.g., `10.0.0.0/8`) to the whitelist.
|
||||
- [ ] Adding an invalid IP/CIDR (e.g., `not-an-ip`) returns a 400 error with a clear message.
|
||||
- [ ] Adding a duplicate entry returns a 409 conflict error.
|
||||
- [ ] Operator can delete an entry; it disappears from the list.
|
||||
- [ ] The Whitelist tab is only visible when CrowdSec is in `local` mode.
|
||||
- [ ] After adding or deleting an entry, the whitelist YAML file is regenerated in `<dataDir>/config/parsers/s02-enrich/charon-whitelist.yaml`.
|
||||
- [ ] Adding or removing a whitelist entry triggers `cscli hub reload` via `h.CmdExec` so changes take effect immediately without a container restart.
|
||||
- [ ] On container restart, the YAML file is regenerated from DB entries before CrowdSec starts.
|
||||
- [ ] **Admin IP protection**: The "Add My IP" button pre-fills the operator's current IP in the `ip_or_cidr` field; a Playwright E2E test verifies the button correctly pre-fills the detected client IP.
|
||||
|
||||
### Technical
|
||||
|
||||
- [ ] `go test ./backend/...` passes — no regressions.
|
||||
- [ ] `pnpm test` (Vitest) passes.
|
||||
- [ ] `make lint-fast` clean — no new lint findings.
|
||||
- [ ] GORM Security Scanner returns zero CRITICAL/HIGH findings.
|
||||
- [ ] Playwright E2E suite passes (Firefox, `--project=firefox`).
|
||||
- [ ] `crowdsecurity/whitelists` parser is installed by `install_hub_items.sh`.
|
||||
|
||||
---
|
||||
|
||||
## 6. Commit Slicing Strategy
|
||||
|
||||
**Decision**: Single PR with ordered logical commits. No scope overlap between commits; each commit leaves the codebase in a compilable state.
|
||||
|
||||
**Trigger reasons**: Cross-domain change (infra script + model + service + handler + startup + frontend) benefits from ordered commits for surgical rollback and focused review.
|
||||
|
||||
| # | Type | Commit Message | Files | Depends On | Validation Gate |
|
||||
|---|---|---|---|---|---|
|
||||
| 1 | `chore` | `install crowdsecurity/whitelists parser by default` | `configs/crowdsec/install_hub_items.sh` | — | `shellcheck` |
|
||||
| 2 | `feat` | `add CrowdSecWhitelist model and automigrate registration` | `backend/internal/models/crowdsec_whitelist.go`, `backend/internal/api/routes/routes.go` | #1 | `go build ./backend/...` |
|
||||
| 3 | `feat` | `add CrowdSecWhitelistService with YAML generation` | `backend/internal/services/crowdsec_whitelist_service.go` | #2 | `go test ./backend/internal/services/...` |
|
||||
| 4 | `feat` | `add whitelist API endpoints to CrowdsecHandler` | `backend/internal/api/handlers/crowdsec_handler.go` | #3 | `go test ./backend/...` + `make lint-fast` |
|
||||
| 5 | `feat` | `regenerate whitelist YAML on CrowdSec startup reconcile` | `backend/internal/services/crowdsec_startup.go` | #3 | `go test ./backend/internal/services/...` |
|
||||
| 6 | `feat` | `add whitelist API client functions and TanStack hooks` | `frontend/src/api/crowdsec.ts`, `frontend/src/hooks/useCrowdSecWhitelist.ts` | #4 | `pnpm test` |
|
||||
| 7 | `feat` | `add Whitelist tab to CrowdSecConfig UI` | `frontend/src/pages/CrowdSecConfig.tsx` | #6 | `pnpm test` + `make lint-fast` |
|
||||
| 8 | `test` | `add whitelist service and handler unit tests` | `*_test.go` files | #4 | `go test ./backend/...` |
|
||||
| 9 | `test` | `add E2E tests for CrowdSec whitelist management` | `tests/crowdsec-whitelist.spec.ts` | #7 | Playwright Firefox |
|
||||
| 10 | `docs` | `update architecture docs for CrowdSec whitelist feature` | `ARCHITECTURE.md` | #7 | `make lint-fast` |
|
||||
|
||||
**Rollback notes**:
|
||||
- Commits 1–3 are pure additions (no existing code modified except the `AutoMigrate` list append in commit 2 and `install_hub_items.sh` in commit 1). Reverting them is safe.
|
||||
- Commit 4 modifies `crowdsec_handler.go` by adding fields and methods without altering existing ones; reverting is mechanical.
|
||||
- Commit 5 modifies `crowdsec_startup.go` — the added block is isolated in a clearly marked section; revert is a 5-line removal.
|
||||
- Commits 6–7 are frontend-only; reverting has no backend impact.
|
||||
|
||||
---
|
||||
|
||||
## 7. Open Questions / Risks
|
||||
|
||||
| Risk | Likelihood | Mitigation |
|
||||
|---|---|---|
|
||||
| CrowdSec does not hot-reload parser files — requires `cscli reload` or process restart | Resolved | `cscli hub reload` is called via `h.CmdExec.Execute(...)` in `AddWhitelist` and `DeleteWhitelist` after each successful `WriteYAML()`. Failure is non-fatal; logged as a warning. |
|
||||
| `crowdsecurity/whitelists` parser path may differ across CrowdSec versions | Low | Use `<dataDir>/config/parsers/s02-enrich/` which is the canonical path; add a note to verify on version upgrades |
|
||||
| Large whitelist files could cause CrowdSec performance issues | Very Low | Reasonable for typical use; document a soft limit recommendation (< 500 entries) in the UI |
|
||||
| `dataDir` empty string in tests | Resolved | Guard added to `WriteYAML`: `if s.dataDir == "" { return nil }` — no-op when `dataDir` is unset |
|
||||
| `CROWDSEC_TRUSTED_IPS` env var seeding | — | **Follow-up / future enhancement** (not in scope for this PR): if `CROWDSEC_TRUSTED_IPS` is set at runtime, parse comma-separated IPs and include them as read-only seed entries in the generated YAML (separate from DB-managed entries). Document in a follow-up issue. |
|
||||
|
||||
@@ -1,447 +1,131 @@
|
||||
# QA Audit Report — Nightly Build Vulnerability Remediation
|
||||
# QA/Security DoD Audit Report — Issue #929
|
||||
|
||||
**Date**: 2026-04-09
|
||||
**Scope**: Dependency-only update — no feature or UI changes
|
||||
**Image Under Test**: `charon:vuln-fix` (built 2026-04-09 14:53 UTC, 632MB)
|
||||
**Branch**: Current working tree (pre-PR)
|
||||
Date: 2026-04-21
|
||||
Repository: /projects/Charon
|
||||
Branch: feature/beta-release
|
||||
Scope assessed: DoD revalidation after recent fixes (E2E-first, frontend coverage, pre-commit/version gate, SA1019, Trivy CVE check)
|
||||
|
||||
---
|
||||
## Final Recommendation
|
||||
|
||||
## Gate Results Summary
|
||||
FAIL
|
||||
|
||||
| # | Gate | Status | Details |
|
||||
|---|------|--------|---------|
|
||||
| 1 | E2E Playwright (Firefox 4/4 shards + Chromium spot check) | PASS | 19 passed, 20 skipped (security suite), 0 failed |
|
||||
| 2 | Backend Tests + Coverage | PASS | All tests pass, 88.2% statements / 88.4% lines (gate: 87%) |
|
||||
| 3 | Frontend Tests + Coverage | PASS | 791 passed, 41 skipped, 89.38% stmts / 90.13% lines (gate: 87%) |
|
||||
| 4 | Local Patch Coverage Report | PASS | 0 changed lines (dependency-only), 100% patch coverage |
|
||||
| 5 | Frontend Type Check (tsc --noEmit) | PASS | Zero TypeScript errors |
|
||||
| 6 | Pre-commit Hooks (lefthook) | PASS | All hooks passed (shellcheck, actionlint, dockerfile-check, YAML, EOF/whitespace) |
|
||||
| 7a | Trivy Filesystem Scan (CRITICAL/HIGH) | PASS | 0 vulnerabilities in source |
|
||||
| 7b | govulncheck (backend) | INFO | 2 findings — both `docker/docker` v28.5.2 with no upstream fix (pre-existing, documented in SECURITY.md) |
|
||||
| 7c | Docker Image Scan (Grype) | PASS | 0 CRITICAL, 2 HIGH (both unfixed Alpine OpenSSL), all target CVEs resolved |
|
||||
| 8 | Linting (make lint-fast) | PASS | 0 issues |
|
||||
| 9 | GORM Security Scan (--check) | PASS | 0 CRITICAL, 0 HIGH, 2 INFO suggestions |
|
||||
Reason: Two mandatory gates are still failing in current rerun evidence:
|
||||
- Playwright E2E-first gate
|
||||
- Frontend coverage gate
|
||||
|
||||
**Overall Status: PASS**
|
||||
Pre-commit/version-check is now passing.
|
||||
|
||||
---
|
||||
## Gate Summary
|
||||
|
||||
## Vulnerability Remediation Verification
|
||||
| # | DoD Gate | Status | Notes |
|
||||
|---|---|---|---|
|
||||
| 1 | Playwright E2E first | FAIL | Healthy container path confirmed (`charon-e2e Up ... (healthy)`), auth setup passes, but accessibility suite still has 1 failing test (security headers page axe timeout) |
|
||||
| 2 | Frontend coverage | FAIL | `scripts/frontend-test-coverage.sh` still ends with unhandled `ENOENT` on `frontend/coverage/.tmp/coverage-132.json` |
|
||||
| 3 | Pre-commit hooks + version check | PASS | `lefthook run pre-commit --all-files` passes; `check-version-match` passes (`.version` matches latest tag `v0.27.0`) |
|
||||
| 4 | SA1019 reconfirmation | PASS | `golangci-lint run ./... --enable-only staticcheck` reports `0 issues`; no `SA1019` occurrences |
|
||||
| 5 | Trivy FS status (CVE-2026-34040) | PASS (not detected) | Current FS scan (`trivy fs --scanners vuln .`) exits 0 with no CVE hit; `CVE-2026-34040` not present in available Trivy artifacts |
|
||||
|
||||
### Target CVEs — All Resolved
|
||||
## Detailed Evidence
|
||||
|
||||
All CVEs identified in the spec (`docs/plans/current_spec.md`) were verified as absent from the `charon:vuln-fix` image:
|
||||
### 1) Playwright E2E-first gate (revalidated)
|
||||
|
||||
| CVE / GHSA | Package | Was | Now | Status |
|
||||
|-----------|---------|-----|-----|--------|
|
||||
| CVE-2026-39883 | otel/sdk | v1.40.0 | v1.43.0 | Resolved |
|
||||
| CVE-2026-34986 | go-jose/v3 | v3.0.4 | v3.0.5 | Resolved |
|
||||
| CVE-2026-34986 | go-jose/v4 | v4.1.3 | v4.1.4 | Resolved |
|
||||
| CVE-2026-32286 | pgproto3/v2 | v2.3.3 | Not detected | Resolved |
|
||||
| GHSA-xmrv-pmrh-hhx2 | AWS SDK v2 (multiple) | various | Patched | Resolved |
|
||||
| CVE-2026-39882 | OTel HTTP exporters | v1.40.0–v1.42.0 | v1.43.0 | Resolved |
|
||||
| CVE-2026-32281/32288/32289 | Go stdlib | 1.26.1 | 1.26.2 | Resolved (via Dockerfile ARG) |
|
||||
Execution evidence:
|
||||
- Container health:
|
||||
- `docker ps --filter name=charon-e2e --format '{{.Names}} {{.Status}}'`
|
||||
- Output: `charon-e2e Up 35 minutes (healthy)`
|
||||
- Auth setup:
|
||||
- `PLAYWRIGHT_HTML_OPEN=never npx playwright test --project=firefox tests/auth.setup.ts -g "authenticate"`
|
||||
- Result: `1 passed`
|
||||
- Evidence: `Login successful`
|
||||
- Accessibility rerun:
|
||||
- `PLAYWRIGHT_HTML_OPEN=never npx playwright test --project=firefox -g "accessibility"`
|
||||
- Result: `1 failed, 2 skipped, 64 passed`
|
||||
- Failing test:
|
||||
- `tests/a11y/security.a11y.spec.ts:21:5`
|
||||
- `Accessibility: Security › security headers page has no critical a11y violations`
|
||||
- Failure detail: `Test timeout of 90000ms exceeded` during axe analyze step.
|
||||
|
||||
### Remaining Vulnerabilities in Docker Image (Pre-existing, Unfixed Upstream)
|
||||
Gate disposition: FAIL.
|
||||
|
||||
| Severity | CVE | Package | Version | Status |
|
||||
|----------|-----|---------|---------|--------|
|
||||
| HIGH | CVE-2026-31790 | libcrypto3, libssl3 | 3.5.5-r0 | Awaiting Alpine patch |
|
||||
| Medium | CVE-2025-60876 | busybox | 1.37.0-r30 | Awaiting Alpine patch |
|
||||
| Medium | GHSA-6jwv-w5xf-7j27 | go.etcd.io/bbolt | v1.4.3 | CrowdSec transitive dep |
|
||||
| Unknown | CVE-2026-28387/28388/28389/28390/31789 | libcrypto3, libssl3 | 3.5.5-r0 | Awaiting Alpine NVD scoring + patch |
|
||||
### 2) Frontend coverage gate (revalidated)
|
||||
|
||||
**Note**: CVE-2026-31790 (HIGH, OpenSSL) is a **new finding** not previously documented in SECURITY.md. It affects the Alpine 3.23.3 base image and has no fix available. It is **not introduced by this PR** — it would be present in any image built on Alpine 3.23.3. Recommend adding to SECURITY.md known vulnerabilities section.
|
||||
Execution:
|
||||
- `bash scripts/frontend-test-coverage.sh`
|
||||
|
||||
### govulncheck Findings (Backend Source — Pre-existing)
|
||||
Result:
|
||||
- Coverage run still fails with unhandled rejection.
|
||||
- Blocking error remains present:
|
||||
- `Error: ENOENT: no such file or directory, open '/projects/Charon/frontend/coverage/.tmp/coverage-132.json'`
|
||||
- Run summary before abort:
|
||||
- `Test Files 128 passed | 5 skipped (187)`
|
||||
- `Tests 1918 passed | 90 skipped (2008)`
|
||||
|
||||
| ID | Module | Fixed In | Notes |
|
||||
|----|--------|----------|-------|
|
||||
| GO-2026-4887 (CVE-2026-34040) | docker/docker v28.5.2 | N/A | Already in SECURITY.md |
|
||||
| GO-2026-4883 (CVE-2026-33997) | docker/docker v28.5.2 | N/A | Already in SECURITY.md |
|
||||
Additional state:
|
||||
- `frontend/coverage/lcov.info` and `frontend/coverage/coverage-summary.json` can exist despite gate failure, but command-level DoD gate remains FAIL due non-zero termination path from unhandled ENOENT.
|
||||
|
||||
---
|
||||
Gate disposition: FAIL.
|
||||
|
||||
## Coverage Details
|
||||
### 3) Pre-commit hooks + version-check gate (revalidated)
|
||||
|
||||
### Backend (Go)
|
||||
Execution:
|
||||
- `lefthook run pre-commit --all-files`
|
||||
- `bash ./scripts/check-version-match-tag.sh`
|
||||
|
||||
- Statement coverage: **88.2%**
|
||||
- Line coverage: **88.4%**
|
||||
- Gate threshold: 87% — **PASSED**
|
||||
Result:
|
||||
- Pre-commit summary shows all required hooks completed successfully, including:
|
||||
- `check-version-match`
|
||||
- `golangci-lint-fast`
|
||||
- `frontend-type-check`
|
||||
- `frontend-lint`
|
||||
- `semgrep`
|
||||
- Version check output:
|
||||
- `OK: .version matches latest Git tag v0.27.0`
|
||||
|
||||
### Frontend (React/TypeScript)
|
||||
Gate disposition: PASS.
|
||||
|
||||
- Statements: **89.38%**
|
||||
- Branches: **81.86%**
|
||||
- Functions: **86.71%**
|
||||
- Lines: **90.13%**
|
||||
- Gate threshold: 87% — **PASSED**
|
||||
### 4) SA1019 reconfirmation
|
||||
|
||||
### Patch Coverage
|
||||
Execution:
|
||||
- `cd backend && golangci-lint run ./... --enable-only staticcheck`
|
||||
|
||||
- Changed source lines: **0** (dependency-only update)
|
||||
- Patch coverage: **100%**
|
||||
Result:
|
||||
- Output: `0 issues.`
|
||||
- Additional grep for `SA1019`: no matches.
|
||||
|
||||
---
|
||||
Conclusion: SA1019 remains resolved.
|
||||
|
||||
## E2E Test Details
|
||||
### 5) Trivy FS reconfirmation for CVE-2026-34040
|
||||
|
||||
Tests executed against `charon:vuln-fix` container on `http://127.0.0.1:8080`:
|
||||
Execution:
|
||||
- `trivy fs --scanners vuln .`
|
||||
|
||||
| Browser | Shards | Passed | Skipped | Failed |
|
||||
|---------|--------|--------|---------|--------|
|
||||
| Firefox | 4/4 | 11 | 20 | 0 |
|
||||
| Chromium | 1/4 (spot) | 8 | 0 | 0 |
|
||||
Result:
|
||||
- Exit status: `0`
|
||||
- Output indicates scan completed with:
|
||||
- `Number of language-specific files num=0`
|
||||
- CVE lookup:
|
||||
- No `CVE-2026-34040` match found in available Trivy JSON artifacts (`vuln-results.json`, `trivy-image-report.json`).
|
||||
|
||||
Skipped tests are from the security suite (separate project configuration). No test failures observed. The full 3-browser suite will run in CI.
|
||||
Conclusion: CVE-2026-34040 not detected in current FS scan context.
|
||||
|
||||
---
|
||||
## Local Patch Report Artifact Check
|
||||
|
||||
## GORM Scanner Details
|
||||
Execution:
|
||||
- `bash /projects/Charon/scripts/local-patch-report.sh`
|
||||
|
||||
- Scanned: 43 Go files (2401 lines)
|
||||
- CRITICAL: 0
|
||||
- HIGH: 0
|
||||
- MEDIUM: 0
|
||||
- INFO: 2 (missing indexes on `UserPermittedHost` foreign keys — pre-existing, non-blocking)
|
||||
Result:
|
||||
- Generated successfully in warn mode.
|
||||
- Artifacts verified:
|
||||
- `/projects/Charon/test-results/local-patch-report.md`
|
||||
- `/projects/Charon/test-results/local-patch-report.json`
|
||||
|
||||
---
|
||||
## Blocking Issues
|
||||
|
||||
## Recommendations
|
||||
1. Playwright E2E accessibility suite has one failing security headers test (axe timeout).
|
||||
2. Frontend coverage command still fails with ENOENT under `frontend/coverage/.tmp`.
|
||||
|
||||
1. **Add CVE-2026-31790 to SECURITY.md** — New HIGH OpenSSL vulnerability in Alpine base image. No fix available. Monitor Alpine security advisories.
|
||||
2. **Monitor docker/docker module migration** — 2 govulncheck findings with no upstream fix. Track moby/moby/v2 stabilization.
|
||||
3. **Monitor bbolt GHSA-6jwv-w5xf-7j27** — Medium severity in CrowdSec transitive dependency. Track CrowdSec updates.
|
||||
4. **Full CI E2E suite** — Local validation passed on Firefox + Chromium spot check. The complete 3-browser suite should run in CI pipeline.
|
||||
## Decision
|
||||
|
||||
---
|
||||
Overall DoD decision for Issue #929: FAIL
|
||||
|
||||
## Conclusion
|
||||
|
||||
All audit gates **PASS**. The dependency-only changes successfully remediate all 5 HIGH and 3 MEDIUM vulnerability groups identified in the spec. No regressions detected in tests, type safety, linting, or security scans. The remaining HIGH finding (CVE-2026-31790) is a pre-existing Alpine base image issue unrelated to this PR.
|
||||
|
||||
**Verdict: Clear to merge.**
|
||||
# QA Security Audit Report
|
||||
|
||||
| Field | Value |
|
||||
|-------------|--------------------------------|
|
||||
| **Date** | 2026-03-24 |
|
||||
| **Image** | `charon:local` (Alpine 3.23.3) |
|
||||
| **Go** | 1.26.1 |
|
||||
| **Grype** | 0.110.0 |
|
||||
| **Trivy** | 0.69.1 |
|
||||
| **CodeQL** | Latest (SARIF v2.1.0) |
|
||||
|
||||
---
|
||||
|
||||
## Executive Summary
|
||||
|
||||
The current `charon:local` image built on 2026-03-24 shows a significantly improved
|
||||
security posture compared to the CI baseline. Three previously tracked SECURITY.md
|
||||
vulnerabilities are now **resolved** due to Go 1.26.1 compilation and Alpine package
|
||||
updates. Two new medium/low findings emerged. No CRITICAL or HIGH active
|
||||
vulnerabilities remain in the unignored scan results.
|
||||
|
||||
| Category | Critical | High | Medium | Low | Total |
|
||||
|------------------------|----------|------|--------|-----|-------|
|
||||
| **Active (unignored)** | 0 | 0 | 4 | 2 | 6 |
|
||||
| **Ignored (documented)**| 0 | 4 | 0 | 0 | 4 |
|
||||
| **Resolved since last audit** | 1 | 4 | 1 | 0 | 6 |
|
||||
|
||||
---
|
||||
|
||||
## Scans Executed
|
||||
|
||||
| # | Scan | Tool | Result |
|
||||
|---|-------------------------------|-----------|----------------------|
|
||||
| 1 | Trivy Filesystem | Trivy | 0 findings (no lang-specific files detected) |
|
||||
| 2 | Docker Image (SBOM + Grype) | Syft/Grype| 6 active, 8 ignored |
|
||||
| 3 | Trivy Image Report | Trivy | 1 HIGH (stale Feb 25 report; resolved in current build) |
|
||||
| 4 | CodeQL Go | CodeQL | 1 finding (false positive — see below) |
|
||||
| 5 | CodeQL JavaScript | CodeQL | 0 findings |
|
||||
| 6 | GORM Security Scanner | Custom | PASSED (0 issues, 2 info) |
|
||||
| 7 | Lefthook / Pre-commit | Lefthook | Configured (project uses `lefthook.yml`, not `.pre-commit-config.yaml`) |
|
||||
|
||||
---
|
||||
|
||||
## Active Findings (Unignored)
|
||||
|
||||
### CVE-2025-60876 — BusyBox wget HTTP Request Smuggling
|
||||
|
||||
| Field | Value |
|
||||
|------------------|-------|
|
||||
| **Severity** | Medium (CVSS 6.5) |
|
||||
| **Package** | `busybox` 1.37.0-r30 (Alpine APK) |
|
||||
| **Affected** | `busybox`, `busybox-binsh`, `busybox-extras`, `ssl_client` (4 matches) |
|
||||
| **Fix Available** | No |
|
||||
| **Classification** | AWAITING UPSTREAM |
|
||||
| **EPSS** | 0.00064 (0.20 percentile) |
|
||||
|
||||
**Description**: BusyBox wget through 1.37 accepts raw CR/LF and other C0 control bytes
|
||||
in the HTTP request-target, allowing request line splitting and header injection (CWE-284).
|
||||
|
||||
**Risk Assessment**: Low practical risk. Charon does not invoke `busybox wget` in its
|
||||
application logic. The vulnerable `wget` applet would need to be manually invoked inside
|
||||
the container with attacker-controlled URLs.
|
||||
|
||||
**Remediation**: Monitor Alpine 3.23 for a patched `busybox` APK. No action required
|
||||
until upstream ships a fix.
|
||||
|
||||
---
|
||||
|
||||
### CVE-2026-26958 / GHSA-fw7p-63qq-7hpr — edwards25519 MultiScalarMult Invalid Results
|
||||
|
||||
| Field | Value |
|
||||
|------------------|-------|
|
||||
| **Severity** | Low (CVSS 1.7) |
|
||||
| **Package** | `filippo.io/edwards25519` v1.1.0 |
|
||||
| **Location** | CrowdSec binaries (`/usr/local/bin/crowdsec`, `/usr/local/bin/cscli`) |
|
||||
| **Fix Available** | v1.1.1 |
|
||||
| **Classification** | AWAITING UPSTREAM |
|
||||
| **EPSS** | 0.00018 (0.04 percentile) |
|
||||
|
||||
**Description**: `MultiScalarMult` produces invalid results or undefined behavior if
|
||||
the receiver is not the identity point. This is a rarely used, advanced API.
|
||||
|
||||
**Risk Assessment**: Minimal. CrowdSec does not directly expose edwards25519
|
||||
`MultiScalarMult` to external input. The fix exists at v1.1.1 but requires CrowdSec
|
||||
to rebuild with the updated dependency.
|
||||
|
||||
**Remediation**: Awaiting CrowdSec upstream release with updated dependency. No
|
||||
action available for Charon maintainers.
|
||||
|
||||
---
|
||||
|
||||
## Ignored Findings (Documented with Justification)
|
||||
|
||||
These findings are suppressed in the Grype configuration with documented risk
|
||||
acceptance rationale. All are in third-party binaries bundled in the container;
|
||||
none are in Charon's own code.
|
||||
|
||||
### CVE-2026-2673 — OpenSSL TLS 1.3 Key Exchange Group Downgrade
|
||||
|
||||
| Field | Value |
|
||||
|------------------|-------|
|
||||
| **Severity** | High (CVSS 7.5) |
|
||||
| **Package** | `libcrypto3` / `libssl3` 3.5.5-r0 |
|
||||
| **Matches** | 2 (libcrypto3, libssl3) |
|
||||
| **Classification** | ALREADY DOCUMENTED · AWAITING UPSTREAM |
|
||||
|
||||
Charon terminates TLS at the Caddy layer; the Go backend does not act as a raw
|
||||
TLS 1.3 server. Alpine 3.23 still ships 3.5.5-r0. Risk accepted pending Alpine patch.
|
||||
|
||||
---
|
||||
|
||||
### GHSA-6g7g-w4f8-9c9x — DoS in buger/jsonparser (CrowdSec)
|
||||
|
||||
| Field | Value |
|
||||
|------------------|-------|
|
||||
| **Severity** | High (CVSS 7.5) |
|
||||
| **Package** | `github.com/buger/jsonparser` v1.1.1 |
|
||||
| **Matches** | 2 (crowdsec, cscli binaries) |
|
||||
| **Fix Available** | v1.1.2 |
|
||||
| **Classification** | ALREADY DOCUMENTED · AWAITING UPSTREAM |
|
||||
|
||||
Charon does not use this package directly. The vector requires reaching CrowdSec's
|
||||
internal JSON processing pipeline. Risk accepted pending CrowdSec upstream fix.
|
||||
|
||||
---
|
||||
|
||||
### GHSA-jqcq-xjh3-6g23 / GHSA-x6gf-mpr2-68h6 / CVE-2026-4427 — DoS in pgproto3/v2 (CrowdSec)
|
||||
|
||||
| Field | Value |
|
||||
|------------------|-------|
|
||||
| **Severity** | High (CVSS 7.5) |
|
||||
| **Package** | `github.com/jackc/pgproto3/v2` v2.3.3 |
|
||||
| **Matches** | 4 (2 GHSAs × 2 binaries) |
|
||||
| **Fix Available** | No (v2 is archived/EOL) |
|
||||
| **Classification** | ALREADY DOCUMENTED · AWAITING UPSTREAM |
|
||||
|
||||
pgproto3/v2 is archived with no fix planned. CrowdSec must migrate to pgx/v5.
|
||||
Charon uses SQLite, not PostgreSQL; this code path is unreachable in standard
|
||||
deployment.
|
||||
|
||||
---
|
||||
|
||||
## Resolved Findings (Since Last SECURITY.md Update)
|
||||
|
||||
The following vulnerabilities documented in SECURITY.md are no longer detected in the
|
||||
current image build. **SECURITY.md should be updated to move these to "Patched
|
||||
Vulnerabilities".**
|
||||
|
||||
### CVE-2025-68121 — Go Stdlib Critical in CrowdSec (RESOLVED)
|
||||
|
||||
| Field | Value |
|
||||
|------------------|-------|
|
||||
| **Previous Severity** | Critical |
|
||||
| **Resolution** | CrowdSec binaries now compiled with Go 1.26.1 (was Go 1.25.6) |
|
||||
| **Verified** | Not detected in Grype scan of current image |
|
||||
|
||||
---
|
||||
|
||||
### CHARON-2025-001 — CrowdSec Go Stdlib CVE Cluster (RESOLVED)
|
||||
|
||||
| Field | Value |
|
||||
|------------------|-------|
|
||||
| **Previous Severity** | High |
|
||||
| **Aliases** | CVE-2025-58183, CVE-2025-58186, CVE-2025-58187, CVE-2025-61729, CVE-2026-25679, CVE-2025-61732, CVE-2026-27142, CVE-2026-27139 |
|
||||
| **Resolution** | CrowdSec binaries now compiled with Go 1.26.1 |
|
||||
| **Verified** | None of the aliased CVEs detected in Grype scan |
|
||||
|
||||
---
|
||||
|
||||
### CVE-2026-27171 — zlib CPU Exhaustion (RESOLVED)
|
||||
|
||||
| Field | Value |
|
||||
|------------------|-------|
|
||||
| **Previous Severity** | Medium |
|
||||
| **Resolution** | Alpine now ships `zlib` 1.3.2-r0 (fix threshold: 1.3.2) |
|
||||
| **Verified** | Not detected in Grype scan; zlib 1.3.2-r0 confirmed in SBOM |
|
||||
|
||||
---
|
||||
|
||||
### CVE-2026-33186 — gRPC-Go Authorization Bypass (RESOLVED)
|
||||
|
||||
| Field | Value |
|
||||
|------------------|-------|
|
||||
| **Previous Severity** | Critical |
|
||||
| **Packages** | `google.golang.org/grpc` v1.74.2 (CrowdSec), v1.79.1 (Caddy) |
|
||||
| **Resolution** | Upstream releases now include patched gRPC (>= v1.79.3) |
|
||||
| **Verified** | Not detected in Grype scan; ignore rule present but no match |
|
||||
|
||||
---
|
||||
|
||||
### GHSA-69x3-g4r3-p962 / CVE-2026-25793 — Nebula ECDSA Malleability (RESOLVED)
|
||||
|
||||
| Field | Value |
|
||||
|------------------|-------|
|
||||
| **Previous Severity** | High |
|
||||
| **Package** | `github.com/slackhq/nebula` v1.9.7 in Caddy |
|
||||
| **Resolution** | Caddy now ships with nebula >= v1.10.3 |
|
||||
| **Verified** | Not detected in Grype scan; Trivy image report from Feb 25 had this but current build does not |
|
||||
|
||||
> **Note**: The stale Trivy image report (`trivy-image-report.json`, dated 2026-02-25) still
|
||||
> shows CVE-2026-25793. This report predates the current build and should be regenerated.
|
||||
|
||||
---
|
||||
|
||||
### GHSA-479m-364c-43vc — goxmldsig XML Signature Bypass (RESOLVED)
|
||||
|
||||
| Field | Value |
|
||||
|------------------|-------|
|
||||
| **Previous Severity** | High |
|
||||
| **Package** | `github.com/russellhaering/goxmldsig` v1.5.0 in Caddy |
|
||||
| **Resolution** | Caddy now ships with goxmldsig >= v1.6.0 |
|
||||
| **Verified** | Not detected in Grype scan; ignore rule present but no match |
|
||||
|
||||
---
|
||||
|
||||
## CodeQL Analysis
|
||||
|
||||
### go/cookie-secure-not-set — FALSE POSITIVE
|
||||
|
||||
| Field | Value |
|
||||
|------------------|-------|
|
||||
| **Severity** | Medium (CodeQL) |
|
||||
| **File** | `backend/internal/api/handlers/auth_handler.go:152` |
|
||||
| **Classification** | FALSE POSITIVE (stale SARIF) |
|
||||
|
||||
**Finding**: CodeQL reports "Cookie does not set Secure attribute to true" at line 152.
|
||||
|
||||
**Verification**: The `setSecureCookie` function at line 148-156 calls `c.SetCookie()`
|
||||
with `secure: true` (6th positional argument). The Secure attribute IS set correctly.
|
||||
This SARIF was generated from a previous code version and does not reflect the current
|
||||
source. **The CodeQL SARIF files should be regenerated.**
|
||||
|
||||
### JavaScript / JS
|
||||
|
||||
No findings. Both `codeql-results-javascript.sarif` and `codeql-results-js.sarif` contain
|
||||
0 results.
|
||||
|
||||
---
|
||||
|
||||
## GORM Security Scanner
|
||||
|
||||
| Metric | Value |
|
||||
|------------|-------|
|
||||
| **Result** | PASSED |
|
||||
| **Files** | 43 Go files (2,396 lines) |
|
||||
| **Critical** | 0 |
|
||||
| **High** | 0 |
|
||||
| **Medium** | 0 |
|
||||
| **Info** | 2 (missing indexes on foreign keys in `UserPermittedHost`) |
|
||||
|
||||
The 2 informational suggestions (`UserID` and `ProxyHostID` missing `gorm:"index"` in
|
||||
`backend/internal/models/user.go:130-131`) are performance recommendations, not security
|
||||
issues. They do not block this audit.
|
||||
|
||||
---
|
||||
|
||||
## CI vs Local Scan Discrepancy
|
||||
|
||||
The CI reported **3 Critical, 5 High, 1 Medium**. The local scan on the freshly built
|
||||
image reports **0 Critical, 0 High, 4 Medium, 2 Low** (active) plus **4 High** (ignored).
|
||||
|
||||
**Root causes for the discrepancy:**
|
||||
|
||||
1. **Resolved vulnerabilities**: 3 Critical and 4 High findings were resolved by Go 1.26.1
|
||||
compilation and upstream Caddy/CrowdSec dependency updates since the CI image was built.
|
||||
2. **Grype ignore rules**: The local scan applies documented risk acceptance rules that
|
||||
suppress 4 High findings in third-party binaries. CI (Trivy) does not use these rules.
|
||||
3. **Stale CI artifacts**: The `trivy-image-report.json` dates from 2026-02-25 and does
|
||||
not reflect the current image state. The `codeql-results-go.sarif` references code that
|
||||
has since been fixed.
|
||||
|
||||
---
|
||||
|
||||
## Recommended Actions
|
||||
|
||||
### Immediate (This Sprint)
|
||||
|
||||
1. **Update SECURITY.md**: Move CVE-2025-68121, CHARON-2025-001, and CVE-2026-27171 to
|
||||
a "Patched Vulnerabilities" section. Add CVE-2025-60876 and CVE-2026-26958 as new
|
||||
known vulnerabilities.
|
||||
|
||||
2. **Regenerate stale scan artifacts**: Re-run Trivy image scan and CodeQL analysis to
|
||||
produce current SARIF/JSON files. The existing files predate fixes and produce
|
||||
misleading CI results.
|
||||
|
||||
3. **Clean up Grype ignore rules**: Remove ignore entries for vulnerabilities that are
|
||||
no longer detected (CVE-2026-33186, GHSA-69x3-g4r3-p962, GHSA-479m-364c-43vc).
|
||||
Stale ignore rules obscure the actual security posture.
|
||||
|
||||
### Next Release
|
||||
|
||||
4. **Monitor Alpine APK updates**: Watch for patched `busybox` (CVE-2025-60876) and
|
||||
`openssl` (CVE-2026-2673) packages in Alpine 3.23.
|
||||
|
||||
5. **Monitor CrowdSec releases**: Watch for CrowdSec builds with updated
|
||||
`filippo.io/edwards25519` >= v1.1.1, `buger/jsonparser` >= v1.1.2, and
|
||||
`pgx/v5` migration (replacing pgproto3/v2).
|
||||
|
||||
6. **Monitor Go 1.26.2-alpine**: When available, bump `GO_VERSION` to pick up any
|
||||
remaining stdlib patches.
|
||||
|
||||
### Informational (Non-Blocking)
|
||||
|
||||
7. **GORM indexes**: Consider adding `gorm:"index"` to `UserID` and `ProxyHostID` in
|
||||
`UserPermittedHost` for query performance.
|
||||
|
||||
---
|
||||
|
||||
## Gotify Token Review
|
||||
|
||||
Verified: No Gotify application tokens appear in scan output, log artifacts, test results,
|
||||
API examples, or URL query parameters. All diagnostic output is clean.
|
||||
|
||||
---
|
||||
|
||||
## Conclusion
|
||||
|
||||
The Charon container image security posture has materially improved. Six previously known
|
||||
vulnerabilities are now resolved through Go toolchain and dependency updates. The remaining
|
||||
active findings are medium/low severity, reside in Alpine base packages and CrowdSec
|
||||
third-party binaries, and have no available fixes. No vulnerabilities exist in Charon's
|
||||
own application code. GORM and CodeQL scans confirm the backend code is clean.
|
||||
Promotion recommendation: keep blocked until both failing mandatory gates are green on rerun.
|
||||
|
||||
226
docs/reports/qa_report_crowdsec_whitelist_2026-04-16.md
Normal file
226
docs/reports/qa_report_crowdsec_whitelist_2026-04-16.md
Normal file
@@ -0,0 +1,226 @@
|
||||
# QA Audit Report — CrowdSec IP Whitelist Management
|
||||
|
||||
**Feature Branch**: `feature/beta-release`
|
||||
**Pull Request**: #952
|
||||
**Repository**: Wikid82/Charon
|
||||
**Audit Date**: 2026-04-16
|
||||
**Auditor**: QA Security Agent
|
||||
|
||||
---
|
||||
|
||||
## Overall Verdict
|
||||
|
||||
### APPROVED WITH CONDITIONS
|
||||
|
||||
The CrowdSec IP Whitelist Management feature passes all critical quality and security gates. All feature-specific E2E tests pass across three browsers. Backend and frontend coverage exceed thresholds. No security vulnerabilities were found in the feature code. Two upstream HIGH CVEs in the Docker image and below-threshold overall patch coverage require tracking but do not block the release.
|
||||
|
||||
---
|
||||
|
||||
## Gate Results Summary
|
||||
|
||||
| # | Gate | Result | Detail |
|
||||
|---|------|--------|--------|
|
||||
| 1 | Playwright E2E | **PASS** | All CrowdSec whitelist tests passed; 14 pre-existing failures (unrelated) |
|
||||
| 2 | Go Backend Coverage | **PASS** | 88.4% line coverage (threshold: 87%) |
|
||||
| 3 | Frontend Coverage | **PASS** | 90.06% line coverage (threshold: 87%) |
|
||||
| 4 | Patch Coverage | **WARN** | Overall 89.4% (threshold: 90%); Backend 88.0% PASS; Frontend 97.0% PASS |
|
||||
| 5 | TypeScript Type Check | **PASS** | `npx tsc --noEmit` — 0 errors |
|
||||
| 6 | Lefthook (Lint/Format) | **PASS** | All 6 hooks green |
|
||||
| 7 | GORM Security Scan | **PASS** | 0 CRITICAL/HIGH/MEDIUM issues |
|
||||
| 8 | Trivy Filesystem Scan | **PASS** | 0 CRITICAL/HIGH vulnerabilities |
|
||||
| 9 | Trivy Docker Image Scan | **WARN** | 2 unique HIGH CVEs (upstream dependencies) |
|
||||
| 10 | CodeQL SARIF Review | **PASS** | 1 pre-existing Go finding; 0 JS findings; 0 whitelist-related |
|
||||
|
||||
---
|
||||
|
||||
## Detailed Gate Analysis
|
||||
|
||||
### Gate 1 — Playwright E2E Tests
|
||||
|
||||
**Result**: PASS
|
||||
**Browsers**: Chromium, Firefox, WebKit (all three)
|
||||
|
||||
**CrowdSec Whitelist-Specific Tests (10 tests)**: All PASSED
|
||||
- Add whitelist entry with valid IP
|
||||
- Add whitelist entry with valid CIDR
|
||||
- Reject invalid IP/CIDR input
|
||||
- Reject duplicate entry
|
||||
- Delete whitelist entry
|
||||
- Display whitelist table with entries
|
||||
- Empty state display
|
||||
- Whitelist tab visibility (local mode only)
|
||||
- Form validation and error handling
|
||||
- Toast notification on success/failure
|
||||
|
||||
**Pre-existing Failures (14 unique, unrelated to this feature)**:
|
||||
- Certificate deletion tests (7): cert delete/bulk-delete operations
|
||||
- Caddy import tests (3): conflict details, server detection, resolution
|
||||
- Navigation test (1): main navigation item count
|
||||
- User management tests (2): invite link copy, keyboard navigation
|
||||
- Integration test (1): system health check
|
||||
|
||||
None of the pre-existing failures are related to the CrowdSec whitelist feature.
|
||||
|
||||
### Gate 2 — Go Backend Coverage
|
||||
|
||||
**Result**: PASS
|
||||
**Coverage**: 88.4% line coverage
|
||||
**Threshold**: 87%
|
||||
|
||||
### Gate 3 — Frontend Coverage
|
||||
|
||||
**Result**: PASS
|
||||
**Coverage**: 90.06% line coverage (Statements: 89.03%, Branches: 85.84%, Functions: 85.85%)
|
||||
**Threshold**: 87%
|
||||
|
||||
5 pre-existing test timeouts in `ProxyHostForm-dns.test.tsx` and `ProxyHostForm-dropdown-changes.test.tsx` — not whitelist-related.
|
||||
|
||||
### Gate 4 — Patch Coverage
|
||||
|
||||
**Result**: WARN (non-blocking)
|
||||
|
||||
| Scope | Changed Lines | Covered | Patch % | Status |
|
||||
|-------|--------------|---------|---------|--------|
|
||||
| Overall | 1689 | 1510 | 89.4% | WARN (threshold: 90%) |
|
||||
| Backend | 1426 | 1255 | 88.0% | PASS (threshold: 85%) |
|
||||
| Frontend | 263 | 255 | 97.0% | PASS (threshold: 85%) |
|
||||
|
||||
**CrowdSec-Specific Patch Coverage**:
|
||||
- `crowdsec_handler.go`: 71.2% — 17 uncovered changed lines (error-handling branches)
|
||||
- `crowdsec_whitelist_service.go`: 83.6% — 18 uncovered changed lines (YAML write failure, edge cases)
|
||||
- `CrowdSecConfig.tsx`: 93.3% — 2 uncovered changed lines
|
||||
|
||||
**Recommendation**: Add targeted unit tests for error-handling branches in `crowdsec_handler.go` (lines 2712-2772) and `crowdsec_whitelist_service.go` (lines 47-148) to bring CrowdSec-specific patch coverage above 90%. This is tracked as a follow-up improvement and does not block release.
|
||||
|
||||
### Gate 5 — TypeScript Type Check
|
||||
|
||||
**Result**: PASS
|
||||
`npx tsc --noEmit` from `frontend/` completed with 0 errors.
|
||||
|
||||
### Gate 6 — Lefthook (Lint/Format)
|
||||
|
||||
**Result**: PASS
|
||||
All 6 hooks passed:
|
||||
- `go-fmt`
|
||||
- `go-vet`
|
||||
- `go-staticcheck`
|
||||
- `eslint`
|
||||
- `prettier-check`
|
||||
- `tsc-check`
|
||||
|
||||
### Gate 7 — GORM Security Scan
|
||||
|
||||
**Result**: PASS
|
||||
`./scripts/scan-gorm-security.sh --check` — 0 CRITICAL, 0 HIGH, 0 MEDIUM issues.
|
||||
No exposed IDs, secrets, or DTO embedding violations in CrowdSec whitelist models.
|
||||
|
||||
### Gate 8 — Trivy Filesystem Scan
|
||||
|
||||
**Result**: PASS
|
||||
`trivy fs --scanners vuln --severity CRITICAL,HIGH --format table .` — 0 vulnerabilities detected in application source and dependencies.
|
||||
|
||||
### Gate 9 — Trivy Docker Image Scan
|
||||
|
||||
**Result**: WARN (non-blocking for this feature)
|
||||
Image: `charon:local` (Alpine 3.23.3)
|
||||
|
||||
| CVE | Severity | Package | Installed | Fixed | Component |
|
||||
|-----|----------|---------|-----------|-------|-----------|
|
||||
| CVE-2026-34040 | HIGH | github.com/docker/docker | v28.5.2+incompatible | 29.3.1 | Charon Go binary (Moby authorization bypass) |
|
||||
| CVE-2026-32286 | HIGH | github.com/jackc/pgproto3/v2 | v2.3.3 | No fix | CrowdSec binaries (PostgreSQL protocol DoS) |
|
||||
|
||||
**Analysis**:
|
||||
- CVE-2026-34040: Moby authorization bypass — affects Docker API access control. Charon does not expose Docker API to untrusted networks. Low practical risk. Update `github.com/docker/docker` to v29.3.1 when available.
|
||||
- CVE-2026-32286: PostgreSQL protocol DoS — present only in CrowdSec's `crowdsec` and `cscli` binaries, not in Charon's own code. Awaiting upstream fix from CrowdSec.
|
||||
|
||||
**Recommendation**: Track both CVEs for remediation. Neither impacts CrowdSec whitelist management functionality or Charon's own security posture directly.
|
||||
|
||||
### Gate 10 — CodeQL SARIF Review
|
||||
|
||||
**Result**: PASS
|
||||
|
||||
- **Go**: 1 pre-existing finding — `cookie-secure-not-set` at `auth_handler.go:152`. Not whitelist-related. Tracked separately.
|
||||
- **JavaScript**: 0 findings.
|
||||
- **CrowdSec whitelist**: 0 findings across both Go and JavaScript.
|
||||
|
||||
---
|
||||
|
||||
## Security Review — CrowdSec IP Whitelist Feature
|
||||
|
||||
### 1. IP/CIDR Input Validation
|
||||
|
||||
**Status**: SECURE
|
||||
|
||||
The `normalizeIPOrCIDR()` function in `crowdsec_whitelist_service.go` uses Go standard library functions `net.ParseIP()` and `net.ParseCIDR()` for validation. Invalid inputs are rejected with the sentinel error `ErrInvalidIPOrCIDR`. No user input passes through without validation.
|
||||
|
||||
### 2. YAML Injection Prevention
|
||||
|
||||
**Status**: SECURE
|
||||
|
||||
`buildWhitelistYAML()` uses a `strings.Builder` to construct YAML output. Only IP addresses and CIDR ranges that have already passed `normalizeIPOrCIDR()` validation are included. The normalized output from `net.ParseIP`/`net.ParseCIDR` cannot contain YAML metacharacters.
|
||||
|
||||
### 3. Path Traversal Protection
|
||||
|
||||
**Status**: SECURE
|
||||
|
||||
`WriteYAML()` uses hardcoded file paths (no user input in path construction). Atomic write pattern: writes to `.tmp` suffix, then `os.Rename()` to final path. No directory traversal vectors.
|
||||
|
||||
### 4. SQL Injection Prevention
|
||||
|
||||
**Status**: SECURE
|
||||
|
||||
All GORM queries use parameterized operations:
|
||||
- `Where("uuid = ?", id)` for delete
|
||||
- `Where("ip_or_cidr = ?", normalized)` for duplicate check
|
||||
- Standard GORM `Create()` for inserts
|
||||
|
||||
No raw SQL or string concatenation.
|
||||
|
||||
### 5. Authentication & Authorization
|
||||
|
||||
**Status**: SECURE
|
||||
|
||||
All whitelist routes are registered under the admin route group in `routes.go`, which is protected by:
|
||||
- Cerberus middleware (authentication/authorization enforcement)
|
||||
- Emergency bypass middleware (for recovery scenarios only)
|
||||
- Security headers and gzip middleware
|
||||
|
||||
No unauthenticated access to whitelist endpoints is possible.
|
||||
|
||||
### 6. Log Safety
|
||||
|
||||
**Status**: SECURE
|
||||
|
||||
Whitelist service logs only operational error context (e.g., "failed to write CrowdSec whitelist YAML after add"). No IP addresses, user data, or PII are written to logs. Other handler code uses `util.SanitizeForLog()` for user-controlled input in log messages.
|
||||
|
||||
---
|
||||
|
||||
## Conditions for Approval
|
||||
|
||||
These items are tracked as follow-up improvements and do not block merge:
|
||||
|
||||
1. **Patch Coverage Improvement**: Add targeted unit tests for error-handling branches in:
|
||||
- `crowdsec_handler.go` (lines 2712-2772, 71.2% patch coverage)
|
||||
- `crowdsec_whitelist_service.go` (lines 47-148, 83.6% patch coverage)
|
||||
|
||||
2. **Upstream CVE Tracking**:
|
||||
- CVE-2026-34040: Update `github.com/docker/docker` to v29.3.1 when Go module is available
|
||||
- CVE-2026-32286: Monitor CrowdSec upstream for `pgproto3` fix
|
||||
|
||||
3. **Pre-existing Test Failures**: 14 pre-existing E2E test failures (certificate deletion, caddy import, navigation, user management) should be tracked in existing issues. None are regressions from this feature.
|
||||
|
||||
---
|
||||
|
||||
## Artifacts
|
||||
|
||||
| Artifact | Location |
|
||||
|----------|----------|
|
||||
| Playwright HTML Report | `playwright-report/index.html` |
|
||||
| Backend Coverage | `backend/coverage.txt` |
|
||||
| Frontend Coverage | `frontend/coverage/lcov.info`, `frontend/coverage/coverage-summary.json` |
|
||||
| Patch Coverage Report | `test-results/local-patch-report.md`, `test-results/local-patch-report.json` |
|
||||
| GORM Security Scan | Inline (0 findings) |
|
||||
| Trivy Filesystem Scan | Inline (0 findings) |
|
||||
| Trivy Image Scan | `trivy-image-report.json` |
|
||||
| CodeQL Go SARIF | `codeql-results-go.sarif` |
|
||||
| CodeQL JS SARIF | `codeql-results-javascript.sarif` |
|
||||
1193
frontend/package-lock.json
generated
1193
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -33,20 +33,20 @@
|
||||
"@radix-ui/react-select": "^2.2.6",
|
||||
"@radix-ui/react-tabs": "^1.1.13",
|
||||
"@radix-ui/react-tooltip": "^1.2.8",
|
||||
"@tanstack/react-query": "^5.99.0",
|
||||
"axios": "1.15.0",
|
||||
"@tanstack/react-query": "^5.99.2",
|
||||
"axios": "1.15.2",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"date-fns": "^4.1.0",
|
||||
"i18next": "^26.0.5",
|
||||
"i18next": "^26.0.6",
|
||||
"i18next-browser-languagedetector": "^8.2.1",
|
||||
"lucide-react": "^1.8.0",
|
||||
"react": "^19.2.5",
|
||||
"react-dom": "^19.2.5",
|
||||
"react-hook-form": "^7.72.1",
|
||||
"react-hook-form": "^7.73.1",
|
||||
"react-hot-toast": "^2.6.0",
|
||||
"react-i18next": "^17.0.3",
|
||||
"react-router-dom": "^7.14.1",
|
||||
"react-i18next": "^17.0.4",
|
||||
"react-router-dom": "^7.14.2",
|
||||
"recharts": "^3.8.1",
|
||||
"tailwind-merge": "^3.5.0",
|
||||
"tldts": "^7.0.28"
|
||||
@@ -57,7 +57,7 @@
|
||||
"@eslint/json": "^1.2.0",
|
||||
"@eslint/markdown": "^8.0.1",
|
||||
"@playwright/test": "^1.59.1",
|
||||
"@tailwindcss/postcss": "^4.2.2",
|
||||
"@tailwindcss/postcss": "^4.2.4",
|
||||
"@testing-library/jest-dom": "^6.9.1",
|
||||
"@testing-library/react": "^16.3.2",
|
||||
"@testing-library/user-event": "^14.6.1",
|
||||
@@ -65,52 +65,52 @@
|
||||
"@types/node": "^25.6.0",
|
||||
"@types/react": "^19.2.14",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"@typescript-eslint/eslint-plugin": "^8.58.2",
|
||||
"@typescript-eslint/parser": "^8.58.2",
|
||||
"@typescript-eslint/utils": "^8.58.2",
|
||||
"@typescript-eslint/eslint-plugin": "^8.59.0",
|
||||
"@typescript-eslint/parser": "^8.59.0",
|
||||
"@typescript-eslint/utils": "^8.59.0",
|
||||
"@vitejs/plugin-react": "^6.0.1",
|
||||
"@vitest/coverage-istanbul": "^4.1.4",
|
||||
"@vitest/coverage-v8": "^4.1.4",
|
||||
"@vitest/coverage-istanbul": "^4.1.5",
|
||||
"@vitest/coverage-v8": "^4.1.5",
|
||||
"@vitest/eslint-plugin": "^1.6.16",
|
||||
"@vitest/ui": "^4.1.4",
|
||||
"@vitest/ui": "^4.1.5",
|
||||
"autoprefixer": "^10.5.0",
|
||||
"eslint": "^10.2.0",
|
||||
"eslint": "^10.2.1",
|
||||
"eslint-import-resolver-typescript": "^4.4.4",
|
||||
"eslint-plugin-import-x": "^4.16.2",
|
||||
"eslint-plugin-jsx-a11y": "^6.10.2",
|
||||
"eslint-plugin-no-unsanitized": "^4.1.5",
|
||||
"eslint-plugin-promise": "^7.2.1",
|
||||
"eslint-plugin-react-compiler": "^19.1.0-rc.2",
|
||||
"eslint-plugin-react-hooks": "^7.0.1",
|
||||
"eslint-plugin-react-hooks": "^7.1.1",
|
||||
"eslint-plugin-react-refresh": "^0.5.2",
|
||||
"eslint-plugin-security": "^4.0.0",
|
||||
"eslint-plugin-sonarjs": "^4.0.2",
|
||||
"eslint-plugin-sonarjs": "^4.0.3",
|
||||
"eslint-plugin-testing-library": "^7.16.2",
|
||||
"eslint-plugin-unicorn": "^64.0.0",
|
||||
"eslint-plugin-unused-imports": "^4.4.1",
|
||||
"jsdom": "29.0.2",
|
||||
"knip": "^6.4.1",
|
||||
"knip": "^6.6.0",
|
||||
"postcss": "^8.5.10",
|
||||
"tailwindcss": "^4.2.2",
|
||||
"typescript": "^6.0.2",
|
||||
"typescript-eslint": "^8.58.2",
|
||||
"vite": "^8.0.8",
|
||||
"vitest": "^4.1.4",
|
||||
"tailwindcss": "^4.2.4",
|
||||
"typescript": "^6.0.3",
|
||||
"typescript-eslint": "^8.59.0",
|
||||
"vite": "^8.0.9",
|
||||
"vitest": "^4.1.5",
|
||||
"zod-validation-error": "^5.0.0"
|
||||
},
|
||||
"overrides": {
|
||||
"typescript": "^6.0.2",
|
||||
"typescript": "^6.0.3",
|
||||
"eslint-plugin-react-hooks": {
|
||||
"eslint": "^10.2.0"
|
||||
"eslint": "^10.2.1"
|
||||
},
|
||||
"eslint-plugin-jsx-a11y": {
|
||||
"eslint": "^10.2.0"
|
||||
"eslint": "^10.2.1"
|
||||
},
|
||||
"eslint-plugin-promise": {
|
||||
"eslint": "^10.2.0"
|
||||
"eslint": "^10.2.1"
|
||||
},
|
||||
"@vitejs/plugin-react": {
|
||||
"vite": "8.0.8"
|
||||
"vite": "8.0.9"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -116,6 +116,120 @@ describe('crowdsec API', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('listCrowdsecDecisions', () => {
|
||||
it('should call GET /admin/crowdsec/decisions and return data', async () => {
|
||||
const mockData = {
|
||||
decisions: [
|
||||
{ id: '1', ip: '1.2.3.4', reason: 'bot', duration: '24h', created_at: '2024-01-01T00:00:00Z', source: 'crowdsec' },
|
||||
],
|
||||
}
|
||||
vi.mocked(client.get).mockResolvedValue({ data: mockData })
|
||||
|
||||
const result = await crowdsec.listCrowdsecDecisions()
|
||||
|
||||
expect(client.get).toHaveBeenCalledWith('/admin/crowdsec/decisions')
|
||||
expect(result).toEqual(mockData)
|
||||
})
|
||||
})
|
||||
|
||||
describe('banIP', () => {
|
||||
it('should call POST /admin/crowdsec/ban with ip, duration, and reason', async () => {
|
||||
vi.mocked(client.post).mockResolvedValue({})
|
||||
|
||||
await crowdsec.banIP('1.2.3.4', '24h', 'manual ban')
|
||||
|
||||
expect(client.post).toHaveBeenCalledWith('/admin/crowdsec/ban', {
|
||||
ip: '1.2.3.4',
|
||||
duration: '24h',
|
||||
reason: 'manual ban',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('unbanIP', () => {
|
||||
it('should call DELETE /admin/crowdsec/ban/{encoded ip}', async () => {
|
||||
vi.mocked(client.delete).mockResolvedValue({})
|
||||
|
||||
await crowdsec.unbanIP('1.2.3.4')
|
||||
|
||||
expect(client.delete).toHaveBeenCalledWith('/admin/crowdsec/ban/1.2.3.4')
|
||||
})
|
||||
|
||||
it('should URL-encode special characters in the IP', async () => {
|
||||
vi.mocked(client.delete).mockResolvedValue({})
|
||||
|
||||
await crowdsec.unbanIP('::1')
|
||||
|
||||
expect(client.delete).toHaveBeenCalledWith('/admin/crowdsec/ban/%3A%3A1')
|
||||
})
|
||||
})
|
||||
|
||||
describe('getCrowdsecKeyStatus', () => {
|
||||
it('should call GET /admin/crowdsec/key-status and return data', async () => {
|
||||
const mockData = {
|
||||
key_source: 'file' as const,
|
||||
env_key_rejected: false,
|
||||
current_key_preview: 'abc***xyz',
|
||||
message: 'Key loaded from file',
|
||||
}
|
||||
vi.mocked(client.get).mockResolvedValue({ data: mockData })
|
||||
|
||||
const result = await crowdsec.getCrowdsecKeyStatus()
|
||||
|
||||
expect(client.get).toHaveBeenCalledWith('/admin/crowdsec/key-status')
|
||||
expect(result).toEqual(mockData)
|
||||
})
|
||||
})
|
||||
|
||||
describe('listWhitelists', () => {
|
||||
it('should call GET /admin/crowdsec/whitelist and return the whitelist array', async () => {
|
||||
const mockWhitelist = [
|
||||
{
|
||||
uuid: 'uuid-1',
|
||||
ip_or_cidr: '192.168.1.1',
|
||||
reason: 'Home',
|
||||
created_at: '2024-01-01T00:00:00Z',
|
||||
updated_at: '2024-01-01T00:00:00Z',
|
||||
},
|
||||
]
|
||||
vi.mocked(client.get).mockResolvedValue({ data: { whitelist: mockWhitelist } })
|
||||
|
||||
const result = await crowdsec.listWhitelists()
|
||||
|
||||
expect(client.get).toHaveBeenCalledWith('/admin/crowdsec/whitelist')
|
||||
expect(result).toEqual(mockWhitelist)
|
||||
})
|
||||
})
|
||||
|
||||
describe('addWhitelist', () => {
|
||||
it('should call POST /admin/crowdsec/whitelist and return the created entry', async () => {
|
||||
const payload = { ip_or_cidr: '192.168.1.1', reason: 'Home' }
|
||||
const mockEntry = {
|
||||
uuid: 'uuid-1',
|
||||
ip_or_cidr: '192.168.1.1',
|
||||
reason: 'Home',
|
||||
created_at: '2024-01-01T00:00:00Z',
|
||||
updated_at: '2024-01-01T00:00:00Z',
|
||||
}
|
||||
vi.mocked(client.post).mockResolvedValue({ data: mockEntry })
|
||||
|
||||
const result = await crowdsec.addWhitelist(payload)
|
||||
|
||||
expect(client.post).toHaveBeenCalledWith('/admin/crowdsec/whitelist', payload)
|
||||
expect(result).toEqual(mockEntry)
|
||||
})
|
||||
})
|
||||
|
||||
describe('deleteWhitelist', () => {
|
||||
it('should call DELETE /admin/crowdsec/whitelist/{uuid}', async () => {
|
||||
vi.mocked(client.delete).mockResolvedValue({})
|
||||
|
||||
await crowdsec.deleteWhitelist('uuid-1')
|
||||
|
||||
expect(client.delete).toHaveBeenCalledWith('/admin/crowdsec/whitelist/uuid-1')
|
||||
})
|
||||
})
|
||||
|
||||
describe('default export', () => {
|
||||
it('should export all functions', () => {
|
||||
expect(crowdsec.default).toHaveProperty('startCrowdsec')
|
||||
@@ -126,6 +240,14 @@ describe('crowdsec API', () => {
|
||||
expect(crowdsec.default).toHaveProperty('listCrowdsecFiles')
|
||||
expect(crowdsec.default).toHaveProperty('readCrowdsecFile')
|
||||
expect(crowdsec.default).toHaveProperty('writeCrowdsecFile')
|
||||
expect(crowdsec.default).toHaveProperty('listCrowdsecDecisions')
|
||||
expect(crowdsec.default).toHaveProperty('banIP')
|
||||
expect(crowdsec.default).toHaveProperty('unbanIP')
|
||||
expect(crowdsec.default).toHaveProperty('getCrowdsecKeyStatus')
|
||||
expect(crowdsec.default).toHaveProperty('listWhitelists')
|
||||
expect(crowdsec.default).toHaveProperty('addWhitelist')
|
||||
expect(crowdsec.default).toHaveProperty('deleteWhitelist')
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
@@ -156,4 +156,31 @@ export async function getCrowdsecKeyStatus(): Promise<CrowdSecKeyStatus> {
|
||||
return resp.data
|
||||
}
|
||||
|
||||
export default { startCrowdsec, stopCrowdsec, statusCrowdsec, importCrowdsecConfig, exportCrowdsecConfig, listCrowdsecFiles, readCrowdsecFile, writeCrowdsecFile, listCrowdsecDecisions, banIP, unbanIP, getCrowdsecKeyStatus }
|
||||
export interface CrowdSecWhitelistEntry {
|
||||
uuid: string
|
||||
ip_or_cidr: string
|
||||
reason: string
|
||||
created_at: string
|
||||
updated_at: string
|
||||
}
|
||||
|
||||
export interface AddWhitelistPayload {
|
||||
ip_or_cidr: string
|
||||
reason: string
|
||||
}
|
||||
|
||||
export const listWhitelists = async (): Promise<CrowdSecWhitelistEntry[]> => {
|
||||
const resp = await client.get<{ whitelist: CrowdSecWhitelistEntry[] }>('/admin/crowdsec/whitelist')
|
||||
return resp.data.whitelist
|
||||
}
|
||||
|
||||
export const addWhitelist = async (data: AddWhitelistPayload): Promise<CrowdSecWhitelistEntry> => {
|
||||
const resp = await client.post<CrowdSecWhitelistEntry>('/admin/crowdsec/whitelist', data)
|
||||
return resp.data
|
||||
}
|
||||
|
||||
export const deleteWhitelist = async (uuid: string): Promise<void> => {
|
||||
await client.delete(`/admin/crowdsec/whitelist/${uuid}`)
|
||||
}
|
||||
|
||||
export default { startCrowdsec, stopCrowdsec, statusCrowdsec, importCrowdsecConfig, exportCrowdsecConfig, listCrowdsecFiles, readCrowdsecFile, writeCrowdsecFile, listCrowdsecDecisions, banIP, unbanIP, getCrowdsecKeyStatus, listWhitelists, addWhitelist, deleteWhitelist }
|
||||
|
||||
155
frontend/src/hooks/__tests__/useCrowdSecWhitelist.test.ts
Normal file
155
frontend/src/hooks/__tests__/useCrowdSecWhitelist.test.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import { QueryClientProvider } from '@tanstack/react-query'
|
||||
import { renderHook, act, waitFor } from '@testing-library/react'
|
||||
import React from 'react'
|
||||
import { vi, describe, it, expect, beforeEach } from 'vitest'
|
||||
|
||||
import * as crowdsecApi from '../../api/crowdsec'
|
||||
import * as toastUtil from '../../utils/toast'
|
||||
import { createTestQueryClient } from '../../test/createTestQueryClient'
|
||||
import { useWhitelistEntries, useAddWhitelist, useDeleteWhitelist } from '../useCrowdSecWhitelist'
|
||||
|
||||
import type { CrowdSecWhitelistEntry } from '../../api/crowdsec'
|
||||
|
||||
vi.mock('../../api/crowdsec', () => ({
|
||||
listWhitelists: vi.fn(),
|
||||
addWhitelist: vi.fn(),
|
||||
deleteWhitelist: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('../../utils/toast', () => ({
|
||||
toast: {
|
||||
success: vi.fn(),
|
||||
error: vi.fn(),
|
||||
},
|
||||
}))
|
||||
|
||||
const wrapper = ({ children }: { children: React.ReactNode }) => {
|
||||
const qc = createTestQueryClient()
|
||||
return React.createElement(QueryClientProvider, { client: qc }, children)
|
||||
}
|
||||
|
||||
const mockEntry: CrowdSecWhitelistEntry = {
|
||||
uuid: 'abc-123',
|
||||
ip_or_cidr: '192.168.1.1',
|
||||
reason: 'trusted device',
|
||||
created_at: '2025-01-01T00:00:00Z',
|
||||
updated_at: '2025-01-01T00:00:00Z',
|
||||
}
|
||||
|
||||
describe('useWhitelistEntries', () => {
|
||||
beforeEach(() => vi.clearAllMocks())
|
||||
|
||||
it('returns whitelist entries on success', async () => {
|
||||
vi.mocked(crowdsecApi.listWhitelists).mockResolvedValue([mockEntry])
|
||||
|
||||
const { result } = renderHook(() => useWhitelistEntries(), { wrapper })
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true))
|
||||
|
||||
expect(result.current.data).toEqual([mockEntry])
|
||||
})
|
||||
|
||||
it('returns empty array when no entries', async () => {
|
||||
vi.mocked(crowdsecApi.listWhitelists).mockResolvedValue([])
|
||||
|
||||
const { result } = renderHook(() => useWhitelistEntries(), { wrapper })
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true))
|
||||
|
||||
expect(result.current.data).toEqual([])
|
||||
})
|
||||
})
|
||||
|
||||
describe('useAddWhitelist', () => {
|
||||
beforeEach(() => vi.clearAllMocks())
|
||||
|
||||
it('calls addWhitelist and shows success toast on success', async () => {
|
||||
vi.mocked(crowdsecApi.addWhitelist).mockResolvedValue(mockEntry)
|
||||
|
||||
const { result } = renderHook(() => useAddWhitelist(), { wrapper })
|
||||
|
||||
await act(async () => {
|
||||
result.current.mutate({ ip_or_cidr: '192.168.1.1', reason: 'test' })
|
||||
})
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true))
|
||||
|
||||
expect(crowdsecApi.addWhitelist).toHaveBeenCalledWith({ ip_or_cidr: '192.168.1.1', reason: 'test' })
|
||||
expect(toastUtil.toast.success).toHaveBeenCalledWith('Whitelist entry added')
|
||||
})
|
||||
|
||||
it('shows error toast with server message on failure', async () => {
|
||||
vi.mocked(crowdsecApi.addWhitelist).mockRejectedValue(new Error('IP already whitelisted'))
|
||||
|
||||
const { result } = renderHook(() => useAddWhitelist(), { wrapper })
|
||||
|
||||
await act(async () => {
|
||||
result.current.mutate({ ip_or_cidr: '10.0.0.0/8', reason: '' })
|
||||
})
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true))
|
||||
|
||||
expect(toastUtil.toast.error).toHaveBeenCalledWith('IP already whitelisted')
|
||||
})
|
||||
|
||||
it('shows generic error toast for non-Error failures', async () => {
|
||||
vi.mocked(crowdsecApi.addWhitelist).mockRejectedValue('unexpected')
|
||||
|
||||
const { result } = renderHook(() => useAddWhitelist(), { wrapper })
|
||||
|
||||
await act(async () => {
|
||||
result.current.mutate({ ip_or_cidr: '10.0.0.1', reason: '' })
|
||||
})
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true))
|
||||
|
||||
expect(toastUtil.toast.error).toHaveBeenCalledWith('Failed to add whitelist entry')
|
||||
})
|
||||
})
|
||||
|
||||
describe('useDeleteWhitelist', () => {
|
||||
beforeEach(() => vi.clearAllMocks())
|
||||
|
||||
it('calls deleteWhitelist and shows success toast on success', async () => {
|
||||
vi.mocked(crowdsecApi.deleteWhitelist).mockResolvedValue(undefined)
|
||||
|
||||
const { result } = renderHook(() => useDeleteWhitelist(), { wrapper })
|
||||
|
||||
await act(async () => {
|
||||
result.current.mutate('abc-123')
|
||||
})
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true))
|
||||
|
||||
expect(crowdsecApi.deleteWhitelist).toHaveBeenCalledWith('abc-123')
|
||||
expect(toastUtil.toast.success).toHaveBeenCalledWith('Whitelist entry removed')
|
||||
})
|
||||
|
||||
it('shows error toast with server message on failure', async () => {
|
||||
vi.mocked(crowdsecApi.deleteWhitelist).mockRejectedValue(new Error('Entry not found'))
|
||||
|
||||
const { result } = renderHook(() => useDeleteWhitelist(), { wrapper })
|
||||
|
||||
await act(async () => {
|
||||
result.current.mutate('bad-uuid')
|
||||
})
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true))
|
||||
|
||||
expect(toastUtil.toast.error).toHaveBeenCalledWith('Entry not found')
|
||||
})
|
||||
|
||||
it('shows generic error toast for non-Error failures', async () => {
|
||||
vi.mocked(crowdsecApi.deleteWhitelist).mockRejectedValue(null)
|
||||
|
||||
const { result } = renderHook(() => useDeleteWhitelist(), { wrapper })
|
||||
|
||||
await act(async () => {
|
||||
result.current.mutate('some-uuid')
|
||||
})
|
||||
|
||||
await waitFor(() => expect(result.current.isError).toBe(true))
|
||||
|
||||
expect(toastUtil.toast.error).toHaveBeenCalledWith('Failed to remove whitelist entry')
|
||||
})
|
||||
})
|
||||
38
frontend/src/hooks/useCrowdSecWhitelist.ts
Normal file
38
frontend/src/hooks/useCrowdSecWhitelist.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'
|
||||
|
||||
import { listWhitelists, addWhitelist, deleteWhitelist, type AddWhitelistPayload } from '../api/crowdsec'
|
||||
import { toast } from '../utils/toast'
|
||||
|
||||
export const useWhitelistEntries = () =>
|
||||
useQuery({
|
||||
queryKey: ['crowdsec-whitelist'],
|
||||
queryFn: listWhitelists,
|
||||
})
|
||||
|
||||
export const useAddWhitelist = () => {
|
||||
const queryClient = useQueryClient()
|
||||
return useMutation({
|
||||
mutationFn: (data: AddWhitelistPayload) => addWhitelist(data),
|
||||
onSuccess: () => {
|
||||
toast.success('Whitelist entry added')
|
||||
queryClient.invalidateQueries({ queryKey: ['crowdsec-whitelist'] })
|
||||
},
|
||||
onError: (err: unknown) => {
|
||||
toast.error(err instanceof Error ? err.message : 'Failed to add whitelist entry')
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const useDeleteWhitelist = () => {
|
||||
const queryClient = useQueryClient()
|
||||
return useMutation({
|
||||
mutationFn: (uuid: string) => deleteWhitelist(uuid),
|
||||
onSuccess: () => {
|
||||
toast.success('Whitelist entry removed')
|
||||
queryClient.invalidateQueries({ queryKey: ['crowdsec-whitelist'] })
|
||||
},
|
||||
onError: (err: unknown) => {
|
||||
toast.error(err instanceof Error ? err.message : 'Failed to remove whitelist entry')
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -6,10 +6,11 @@ import { useTranslation } from 'react-i18next'
|
||||
import { useNavigate, Link } from 'react-router-dom'
|
||||
|
||||
import { createBackup } from '../api/backups'
|
||||
import { exportCrowdsecConfig, importCrowdsecConfig, listCrowdsecFiles, readCrowdsecFile, writeCrowdsecFile, listCrowdsecDecisions, banIP, unbanIP, type CrowdSecDecision, statusCrowdsec, type CrowdSecStatus, startCrowdsec } from '../api/crowdsec'
|
||||
import { exportCrowdsecConfig, importCrowdsecConfig, listCrowdsecFiles, readCrowdsecFile, writeCrowdsecFile, listCrowdsecDecisions, banIP, unbanIP, type CrowdSecDecision, type CrowdSecWhitelistEntry, statusCrowdsec, type CrowdSecStatus, startCrowdsec } from '../api/crowdsec'
|
||||
import { getFeatureFlags } from '../api/featureFlags'
|
||||
import { listCrowdsecPresets, pullCrowdsecPreset, applyCrowdsecPreset, getCrowdsecPresetCache } from '../api/presets'
|
||||
import { getSecurityStatus } from '../api/security'
|
||||
import { getMyIP } from '../api/system'
|
||||
import { CrowdSecBouncerKeyDisplay } from '../components/CrowdSecBouncerKeyDisplay'
|
||||
import { ConfigReloadOverlay } from '../components/LoadingStates'
|
||||
import { Button } from '../components/ui/Button'
|
||||
@@ -19,6 +20,7 @@ import { Skeleton } from '../components/ui/Skeleton'
|
||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from '../components/ui/Tabs'
|
||||
import { CROWDSEC_PRESETS, type CrowdsecPreset } from '../data/crowdsecPresets'
|
||||
import { useConsoleStatus, useEnrollConsole, useClearConsoleEnrollment } from '../hooks/useConsoleEnrollment'
|
||||
import { useWhitelistEntries, useAddWhitelist, useDeleteWhitelist } from '../hooks/useCrowdSecWhitelist'
|
||||
import { buildCrowdsecExportFilename, downloadCrowdsecExport, promptCrowdsecFilename } from '../utils/crowdsecExport'
|
||||
import { toast } from '../utils/toast'
|
||||
|
||||
@@ -36,6 +38,8 @@ export default function CrowdSecConfig() {
|
||||
const [showBanModal, setShowBanModal] = useState(false)
|
||||
const [banForm, setBanForm] = useState({ ip: '', duration: '24h', reason: '' })
|
||||
const [confirmUnban, setConfirmUnban] = useState<CrowdSecDecision | null>(null)
|
||||
const [whitelistForm, setWhitelistForm] = useState<{ ip_or_cidr: string; reason: string }>({ ip_or_cidr: '', reason: '' })
|
||||
const [confirmDeleteWhitelist, setConfirmDeleteWhitelist] = useState<CrowdSecWhitelistEntry | null>(null)
|
||||
const [isApplyingPreset, setIsApplyingPreset] = useState(false)
|
||||
const [presetPreview, setPresetPreview] = useState<string>('')
|
||||
const [presetMeta, setPresetMeta] = useState<{ cacheKey?: string; etag?: string; retrievedAt?: string; source?: string } | null>(null)
|
||||
@@ -361,6 +365,25 @@ export default function CrowdSecConfig() {
|
||||
},
|
||||
})
|
||||
|
||||
const whitelistQuery = useWhitelistEntries()
|
||||
const addWhitelistMutation = useAddWhitelist()
|
||||
const deleteWhitelistMutation = useDeleteWhitelist()
|
||||
|
||||
const whitelistInlineError = addWhitelistMutation.error instanceof Error
|
||||
? addWhitelistMutation.error.message
|
||||
: addWhitelistMutation.error != null
|
||||
? 'Failed to add entry'
|
||||
: null
|
||||
|
||||
const handleAddMyIP = async () => {
|
||||
try {
|
||||
const result = await getMyIP()
|
||||
setWhitelistForm((prev) => ({ ...prev, ip_or_cidr: result.ip }))
|
||||
} catch {
|
||||
toast.error('Failed to detect your IP address')
|
||||
}
|
||||
}
|
||||
|
||||
const handleExport = async () => {
|
||||
const defaultName = buildCrowdsecExportFilename()
|
||||
const filename = promptCrowdsecFilename(defaultName)
|
||||
@@ -517,7 +540,9 @@ export default function CrowdSecConfig() {
|
||||
pullPresetMutation.isPending ||
|
||||
isApplyingPreset ||
|
||||
banMutation.isPending ||
|
||||
unbanMutation.isPending
|
||||
unbanMutation.isPending ||
|
||||
addWhitelistMutation.isPending ||
|
||||
deleteWhitelistMutation.isPending
|
||||
|
||||
// Determine contextual message
|
||||
const getMessage = () => {
|
||||
@@ -539,6 +564,12 @@ export default function CrowdSecConfig() {
|
||||
if (unbanMutation.isPending) {
|
||||
return { message: 'Guardian lowers shield...', submessage: 'Unbanning IP address' }
|
||||
}
|
||||
if (addWhitelistMutation.isPending) {
|
||||
return { message: 'Guardian updates list...', submessage: 'Adding IP to whitelist' }
|
||||
}
|
||||
if (deleteWhitelistMutation.isPending) {
|
||||
return { message: 'Guardian updates list...', submessage: 'Removing from whitelist' }
|
||||
}
|
||||
return { message: 'Strengthening the guard...', submessage: 'Configuration in progress' }
|
||||
}
|
||||
|
||||
@@ -565,6 +596,7 @@ export default function CrowdSecConfig() {
|
||||
<TabsList>
|
||||
<TabsTrigger value="config">{t('security.crowdsec.tabs.config', 'Configuration')}</TabsTrigger>
|
||||
<TabsTrigger value="dashboard">{t('security.crowdsec.tabs.dashboard', 'Dashboard')}</TabsTrigger>
|
||||
{isLocalMode && <TabsTrigger value="whitelist">{t('crowdsecConfig.whitelist.tabLabel', 'Whitelist')}</TabsTrigger>}
|
||||
</TabsList>
|
||||
|
||||
<TabsContent value="dashboard" className="mt-4">
|
||||
@@ -1241,6 +1273,135 @@ export default function CrowdSecConfig() {
|
||||
</Card>
|
||||
|
||||
</TabsContent>
|
||||
|
||||
{isLocalMode && (
|
||||
<TabsContent value="whitelist" className="mt-4">
|
||||
<Card>
|
||||
<div className="space-y-4">
|
||||
<div className="flex items-center gap-2">
|
||||
<Shield className="h-5 w-5 text-green-400" />
|
||||
<h3 className="text-md font-semibold">{t('crowdsecConfig.whitelist.title', 'IP Whitelist')}</h3>
|
||||
</div>
|
||||
<p className="text-sm text-gray-400">
|
||||
{t('crowdsecConfig.whitelist.description', 'Whitelisted IPs and CIDRs are never blocked by CrowdSec, even if they trigger alerts.')}
|
||||
</p>
|
||||
|
||||
{/* Add entry form */}
|
||||
<div className="flex flex-wrap gap-3 items-end">
|
||||
<div className="flex-1 min-w-[180px]">
|
||||
<Input
|
||||
id="whitelist-ip"
|
||||
label={t('crowdsecConfig.whitelist.ipLabel', 'IP or CIDR')}
|
||||
placeholder="192.168.1.1 or 10.0.0.0/8"
|
||||
value={whitelistForm.ip_or_cidr}
|
||||
onChange={(e) => {
|
||||
setWhitelistForm((prev) => ({ ...prev, ip_or_cidr: e.target.value }))
|
||||
if (addWhitelistMutation.error) addWhitelistMutation.reset()
|
||||
}}
|
||||
error={whitelistInlineError ?? undefined}
|
||||
errorTestId="whitelist-ip-error"
|
||||
aria-required={true}
|
||||
data-testid="whitelist-ip-input"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-1 min-w-[180px]">
|
||||
<Input
|
||||
id="whitelist-reason"
|
||||
label={t('crowdsecConfig.whitelist.reasonLabel', 'Reason')}
|
||||
placeholder={t('crowdsecConfig.whitelist.reasonPlaceholder', 'Optional reason')}
|
||||
value={whitelistForm.reason}
|
||||
onChange={(e) => setWhitelistForm((prev) => ({ ...prev, reason: e.target.value }))}
|
||||
data-testid="whitelist-reason-input"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex gap-2 pb-0.5">
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="sm"
|
||||
type="button"
|
||||
onClick={handleAddMyIP}
|
||||
data-testid="whitelist-add-my-ip-btn"
|
||||
>
|
||||
{t('crowdsecConfig.whitelist.addMyIp', 'Add My IP')}
|
||||
</Button>
|
||||
<Button
|
||||
size="sm"
|
||||
type="button"
|
||||
onClick={() => {
|
||||
addWhitelistMutation.mutate(whitelistForm, {
|
||||
onSuccess: () => setWhitelistForm({ ip_or_cidr: '', reason: '' }),
|
||||
})
|
||||
}}
|
||||
disabled={!whitelistForm.ip_or_cidr.trim() || addWhitelistMutation.isPending}
|
||||
isLoading={addWhitelistMutation.isPending}
|
||||
data-testid="whitelist-add-btn"
|
||||
>
|
||||
{t('common.add', 'Add')}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Entries table */}
|
||||
{whitelistQuery.isLoading ? (
|
||||
<div className="space-y-2">
|
||||
<Skeleton className="h-8 w-full" />
|
||||
<Skeleton className="h-8 w-full" />
|
||||
<Skeleton className="h-8 w-full" />
|
||||
</div>
|
||||
) : !whitelistQuery.data?.length ? (
|
||||
<p className="text-sm text-gray-500" data-testid="whitelist-empty">
|
||||
{t('crowdsecConfig.whitelist.none', 'No whitelist entries')}
|
||||
</p>
|
||||
) : (
|
||||
<div className="overflow-x-auto">
|
||||
<table className="w-full text-sm">
|
||||
<thead>
|
||||
<tr className="border-b border-gray-700">
|
||||
<th className="text-left py-2 px-3 text-gray-400 font-medium" scope="col">
|
||||
{t('crowdsecConfig.whitelist.columnIp', 'IP / CIDR')}
|
||||
</th>
|
||||
<th className="text-left py-2 px-3 text-gray-400 font-medium" scope="col">
|
||||
{t('crowdsecConfig.whitelist.columnReason', 'Reason')}
|
||||
</th>
|
||||
<th className="text-left py-2 px-3 text-gray-400 font-medium" scope="col">
|
||||
{t('crowdsecConfig.whitelist.columnAdded', 'Added')}
|
||||
</th>
|
||||
<th className="text-right py-2 px-3 text-gray-400 font-medium" scope="col">
|
||||
{t('crowdsecConfig.bannedIps.actions')}
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{whitelistQuery.data.map((entry) => (
|
||||
<tr key={entry.uuid} className="border-b border-gray-800 hover:bg-gray-800/50">
|
||||
<td className="py-2 px-3 font-mono text-white">{entry.ip_or_cidr}</td>
|
||||
<td className="py-2 px-3 text-gray-300">{entry.reason || '-'}</td>
|
||||
<td className="py-2 px-3 text-gray-300">
|
||||
{entry.created_at ? new Date(entry.created_at).toLocaleString() : '-'}
|
||||
</td>
|
||||
<td className="py-2 px-3 text-right">
|
||||
<Button
|
||||
variant="danger"
|
||||
size="sm"
|
||||
onClick={() => setConfirmDeleteWhitelist(entry)}
|
||||
aria-label={`${t('crowdsecConfig.whitelist.deleteAriaLabel', 'Remove whitelist entry for')} ${entry.ip_or_cidr}`}
|
||||
data-testid="whitelist-delete-btn"
|
||||
>
|
||||
<Trash2 className="h-3 w-3 mr-1" />
|
||||
{t('crowdsecConfig.whitelist.delete', 'Delete')}
|
||||
</Button>
|
||||
</td>
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</Card>
|
||||
</TabsContent>
|
||||
)}
|
||||
|
||||
</Tabs>
|
||||
</div>
|
||||
|
||||
@@ -1386,6 +1547,54 @@ export default function CrowdSecConfig() {
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Delete Whitelist Entry Modal */}
|
||||
{confirmDeleteWhitelist && (
|
||||
<div
|
||||
className="fixed inset-0 z-50 flex items-center justify-center"
|
||||
role="dialog"
|
||||
aria-modal="true"
|
||||
aria-labelledby="whitelist-delete-modal-title"
|
||||
>
|
||||
<button
|
||||
type="button"
|
||||
className="absolute inset-0 bg-black/60 focus:outline-none focus-visible:ring-2 focus-visible:ring-inset focus-visible:ring-white"
|
||||
onClick={() => setConfirmDeleteWhitelist(null)}
|
||||
aria-label={t('common.close')}
|
||||
/>
|
||||
<div
|
||||
className="relative z-10 bg-gray-900 rounded-lg border border-gray-700 p-6 max-w-md w-full mx-4 shadow-xl"
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Escape') setConfirmDeleteWhitelist(null)
|
||||
}}
|
||||
>
|
||||
<h2 id="whitelist-delete-modal-title" className="text-lg font-semibold text-white mb-2">
|
||||
{t('crowdsecConfig.whitelist.deleteModal.title', 'Remove Whitelist Entry')}
|
||||
</h2>
|
||||
<p className="text-sm text-gray-300 mb-4">
|
||||
{t('crowdsecConfig.whitelist.deleteModal.body', 'Remove {{ip}} from the whitelist? CrowdSec may then block this IP if it triggers alerts.', { ip: confirmDeleteWhitelist.ip_or_cidr })}
|
||||
</p>
|
||||
<div className="flex justify-end gap-3">
|
||||
<Button
|
||||
variant="secondary"
|
||||
onClick={() => setConfirmDeleteWhitelist(null)}
|
||||
autoFocus
|
||||
>
|
||||
{t('common.cancel', 'Cancel')}
|
||||
</Button>
|
||||
<Button
|
||||
variant="danger"
|
||||
onClick={() => deleteWhitelistMutation.mutate(confirmDeleteWhitelist.uuid, {
|
||||
onSuccess: () => setConfirmDeleteWhitelist(null),
|
||||
})}
|
||||
isLoading={deleteWhitelistMutation.isPending}
|
||||
>
|
||||
{t('crowdsecConfig.whitelist.deleteModal.submit', 'Remove')}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -435,7 +435,7 @@ export default function Security() {
|
||||
<ShieldAlert className={`w-5 h-5 ${crowdsecChecked ? 'text-success' : 'text-content-muted'}`} />
|
||||
</div>
|
||||
<div>
|
||||
<CardTitle className="text-base">{t('security.crowdsec')}</CardTitle>
|
||||
<CardTitle className="text-base">{t('security.crowdsec.title')}</CardTitle>
|
||||
<CardDescription>{t('security.crowdsecDescription')}</CardDescription>
|
||||
</div>
|
||||
</div>
|
||||
@@ -485,7 +485,7 @@ export default function Security() {
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center gap-2">
|
||||
<Badge variant="outline" size="sm">{t('security.layer2')}</Badge>
|
||||
<Badge variant="primary" size="sm">{t('security.acl')}</Badge>
|
||||
<Badge variant="primary" size="sm">{t('security.acl.badge')}</Badge>
|
||||
</div>
|
||||
<Badge variant={status.acl.enabled ? 'success' : 'default'}>
|
||||
{status.acl.enabled ? t('common.enabled') : t('common.disabled')}
|
||||
@@ -538,7 +538,7 @@ export default function Security() {
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center gap-2">
|
||||
<Badge variant="outline" size="sm">{t('security.layer3')}</Badge>
|
||||
<Badge variant="primary" size="sm">{t('security.waf')}</Badge>
|
||||
<Badge variant="primary" size="sm">{t('security.waf.badge')}</Badge>
|
||||
</div>
|
||||
<Badge variant={status.waf.enabled ? 'success' : 'default'}>
|
||||
{status.waf.enabled ? t('common.enabled') : t('common.disabled')}
|
||||
|
||||
321
frontend/src/pages/__tests__/CrowdSecConfig.whitelist.test.tsx
Normal file
321
frontend/src/pages/__tests__/CrowdSecConfig.whitelist.test.tsx
Normal file
@@ -0,0 +1,321 @@
|
||||
import { screen, waitFor } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import { AxiosError } from 'axios'
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest'
|
||||
|
||||
import * as backupsApi from '../../api/backups'
|
||||
import * as crowdsecApi from '../../api/crowdsec'
|
||||
import * as featureFlagsApi from '../../api/featureFlags'
|
||||
import * as presetsApi from '../../api/presets'
|
||||
import * as securityApi from '../../api/security'
|
||||
import * as settingsApi from '../../api/settings'
|
||||
import * as systemApi from '../../api/system'
|
||||
import { renderWithQueryClient } from '../../test-utils/renderWithQueryClient'
|
||||
import { toast } from '../../utils/toast'
|
||||
import CrowdSecConfig from '../CrowdSecConfig'
|
||||
|
||||
vi.mock('../../api/security')
|
||||
vi.mock('../../api/crowdsec')
|
||||
vi.mock('../../api/presets')
|
||||
vi.mock('../../api/backups')
|
||||
vi.mock('../../api/settings')
|
||||
vi.mock('../../api/featureFlags')
|
||||
vi.mock('../../api/system')
|
||||
vi.mock('../../hooks/useConsoleEnrollment', () => ({
|
||||
useConsoleStatus: vi.fn(() => ({
|
||||
data: {
|
||||
status: 'not_enrolled',
|
||||
key_present: false,
|
||||
last_error: null,
|
||||
last_attempt_at: null,
|
||||
enrolled_at: null,
|
||||
last_heartbeat_at: null,
|
||||
correlation_id: 'corr-1',
|
||||
tenant: 'default',
|
||||
agent_name: 'charon-agent',
|
||||
},
|
||||
isLoading: false,
|
||||
isRefetching: false,
|
||||
})),
|
||||
useEnrollConsole: vi.fn(() => ({
|
||||
mutateAsync: vi.fn().mockResolvedValue({ status: 'enrolling', key_present: false }),
|
||||
isPending: false,
|
||||
})),
|
||||
useClearConsoleEnrollment: vi.fn(() => ({ mutate: vi.fn(), isPending: false })),
|
||||
}))
|
||||
vi.mock('../../components/CrowdSecBouncerKeyDisplay', () => ({
|
||||
CrowdSecBouncerKeyDisplay: () => null,
|
||||
}))
|
||||
vi.mock('../../utils/crowdsecExport', () => ({
|
||||
buildCrowdsecExportFilename: vi.fn(() => 'crowdsec-default.tar.gz'),
|
||||
promptCrowdsecFilename: vi.fn(() => 'crowdsec.tar.gz'),
|
||||
downloadCrowdsecExport: vi.fn(),
|
||||
}))
|
||||
vi.mock('../../utils/toast', () => ({
|
||||
toast: { success: vi.fn(), error: vi.fn(), info: vi.fn() },
|
||||
}))
|
||||
|
||||
// The i18n mock in test setup returns the translation key when no translation is found.
|
||||
// These constants keep assertions in sync with what the component actually renders.
|
||||
const TAB_WHITELIST = 'crowdsecConfig.whitelist.tabLabel'
|
||||
const MODAL_TITLE = 'crowdsecConfig.whitelist.deleteModal.title'
|
||||
const BTN_REMOVE = 'crowdsecConfig.whitelist.deleteModal.submit'
|
||||
|
||||
const baseStatus = {
|
||||
cerberus: { enabled: true },
|
||||
crowdsec: { enabled: true, mode: 'local' as const, api_url: '' },
|
||||
waf: { enabled: true, mode: 'enabled' as const },
|
||||
rate_limit: { enabled: true },
|
||||
acl: { enabled: true },
|
||||
}
|
||||
|
||||
const axiosError = (status: number, message: string, data?: Record<string, unknown>) =>
|
||||
new AxiosError(message, undefined, undefined, undefined, {
|
||||
status,
|
||||
statusText: String(status),
|
||||
headers: {},
|
||||
config: {},
|
||||
data: data ?? { error: message },
|
||||
} as never)
|
||||
|
||||
const mockWhitelistEntries = [
|
||||
{
|
||||
uuid: 'uuid-1',
|
||||
ip_or_cidr: '192.168.1.1',
|
||||
reason: 'Home IP',
|
||||
created_at: '2024-01-01T00:00:00Z',
|
||||
updated_at: '2024-01-01T00:00:00Z',
|
||||
},
|
||||
{
|
||||
uuid: 'uuid-2',
|
||||
ip_or_cidr: '10.0.0.0/8',
|
||||
reason: 'LAN',
|
||||
created_at: '2024-01-02T00:00:00Z',
|
||||
updated_at: '2024-01-02T00:00:00Z',
|
||||
},
|
||||
]
|
||||
|
||||
const renderPage = async () => {
|
||||
renderWithQueryClient(<CrowdSecConfig />)
|
||||
await waitFor(() => screen.getByText('CrowdSec Configuration'))
|
||||
}
|
||||
|
||||
const goToWhitelistTab = async () => {
|
||||
await userEvent.click(screen.getByRole('tab', { name: TAB_WHITELIST }))
|
||||
await waitFor(() => screen.getByTestId('whitelist-ip-input'))
|
||||
}
|
||||
|
||||
describe('CrowdSecConfig – whitelist tab', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
vi.mocked(securityApi.getSecurityStatus).mockResolvedValue(baseStatus)
|
||||
vi.mocked(crowdsecApi.statusCrowdsec).mockResolvedValue({ running: true, pid: 123, lapi_ready: true })
|
||||
vi.mocked(crowdsecApi.listCrowdsecFiles).mockResolvedValue({ files: ['acquis.yaml'] })
|
||||
vi.mocked(crowdsecApi.readCrowdsecFile).mockResolvedValue({ content: '' })
|
||||
vi.mocked(crowdsecApi.writeCrowdsecFile).mockResolvedValue(undefined)
|
||||
vi.mocked(crowdsecApi.listCrowdsecDecisions).mockResolvedValue({ decisions: [] })
|
||||
vi.mocked(crowdsecApi.banIP).mockResolvedValue(undefined)
|
||||
vi.mocked(crowdsecApi.unbanIP).mockResolvedValue(undefined)
|
||||
vi.mocked(crowdsecApi.exportCrowdsecConfig).mockResolvedValue(new Blob(['data']))
|
||||
vi.mocked(crowdsecApi.importCrowdsecConfig).mockResolvedValue(undefined)
|
||||
vi.mocked(crowdsecApi.listWhitelists).mockResolvedValue([])
|
||||
vi.mocked(crowdsecApi.addWhitelist).mockResolvedValue({
|
||||
uuid: 'uuid-new',
|
||||
ip_or_cidr: '1.2.3.4',
|
||||
reason: '',
|
||||
created_at: '',
|
||||
updated_at: '',
|
||||
})
|
||||
vi.mocked(crowdsecApi.deleteWhitelist).mockResolvedValue(undefined)
|
||||
vi.mocked(presetsApi.listCrowdsecPresets).mockResolvedValue({ presets: [] })
|
||||
vi.mocked(presetsApi.pullCrowdsecPreset).mockResolvedValue({
|
||||
status: 'pulled',
|
||||
slug: '',
|
||||
preview: '',
|
||||
cache_key: '',
|
||||
etag: '',
|
||||
retrieved_at: '',
|
||||
source: 'hub',
|
||||
})
|
||||
vi.mocked(presetsApi.applyCrowdsecPreset).mockResolvedValue({
|
||||
status: 'applied',
|
||||
backup: '',
|
||||
reload_hint: false,
|
||||
used_cscli: false,
|
||||
cache_key: '',
|
||||
slug: '',
|
||||
})
|
||||
vi.mocked(presetsApi.getCrowdsecPresetCache).mockResolvedValue({
|
||||
preview: '',
|
||||
cache_key: '',
|
||||
etag: '',
|
||||
})
|
||||
vi.mocked(backupsApi.createBackup).mockResolvedValue({ filename: 'backup.tar.gz' })
|
||||
vi.mocked(settingsApi.updateSetting).mockResolvedValue()
|
||||
vi.mocked(featureFlagsApi.getFeatureFlags).mockResolvedValue({
|
||||
'feature.crowdsec.console_enrollment': false,
|
||||
})
|
||||
vi.mocked(systemApi.getMyIP).mockResolvedValue({ ip: '203.0.113.1', source: 'cloudflare' })
|
||||
})
|
||||
|
||||
it('shows whitelist tab trigger in local mode', async () => {
|
||||
await renderPage()
|
||||
expect(screen.getByRole('tab', { name: TAB_WHITELIST })).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('does not show whitelist tab in disabled mode', async () => {
|
||||
vi.mocked(securityApi.getSecurityStatus).mockResolvedValue({
|
||||
...baseStatus,
|
||||
crowdsec: { enabled: true, mode: 'disabled' as const, api_url: '' },
|
||||
})
|
||||
await renderPage()
|
||||
expect(screen.queryByRole('tab', { name: TAB_WHITELIST })).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows empty state when there are no whitelist entries', async () => {
|
||||
await renderPage()
|
||||
await goToWhitelistTab()
|
||||
expect(screen.getByTestId('whitelist-empty')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders whitelist entries in the table', async () => {
|
||||
vi.mocked(crowdsecApi.listWhitelists).mockResolvedValue(mockWhitelistEntries)
|
||||
await renderPage()
|
||||
await goToWhitelistTab()
|
||||
expect(await screen.findByText('192.168.1.1')).toBeInTheDocument()
|
||||
expect(screen.getByText('10.0.0.0/8')).toBeInTheDocument()
|
||||
expect(screen.getByText('Home IP')).toBeInTheDocument()
|
||||
expect(screen.getByText('LAN')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('submits a new whitelist entry', async () => {
|
||||
await renderPage()
|
||||
await goToWhitelistTab()
|
||||
await userEvent.type(screen.getByTestId('whitelist-ip-input'), '1.2.3.4')
|
||||
await userEvent.type(screen.getByTestId('whitelist-reason-input'), 'Test reason')
|
||||
await userEvent.click(screen.getByTestId('whitelist-add-btn'))
|
||||
await waitFor(() =>
|
||||
expect(crowdsecApi.addWhitelist).toHaveBeenCalledWith({
|
||||
ip_or_cidr: '1.2.3.4',
|
||||
reason: 'Test reason',
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
it('shows add-whitelist loading overlay while mutation is pending', async () => {
|
||||
let resolveAdd!: (v: (typeof mockWhitelistEntries)[0]) => void
|
||||
vi.mocked(crowdsecApi.addWhitelist).mockImplementationOnce(
|
||||
() =>
|
||||
new Promise((resolve) => {
|
||||
resolveAdd = resolve
|
||||
}),
|
||||
)
|
||||
await renderPage()
|
||||
await goToWhitelistTab()
|
||||
await userEvent.type(screen.getByTestId('whitelist-ip-input'), '1.2.3.4')
|
||||
await userEvent.click(screen.getByTestId('whitelist-add-btn'))
|
||||
await waitFor(() => expect(screen.getByText('Adding IP to whitelist')).toBeInTheDocument())
|
||||
resolveAdd(mockWhitelistEntries[0])
|
||||
})
|
||||
|
||||
it('displays inline error when adding a whitelist entry fails', async () => {
|
||||
vi.mocked(crowdsecApi.addWhitelist).mockRejectedValueOnce(
|
||||
axiosError(400, 'Invalid IP', { error: 'bad ip format' }),
|
||||
)
|
||||
await renderPage()
|
||||
await goToWhitelistTab()
|
||||
await userEvent.type(screen.getByTestId('whitelist-ip-input'), 'bad-ip')
|
||||
await userEvent.click(screen.getByTestId('whitelist-add-btn'))
|
||||
await waitFor(() => expect(screen.getByTestId('whitelist-ip-error')).toBeInTheDocument())
|
||||
})
|
||||
|
||||
it('opens delete confirmation dialog', async () => {
|
||||
vi.mocked(crowdsecApi.listWhitelists).mockResolvedValue(mockWhitelistEntries)
|
||||
await renderPage()
|
||||
await goToWhitelistTab()
|
||||
await userEvent.click((await screen.findAllByTestId('whitelist-delete-btn'))[0])
|
||||
expect(await screen.findByRole('dialog', { name: MODAL_TITLE })).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('cancels whitelist deletion via Cancel button', async () => {
|
||||
vi.mocked(crowdsecApi.listWhitelists).mockResolvedValue(mockWhitelistEntries)
|
||||
await renderPage()
|
||||
await goToWhitelistTab()
|
||||
await userEvent.click((await screen.findAllByTestId('whitelist-delete-btn'))[0])
|
||||
await userEvent.click(await screen.findByRole('button', { name: 'Cancel' }))
|
||||
await waitFor(() =>
|
||||
expect(screen.queryByRole('dialog', { name: MODAL_TITLE })).not.toBeInTheDocument(),
|
||||
)
|
||||
expect(crowdsecApi.deleteWhitelist).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('confirms whitelist entry deletion via Remove button', async () => {
|
||||
vi.mocked(crowdsecApi.listWhitelists).mockResolvedValue(mockWhitelistEntries)
|
||||
await renderPage()
|
||||
await goToWhitelistTab()
|
||||
await userEvent.click((await screen.findAllByTestId('whitelist-delete-btn'))[0])
|
||||
await userEvent.click(await screen.findByRole('button', { name: BTN_REMOVE }))
|
||||
await waitFor(() => expect(crowdsecApi.deleteWhitelist).toHaveBeenCalledWith('uuid-1'))
|
||||
})
|
||||
|
||||
it('shows delete-whitelist loading overlay while mutation is pending', async () => {
|
||||
vi.mocked(crowdsecApi.listWhitelists).mockResolvedValue(mockWhitelistEntries)
|
||||
let resolveDelete!: () => void
|
||||
vi.mocked(crowdsecApi.deleteWhitelist).mockImplementationOnce(
|
||||
() =>
|
||||
new Promise<void>((resolve) => {
|
||||
resolveDelete = resolve
|
||||
}),
|
||||
)
|
||||
await renderPage()
|
||||
await goToWhitelistTab()
|
||||
await userEvent.click((await screen.findAllByTestId('whitelist-delete-btn'))[0])
|
||||
await userEvent.click(await screen.findByRole('button', { name: BTN_REMOVE }))
|
||||
await waitFor(() => expect(screen.getByText('Removing from whitelist')).toBeInTheDocument())
|
||||
resolveDelete()
|
||||
})
|
||||
|
||||
it('closes delete dialog on Escape key', async () => {
|
||||
vi.mocked(crowdsecApi.listWhitelists).mockResolvedValue(mockWhitelistEntries)
|
||||
await renderPage()
|
||||
await goToWhitelistTab()
|
||||
await userEvent.click((await screen.findAllByTestId('whitelist-delete-btn'))[0])
|
||||
expect(await screen.findByRole('dialog', { name: MODAL_TITLE })).toBeInTheDocument()
|
||||
await userEvent.keyboard('{Escape}')
|
||||
await waitFor(() =>
|
||||
expect(screen.queryByRole('dialog', { name: MODAL_TITLE })).not.toBeInTheDocument(),
|
||||
)
|
||||
})
|
||||
|
||||
it('closes delete dialog when backdrop is clicked', async () => {
|
||||
vi.mocked(crowdsecApi.listWhitelists).mockResolvedValue(mockWhitelistEntries)
|
||||
await renderPage()
|
||||
await goToWhitelistTab()
|
||||
await userEvent.click((await screen.findAllByTestId('whitelist-delete-btn'))[0])
|
||||
expect(await screen.findByRole('dialog', { name: MODAL_TITLE })).toBeInTheDocument()
|
||||
await userEvent.click(screen.getByRole('button', { name: /close/i }))
|
||||
await waitFor(() =>
|
||||
expect(screen.queryByRole('dialog', { name: MODAL_TITLE })).not.toBeInTheDocument(),
|
||||
)
|
||||
})
|
||||
|
||||
it('fills IP input when Add My IP is clicked', async () => {
|
||||
await renderPage()
|
||||
await goToWhitelistTab()
|
||||
await userEvent.click(screen.getByTestId('whitelist-add-my-ip-btn'))
|
||||
await waitFor(() => {
|
||||
const input = screen.getByTestId('whitelist-ip-input') as HTMLInputElement
|
||||
expect(input.value).toBe('203.0.113.1')
|
||||
})
|
||||
})
|
||||
|
||||
it('shows error toast when Add My IP request fails', async () => {
|
||||
vi.mocked(systemApi.getMyIP).mockRejectedValueOnce(new Error('network error'))
|
||||
await renderPage()
|
||||
await goToWhitelistTab()
|
||||
await userEvent.click(screen.getByTestId('whitelist-add-my-ip-btn'))
|
||||
await waitFor(() =>
|
||||
expect(toast.error).toHaveBeenCalledWith('Failed to detect your IP address'),
|
||||
)
|
||||
})
|
||||
})
|
||||
@@ -27,7 +27,7 @@ vi.mock('../../hooks/useRemoteServers', () => ({
|
||||
vi.mock('../../hooks/useCertificates', () => ({
|
||||
useCertificates: () => ({
|
||||
certificates: [
|
||||
{ id: 1, status: 'valid', domain: 'test.com' },
|
||||
{ id: 1, status: 'valid', domain: 'test.com', domains: 'test.com,www.test.com' },
|
||||
{ id: 2, status: 'expired', domain: 'expired.com' },
|
||||
],
|
||||
isLoading: false,
|
||||
@@ -84,4 +84,5 @@ describe('Dashboard page', () => {
|
||||
// "1 valid" still renders even though cert.domains is undefined
|
||||
expect(screen.getByText('1 valid')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
@@ -73,6 +73,7 @@ const securityTranslations: Record<string, string> = {
|
||||
'security.waf': 'WAF',
|
||||
'security.rate': 'Rate',
|
||||
'security.crowdsec': 'CrowdSec',
|
||||
'security.crowdsec.title': 'CrowdSec',
|
||||
'security.crowdsecDescription': 'IP Reputation',
|
||||
'security.crowdsecProtects': 'Blocks known attackers, botnets, and malicious IPs',
|
||||
'security.crowdsecDisabledDescription': 'Enable to block known malicious IPs',
|
||||
|
||||
@@ -447,18 +447,23 @@ describe('UsersPage', () => {
|
||||
const user = userEvent.setup()
|
||||
expect(await screen.findByText('Invite User')).toBeInTheDocument()
|
||||
await user.click(screen.getByRole('button', { name: /Invite User/i }))
|
||||
expect(await screen.findByPlaceholderText('user@example.com')).toBeInTheDocument()
|
||||
|
||||
const emailInput = screen.getByPlaceholderText('user@example.com')
|
||||
await user.type(emailInput, 'test@example.com')
|
||||
vi.useFakeTimers()
|
||||
|
||||
try {
|
||||
const emailInput = screen.getByPlaceholderText('user@example.com')
|
||||
fireEvent.change(emailInput, { target: { value: 'test@example.com' } })
|
||||
|
||||
await act(async () => {
|
||||
await vi.advanceTimersByTimeAsync(550)
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(client.post).toHaveBeenCalledWith('/users/preview-invite-url', { email: 'test@example.com' })
|
||||
}, { timeout: 2000 })
|
||||
|
||||
// Look for the preview URL content with ellipsis replacing the token
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('https://charon.example.com/accept-invite?token=...')).toBeInTheDocument()
|
||||
}, { timeout: 2000 })
|
||||
} finally {
|
||||
vi.useRealTimers()
|
||||
}
|
||||
})
|
||||
|
||||
it('debounces URL preview for 500ms', async () => {
|
||||
|
||||
@@ -6,6 +6,7 @@ const coverageThresholdValue =
|
||||
process.env.CHARON_MIN_COVERAGE ?? process.env.CPM_MIN_COVERAGE ?? '87.0'
|
||||
const coverageThreshold = Number.parseFloat(coverageThresholdValue)
|
||||
const resolvedCoverageThreshold = Number.isNaN(coverageThreshold) ? 87.0 : coverageThreshold
|
||||
const coverageReportsDirectory = process.env.VITEST_COVERAGE_REPORTS_DIR ?? './coverage'
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
@@ -34,6 +35,7 @@ export default defineConfig({
|
||||
coverage: {
|
||||
provider: 'v8',
|
||||
clean: false,
|
||||
reportsDirectory: coverageReportsDirectory,
|
||||
reporter: ['text', 'json', 'html', 'lcov', 'json-summary'],
|
||||
exclude: [
|
||||
'node_modules/',
|
||||
|
||||
@@ -4,6 +4,7 @@ cloud.google.com/go/compute v1.14.0/go.mod h1:YfLtxrj9sU4Yxv+sXzZkyPjEyPBZfXHUvj
|
||||
cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY=
|
||||
cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA=
|
||||
cloud.google.com/go/compute/metadata v0.9.0/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10=
|
||||
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0/go.mod h1:P4WPRUkOhJC13W//jWpyfJNDAIpvRbAUIYLX/4jtlE0=
|
||||
github.com/alecthomas/kingpin/v2 v2.4.0 h1:f48lwail6p8zpO1bC4TxtqACaGqHYA22qkHjHpqDjYY=
|
||||
github.com/alecthomas/kingpin/v2 v2.4.0/go.mod h1:0gyi0zQnjuFk8xrkNKamJoyUo382HRL7ATRpFZCw6tE=
|
||||
@@ -18,6 +19,7 @@ github.com/containerd/typeurl/v2 v2.2.0/go.mod h1:8XOOxnyatxSWuG8OfsZXVnAF4iZfed
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY=
|
||||
github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
|
||||
github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfvcwE=
|
||||
github.com/envoyproxy/go-control-plane v0.14.0/go.mod h1:NcS5X47pLl/hfqxU70yPwL9ZMkUlwlKxtAohpi2wBEU=
|
||||
github.com/envoyproxy/go-control-plane/envoy v1.36.0/go.mod h1:ty89S1YCCVruQAm9OtKeEkQLTb+Lkz0k8v9W0Oxsv98=
|
||||
github.com/envoyproxy/go-control-plane/ratelimit v0.1.0/go.mod h1:Wk+tMFAFbCXaJPzVVHnPgRKdUdwW/KdbRt94AzgRee4=
|
||||
@@ -53,6 +55,7 @@ github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0V
|
||||
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
|
||||
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
github.com/oschwald/geoip2-golang/v2 v2.0.1 h1:YcYoG/L+gmSfk7AlToTmoL0JvblNyhGC8NyVhwDzzi8=
|
||||
@@ -134,6 +137,8 @@ golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6f
|
||||
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
||||
google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c=
|
||||
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU=
|
||||
google.golang.org/grpc v1.67.0/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
|
||||
|
||||
310
package-lock.json
generated
310
package-lock.json
generated
@@ -10,6 +10,7 @@
|
||||
"type-check": "^0.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@axe-core/playwright": "^4.11.2",
|
||||
"@bgotink/playwright-coverage": "^0.3.2",
|
||||
"@playwright/test": "^1.59.1",
|
||||
"@types/eslint-plugin-jsx-a11y": "^6.10.1",
|
||||
@@ -19,9 +20,22 @@
|
||||
"prettier": "^3.8.3",
|
||||
"prettier-plugin-tailwindcss": "^0.7.2",
|
||||
"tar": "^7.5.13",
|
||||
"typescript": "^6.0.2",
|
||||
"vite": "^8.0.8",
|
||||
"vitest": "^4.1.4"
|
||||
"typescript": "^6.0.3",
|
||||
"vite": "^8.0.9",
|
||||
"vitest": "^4.1.5"
|
||||
}
|
||||
},
|
||||
"node_modules/@axe-core/playwright": {
|
||||
"version": "4.11.2",
|
||||
"resolved": "https://registry.npmjs.org/@axe-core/playwright/-/playwright-4.11.2.tgz",
|
||||
"integrity": "sha512-iP6hfNl9G0j/SEUSo8M7D80RbcDo9KRAAfDP4IT5OHB+Wm6zUHIrm8Y51BKI+Oyqduvipf9u1hcRy57zCBKzWQ==",
|
||||
"dev": true,
|
||||
"license": "MPL-2.0",
|
||||
"dependencies": {
|
||||
"axe-core": "~4.11.3"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"playwright-core": ">= 1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@bcoe/v8-coverage": {
|
||||
@@ -231,29 +245,43 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@humanfs/core": {
|
||||
"version": "0.19.1",
|
||||
"resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz",
|
||||
"integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==",
|
||||
"version": "0.19.2",
|
||||
"resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.2.tgz",
|
||||
"integrity": "sha512-UhXNm+CFMWcbChXywFwkmhqjs3PRCmcSa/hfBgLIb7oQ5HNb1wS0icWsGtSAUNgefHeI+eBrA8I1fxmbHsGdvA==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@humanfs/types": "^0.15.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.18.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@humanfs/node": {
|
||||
"version": "0.16.7",
|
||||
"resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.7.tgz",
|
||||
"integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==",
|
||||
"version": "0.16.8",
|
||||
"resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.8.tgz",
|
||||
"integrity": "sha512-gE1eQNZ3R++kTzFUpdGlpmy8kDZD/MLyHqDwqjkVQI0JMdI1D51sy1H958PNXYkM2rAac7e5/CnIKZrHtPh3BQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@humanfs/core": "^0.19.1",
|
||||
"@humanfs/core": "^0.19.2",
|
||||
"@humanfs/types": "^0.15.0",
|
||||
"@humanwhocodes/retry": "^0.4.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.18.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@humanfs/types": {
|
||||
"version": "0.15.0",
|
||||
"resolved": "https://registry.npmjs.org/@humanfs/types/-/types-0.15.0.tgz",
|
||||
"integrity": "sha512-ZZ1w0aoQkwuUuC7Yf+7sdeaNfqQiiLcSRbfI08oAxqLtpXQr9AIVX7Ay7HLDuiLYAaFPu8oBYNq/QIi9URHJ3Q==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=18.18.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@humanwhocodes/module-importer": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
|
||||
@@ -381,9 +409,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@oxc-project/types": {
|
||||
"version": "0.124.0",
|
||||
"resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.124.0.tgz",
|
||||
"integrity": "sha512-VBFWMTBvHxS11Z5Lvlr3IWgrwhMTXV+Md+EQF0Xf60+wAdsGFTBx7X7K/hP4pi8N7dcm1RvcHwDxZ16Qx8keUg==",
|
||||
"version": "0.126.0",
|
||||
"resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.126.0.tgz",
|
||||
"integrity": "sha512-oGfVtjAgwQVVpfBrbtk4e1XDyWHRFta6BS3GWVzrF8xYBT2VGQAk39yJS/wFSMrZqoiCU4oghT3Ch0HaHGIHcQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"funding": {
|
||||
@@ -407,9 +435,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/binding-android-arm64": {
|
||||
"version": "1.0.0-rc.15",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-android-arm64/-/binding-android-arm64-1.0.0-rc.15.tgz",
|
||||
"integrity": "sha512-YYe6aWruPZDtHNpwu7+qAHEMbQ/yRl6atqb/AhznLTnD3UY99Q1jE7ihLSahNWkF4EqRPVC4SiR4O0UkLK02tA==",
|
||||
"version": "1.0.0-rc.16",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-android-arm64/-/binding-android-arm64-1.0.0-rc.16.tgz",
|
||||
"integrity": "sha512-rhY3k7Bsae9qQfOtph2Pm2jZEA+s8Gmjoz4hhmx70K9iMQ/ddeae+xhRQcM5IuVx5ry1+bGfkvMn7D6MJggVSA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -424,9 +452,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/binding-darwin-arm64": {
|
||||
"version": "1.0.0-rc.15",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-arm64/-/binding-darwin-arm64-1.0.0-rc.15.tgz",
|
||||
"integrity": "sha512-oArR/ig8wNTPYsXL+Mzhs0oxhxfuHRfG7Ikw7jXsw8mYOtk71W0OkF2VEVh699pdmzjPQsTjlD1JIOoHkLP1Fg==",
|
||||
"version": "1.0.0-rc.16",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-arm64/-/binding-darwin-arm64-1.0.0-rc.16.tgz",
|
||||
"integrity": "sha512-rNz0yK078yrNn3DrdgN+PKiMOW8HfQ92jQiXxwX8yW899ayV00MLVdaCNeVBhG/TbH3ouYVObo8/yrkiectkcQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -441,9 +469,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/binding-darwin-x64": {
|
||||
"version": "1.0.0-rc.15",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-x64/-/binding-darwin-x64-1.0.0-rc.15.tgz",
|
||||
"integrity": "sha512-YzeVqOqjPYvUbJSWJ4EDL8ahbmsIXQpgL3JVipmN+MX0XnXMeWomLN3Fb+nwCmP/jfyqte5I3XRSm7OfQrbyxw==",
|
||||
"version": "1.0.0-rc.16",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-x64/-/binding-darwin-x64-1.0.0-rc.16.tgz",
|
||||
"integrity": "sha512-r/OmdR00HmD4i79Z//xO06uEPOq5hRXdhw7nzkxQxwSavs3PSHa1ijntdpOiZ2mzOQ3fVVu8C1M19FoNM+dMUQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -458,9 +486,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/binding-freebsd-x64": {
|
||||
"version": "1.0.0-rc.15",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-freebsd-x64/-/binding-freebsd-x64-1.0.0-rc.15.tgz",
|
||||
"integrity": "sha512-9Erhx956jeQ0nNTyif1+QWAXDRD38ZNjr//bSHrt6wDwB+QkAfl2q6Mn1k6OBPerznjRmbM10lgRb1Pli4xZPw==",
|
||||
"version": "1.0.0-rc.16",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-freebsd-x64/-/binding-freebsd-x64-1.0.0-rc.16.tgz",
|
||||
"integrity": "sha512-KcRE5w8h0OnjUatG8pldyD14/CQ5Phs1oxfR+3pKDjboHRo9+MkqQaiIZlZRpsxC15paeXme/I127tUa9TXJ6g==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -475,9 +503,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/binding-linux-arm-gnueabihf": {
|
||||
"version": "1.0.0-rc.15",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-1.0.0-rc.15.tgz",
|
||||
"integrity": "sha512-cVwk0w8QbZJGTnP/AHQBs5yNwmpgGYStL88t4UIaqcvYJWBfS0s3oqVLZPwsPU6M0zlW4GqjP0Zq5MnAGwFeGA==",
|
||||
"version": "1.0.0-rc.16",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-1.0.0-rc.16.tgz",
|
||||
"integrity": "sha512-bT0guA1bpxEJ/ZhTRniQf7rNF8ybvXOuWbNIeLABaV5NGjx4EtOWBTSRGWFU9ZWVkPOZ+HNFP8RMcBokBiZ0Kg==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
@@ -492,9 +520,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/binding-linux-arm64-gnu": {
|
||||
"version": "1.0.0-rc.15",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.0.0-rc.15.tgz",
|
||||
"integrity": "sha512-eBZ/u8iAK9SoHGanqe/jrPnY0JvBN6iXbVOsbO38mbz+ZJsaobExAm1Iu+rxa4S1l2FjG0qEZn4Rc6X8n+9M+w==",
|
||||
"version": "1.0.0-rc.16",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.0.0-rc.16.tgz",
|
||||
"integrity": "sha512-+tHktCHWV8BDQSjemUqm/Jl/TPk3QObCTIjmdDy/nlupcujZghmKK2962LYrqFpWu+ai01AN/REOH3NEpqvYQg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -509,9 +537,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/binding-linux-arm64-musl": {
|
||||
"version": "1.0.0-rc.15",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.0.0-rc.15.tgz",
|
||||
"integrity": "sha512-ZvRYMGrAklV9PEkgt4LQM6MjQX2P58HPAuecwYObY2DhS2t35R0I810bKi0wmaYORt6m/2Sm+Z+nFgb0WhXNcQ==",
|
||||
"version": "1.0.0-rc.16",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.0.0-rc.16.tgz",
|
||||
"integrity": "sha512-3fPzdREH806oRLxpTWW1Gt4tQHs0TitZFOECB2xzCFLPKnSOy90gwA7P29cksYilFO6XVRY1kzga0cL2nRjKPg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -526,9 +554,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/binding-linux-ppc64-gnu": {
|
||||
"version": "1.0.0-rc.15",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-1.0.0-rc.15.tgz",
|
||||
"integrity": "sha512-VDpgGBzgfg5hLg+uBpCLoFG5kVvEyafmfxGUV0UHLcL5irxAK7PKNeC2MwClgk6ZAiNhmo9FLhRYgvMmedLtnQ==",
|
||||
"version": "1.0.0-rc.16",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-1.0.0-rc.16.tgz",
|
||||
"integrity": "sha512-EKwI1tSrLs7YVw+JPJT/G2dJQ1jl9qlTTTEG0V2Ok/RdOenRfBw2PQdLPyjhIu58ocdBfP7vIRN/pvMsPxs/AQ==",
|
||||
"cpu": [
|
||||
"ppc64"
|
||||
],
|
||||
@@ -543,9 +571,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/binding-linux-s390x-gnu": {
|
||||
"version": "1.0.0-rc.15",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-1.0.0-rc.15.tgz",
|
||||
"integrity": "sha512-y1uXY3qQWCzcPgRJATPSOUP4tCemh4uBdY7e3EZbVwCJTY3gLJWnQABgeUetvED+bt1FQ01OeZwvhLS2bpNrAQ==",
|
||||
"version": "1.0.0-rc.16",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-1.0.0-rc.16.tgz",
|
||||
"integrity": "sha512-Uknladnb3Sxqu6SEcqBldQyJUpk8NleooZEc0MbRBJ4inEhRYWZX0NJu12vNf2mqAq7gsofAxHrGghiUYjhaLQ==",
|
||||
"cpu": [
|
||||
"s390x"
|
||||
],
|
||||
@@ -560,9 +588,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/binding-linux-x64-gnu": {
|
||||
"version": "1.0.0-rc.15",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.0.0-rc.15.tgz",
|
||||
"integrity": "sha512-023bTPBod7J3Y/4fzAN6QtpkSABR0rigtrwaP+qSEabUh5zf6ELr9Nc7GujaROuPY3uwdSIXWrvhn1KxOvurWA==",
|
||||
"version": "1.0.0-rc.16",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.0.0-rc.16.tgz",
|
||||
"integrity": "sha512-FIb8+uG49sZBtLTn+zt1AJ20TqVcqWeSIyoVt0or7uAWesgKaHbiBh6OpA/k9v0LTt+PTrb1Lao133kP4uVxkg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -577,9 +605,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/binding-linux-x64-musl": {
|
||||
"version": "1.0.0-rc.15",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-musl/-/binding-linux-x64-musl-1.0.0-rc.15.tgz",
|
||||
"integrity": "sha512-witB2O0/hU4CgfOOKUoeFgQ4GktPi1eEbAhaLAIpgD6+ZnhcPkUtPsoKKHRzmOoWPZue46IThdSgdo4XneOLYw==",
|
||||
"version": "1.0.0-rc.16",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-musl/-/binding-linux-x64-musl-1.0.0-rc.16.tgz",
|
||||
"integrity": "sha512-RuERhF9/EgWxZEXYWCOaViUWHIboceK4/ivdtQ3R0T44NjLkIIlGIAVAuCddFxsZ7vnRHtNQUrt2vR2n2slB2w==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -594,9 +622,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/binding-openharmony-arm64": {
|
||||
"version": "1.0.0-rc.15",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-openharmony-arm64/-/binding-openharmony-arm64-1.0.0-rc.15.tgz",
|
||||
"integrity": "sha512-UCL68NJ0Ud5zRipXZE9dF5PmirzJE4E4BCIOOssEnM7wLDsxjc6Qb0sGDxTNRTP53I6MZpygyCpY8Aa8sPfKPg==",
|
||||
"version": "1.0.0-rc.16",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-openharmony-arm64/-/binding-openharmony-arm64-1.0.0-rc.16.tgz",
|
||||
"integrity": "sha512-mXcXnvd9GpazCxeUCCnZ2+YF7nut+ZOEbE4GtaiPtyY6AkhZWbK70y1KK3j+RDhjVq5+U8FySkKRb/+w0EeUwA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -611,9 +639,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/binding-wasm32-wasi": {
|
||||
"version": "1.0.0-rc.15",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-wasm32-wasi/-/binding-wasm32-wasi-1.0.0-rc.15.tgz",
|
||||
"integrity": "sha512-ApLruZq/ig+nhaE7OJm4lDjayUnOHVUa77zGeqnqZ9pn0ovdVbbNPerVibLXDmWeUZXjIYIT8V3xkT58Rm9u5Q==",
|
||||
"version": "1.0.0-rc.16",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-wasm32-wasi/-/binding-wasm32-wasi-1.0.0-rc.16.tgz",
|
||||
"integrity": "sha512-3Q2KQxnC8IJOLqXmUMoYwyIPZU9hzRbnHaoV3Euz+VVnjZKcY8ktnNP8T9R4/GGQtb27C/UYKABxesKWb8lsvQ==",
|
||||
"cpu": [
|
||||
"wasm32"
|
||||
],
|
||||
@@ -623,16 +651,16 @@
|
||||
"dependencies": {
|
||||
"@emnapi/core": "1.9.2",
|
||||
"@emnapi/runtime": "1.9.2",
|
||||
"@napi-rs/wasm-runtime": "^1.1.3"
|
||||
"@napi-rs/wasm-runtime": "^1.1.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.0.0"
|
||||
"node": "^20.19.0 || >=22.12.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/binding-win32-arm64-msvc": {
|
||||
"version": "1.0.0-rc.15",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.0.0-rc.15.tgz",
|
||||
"integrity": "sha512-KmoUoU7HnN+Si5YWJigfTws1jz1bKBYDQKdbLspz0UaqjjFkddHsqorgiW1mxcAj88lYUE6NC/zJNwT+SloqtA==",
|
||||
"version": "1.0.0-rc.16",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.0.0-rc.16.tgz",
|
||||
"integrity": "sha512-tj7XRemQcOcFwv7qhpUxMTBbI5mWMlE4c1Omhg5+h8GuLXzyj8HviYgR+bB2DMDgRqUE+jiDleqSCRjx4aYk/Q==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -647,9 +675,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/binding-win32-x64-msvc": {
|
||||
"version": "1.0.0-rc.15",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-win32-x64-msvc/-/binding-win32-x64-msvc-1.0.0-rc.15.tgz",
|
||||
"integrity": "sha512-3P2A8L+x75qavWLe/Dll3EYBJLQmtkJN8rfh+U/eR3MqMgL/h98PhYI+JFfXuDPgPeCB7iZAKiqii5vqOvnA0g==",
|
||||
"version": "1.0.0-rc.16",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/binding-win32-x64-msvc/-/binding-win32-x64-msvc-1.0.0-rc.16.tgz",
|
||||
"integrity": "sha512-PH5DRZT+F4f2PTXRXR8uJxnBq2po/xFtddyabTJVJs/ZYVHqXPEgNIr35IHTEa6bpa0Q8Awg+ymkTaGnKITw4g==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -664,9 +692,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rolldown/pluginutils": {
|
||||
"version": "1.0.0-rc.15",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-rc.15.tgz",
|
||||
"integrity": "sha512-UromN0peaE53IaBRe9W7CjrZgXl90fqGpK+mIZbA3qSTeYqg3pqpROBdIPvOG3F5ereDHNwoHBI2e50n1BDr1g==",
|
||||
"version": "1.0.0-rc.16",
|
||||
"resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-rc.16.tgz",
|
||||
"integrity": "sha512-45+YtqxLYKDWQouLKCrpIZhke+nXxhsw+qAHVzHDVwttyBlHNBVs2K25rDXrZzhpTp9w1FlAlvweV1H++fdZoA==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
@@ -813,16 +841,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/expect": {
|
||||
"version": "4.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.1.4.tgz",
|
||||
"integrity": "sha512-iPBpra+VDuXmBFI3FMKHSFXp3Gx5HfmSCE8X67Dn+bwephCnQCaB7qWK2ldHa+8ncN8hJU8VTMcxjPpyMkUjww==",
|
||||
"version": "4.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.1.5.tgz",
|
||||
"integrity": "sha512-PWBaRY5JoKuRnHlUHfpV/KohFylaDZTupcXN1H9vYryNLOnitSw60Mw9IAE2r67NbwwzBw/Cc/8q9BK3kIX8Kw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@standard-schema/spec": "^1.1.0",
|
||||
"@types/chai": "^5.2.2",
|
||||
"@vitest/spy": "4.1.4",
|
||||
"@vitest/utils": "4.1.4",
|
||||
"@vitest/spy": "4.1.5",
|
||||
"@vitest/utils": "4.1.5",
|
||||
"chai": "^6.2.2",
|
||||
"tinyrainbow": "^3.1.0"
|
||||
},
|
||||
@@ -831,13 +859,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/mocker": {
|
||||
"version": "4.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.1.4.tgz",
|
||||
"integrity": "sha512-R9HTZBhW6yCSGbGQnDnH3QHfJxokKN4KB+Yvk9Q1le7eQNYwiCyKxmLmurSpFy6BzJanSLuEUDrD+j97Q+ZLPg==",
|
||||
"version": "4.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.1.5.tgz",
|
||||
"integrity": "sha512-/x2EmFC4mT4NNzqvC3fmesuV97w5FC903KPmey4gsnJiMQ3Be1IlDKVaDaG8iqaLFHqJ2FVEkxZk5VmeLjIItw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@vitest/spy": "4.1.4",
|
||||
"@vitest/spy": "4.1.5",
|
||||
"estree-walker": "^3.0.3",
|
||||
"magic-string": "^0.30.21"
|
||||
},
|
||||
@@ -858,9 +886,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/pretty-format": {
|
||||
"version": "4.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.1.4.tgz",
|
||||
"integrity": "sha512-ddmDHU0gjEUyEVLxtZa7xamrpIefdEETu3nZjWtHeZX4QxqJ7tRxSteHVXJOcr8jhiLoGAhkK4WJ3WqBpjx42A==",
|
||||
"version": "4.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.1.5.tgz",
|
||||
"integrity": "sha512-7I3q6l5qr03dVfMX2wCo9FxwSJbPdwKjy2uu/YPpU3wfHvIL4QHwVRp57OfGrDFeUJ8/8QdfBKIV12FTtLn00g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -871,13 +899,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/runner": {
|
||||
"version": "4.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.1.4.tgz",
|
||||
"integrity": "sha512-xTp7VZ5aXP5ZJrn15UtJUWlx6qXLnGtF6jNxHepdPHpMfz/aVPx+htHtgcAL2mDXJgKhpoo2e9/hVJsIeFbytQ==",
|
||||
"version": "4.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.1.5.tgz",
|
||||
"integrity": "sha512-2D+o7Pr82IEO46YPpoA/YU0neeyr6FTerQb5Ro7BUnBuv6NQtT/kmVnczngiMEBhzgqz2UZYl5gArejsyERDSQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@vitest/utils": "4.1.4",
|
||||
"@vitest/utils": "4.1.5",
|
||||
"pathe": "^2.0.3"
|
||||
},
|
||||
"funding": {
|
||||
@@ -885,14 +913,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/snapshot": {
|
||||
"version": "4.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.1.4.tgz",
|
||||
"integrity": "sha512-MCjCFgaS8aZz+m5nTcEcgk/xhWv0rEH4Yl53PPlMXOZ1/Ka2VcZU6CJ+MgYCZbcJvzGhQRjVrGQNZqkGPttIKw==",
|
||||
"version": "4.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.1.5.tgz",
|
||||
"integrity": "sha512-zypXEt4KH/XgKGPUz4eC2AvErYx0My5hfL8oDb1HzGFpEk1P62bxSohdyOmvz+d9UJwanI68MKwr2EquOaOgMQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@vitest/pretty-format": "4.1.4",
|
||||
"@vitest/utils": "4.1.4",
|
||||
"@vitest/pretty-format": "4.1.5",
|
||||
"@vitest/utils": "4.1.5",
|
||||
"magic-string": "^0.30.21",
|
||||
"pathe": "^2.0.3"
|
||||
},
|
||||
@@ -901,9 +929,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/spy": {
|
||||
"version": "4.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.1.4.tgz",
|
||||
"integrity": "sha512-XxNdAsKW7C+FLydqFJLb5KhJtl3PGCMmYwFRfhvIgxJvLSXhhVI1zM8f1qD3Zg7RCjTSzDVyct6sghs9UEgBEQ==",
|
||||
"version": "4.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.1.5.tgz",
|
||||
"integrity": "sha512-2lNOsh6+R2Idnf1TCZqSwYlKN2E/iDlD8sgU59kYVl+OMDmvldO1VDk39smRfpUNwYpNRVn3w4YfuC7KfbBnkQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"funding": {
|
||||
@@ -911,13 +939,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@vitest/utils": {
|
||||
"version": "4.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.1.4.tgz",
|
||||
"integrity": "sha512-13QMT+eysM5uVGa1rG4kegGYNp6cnQcsTc67ELFbhNLQO+vgsygtYJx2khvdt4gVQqSSpC/KT5FZZxUpP3Oatw==",
|
||||
"version": "4.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.1.5.tgz",
|
||||
"integrity": "sha512-76wdkrmfXfqGjueGgnb45ITPyUi1ycZ4IHgC2bhPDUfWHklY/q3MdLOAB+TF1e6xfl8NxNY0ZYaPCFNWSsw3Ug==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@vitest/pretty-format": "4.1.4",
|
||||
"@vitest/pretty-format": "4.1.5",
|
||||
"convert-source-map": "^2.0.0",
|
||||
"tinyrainbow": "^3.1.0"
|
||||
},
|
||||
@@ -1010,6 +1038,16 @@
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/axe-core": {
|
||||
"version": "4.11.3",
|
||||
"resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.11.3.tgz",
|
||||
"integrity": "sha512-zBQouZixDTbo3jMGqHKyePxYxr1e5W8UdTmBQ7sNtaA9M2bE32daxxPLS/jojhKOHxQ7LWwPjfiwf/fhaJWzlg==",
|
||||
"dev": true,
|
||||
"license": "MPL-2.0",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/balanced-match": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
|
||||
@@ -3606,14 +3644,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/rolldown": {
|
||||
"version": "1.0.0-rc.15",
|
||||
"resolved": "https://registry.npmjs.org/rolldown/-/rolldown-1.0.0-rc.15.tgz",
|
||||
"integrity": "sha512-Ff31guA5zT6WjnGp0SXw76X6hzGRk/OQq2hE+1lcDe+lJdHSgnSX6nK3erbONHyCbpSj9a9E+uX/OvytZoWp2g==",
|
||||
"version": "1.0.0-rc.16",
|
||||
"resolved": "https://registry.npmjs.org/rolldown/-/rolldown-1.0.0-rc.16.tgz",
|
||||
"integrity": "sha512-rzi5WqKzEZw3SooTt7cgm4eqIoujPIyGcJNGFL7iPEuajQw7vxMHUkXylu4/vhCkJGXsgRmxqMKXUpT6FEgl0g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@oxc-project/types": "=0.124.0",
|
||||
"@rolldown/pluginutils": "1.0.0-rc.15"
|
||||
"@oxc-project/types": "=0.126.0",
|
||||
"@rolldown/pluginutils": "1.0.0-rc.16"
|
||||
},
|
||||
"bin": {
|
||||
"rolldown": "bin/cli.mjs"
|
||||
@@ -3622,21 +3660,21 @@
|
||||
"node": "^20.19.0 || >=22.12.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@rolldown/binding-android-arm64": "1.0.0-rc.15",
|
||||
"@rolldown/binding-darwin-arm64": "1.0.0-rc.15",
|
||||
"@rolldown/binding-darwin-x64": "1.0.0-rc.15",
|
||||
"@rolldown/binding-freebsd-x64": "1.0.0-rc.15",
|
||||
"@rolldown/binding-linux-arm-gnueabihf": "1.0.0-rc.15",
|
||||
"@rolldown/binding-linux-arm64-gnu": "1.0.0-rc.15",
|
||||
"@rolldown/binding-linux-arm64-musl": "1.0.0-rc.15",
|
||||
"@rolldown/binding-linux-ppc64-gnu": "1.0.0-rc.15",
|
||||
"@rolldown/binding-linux-s390x-gnu": "1.0.0-rc.15",
|
||||
"@rolldown/binding-linux-x64-gnu": "1.0.0-rc.15",
|
||||
"@rolldown/binding-linux-x64-musl": "1.0.0-rc.15",
|
||||
"@rolldown/binding-openharmony-arm64": "1.0.0-rc.15",
|
||||
"@rolldown/binding-wasm32-wasi": "1.0.0-rc.15",
|
||||
"@rolldown/binding-win32-arm64-msvc": "1.0.0-rc.15",
|
||||
"@rolldown/binding-win32-x64-msvc": "1.0.0-rc.15"
|
||||
"@rolldown/binding-android-arm64": "1.0.0-rc.16",
|
||||
"@rolldown/binding-darwin-arm64": "1.0.0-rc.16",
|
||||
"@rolldown/binding-darwin-x64": "1.0.0-rc.16",
|
||||
"@rolldown/binding-freebsd-x64": "1.0.0-rc.16",
|
||||
"@rolldown/binding-linux-arm-gnueabihf": "1.0.0-rc.16",
|
||||
"@rolldown/binding-linux-arm64-gnu": "1.0.0-rc.16",
|
||||
"@rolldown/binding-linux-arm64-musl": "1.0.0-rc.16",
|
||||
"@rolldown/binding-linux-ppc64-gnu": "1.0.0-rc.16",
|
||||
"@rolldown/binding-linux-s390x-gnu": "1.0.0-rc.16",
|
||||
"@rolldown/binding-linux-x64-gnu": "1.0.0-rc.16",
|
||||
"@rolldown/binding-linux-x64-musl": "1.0.0-rc.16",
|
||||
"@rolldown/binding-openharmony-arm64": "1.0.0-rc.16",
|
||||
"@rolldown/binding-wasm32-wasi": "1.0.0-rc.16",
|
||||
"@rolldown/binding-win32-arm64-msvc": "1.0.0-rc.16",
|
||||
"@rolldown/binding-win32-x64-msvc": "1.0.0-rc.16"
|
||||
}
|
||||
},
|
||||
"node_modules/run-parallel": {
|
||||
@@ -4014,9 +4052,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-6.0.2.tgz",
|
||||
"integrity": "sha512-bGdAIrZ0wiGDo5l8c++HWtbaNCWTS4UTv7RaTH/ThVIgjkveJt83m74bBHMJkuCbslY8ixgLBVZJIOiQlQTjfQ==",
|
||||
"version": "6.0.3",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-6.0.3.tgz",
|
||||
"integrity": "sha512-y2TvuxSZPDyQakkFRPZHKFm+KKVqIisdg9/CZwm9ftvKXLP8NRWj38/ODjNbr43SsoXqNuAisEf1GdCxqWcdBw==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
@@ -4086,17 +4124,17 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite": {
|
||||
"version": "8.0.8",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-8.0.8.tgz",
|
||||
"integrity": "sha512-dbU7/iLVa8KZALJyLOBOQ88nOXtNG8vxKuOT4I2mD+Ya70KPceF4IAmDsmU0h1Qsn5bPrvsY9HJstCRh3hG6Uw==",
|
||||
"version": "8.0.9",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-8.0.9.tgz",
|
||||
"integrity": "sha512-t7g7GVRpMXjNpa67HaVWI/8BWtdVIQPCL2WoozXXA7LBGEFK4AkkKkHx2hAQf5x1GZSlcmEDPkVLSGahxnEEZw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"lightningcss": "^1.32.0",
|
||||
"picomatch": "^4.0.4",
|
||||
"postcss": "^8.5.8",
|
||||
"rolldown": "1.0.0-rc.15",
|
||||
"tinyglobby": "^0.2.15"
|
||||
"postcss": "^8.5.10",
|
||||
"rolldown": "1.0.0-rc.16",
|
||||
"tinyglobby": "^0.2.16"
|
||||
},
|
||||
"bin": {
|
||||
"vite": "bin/vite.js"
|
||||
@@ -4192,19 +4230,19 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vitest": {
|
||||
"version": "4.1.4",
|
||||
"resolved": "https://registry.npmjs.org/vitest/-/vitest-4.1.4.tgz",
|
||||
"integrity": "sha512-tFuJqTxKb8AvfyqMfnavXdzfy3h3sWZRWwfluGbkeR7n0HUev+FmNgZ8SDrRBTVrVCjgH5cA21qGbCffMNtWvg==",
|
||||
"version": "4.1.5",
|
||||
"resolved": "https://registry.npmjs.org/vitest/-/vitest-4.1.5.tgz",
|
||||
"integrity": "sha512-9Xx1v3/ih3m9hN+SbfkUyy0JAs72ap3r7joc87XL6jwF0jGg6mFBvQ1SrwaX+h8BlkX6Hz9shdd1uo6AF+ZGpg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@vitest/expect": "4.1.4",
|
||||
"@vitest/mocker": "4.1.4",
|
||||
"@vitest/pretty-format": "4.1.4",
|
||||
"@vitest/runner": "4.1.4",
|
||||
"@vitest/snapshot": "4.1.4",
|
||||
"@vitest/spy": "4.1.4",
|
||||
"@vitest/utils": "4.1.4",
|
||||
"@vitest/expect": "4.1.5",
|
||||
"@vitest/mocker": "4.1.5",
|
||||
"@vitest/pretty-format": "4.1.5",
|
||||
"@vitest/runner": "4.1.5",
|
||||
"@vitest/snapshot": "4.1.5",
|
||||
"@vitest/spy": "4.1.5",
|
||||
"@vitest/utils": "4.1.5",
|
||||
"es-module-lexer": "^2.0.0",
|
||||
"expect-type": "^1.3.0",
|
||||
"magic-string": "^0.30.21",
|
||||
@@ -4232,12 +4270,12 @@
|
||||
"@edge-runtime/vm": "*",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
"@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0",
|
||||
"@vitest/browser-playwright": "4.1.4",
|
||||
"@vitest/browser-preview": "4.1.4",
|
||||
"@vitest/browser-webdriverio": "4.1.4",
|
||||
"@vitest/coverage-istanbul": "4.1.4",
|
||||
"@vitest/coverage-v8": "4.1.4",
|
||||
"@vitest/ui": "4.1.4",
|
||||
"@vitest/browser-playwright": "4.1.5",
|
||||
"@vitest/browser-preview": "4.1.5",
|
||||
"@vitest/browser-webdriverio": "4.1.5",
|
||||
"@vitest/coverage-istanbul": "4.1.5",
|
||||
"@vitest/coverage-v8": "4.1.5",
|
||||
"@vitest/ui": "4.1.5",
|
||||
"happy-dom": "*",
|
||||
"jsdom": "*",
|
||||
"vite": "^6.0.0 || ^7.0.0 || ^8.0.0"
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
"smol-toml": "^1.6.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@axe-core/playwright": "^4.11.2",
|
||||
"@bgotink/playwright-coverage": "^0.3.2",
|
||||
"@playwright/test": "^1.59.1",
|
||||
"@types/eslint-plugin-jsx-a11y": "^6.10.1",
|
||||
@@ -27,8 +28,8 @@
|
||||
"prettier": "^3.8.3",
|
||||
"prettier-plugin-tailwindcss": "^0.7.2",
|
||||
"tar": "^7.5.13",
|
||||
"typescript": "^6.0.2",
|
||||
"vite": "^8.0.8",
|
||||
"vitest": "^4.1.4"
|
||||
"typescript": "^6.0.3",
|
||||
"vite": "^8.0.9",
|
||||
"vitest": "^4.1.5"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -240,7 +240,7 @@ export default defineConfig({
|
||||
testDir: './tests',
|
||||
testMatch: [
|
||||
/security-enforcement\/.*\.spec\.(ts|js)/,
|
||||
/security\/.*\.spec\.(ts|js)/,
|
||||
/^tests\/security\/.*\.spec\.(ts|js)/,
|
||||
],
|
||||
dependencies: ['setup', 'security-shard-setup'],
|
||||
teardown: 'security-teardown',
|
||||
@@ -275,7 +275,7 @@ export default defineConfig({
|
||||
'**/node_modules/**',
|
||||
'**/backend/**',
|
||||
'**/security-enforcement/**',
|
||||
'**/security/**',
|
||||
'**/tests/security/**',
|
||||
],
|
||||
},
|
||||
|
||||
@@ -292,7 +292,7 @@ export default defineConfig({
|
||||
'**/node_modules/**',
|
||||
'**/backend/**',
|
||||
'**/security-enforcement/**',
|
||||
'**/security/**',
|
||||
'**/tests/security/**',
|
||||
],
|
||||
},
|
||||
|
||||
@@ -309,7 +309,7 @@ export default defineConfig({
|
||||
'**/node_modules/**',
|
||||
'**/backend/**',
|
||||
'**/security-enforcement/**',
|
||||
'**/security/**',
|
||||
'**/tests/security/**',
|
||||
],
|
||||
},
|
||||
|
||||
|
||||
@@ -13,22 +13,34 @@ sleep 1
|
||||
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
FRONTEND_DIR="$ROOT_DIR/frontend"
|
||||
MIN_COVERAGE="${CHARON_MIN_COVERAGE:-${CPM_MIN_COVERAGE:-87}}"
|
||||
CANONICAL_COVERAGE_DIR="coverage"
|
||||
RUN_COVERAGE_DIR="coverage/.run-${PPID}-$$-$(date +%s)"
|
||||
|
||||
cd "$FRONTEND_DIR"
|
||||
|
||||
# Ensure dependencies are installed for CI runs
|
||||
npm ci --silent
|
||||
|
||||
# Ensure coverage output directories exist to avoid intermittent ENOENT errors
|
||||
mkdir -p coverage/.tmp
|
||||
# Ensure coverage output directories exist
|
||||
mkdir -p "$CANONICAL_COVERAGE_DIR"
|
||||
mkdir -p "$RUN_COVERAGE_DIR"
|
||||
|
||||
# Run tests with coverage and json-summary reporter (force istanbul provider)
|
||||
# Using istanbul ensures json-summary and coverage-summary artifacts are produced
|
||||
# so that downstream checks can parse them reliably.
|
||||
npm run test:coverage -- --run
|
||||
cleanup() {
|
||||
rm -rf "$RUN_COVERAGE_DIR"
|
||||
}
|
||||
|
||||
SUMMARY_FILE="coverage/coverage-summary.json"
|
||||
LCOV_FILE="coverage/lcov.info"
|
||||
trap cleanup EXIT
|
||||
|
||||
# Run tests with coverage in an isolated per-run reports directory to avoid
|
||||
# collisions when multiple coverage processes execute against the same workspace.
|
||||
VITEST_COVERAGE_REPORTS_DIR="$RUN_COVERAGE_DIR" npm run test:coverage -- --run
|
||||
|
||||
# Publish stable artifacts to the canonical coverage directory used by DoD checks.
|
||||
cp "$RUN_COVERAGE_DIR/coverage-summary.json" "$CANONICAL_COVERAGE_DIR/coverage-summary.json"
|
||||
cp "$RUN_COVERAGE_DIR/lcov.info" "$CANONICAL_COVERAGE_DIR/lcov.info"
|
||||
|
||||
SUMMARY_FILE="$CANONICAL_COVERAGE_DIR/coverage-summary.json"
|
||||
LCOV_FILE="$CANONICAL_COVERAGE_DIR/lcov.info"
|
||||
|
||||
if [ ! -f "$SUMMARY_FILE" ]; then
|
||||
echo "Error: Coverage summary file not found at $SUMMARY_FILE"
|
||||
|
||||
99
tests/a11y/README.md
Normal file
99
tests/a11y/README.md
Normal file
@@ -0,0 +1,99 @@
|
||||
## Accessibility Test Suite (`tests/a11y`)
|
||||
|
||||
### Purpose and Scope
|
||||
|
||||
This suite checks key Charon pages for accessibility issues using Playwright and axe.
|
||||
It is focused on page-level smoke coverage so we can catch major accessibility regressions early.
|
||||
|
||||
### Run Locally
|
||||
|
||||
Run a quick single-browser check:
|
||||
|
||||
```bash
|
||||
npx playwright test tests/a11y/ --project=firefox
|
||||
```
|
||||
|
||||
Run the full cross-browser matrix:
|
||||
|
||||
```bash
|
||||
npx playwright test tests/a11y/ --project=chromium --project=firefox --project=webkit
|
||||
```
|
||||
|
||||
### CI Execution
|
||||
|
||||
In CI, this suite runs in the non-security shard jobs of the E2E split workflow:
|
||||
|
||||
- Workflow: `.github/workflows/e2e-tests-split.yml`
|
||||
- Jobs: non-security shard jobs for Chromium, Firefox, and WebKit
|
||||
- Behavior: `tests/a11y` is included in the Playwright test paths and distributed by `--shard`
|
||||
|
||||
### Add a New Page Accessibility Test
|
||||
|
||||
1. Create or update a spec in `tests/a11y/`.
|
||||
2. Import the accessibility fixture from `../fixtures/a11y`.
|
||||
3. Use wait helpers (for example from `../utils/wait-helpers`) before running axe so page state is stable.
|
||||
4. Attach scan results with `test.info().attach(...)` for report debugging.
|
||||
5. Filter known accepted baseline items using `getBaselinedRuleIds('<page-path>')`.
|
||||
6. Assert with `expectNoA11yViolations`.
|
||||
|
||||
Minimal pattern:
|
||||
|
||||
```ts
|
||||
import { test } from '../fixtures/a11y';
|
||||
import { waitForLoadingComplete } from '../utils/wait-helpers';
|
||||
import { expectNoA11yViolations } from '../utils/a11y-helpers';
|
||||
import { getBaselinedRuleIds } from './a11y-baseline';
|
||||
|
||||
test('example page has no critical a11y violations', async ({ page, makeAxeBuilder }) => {
|
||||
await page.goto('/example');
|
||||
await waitForLoadingComplete(page);
|
||||
|
||||
const results = await makeAxeBuilder().analyze();
|
||||
|
||||
test.info().attach('a11y-results', {
|
||||
body: JSON.stringify(results.violations, null, 2),
|
||||
contentType: 'application/json',
|
||||
});
|
||||
|
||||
expectNoA11yViolations(results, {
|
||||
knownViolations: getBaselinedRuleIds('/example'),
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Baseline Policy
|
||||
|
||||
Baseline entries are allowed only for known and accepted issues with clear rationale and a tracking ticket.
|
||||
|
||||
- Add a clear `reason` and a `ticket` reference.
|
||||
- Add `expiresAt` so each baseline is reviewed periodically.
|
||||
- Remove the baseline entry as soon as the underlying issue is fixed.
|
||||
|
||||
### Failure Semantics
|
||||
|
||||
- `critical` and `serious` violations fail the test.
|
||||
- `moderate` and `minor` violations are reported in attached output and do not fail by default.
|
||||
|
||||
### Troubleshooting Timeout Flakes
|
||||
|
||||
Intermittent timeout flakes can happen, especially on Firefox.
|
||||
|
||||
Recommended rerun strategy:
|
||||
|
||||
1. Rerun the same failed spec once in Firefox.
|
||||
2. If it passes on rerun, treat it as a transient flake and continue.
|
||||
3. If it fails again, run the full a11y suite in Firefox.
|
||||
4. If still failing, run all three browsers and inspect `a11y-results` attachments.
|
||||
|
||||
Useful commands:
|
||||
|
||||
```bash
|
||||
# Rerun one spec in Firefox
|
||||
npx playwright test tests/a11y/<spec-file>.spec.ts --project=firefox
|
||||
|
||||
# Rerun full a11y suite in Firefox
|
||||
npx playwright test tests/a11y/ --project=firefox
|
||||
|
||||
# Rerun full a11y suite in all browsers
|
||||
npx playwright test tests/a11y/ --project=chromium --project=firefox --project=webkit
|
||||
```
|
||||
51
tests/a11y/a11y-baseline.ts
Normal file
51
tests/a11y/a11y-baseline.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
export interface BaselineEntry {
|
||||
ruleId: string;
|
||||
pages: string[];
|
||||
reason: string;
|
||||
ticket?: string;
|
||||
expiresAt?: string;
|
||||
}
|
||||
|
||||
export const A11Y_BASELINE: BaselineEntry[] = [
|
||||
{
|
||||
ruleId: 'color-contrast',
|
||||
pages: ['/'],
|
||||
reason: 'Tailwind blue-500 buttons (#3b82f6) have 3.67:1 contrast with white text; requires design system update',
|
||||
ticket: '#929',
|
||||
expiresAt: '2026-07-31',
|
||||
},
|
||||
{
|
||||
ruleId: 'label',
|
||||
pages: ['/settings/users', '/security', '/tasks/backups', '/tasks/import/caddyfile', '/tasks/import/crowdsec'],
|
||||
reason: 'Form inputs missing associated labels; requires frontend component fixes',
|
||||
ticket: '#929',
|
||||
expiresAt: '2026-07-31',
|
||||
},
|
||||
{
|
||||
ruleId: 'button-name',
|
||||
pages: ['/settings', '/security/headers'],
|
||||
reason: 'Icon-only buttons missing accessible names; requires aria-label additions',
|
||||
ticket: '#929',
|
||||
expiresAt: '2026-07-31',
|
||||
},
|
||||
{
|
||||
ruleId: 'select-name',
|
||||
pages: ['/tasks/logs'],
|
||||
reason: 'Select element missing associated label',
|
||||
ticket: '#929',
|
||||
expiresAt: '2026-07-31',
|
||||
},
|
||||
{
|
||||
ruleId: 'scrollable-region-focusable',
|
||||
pages: ['/tasks/logs'],
|
||||
reason: 'Log output container is scrollable but not keyboard-focusable',
|
||||
ticket: '#929',
|
||||
expiresAt: '2026-07-31',
|
||||
},
|
||||
];
|
||||
|
||||
export function getBaselinedRuleIds(currentPage: string): string[] {
|
||||
return A11Y_BASELINE
|
||||
.filter((entry) => entry.pages.some((p) => currentPage.startsWith(p)))
|
||||
.map((entry) => entry.ruleId);
|
||||
}
|
||||
29
tests/a11y/certificates.a11y.spec.ts
Normal file
29
tests/a11y/certificates.a11y.spec.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { test } from '../fixtures/a11y';
|
||||
import { waitForLoadingComplete, waitForTableLoad } from '../utils/wait-helpers';
|
||||
import { expectNoA11yViolations } from '../utils/a11y-helpers';
|
||||
import { getBaselinedRuleIds } from './a11y-baseline';
|
||||
|
||||
test.describe('Accessibility: Certificates', () => {
|
||||
test.describe.configure({ mode: 'parallel' });
|
||||
|
||||
test('certificates page has no critical a11y violations', async ({ page, makeAxeBuilder }) => {
|
||||
await test.step('Navigate to certificates', async () => {
|
||||
await page.goto('/certificates');
|
||||
await waitForLoadingComplete(page);
|
||||
await waitForTableLoad(page);
|
||||
});
|
||||
|
||||
await test.step('Run axe accessibility scan', async () => {
|
||||
const results = await makeAxeBuilder().analyze();
|
||||
|
||||
test.info().attach('a11y-results', {
|
||||
body: JSON.stringify(results.violations, null, 2),
|
||||
contentType: 'application/json',
|
||||
});
|
||||
|
||||
expectNoA11yViolations(results, {
|
||||
knownViolations: getBaselinedRuleIds('/certificates'),
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
28
tests/a11y/dashboard.a11y.spec.ts
Normal file
28
tests/a11y/dashboard.a11y.spec.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { test } from '../fixtures/a11y';
|
||||
import { waitForLoadingComplete } from '../utils/wait-helpers';
|
||||
import { expectNoA11yViolations } from '../utils/a11y-helpers';
|
||||
import { getBaselinedRuleIds } from './a11y-baseline';
|
||||
|
||||
test.describe('Accessibility: Dashboard', () => {
|
||||
test.describe.configure({ mode: 'parallel' });
|
||||
|
||||
test('dashboard has no critical a11y violations', async ({ page, makeAxeBuilder }) => {
|
||||
await test.step('Navigate to dashboard', async () => {
|
||||
await page.goto('/');
|
||||
await waitForLoadingComplete(page);
|
||||
});
|
||||
|
||||
await test.step('Run axe accessibility scan', async () => {
|
||||
const results = await makeAxeBuilder().analyze();
|
||||
|
||||
test.info().attach('a11y-results', {
|
||||
body: JSON.stringify(results.violations, null, 2),
|
||||
contentType: 'application/json',
|
||||
});
|
||||
|
||||
expectNoA11yViolations(results, {
|
||||
knownViolations: getBaselinedRuleIds('/'),
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
36
tests/a11y/dns-providers.a11y.spec.ts
Normal file
36
tests/a11y/dns-providers.a11y.spec.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { test } from '../fixtures/a11y';
|
||||
import { waitForLoadingComplete } from '../utils/wait-helpers';
|
||||
import { expectNoA11yViolations } from '../utils/a11y-helpers';
|
||||
import { getBaselinedRuleIds } from './a11y-baseline';
|
||||
|
||||
test.describe('Accessibility: DNS Providers', () => {
|
||||
test.describe.configure({ mode: 'parallel' });
|
||||
|
||||
test('DNS providers page has no critical a11y violations', async ({ page, makeAxeBuilder }) => {
|
||||
await test.step('Navigate to DNS providers', async () => {
|
||||
await page.goto('/dns/providers');
|
||||
await waitForLoadingComplete(page);
|
||||
await page.getByRole('heading', { name: 'DNS Management', level: 1 }).waitFor({
|
||||
state: 'visible',
|
||||
timeout: 10000,
|
||||
});
|
||||
await page.getByRole('button', { name: 'Add DNS Provider' }).waitFor({
|
||||
state: 'visible',
|
||||
timeout: 10000,
|
||||
});
|
||||
});
|
||||
|
||||
await test.step('Run axe accessibility scan', async () => {
|
||||
const results = await makeAxeBuilder().analyze();
|
||||
|
||||
test.info().attach('a11y-results', {
|
||||
body: JSON.stringify(results.violations, null, 2),
|
||||
contentType: 'application/json',
|
||||
});
|
||||
|
||||
expectNoA11yViolations(results, {
|
||||
knownViolations: getBaselinedRuleIds('/dns/providers'),
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
35
tests/a11y/domains.a11y.spec.ts
Normal file
35
tests/a11y/domains.a11y.spec.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { test } from '../fixtures/a11y';
|
||||
import { waitForLoadingComplete } from '../utils/wait-helpers';
|
||||
import { expectNoA11yViolations } from '../utils/a11y-helpers';
|
||||
import { getBaselinedRuleIds } from './a11y-baseline';
|
||||
|
||||
const domainRoutes = [
|
||||
{ route: '/domains', name: 'domains' },
|
||||
{ route: '/remote-servers', name: 'remote servers' },
|
||||
] as const;
|
||||
|
||||
test.describe('Accessibility: Domains & Remote Servers', () => {
|
||||
test.describe.configure({ mode: 'parallel' });
|
||||
|
||||
for (const { route, name } of domainRoutes) {
|
||||
test(`${name} page has no critical a11y violations`, async ({ page, makeAxeBuilder }) => {
|
||||
await test.step(`Navigate to ${name}`, async () => {
|
||||
await page.goto(route);
|
||||
await waitForLoadingComplete(page);
|
||||
});
|
||||
|
||||
await test.step('Run axe accessibility scan', async () => {
|
||||
const results = await makeAxeBuilder().analyze();
|
||||
|
||||
test.info().attach('a11y-results', {
|
||||
body: JSON.stringify(results.violations, null, 2),
|
||||
contentType: 'application/json',
|
||||
});
|
||||
|
||||
expectNoA11yViolations(results, {
|
||||
knownViolations: getBaselinedRuleIds(route),
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
30
tests/a11y/login.a11y.spec.ts
Normal file
30
tests/a11y/login.a11y.spec.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { test } from '../fixtures/a11y';
|
||||
import { waitForLoadingComplete } from '../utils/wait-helpers';
|
||||
import { expectNoA11yViolations } from '../utils/a11y-helpers';
|
||||
import { getBaselinedRuleIds } from './a11y-baseline';
|
||||
|
||||
test.use({ storageState: { cookies: [], origins: [] } });
|
||||
|
||||
test.describe('Accessibility: Login', () => {
|
||||
test.describe.configure({ mode: 'parallel' });
|
||||
|
||||
test('login page has no critical a11y violations', async ({ page, makeAxeBuilder }) => {
|
||||
await test.step('Navigate to login page', async () => {
|
||||
await page.goto('/login');
|
||||
await waitForLoadingComplete(page);
|
||||
});
|
||||
|
||||
await test.step('Run axe accessibility scan', async () => {
|
||||
const results = await makeAxeBuilder().analyze();
|
||||
|
||||
test.info().attach('a11y-results', {
|
||||
body: JSON.stringify(results.violations, null, 2),
|
||||
contentType: 'application/json',
|
||||
});
|
||||
|
||||
expectNoA11yViolations(results, {
|
||||
knownViolations: getBaselinedRuleIds('/login'),
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
35
tests/a11y/notifications.a11y.spec.ts
Normal file
35
tests/a11y/notifications.a11y.spec.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { test } from '../fixtures/a11y';
|
||||
import { waitForLoadingComplete } from '../utils/wait-helpers';
|
||||
import { expectNoA11yViolations } from '../utils/a11y-helpers';
|
||||
import { getBaselinedRuleIds } from './a11y-baseline';
|
||||
|
||||
const notificationRoutes = [
|
||||
{ route: '/settings/notifications', name: 'notifications' },
|
||||
{ route: '/settings/smtp', name: 'SMTP settings' },
|
||||
] as const;
|
||||
|
||||
test.describe('Accessibility: Notifications', () => {
|
||||
test.describe.configure({ mode: 'parallel' });
|
||||
|
||||
for (const { route, name } of notificationRoutes) {
|
||||
test(`${name} page has no critical a11y violations`, async ({ page, makeAxeBuilder }) => {
|
||||
await test.step(`Navigate to ${name}`, async () => {
|
||||
await page.goto(route);
|
||||
await waitForLoadingComplete(page);
|
||||
});
|
||||
|
||||
await test.step('Run axe accessibility scan', async () => {
|
||||
const results = await makeAxeBuilder().analyze();
|
||||
|
||||
test.info().attach('a11y-results', {
|
||||
body: JSON.stringify(results.violations, null, 2),
|
||||
contentType: 'application/json',
|
||||
});
|
||||
|
||||
expectNoA11yViolations(results, {
|
||||
knownViolations: getBaselinedRuleIds(route),
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
29
tests/a11y/proxy-hosts.a11y.spec.ts
Normal file
29
tests/a11y/proxy-hosts.a11y.spec.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { test } from '../fixtures/a11y';
|
||||
import { waitForLoadingComplete, waitForTableLoad } from '../utils/wait-helpers';
|
||||
import { expectNoA11yViolations } from '../utils/a11y-helpers';
|
||||
import { getBaselinedRuleIds } from './a11y-baseline';
|
||||
|
||||
test.describe('Accessibility: Proxy Hosts', () => {
|
||||
test.describe.configure({ mode: 'parallel' });
|
||||
|
||||
test('proxy hosts page has no critical a11y violations', async ({ page, makeAxeBuilder }) => {
|
||||
await test.step('Navigate to proxy hosts', async () => {
|
||||
await page.goto('/proxy-hosts');
|
||||
await waitForLoadingComplete(page);
|
||||
await waitForTableLoad(page);
|
||||
});
|
||||
|
||||
await test.step('Run axe accessibility scan', async () => {
|
||||
const results = await makeAxeBuilder().analyze();
|
||||
|
||||
test.info().attach('a11y-results', {
|
||||
body: JSON.stringify(results.violations, null, 2),
|
||||
contentType: 'application/json',
|
||||
});
|
||||
|
||||
expectNoA11yViolations(results, {
|
||||
knownViolations: getBaselinedRuleIds('/proxy-hosts'),
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
81
tests/a11y/security.a11y.spec.ts
Normal file
81
tests/a11y/security.a11y.spec.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import { test, expect } from '../fixtures/a11y';
|
||||
import { waitForLoadingComplete } from '../utils/wait-helpers';
|
||||
import { expectNoA11yViolations } from '../utils/a11y-helpers';
|
||||
import { getBaselinedRuleIds } from './a11y-baseline';
|
||||
|
||||
const securityRoutes = [
|
||||
{ route: '/security', name: 'security dashboard' },
|
||||
{ route: '/security/access-lists', name: 'access lists' },
|
||||
{ route: '/security/crowdsec', name: 'CrowdSec' },
|
||||
{ route: '/security/waf', name: 'WAF' },
|
||||
{ route: '/security/rate-limiting', name: 'rate limiting' },
|
||||
{ route: '/security/headers', name: 'security headers' },
|
||||
{ route: '/security/encryption', name: 'encryption' },
|
||||
{ route: '/security/audit-logs', name: 'audit logs' },
|
||||
] as const;
|
||||
|
||||
/**
|
||||
* Wait for route-specific content to be visible before axe analysis
|
||||
* Ensures all key page elements have been rendered
|
||||
*/
|
||||
async function waitForRouteReady(page: any, route: string): Promise<void> {
|
||||
// Wait for main content area if it exists (most pages have one)
|
||||
const main = page.locator('main');
|
||||
try {
|
||||
await expect(main).toBeVisible({ timeout: 5000 });
|
||||
} catch {
|
||||
// If no main element, just continue (some pages may not have it)
|
||||
}
|
||||
|
||||
// Route-specific readiness conditions - all optional
|
||||
switch (route) {
|
||||
case '/security/headers':
|
||||
// Security headers page has a button to create profiles
|
||||
try {
|
||||
await expect(page.getByRole('button', { name: /create|add|new/i }).first())
|
||||
.toBeVisible({ timeout: 5000 });
|
||||
} catch {
|
||||
// Button not found, continue anyway
|
||||
}
|
||||
break;
|
||||
case '/security/audit-logs':
|
||||
// Audit logs page may have a heading or table
|
||||
try {
|
||||
await expect(page.locator('h1, h2, table, [role="grid"]').first())
|
||||
.toBeVisible({ timeout: 5000 });
|
||||
} catch {
|
||||
// No expected content elements, continue anyway
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// For other routes, just ensure main content is visible (already checked above)
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
test.describe('Accessibility: Security', () => {
|
||||
test.describe.configure({ mode: 'parallel' });
|
||||
|
||||
for (const { route, name } of securityRoutes) {
|
||||
test(`${name} page has no critical a11y violations`, async ({ page, makeAxeBuilder }) => {
|
||||
await test.step(`Navigate to ${name}`, async () => {
|
||||
await page.goto(route);
|
||||
await waitForLoadingComplete(page);
|
||||
await waitForRouteReady(page, route);
|
||||
});
|
||||
|
||||
await test.step('Run axe accessibility scan', async () => {
|
||||
const results = await makeAxeBuilder().analyze();
|
||||
|
||||
test.info().attach('a11y-results', {
|
||||
body: JSON.stringify(results.violations, null, 2),
|
||||
contentType: 'application/json',
|
||||
});
|
||||
|
||||
expectNoA11yViolations(results, {
|
||||
knownViolations: getBaselinedRuleIds(route),
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
49
tests/a11y/settings.a11y.spec.ts
Normal file
49
tests/a11y/settings.a11y.spec.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { test } from '../fixtures/a11y';
|
||||
import { waitForLoadingComplete, waitForTableLoad } from '../utils/wait-helpers';
|
||||
import { expectNoA11yViolations } from '../utils/a11y-helpers';
|
||||
import { getBaselinedRuleIds } from './a11y-baseline';
|
||||
|
||||
test.describe('Accessibility: Settings', () => {
|
||||
test.describe.configure({ mode: 'parallel' });
|
||||
|
||||
test('settings page has no critical a11y violations', async ({ page, makeAxeBuilder }) => {
|
||||
await test.step('Navigate to settings', async () => {
|
||||
await page.goto('/settings');
|
||||
await waitForLoadingComplete(page);
|
||||
});
|
||||
|
||||
await test.step('Run axe accessibility scan', async () => {
|
||||
const results = await makeAxeBuilder().analyze();
|
||||
|
||||
test.info().attach('a11y-results', {
|
||||
body: JSON.stringify(results.violations, null, 2),
|
||||
contentType: 'application/json',
|
||||
});
|
||||
|
||||
expectNoA11yViolations(results, {
|
||||
knownViolations: getBaselinedRuleIds('/settings'),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('users page has no critical a11y violations', async ({ page, makeAxeBuilder }) => {
|
||||
await test.step('Navigate to users', async () => {
|
||||
await page.goto('/settings/users');
|
||||
await waitForLoadingComplete(page);
|
||||
await waitForTableLoad(page);
|
||||
});
|
||||
|
||||
await test.step('Run axe accessibility scan', async () => {
|
||||
const results = await makeAxeBuilder().analyze();
|
||||
|
||||
test.info().attach('a11y-results', {
|
||||
body: JSON.stringify(results.violations, null, 2),
|
||||
contentType: 'application/json',
|
||||
});
|
||||
|
||||
expectNoA11yViolations(results, {
|
||||
knownViolations: getBaselinedRuleIds('/settings/users'),
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
34
tests/a11y/setup.a11y.spec.ts
Normal file
34
tests/a11y/setup.a11y.spec.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { test } from '../fixtures/a11y';
|
||||
import { waitForLoadingComplete } from '../utils/wait-helpers';
|
||||
import { expectNoA11yViolations } from '../utils/a11y-helpers';
|
||||
import { getBaselinedRuleIds } from './a11y-baseline';
|
||||
|
||||
test.use({ storageState: { cookies: [], origins: [] } });
|
||||
|
||||
test.describe('Accessibility: Setup', () => {
|
||||
test.describe.configure({ mode: 'parallel' });
|
||||
|
||||
test('setup page has no critical a11y violations', async ({ page, makeAxeBuilder }) => {
|
||||
await test.step('Navigate to setup page', async () => {
|
||||
await page.goto('/setup');
|
||||
|
||||
const url = page.url();
|
||||
test.skip(!url.includes('/setup'), 'Setup already complete — page redirected');
|
||||
|
||||
await waitForLoadingComplete(page);
|
||||
});
|
||||
|
||||
await test.step('Run axe accessibility scan', async () => {
|
||||
const results = await makeAxeBuilder().analyze();
|
||||
|
||||
test.info().attach('a11y-results', {
|
||||
body: JSON.stringify(results.violations, null, 2),
|
||||
contentType: 'application/json',
|
||||
});
|
||||
|
||||
expectNoA11yViolations(results, {
|
||||
knownViolations: getBaselinedRuleIds('/setup'),
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
39
tests/a11y/tasks.a11y.spec.ts
Normal file
39
tests/a11y/tasks.a11y.spec.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { test } from '../fixtures/a11y';
|
||||
import { waitForLoadingComplete } from '../utils/wait-helpers';
|
||||
import { expectNoA11yViolations } from '../utils/a11y-helpers';
|
||||
import { getBaselinedRuleIds } from './a11y-baseline';
|
||||
|
||||
const taskRoutes = [
|
||||
{ route: '/tasks/backups', name: 'backups' },
|
||||
{ route: '/tasks/logs', name: 'logs' },
|
||||
{ route: '/tasks/import/caddyfile', name: 'Caddyfile import' },
|
||||
{ route: '/tasks/import/crowdsec', name: 'CrowdSec import' },
|
||||
{ route: '/tasks/import/npm', name: 'NPM import' },
|
||||
{ route: '/tasks/import/json', name: 'JSON import' },
|
||||
] as const;
|
||||
|
||||
test.describe('Accessibility: Tasks', () => {
|
||||
test.describe.configure({ mode: 'parallel' });
|
||||
|
||||
for (const { route, name } of taskRoutes) {
|
||||
test(`${name} page has no critical a11y violations`, async ({ page, makeAxeBuilder }) => {
|
||||
await test.step(`Navigate to ${name}`, async () => {
|
||||
await page.goto(route);
|
||||
await waitForLoadingComplete(page);
|
||||
});
|
||||
|
||||
await test.step('Run axe accessibility scan', async () => {
|
||||
const results = await makeAxeBuilder().analyze();
|
||||
|
||||
test.info().attach('a11y-results', {
|
||||
body: JSON.stringify(results.violations, null, 2),
|
||||
contentType: 'application/json',
|
||||
});
|
||||
|
||||
expectNoA11yViolations(results, {
|
||||
knownViolations: getBaselinedRuleIds(route),
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
28
tests/a11y/uptime.a11y.spec.ts
Normal file
28
tests/a11y/uptime.a11y.spec.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { test } from '../fixtures/a11y';
|
||||
import { waitForLoadingComplete } from '../utils/wait-helpers';
|
||||
import { expectNoA11yViolations } from '../utils/a11y-helpers';
|
||||
import { getBaselinedRuleIds } from './a11y-baseline';
|
||||
|
||||
test.describe('Accessibility: Uptime', () => {
|
||||
test.describe.configure({ mode: 'parallel' });
|
||||
|
||||
test('uptime page has no critical a11y violations', async ({ page, makeAxeBuilder }) => {
|
||||
await test.step('Navigate to uptime', async () => {
|
||||
await page.goto('/uptime');
|
||||
await waitForLoadingComplete(page);
|
||||
});
|
||||
|
||||
await test.step('Run axe accessibility scan', async () => {
|
||||
const results = await makeAxeBuilder().analyze();
|
||||
|
||||
test.info().attach('a11y-results', {
|
||||
body: JSON.stringify(results.violations, null, 2),
|
||||
contentType: 'application/json',
|
||||
});
|
||||
|
||||
expectNoA11yViolations(results, {
|
||||
knownViolations: getBaselinedRuleIds('/uptime'),
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
406
tests/crowdsec-whitelist.spec.ts
Normal file
406
tests/crowdsec-whitelist.spec.ts
Normal file
@@ -0,0 +1,406 @@
|
||||
import { test, expect, request as playwrightRequest } from '@playwright/test';
|
||||
import type { APIRequestContext } from '@playwright/test';
|
||||
import {
|
||||
withSecurityEnabled,
|
||||
captureSecurityState,
|
||||
setSecurityModuleEnabled,
|
||||
} from './utils/security-helpers';
|
||||
import { getStorageStateAuthHeaders } from './utils/api-helpers';
|
||||
import { STORAGE_STATE } from './constants';
|
||||
|
||||
/**
|
||||
* CrowdSec IP Whitelist Management E2E Tests
|
||||
*
|
||||
* Tests the whitelist tab on the CrowdSec configuration page (/security/crowdsec).
|
||||
* The tab is conditionally rendered: it only appears when CrowdSec mode is not 'disabled'.
|
||||
*
|
||||
* Uses IPs in the 10.99.x.x range to avoid conflicts with real network addresses.
|
||||
*
|
||||
* NOTE: Uses request.newContext({ storageState }) instead of the `request` fixture because
|
||||
* the auth cookie has `secure: true` which the fixture won't send over HTTP, but
|
||||
* Playwright's APIRequestContext does send it.
|
||||
*/
|
||||
|
||||
const BASE_URL = process.env.PLAYWRIGHT_BASE_URL ?? 'http://127.0.0.1:8080';
|
||||
const TEST_IP_PREFIX = '10.99';
|
||||
|
||||
function createRequestContext(): Promise<APIRequestContext> {
|
||||
return playwrightRequest.newContext({
|
||||
baseURL: BASE_URL,
|
||||
storageState: STORAGE_STATE,
|
||||
extraHTTPHeaders: getStorageStateAuthHeaders(),
|
||||
});
|
||||
}
|
||||
|
||||
test.describe('CrowdSec IP Whitelist Management', () => {
|
||||
// Serial mode prevents the tab-visibility test (which disables CrowdSec) from
|
||||
// racing with the local-mode tests (which require CrowdSec enabled).
|
||||
test.describe.configure({ mode: 'serial' });
|
||||
|
||||
test.describe('tab visibility', () => {
|
||||
test('whitelist tab is hidden when CrowdSec is disabled', async ({ page }) => {
|
||||
const rc = await createRequestContext();
|
||||
const originalState = await captureSecurityState(rc);
|
||||
if (originalState.crowdsec) {
|
||||
await setSecurityModuleEnabled(rc, 'crowdsec', false);
|
||||
}
|
||||
|
||||
try {
|
||||
await test.step('Navigate to CrowdSec config page', async () => {
|
||||
await page.goto('/security/crowdsec');
|
||||
await page.waitForLoadState('networkidle');
|
||||
});
|
||||
|
||||
await test.step('Verify whitelist tab is not present', async () => {
|
||||
await expect(page.getByRole('tab', { name: 'Whitelist' })).not.toBeVisible();
|
||||
});
|
||||
} finally {
|
||||
if (originalState.crowdsec) {
|
||||
await setSecurityModuleEnabled(rc, 'crowdsec', true);
|
||||
}
|
||||
await rc.dispose();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('with CrowdSec in local mode', () => {
|
||||
let rc: APIRequestContext;
|
||||
let cleanupSecurity: () => Promise<void>;
|
||||
|
||||
test.beforeAll(async () => {
|
||||
rc = await createRequestContext();
|
||||
cleanupSecurity = await withSecurityEnabled(rc, { crowdsec: true, cerberus: true });
|
||||
|
||||
// Wait for CrowdSec to enter local mode (may take a few seconds after enabling)
|
||||
for (let attempt = 0; attempt < 15; attempt++) {
|
||||
const statusResp = await rc.get('/api/v1/security/status');
|
||||
if (statusResp.ok()) {
|
||||
const status = await statusResp.json();
|
||||
if (status.crowdsec?.mode !== 'disabled') break;
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
}
|
||||
});
|
||||
|
||||
test.afterAll(async () => {
|
||||
// Remove any leftover test entries before restoring security state
|
||||
const resp = await rc.get('/api/v1/admin/crowdsec/whitelist');
|
||||
if (resp.ok()) {
|
||||
const data = await resp.json();
|
||||
for (const entry of (data.whitelist ?? []) as Array<{ uuid: string; ip_or_cidr: string }>) {
|
||||
if (entry.ip_or_cidr.startsWith(TEST_IP_PREFIX)) {
|
||||
await rc.delete(`/api/v1/admin/crowdsec/whitelist/${entry.uuid}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
await cleanupSecurity?.();
|
||||
await rc.dispose();
|
||||
});
|
||||
|
||||
test.beforeEach(async ({ page }) => {
|
||||
await test.step('Open CrowdSec Whitelist tab', async () => {
|
||||
// CrowdSec may take time to enter local mode after being enabled.
|
||||
// Retry navigation until the Whitelist tab is visible.
|
||||
const maxAttempts = 15;
|
||||
let tabFound = false;
|
||||
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
||||
await page.goto('/security/crowdsec');
|
||||
// Wait for network to settle so React Query status fetch completes
|
||||
await page.waitForLoadState('networkidle', { timeout: 8000 }).catch(() => {});
|
||||
const whitelistTab = page.getByRole('tab', { name: 'Whitelist' });
|
||||
const visible = await whitelistTab.isVisible().catch(() => false);
|
||||
if (visible) {
|
||||
await whitelistTab.click();
|
||||
await page.waitForLoadState('networkidle', { timeout: 8000 }).catch(() => {});
|
||||
tabFound = true;
|
||||
break;
|
||||
}
|
||||
if (attempt < maxAttempts - 1) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000));
|
||||
}
|
||||
}
|
||||
if (!tabFound) {
|
||||
// Fail with a clear error message if tab never appeared
|
||||
await expect(page.getByRole('tab', { name: 'Whitelist' })).toBeVisible({
|
||||
timeout: 1000,
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test('displays empty state when no whitelist entries exist', async ({ page }) => {
|
||||
await test.step('Verify empty state message and snapshot', async () => {
|
||||
const emptyEl = page.getByTestId('whitelist-empty');
|
||||
await expect(emptyEl).toBeVisible();
|
||||
await expect(emptyEl).toHaveText('No whitelist entries');
|
||||
|
||||
await expect(emptyEl).toMatchAriaSnapshot(`
|
||||
- paragraph: No whitelist entries
|
||||
`);
|
||||
});
|
||||
});
|
||||
|
||||
test('adds a valid IPv4 address to the whitelist', async ({ page }) => {
|
||||
const testIP = `${TEST_IP_PREFIX}.1.10`;
|
||||
let addedUUID: string | null = null;
|
||||
|
||||
try {
|
||||
await test.step('Fill IP address and reason fields', async () => {
|
||||
await page.getByTestId('whitelist-ip-input').fill(testIP);
|
||||
await page.getByTestId('whitelist-reason-input').fill('IPv4 E2E test entry');
|
||||
});
|
||||
|
||||
await test.step('Submit the form and capture response', async () => {
|
||||
const responsePromise = page.waitForResponse(
|
||||
(resp) =>
|
||||
resp.url().includes('/api/v1/admin/crowdsec/whitelist') &&
|
||||
resp.request().method() === 'POST'
|
||||
);
|
||||
await page.getByTestId('whitelist-add-btn').click();
|
||||
const response = await responsePromise;
|
||||
expect(response.status()).toBe(201);
|
||||
const body = await response.json();
|
||||
addedUUID = body.uuid as string;
|
||||
});
|
||||
|
||||
await test.step('Verify the entry appears in the table', async () => {
|
||||
await expect(page.getByRole('cell', { name: testIP, exact: true })).toBeVisible({ timeout: 10_000 });
|
||||
await expect(page.getByRole('cell', { name: 'IPv4 E2E test entry' })).toBeVisible({ timeout: 10_000 });
|
||||
});
|
||||
} finally {
|
||||
if (addedUUID) {
|
||||
await rc.delete(`/api/v1/admin/crowdsec/whitelist/${addedUUID}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test('adds a valid CIDR range to the whitelist', async ({ page }) => {
|
||||
const testCIDR = `${TEST_IP_PREFIX}.2.0/24`;
|
||||
let addedUUID: string | null = null;
|
||||
|
||||
try {
|
||||
await test.step('Fill CIDR notation and reason', async () => {
|
||||
await page.getByTestId('whitelist-ip-input').fill(testCIDR);
|
||||
await page.getByTestId('whitelist-reason-input').fill('CIDR E2E test range');
|
||||
});
|
||||
|
||||
await test.step('Submit the form and capture response', async () => {
|
||||
const responsePromise = page.waitForResponse(
|
||||
(resp) =>
|
||||
resp.url().includes('/api/v1/admin/crowdsec/whitelist') &&
|
||||
resp.request().method() === 'POST'
|
||||
);
|
||||
await page.getByTestId('whitelist-add-btn').click();
|
||||
const response = await responsePromise;
|
||||
expect(response.status()).toBe(201);
|
||||
const body = await response.json();
|
||||
addedUUID = body.uuid as string;
|
||||
});
|
||||
|
||||
await test.step('Verify CIDR entry appears in the table', async () => {
|
||||
await expect(page.getByRole('cell', { name: testCIDR, exact: true })).toBeVisible({ timeout: 10_000 });
|
||||
await expect(page.getByRole('cell', { name: 'CIDR E2E test range' })).toBeVisible({ timeout: 10_000 });
|
||||
});
|
||||
} finally {
|
||||
if (addedUUID) {
|
||||
await rc.delete(`/api/v1/admin/crowdsec/whitelist/${addedUUID}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test('"Add My IP" button pre-fills the detected client IP', async ({ page }) => {
|
||||
const ipResp = await rc.get('/api/v1/system/my-ip');
|
||||
expect(ipResp.ok()).toBeTruthy();
|
||||
const { ip: detectedIP } = await ipResp.json() as { ip: string };
|
||||
|
||||
await test.step('Click the "Add My IP" button', async () => {
|
||||
await page.getByTestId('whitelist-add-my-ip-btn').click();
|
||||
});
|
||||
|
||||
await test.step('Verify the IP input is pre-filled with the detected IP', async () => {
|
||||
await expect(page.getByTestId('whitelist-ip-input')).toHaveValue(detectedIP);
|
||||
});
|
||||
});
|
||||
|
||||
test('shows an inline validation error for an invalid IP address', async ({ page }) => {
|
||||
await test.step('Fill the IP field with an invalid value', async () => {
|
||||
await page.getByTestId('whitelist-ip-input').fill('not-an-ip');
|
||||
});
|
||||
|
||||
await test.step('Submit the form', async () => {
|
||||
await page.getByTestId('whitelist-add-btn').click();
|
||||
});
|
||||
|
||||
await test.step('Verify the inline error element is visible with an error message', async () => {
|
||||
const errorEl = page.getByTestId('whitelist-ip-error');
|
||||
await expect(errorEl).toBeVisible();
|
||||
await expect(errorEl).toContainText(/invalid/i);
|
||||
});
|
||||
});
|
||||
|
||||
test('shows a conflict error when adding a duplicate whitelist entry', async ({ page }) => {
|
||||
const testIP = `${TEST_IP_PREFIX}.3.10`;
|
||||
let addedUUID: string | null = null;
|
||||
|
||||
try {
|
||||
await test.step('Pre-seed the whitelist entry via API', async () => {
|
||||
const addResp = await rc.post('/api/v1/admin/crowdsec/whitelist', {
|
||||
data: { ip_or_cidr: testIP, reason: 'duplicate seed' },
|
||||
});
|
||||
expect(addResp.status()).toBe(201);
|
||||
const body = await addResp.json();
|
||||
addedUUID = body.uuid as string;
|
||||
});
|
||||
|
||||
await test.step('Reload the whitelist tab to see the seeded entry', async () => {
|
||||
await page.goto('/security/crowdsec');
|
||||
const whitelistTab = page.getByRole('tab', { name: 'Whitelist' });
|
||||
await expect(whitelistTab).toBeVisible({ timeout: 15_000 });
|
||||
await whitelistTab.click();
|
||||
await expect(page.getByRole('cell', { name: testIP, exact: true })).toBeVisible({ timeout: 10_000 });
|
||||
});
|
||||
|
||||
await test.step('Attempt to add the same IP again', async () => {
|
||||
await page.getByTestId('whitelist-ip-input').fill(testIP);
|
||||
await page.getByTestId('whitelist-add-btn').click();
|
||||
});
|
||||
|
||||
await test.step('Verify the conflict error is shown inline', async () => {
|
||||
const errorEl = page.getByTestId('whitelist-ip-error');
|
||||
await expect(errorEl).toBeVisible();
|
||||
await expect(errorEl).toContainText(/already exists/i);
|
||||
});
|
||||
} finally {
|
||||
if (addedUUID) {
|
||||
await rc.delete(`/api/v1/admin/crowdsec/whitelist/${addedUUID}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test('removes a whitelist entry via the delete confirmation modal', async ({ page }) => {
|
||||
const testIP = `${TEST_IP_PREFIX}.4.10`;
|
||||
let addedUUID: string | null = null;
|
||||
|
||||
try {
|
||||
await test.step('Pre-seed a whitelist entry via API', async () => {
|
||||
const addResp = await rc.post('/api/v1/admin/crowdsec/whitelist', {
|
||||
data: { ip_or_cidr: testIP, reason: 'delete modal test' },
|
||||
});
|
||||
expect(addResp.status()).toBe(201);
|
||||
const body = await addResp.json();
|
||||
addedUUID = body.uuid as string;
|
||||
});
|
||||
|
||||
await test.step('Reload the whitelist tab to see the seeded entry', async () => {
|
||||
await page.goto('/security/crowdsec');
|
||||
const whitelistTab = page.getByRole('tab', { name: 'Whitelist' });
|
||||
await expect(whitelistTab).toBeVisible({ timeout: 15_000 });
|
||||
await whitelistTab.click();
|
||||
await expect(page.getByRole('cell', { name: testIP, exact: true })).toBeVisible({ timeout: 10_000 });
|
||||
});
|
||||
|
||||
await test.step('Click the delete button for the entry', async () => {
|
||||
const deleteBtn = page.getByRole('button', {
|
||||
name: new RegExp(`Remove whitelist entry for ${testIP.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`, 'i'),
|
||||
});
|
||||
await expect(deleteBtn).toBeVisible();
|
||||
await deleteBtn.click();
|
||||
});
|
||||
|
||||
await test.step('Verify the confirmation modal appears', async () => {
|
||||
const modal = page.getByRole('dialog');
|
||||
await expect(modal).toBeVisible();
|
||||
await expect(modal.locator('#whitelist-delete-modal-title')).toHaveText(
|
||||
'Remove Whitelist Entry'
|
||||
);
|
||||
await expect(modal).toMatchAriaSnapshot(`
|
||||
- dialog:
|
||||
- heading "Remove Whitelist Entry" [level=2]
|
||||
`);
|
||||
});
|
||||
|
||||
await test.step('Confirm deletion and verify the entry is removed', async () => {
|
||||
const deleteResponsePromise = page.waitForResponse(
|
||||
(resp) =>
|
||||
resp.url().includes('/api/v1/admin/crowdsec/whitelist/') &&
|
||||
resp.request().method() === 'DELETE'
|
||||
);
|
||||
await page.getByRole('button', { name: 'Remove', exact: true }).click();
|
||||
const deleteResponse = await deleteResponsePromise;
|
||||
expect(deleteResponse.ok()).toBeTruthy();
|
||||
addedUUID = null; // cleaned up by the UI action
|
||||
|
||||
await expect(page.getByRole('cell', { name: testIP, exact: true })).not.toBeVisible();
|
||||
await expect(page.getByTestId('whitelist-empty')).toBeVisible();
|
||||
});
|
||||
} finally {
|
||||
// Fallback cleanup if the UI delete failed
|
||||
if (addedUUID) {
|
||||
await rc.delete(`/api/v1/admin/crowdsec/whitelist/${addedUUID}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test('delete confirmation modal is dismissed by the Cancel button', async ({ page }) => {
|
||||
const testIP = `${TEST_IP_PREFIX}.5.10`;
|
||||
let addedUUID: string | null = null;
|
||||
|
||||
try {
|
||||
await test.step('Pre-seed a whitelist entry via API', async () => {
|
||||
const addResp = await rc.post('/api/v1/admin/crowdsec/whitelist', {
|
||||
data: { ip_or_cidr: testIP, reason: 'cancel modal test' },
|
||||
});
|
||||
expect(addResp.status()).toBe(201);
|
||||
const body = await addResp.json();
|
||||
addedUUID = body.uuid as string;
|
||||
});
|
||||
|
||||
await test.step('Reload the whitelist tab', async () => {
|
||||
await page.goto('/security/crowdsec');
|
||||
const whitelistTab = page.getByRole('tab', { name: 'Whitelist' });
|
||||
await expect(whitelistTab).toBeVisible({ timeout: 15_000 });
|
||||
await whitelistTab.click();
|
||||
await expect(page.getByRole('cell', { name: testIP, exact: true })).toBeVisible({ timeout: 10_000 });
|
||||
});
|
||||
|
||||
await test.step('Open the delete modal', async () => {
|
||||
const deleteBtn = page.getByRole('button', {
|
||||
name: new RegExp(`Remove whitelist entry for ${testIP.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}`, 'i'),
|
||||
});
|
||||
await deleteBtn.click();
|
||||
await expect(page.getByRole('dialog')).toBeVisible();
|
||||
});
|
||||
|
||||
await test.step('Cancel and verify the entry is still present', async () => {
|
||||
await page.getByRole('button', { name: 'Cancel' }).click();
|
||||
await expect(page.getByRole('dialog')).not.toBeVisible();
|
||||
await expect(page.getByRole('cell', { name: testIP, exact: true })).toBeVisible();
|
||||
});
|
||||
} finally {
|
||||
if (addedUUID) {
|
||||
await rc.delete(`/api/v1/admin/crowdsec/whitelist/${addedUUID}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test('add button is disabled when the IP field is empty', async ({ page }) => {
|
||||
await test.step('Verify add button is disabled with empty IP field', async () => {
|
||||
const ipInput = page.getByTestId('whitelist-ip-input');
|
||||
const addBtn = page.getByTestId('whitelist-add-btn');
|
||||
|
||||
await expect(ipInput).toHaveValue('');
|
||||
await expect(addBtn).toBeDisabled();
|
||||
});
|
||||
|
||||
await test.step('Button becomes enabled when IP is entered', async () => {
|
||||
await page.getByTestId('whitelist-ip-input').fill('192.168.1.1');
|
||||
await expect(page.getByTestId('whitelist-add-btn')).toBeEnabled();
|
||||
});
|
||||
|
||||
await test.step('Button returns to disabled state when IP is cleared', async () => {
|
||||
await page.getByTestId('whitelist-ip-input').clear();
|
||||
await expect(page.getByTestId('whitelist-add-btn')).toBeDisabled();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
18
tests/fixtures/a11y.ts
vendored
Normal file
18
tests/fixtures/a11y.ts
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
import { test as base } from './auth-fixtures';
|
||||
import AxeBuilder from '@axe-core/playwright';
|
||||
|
||||
interface A11yFixtures {
|
||||
makeAxeBuilder: () => AxeBuilder;
|
||||
}
|
||||
|
||||
export const test = base.extend<A11yFixtures>({
|
||||
makeAxeBuilder: async ({ page }, use) => {
|
||||
const makeAxeBuilder = () =>
|
||||
new AxeBuilder({ page })
|
||||
.withTags(['wcag2a', 'wcag2aa', 'wcag22aa'])
|
||||
.exclude('.chart-container canvas');
|
||||
await use(makeAxeBuilder);
|
||||
},
|
||||
});
|
||||
|
||||
export { expect } from './auth-fixtures';
|
||||
58
tests/utils/a11y-helpers.ts
Normal file
58
tests/utils/a11y-helpers.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { expect } from '../fixtures/test';
|
||||
import type { AxeResults, Result } from 'axe-core';
|
||||
|
||||
export type ViolationImpact = 'critical' | 'serious' | 'moderate' | 'minor';
|
||||
|
||||
export interface A11yAssertionOptions {
|
||||
failOn?: ViolationImpact[];
|
||||
knownViolations?: string[];
|
||||
}
|
||||
|
||||
const DEFAULT_FAIL_ON: ViolationImpact[] = ['critical', 'serious'];
|
||||
|
||||
export function getFailingViolations(
|
||||
results: AxeResults,
|
||||
options: A11yAssertionOptions = {},
|
||||
): Result[] {
|
||||
const failOn = options.failOn ?? DEFAULT_FAIL_ON;
|
||||
const knownViolations = new Set(options.knownViolations ?? []);
|
||||
|
||||
return results.violations.filter(
|
||||
(v) =>
|
||||
failOn.includes(v.impact as ViolationImpact) &&
|
||||
!knownViolations.has(v.id),
|
||||
);
|
||||
}
|
||||
|
||||
export function formatViolation(violation: Result): string {
|
||||
const nodes = violation.nodes
|
||||
.map((node, i) => {
|
||||
const selector = node.target.join(' ');
|
||||
const html = node.html.length > 200
|
||||
? `${node.html.slice(0, 200)}…`
|
||||
: node.html;
|
||||
const fix = node.failureSummary ?? '';
|
||||
return ` Node ${i + 1}: ${selector}\n HTML: ${html}\n Fix: ${fix}`;
|
||||
})
|
||||
.join('\n');
|
||||
|
||||
return [
|
||||
`[${violation.impact?.toUpperCase()}] ${violation.id}: ${violation.description}`,
|
||||
` Help: ${violation.helpUrl}`,
|
||||
` Affected nodes (${violation.nodes.length}):`,
|
||||
nodes,
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
export function expectNoA11yViolations(
|
||||
results: AxeResults,
|
||||
options: A11yAssertionOptions = {},
|
||||
): void {
|
||||
const failing = getFailingViolations(results, options);
|
||||
|
||||
const message = failing.length > 0
|
||||
? `Found ${failing.length} accessibility violation(s):\n\n${failing.map(formatViolation).join('\n\n')}`
|
||||
: '';
|
||||
|
||||
expect(failing, message).toEqual([]);
|
||||
}
|
||||
6199
trivy-report.json
6199
trivy-report.json
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user