Merge branch 'feature/beta-release' into renovate/feature/beta-release-actions-attest-sbom-4.x

This commit is contained in:
Jeremy
2026-02-25 22:33:07 -05:00
committed by GitHub
24 changed files with 1508 additions and 345 deletions

View File

@@ -6,10 +6,6 @@ on:
- cron: '0 3 * * 0' # Weekly: Sundays at 03:00 UTC
workflow_dispatch:
inputs:
registries:
description: 'Comma-separated registries to prune (ghcr,dockerhub)'
required: false
default: 'ghcr,dockerhub'
keep_days:
description: 'Number of days to retain images (unprotected)'
required: false
@@ -28,47 +24,38 @@ permissions:
contents: read
jobs:
prune:
prune-ghcr:
runs-on: ubuntu-latest
env:
OWNER: ${{ github.repository_owner }}
IMAGE_NAME: charon
REGISTRIES: ${{ github.event.inputs.registries || 'ghcr,dockerhub' }}
KEEP_DAYS: ${{ github.event.inputs.keep_days || '30' }}
KEEP_LAST_N: ${{ github.event.inputs.keep_last_n || '30' }}
DRY_RUN: ${{ github.event.inputs.dry_run || 'false' }}
DRY_RUN: ${{ github.event_name == 'pull_request' && 'true' || github.event.inputs.dry_run || 'false' }}
PROTECTED_REGEX: '["^v?[0-9]+\\.[0-9]+\\.[0-9]+$","^latest$","^main$","^develop$"]'
PRUNE_UNTAGGED: 'true'
PRUNE_SBOM_TAGS: 'true'
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Install tools
run: |
sudo apt-get update && sudo apt-get install -y jq curl gh
sudo apt-get update && sudo apt-get install -y jq curl
- name: Show prune script being executed
run: |
echo "===== SCRIPT PATH ====="
pwd
ls -la scripts
echo "===== FIRST 20 LINES ====="
head -n 20 scripts/prune-container-images.sh
- name: Run container prune
- name: Run GHCR prune
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
run: |
chmod +x scripts/prune-container-images.sh
./scripts/prune-container-images.sh 2>&1 | tee prune-${{ github.run_id }}.log
chmod +x scripts/prune-ghcr.sh
./scripts/prune-ghcr.sh 2>&1 | tee prune-ghcr-${{ github.run_id }}.log
- name: Summarize prune results (space reclaimed)
if: ${{ always() }}
- name: Summarize GHCR results
if: always()
run: |
set -euo pipefail
SUMMARY_FILE=prune-summary.env
LOG_FILE=prune-${{ github.run_id }}.log
SUMMARY_FILE=prune-summary-ghcr.env
LOG_FILE=prune-ghcr-${{ github.run_id }}.log
human() {
local bytes=${1:-0}
@@ -76,7 +63,7 @@ jobs:
echo "0 B"
return
fi
awk -v b="$bytes" 'function human(x){ split("B KiB MiB GiB TiB",u," "); i=0; while(x>1024){x/=1024;i++} printf "%0.2f %s", x, u[i+1]} END{human(b)}'
awk -v b="$bytes" 'BEGIN { split("B KiB MiB GiB TiB",u," "); i=0; x=b; while(x>1024){x/=1024;i++} printf "%0.2f %s", x, u[i+1] }'
}
if [ -f "$SUMMARY_FILE" ]; then
@@ -86,34 +73,155 @@ jobs:
TOTAL_DELETED_BYTES=$(grep -E '^TOTAL_DELETED_BYTES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
{
echo "## Container prune summary"
echo "## GHCR prune summary"
echo "- candidates: ${TOTAL_CANDIDATES} (≈ $(human "${TOTAL_CANDIDATES_BYTES}"))"
echo "- deleted: ${TOTAL_DELETED} (≈ $(human "${TOTAL_DELETED_BYTES}"))"
} >> "$GITHUB_STEP_SUMMARY"
printf 'PRUNE_SUMMARY: candidates=%s candidates_bytes=%s deleted=%s deleted_bytes=%s\n' \
"${TOTAL_CANDIDATES}" "${TOTAL_CANDIDATES_BYTES}" "${TOTAL_DELETED}" "${TOTAL_DELETED_BYTES}"
echo "Deleted approximately: $(human "${TOTAL_DELETED_BYTES}")"
echo "space_saved=$(human "${TOTAL_DELETED_BYTES}")" >> "$GITHUB_OUTPUT"
else
deleted_bytes=$(grep -oE '\( *approx +[0-9]+ bytes\)' "$LOG_FILE" | sed -E 's/.*approx +([0-9]+) bytes.*/\1/' | awk '{s+=$1} END {print s+0}' || true)
deleted_count=$(grep -cE 'deleting |DRY RUN: would delete' "$LOG_FILE" || true)
{
echo "## Container prune summary"
echo "## GHCR prune summary"
echo "- deleted (approx): ${deleted_count} (≈ $(human "${deleted_bytes}"))"
} >> "$GITHUB_STEP_SUMMARY"
printf 'PRUNE_SUMMARY: deleted_approx=%s deleted_bytes=%s\n' "${deleted_count}" "${deleted_bytes}"
echo "Deleted approximately: $(human "${deleted_bytes}")"
echo "space_saved=$(human "${deleted_bytes}")" >> "$GITHUB_OUTPUT"
fi
- name: Upload prune artifacts
if: ${{ always() }}
- name: Upload GHCR prune artifacts
if: always()
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
with:
name: prune-log-${{ github.run_id }}
name: prune-ghcr-log-${{ github.run_id }}
path: |
prune-${{ github.run_id }}.log
prune-summary.env
prune-ghcr-${{ github.run_id }}.log
prune-summary-ghcr.env
prune-dockerhub:
runs-on: ubuntu-latest
env:
OWNER: ${{ github.repository_owner }}
IMAGE_NAME: charon
KEEP_DAYS: ${{ github.event.inputs.keep_days || '30' }}
KEEP_LAST_N: ${{ github.event.inputs.keep_last_n || '30' }}
DRY_RUN: ${{ github.event_name == 'pull_request' && 'true' || github.event.inputs.dry_run || 'false' }}
PROTECTED_REGEX: '["^v?[0-9]+\\.[0-9]+\\.[0-9]+$","^latest$","^main$","^develop$"]'
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Install tools
run: |
sudo apt-get update && sudo apt-get install -y jq curl
- name: Run Docker Hub prune
env:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
run: |
chmod +x scripts/prune-dockerhub.sh
./scripts/prune-dockerhub.sh 2>&1 | tee prune-dockerhub-${{ github.run_id }}.log
- name: Summarize Docker Hub results
if: always()
run: |
set -euo pipefail
SUMMARY_FILE=prune-summary-dockerhub.env
LOG_FILE=prune-dockerhub-${{ github.run_id }}.log
human() {
local bytes=${1:-0}
if [ -z "$bytes" ] || [ "$bytes" -eq 0 ]; then
echo "0 B"
return
fi
awk -v b="$bytes" 'BEGIN { split("B KiB MiB GiB TiB",u," "); i=0; x=b; while(x>1024){x/=1024;i++} printf "%0.2f %s", x, u[i+1] }'
}
if [ -f "$SUMMARY_FILE" ]; then
TOTAL_CANDIDATES=$(grep -E '^TOTAL_CANDIDATES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
TOTAL_CANDIDATES_BYTES=$(grep -E '^TOTAL_CANDIDATES_BYTES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
TOTAL_DELETED=$(grep -E '^TOTAL_DELETED=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
TOTAL_DELETED_BYTES=$(grep -E '^TOTAL_DELETED_BYTES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
{
echo "## Docker Hub prune summary"
echo "- candidates: ${TOTAL_CANDIDATES} (≈ $(human "${TOTAL_CANDIDATES_BYTES}"))"
echo "- deleted: ${TOTAL_DELETED} (≈ $(human "${TOTAL_DELETED_BYTES}"))"
} >> "$GITHUB_STEP_SUMMARY"
else
deleted_bytes=$(grep -oE '\( *approx +[0-9]+ bytes\)' "$LOG_FILE" | sed -E 's/.*approx +([0-9]+) bytes.*/\1/' | awk '{s+=$1} END {print s+0}' || true)
deleted_count=$(grep -cE 'deleting |DRY RUN: would delete' "$LOG_FILE" || true)
{
echo "## Docker Hub prune summary"
echo "- deleted (approx): ${deleted_count} (≈ $(human "${deleted_bytes}"))"
} >> "$GITHUB_STEP_SUMMARY"
fi
- name: Upload Docker Hub prune artifacts
if: always()
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
with:
name: prune-dockerhub-log-${{ github.run_id }}
path: |
prune-dockerhub-${{ github.run_id }}.log
prune-summary-dockerhub.env
summarize:
runs-on: ubuntu-latest
needs: [prune-ghcr, prune-dockerhub]
if: always()
steps:
- name: Download all artifacts
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
with:
pattern: prune-*-log-${{ github.run_id }}
merge-multiple: true
- name: Combined summary
run: |
set -euo pipefail
human() {
local bytes=${1:-0}
if [ -z "$bytes" ] || [ "$bytes" -eq 0 ]; then
echo "0 B"
return
fi
awk -v b="$bytes" 'BEGIN { split("B KiB MiB GiB TiB",u," "); i=0; x=b; while(x>1024){x/=1024;i++} printf "%0.2f %s", x, u[i+1] }'
}
GHCR_CANDIDATES=0 GHCR_CANDIDATES_BYTES=0 GHCR_DELETED=0 GHCR_DELETED_BYTES=0
if [ -f prune-summary-ghcr.env ]; then
GHCR_CANDIDATES=$(grep -E '^TOTAL_CANDIDATES=' prune-summary-ghcr.env | cut -d= -f2 || echo 0)
GHCR_CANDIDATES_BYTES=$(grep -E '^TOTAL_CANDIDATES_BYTES=' prune-summary-ghcr.env | cut -d= -f2 || echo 0)
GHCR_DELETED=$(grep -E '^TOTAL_DELETED=' prune-summary-ghcr.env | cut -d= -f2 || echo 0)
GHCR_DELETED_BYTES=$(grep -E '^TOTAL_DELETED_BYTES=' prune-summary-ghcr.env | cut -d= -f2 || echo 0)
fi
HUB_CANDIDATES=0 HUB_CANDIDATES_BYTES=0 HUB_DELETED=0 HUB_DELETED_BYTES=0
if [ -f prune-summary-dockerhub.env ]; then
HUB_CANDIDATES=$(grep -E '^TOTAL_CANDIDATES=' prune-summary-dockerhub.env | cut -d= -f2 || echo 0)
HUB_CANDIDATES_BYTES=$(grep -E '^TOTAL_CANDIDATES_BYTES=' prune-summary-dockerhub.env | cut -d= -f2 || echo 0)
HUB_DELETED=$(grep -E '^TOTAL_DELETED=' prune-summary-dockerhub.env | cut -d= -f2 || echo 0)
HUB_DELETED_BYTES=$(grep -E '^TOTAL_DELETED_BYTES=' prune-summary-dockerhub.env | cut -d= -f2 || echo 0)
fi
TOTAL_CANDIDATES=$((GHCR_CANDIDATES + HUB_CANDIDATES))
TOTAL_CANDIDATES_BYTES=$((GHCR_CANDIDATES_BYTES + HUB_CANDIDATES_BYTES))
TOTAL_DELETED=$((GHCR_DELETED + HUB_DELETED))
TOTAL_DELETED_BYTES=$((GHCR_DELETED_BYTES + HUB_DELETED_BYTES))
{
echo "## Combined container prune summary"
echo ""
echo "| Registry | Candidates | Deleted | Space Reclaimed |"
echo "|----------|------------|---------|-----------------|"
echo "| GHCR | ${GHCR_CANDIDATES} | ${GHCR_DELETED} | $(human "${GHCR_DELETED_BYTES}") |"
echo "| Docker Hub | ${HUB_CANDIDATES} | ${HUB_DELETED} | $(human "${HUB_DELETED_BYTES}") |"
echo "| **Total** | **${TOTAL_CANDIDATES}** | **${TOTAL_DELETED}** | **$(human "${TOTAL_DELETED_BYTES}")** |"
} >> "$GITHUB_STEP_SUMMARY"
printf 'PRUNE_SUMMARY: candidates=%s candidates_bytes=%s deleted=%s deleted_bytes=%s\n' \
"${TOTAL_CANDIDATES}" "${TOTAL_CANDIDATES_BYTES}" "${TOTAL_DELETED}" "${TOTAL_DELETED_BYTES}"
echo "Total space reclaimed: $(human "${TOTAL_DELETED_BYTES}")"

View File

@@ -570,7 +570,7 @@ jobs:
# Generate SBOM (Software Bill of Materials) for supply chain security
# Only for production builds (main/development) - feature branches use downstream supply-chain-pr.yml
- name: Generate SBOM
uses: anchore/sbom-action@28d71544de8eaf1b958d335707167c5f783590ad # v0.22.2
uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
with:
image: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }}

View File

@@ -220,7 +220,7 @@ jobs:
echo "- ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:nightly@${{ steps.build.outputs.digest }}" >> "$GITHUB_STEP_SUMMARY"
- name: Generate SBOM
uses: anchore/sbom-action@28d71544de8eaf1b958d335707167c5f783590ad # v0.22.2
uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0
with:
image: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:nightly@${{ steps.build.outputs.digest }}
format: cyclonedx-json

View File

@@ -28,7 +28,7 @@ jobs:
ref: ${{ github.sha }}
- name: Set up Go
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0
with:
go-version: ${{ env.GO_VERSION }}
cache-dependency-path: backend/go.sum
@@ -134,7 +134,7 @@ jobs:
} >> "$GITHUB_ENV"
- name: Set up Go
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0
with:
go-version: ${{ env.GO_VERSION }}
cache-dependency-path: backend/go.sum

View File

@@ -306,7 +306,7 @@ jobs:
- name: Upload scan artifacts
if: always() && steps.trivy-sarif-check.outputs.exists == 'true'
# actions/upload-artifact v4.4.3
uses: actions/upload-artifact@47309c993abb98030a35d55ef7ff34b7fa1074b5
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
with:
name: ${{ steps.pr-info.outputs.is_push == 'true' && format('security-scan-{0}', github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref_name) || format('security-scan-pr-{0}', steps.pr-info.outputs.pr_number) }}
path: |

View File

@@ -264,7 +264,7 @@ jobs:
# Generate SBOM using official Anchore action (auto-updated by Renovate)
- name: Generate SBOM
if: steps.set-target.outputs.image_name != ''
uses: anchore/sbom-action@28d71544de8eaf1b958d335707167c5f783590ad # v0.22.2
uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0
id: sbom
with:
image: ${{ steps.set-target.outputs.image_name }}
@@ -369,7 +369,7 @@ jobs:
- name: Upload supply chain artifacts
if: steps.set-target.outputs.image_name != ''
# actions/upload-artifact v4.6.0
uses: actions/upload-artifact@47309c993abb98030a35d55ef7ff34b7fa1074b5
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
with:
name: ${{ steps.pr-number.outputs.is_push == 'true' && format('supply-chain-{0}', steps.sanitize.outputs.branch) || format('supply-chain-pr-{0}', steps.pr-number.outputs.pr_number) }}
path: |

View File

@@ -119,7 +119,7 @@ jobs:
# Generate SBOM using official Anchore action (auto-updated by Renovate)
- name: Generate and Verify SBOM
if: steps.image-check.outputs.exists == 'true'
uses: anchore/sbom-action@28d71544de8eaf1b958d335707167c5f783590ad # v0.22.2
uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0
with:
image: ghcr.io/${{ github.repository_owner }}/charon:${{ steps.tag.outputs.tag }}
format: cyclonedx-json

View File

@@ -17,7 +17,7 @@ require (
github.com/sirupsen/logrus v1.9.4
github.com/stretchr/testify v1.11.1
golang.org/x/crypto v0.48.0
golang.org/x/net v0.50.0
golang.org/x/net v0.51.0
golang.org/x/text v0.34.0
golang.org/x/time v0.14.0
gopkg.in/natefinch/lumberjack.v2 v2.2.1

View File

@@ -200,6 +200,8 @@ golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts=
golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos=
golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60=
golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM=
golang.org/x/net v0.51.0 h1:94R/GTO7mt3/4wIKpcR5gkGmRLOuE/2hNGeWq/GBIFo=
golang.org/x/net v0.51.0/go.mod h1:aamm+2QF5ogm02fjy5Bb7CQ0WMt1/WVM7FtyaTLlA9Y=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=

View File

@@ -378,6 +378,38 @@ func TestNotificationProviderHandler_Test_RejectsGotifyTokenWithWhitespace(t *te
assert.NotContains(t, w.Body.String(), "secret-with-space")
}
func TestClassifyProviderTestFailure_NilError(t *testing.T) {
code, category, message := classifyProviderTestFailure(nil)
assert.Equal(t, "PROVIDER_TEST_FAILED", code)
assert.Equal(t, "dispatch", category)
assert.Equal(t, "Provider test failed", message)
}
func TestClassifyProviderTestFailure_DefaultStatusCode(t *testing.T) {
code, category, message := classifyProviderTestFailure(errors.New("provider returned status 500"))
assert.Equal(t, "PROVIDER_TEST_REMOTE_REJECTED", code)
assert.Equal(t, "dispatch", category)
assert.Contains(t, message, "HTTP 500")
}
func TestClassifyProviderTestFailure_GenericError(t *testing.T) {
code, category, message := classifyProviderTestFailure(errors.New("something completely unexpected"))
assert.Equal(t, "PROVIDER_TEST_FAILED", code)
assert.Equal(t, "dispatch", category)
assert.Equal(t, "Provider test failed", message)
}
func TestClassifyProviderTestFailure_InvalidDiscordWebhookURL(t *testing.T) {
code, category, message := classifyProviderTestFailure(errors.New("invalid discord webhook url"))
assert.Equal(t, "PROVIDER_TEST_URL_INVALID", code)
assert.Equal(t, "validation", category)
assert.Contains(t, message, "Provider URL")
}
func TestClassifyProviderTestFailure_URLValidation(t *testing.T) {
code, category, message := classifyProviderTestFailure(errors.New("destination URL validation failed"))
@@ -748,3 +780,258 @@ func TestNotificationTemplateHandler_Preview_InvalidTemplate(t *testing.T) {
assert.Equal(t, 400, w.Code)
}
func TestNotificationProviderHandler_Preview_TokenWriteOnly(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupNotificationCoverageDB(t)
svc := services.NewNotificationService(db)
h := NewNotificationProviderHandler(svc)
payload := map[string]any{
"template": "minimal",
"token": "secret-token-value",
}
body, _ := json.Marshal(payload)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
setAdminContext(c)
c.Request = httptest.NewRequest("POST", "/providers/preview", bytes.NewBuffer(body))
c.Request.Header.Set("Content-Type", "application/json")
h.Preview(c)
assert.Equal(t, 400, w.Code)
assert.Contains(t, w.Body.String(), "TOKEN_WRITE_ONLY")
}
func TestNotificationProviderHandler_Update_TypeChangeRejected(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupNotificationCoverageDB(t)
svc := services.NewNotificationService(db)
h := NewNotificationProviderHandler(svc)
existing := models.NotificationProvider{
ID: "update-type-test",
Name: "Discord Provider",
Type: "discord",
URL: "https://discord.com/api/webhooks/123/abc",
}
require.NoError(t, db.Create(&existing).Error)
payload := map[string]any{
"name": "Changed Type Provider",
"type": "gotify",
"url": "https://gotify.example.com",
}
body, _ := json.Marshal(payload)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
setAdminContext(c)
c.Params = gin.Params{{Key: "id", Value: "update-type-test"}}
c.Request = httptest.NewRequest("PUT", "/providers/update-type-test", bytes.NewBuffer(body))
c.Request.Header.Set("Content-Type", "application/json")
h.Update(c)
assert.Equal(t, 400, w.Code)
assert.Contains(t, w.Body.String(), "PROVIDER_TYPE_IMMUTABLE")
}
func TestNotificationProviderHandler_Test_MissingProviderID(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupNotificationCoverageDB(t)
svc := services.NewNotificationService(db)
h := NewNotificationProviderHandler(svc)
payload := map[string]any{
"type": "discord",
}
body, _ := json.Marshal(payload)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
setAdminContext(c)
c.Request = httptest.NewRequest("POST", "/providers/test", bytes.NewBuffer(body))
c.Request.Header.Set("Content-Type", "application/json")
h.Test(c)
assert.Equal(t, 400, w.Code)
assert.Contains(t, w.Body.String(), "MISSING_PROVIDER_ID")
}
func TestNotificationProviderHandler_Test_ProviderNotFound(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupNotificationCoverageDB(t)
svc := services.NewNotificationService(db)
h := NewNotificationProviderHandler(svc)
payload := map[string]any{
"type": "discord",
"id": "nonexistent-provider",
}
body, _ := json.Marshal(payload)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
setAdminContext(c)
c.Request = httptest.NewRequest("POST", "/providers/test", bytes.NewBuffer(body))
c.Request.Header.Set("Content-Type", "application/json")
h.Test(c)
assert.Equal(t, 404, w.Code)
assert.Contains(t, w.Body.String(), "PROVIDER_NOT_FOUND")
}
func TestNotificationProviderHandler_Test_EmptyProviderURL(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupNotificationCoverageDB(t)
svc := services.NewNotificationService(db)
h := NewNotificationProviderHandler(svc)
existing := models.NotificationProvider{
ID: "empty-url-test",
Name: "Empty URL Provider",
Type: "discord",
URL: "",
}
require.NoError(t, db.Create(&existing).Error)
payload := map[string]any{
"type": "discord",
"id": "empty-url-test",
}
body, _ := json.Marshal(payload)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
setAdminContext(c)
c.Request = httptest.NewRequest("POST", "/providers/test", bytes.NewBuffer(body))
c.Request.Header.Set("Content-Type", "application/json")
h.Test(c)
assert.Equal(t, 400, w.Code)
assert.Contains(t, w.Body.String(), "PROVIDER_CONFIG_MISSING")
}
func TestIsProviderValidationError_Comprehensive(t *testing.T) {
cases := []struct {
name string
err error
expect bool
}{
{"nil", nil, false},
{"invalid_custom_template", errors.New("invalid custom template: missing field"), true},
{"rendered_template", errors.New("rendered template exceeds maximum"), true},
{"failed_to_parse", errors.New("failed to parse template: unexpected end"), true},
{"failed_to_render", errors.New("failed to render template: missing key"), true},
{"invalid_discord_webhook", errors.New("invalid Discord webhook URL"), true},
{"unrelated_error", errors.New("database connection failed"), false},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
assert.Equal(t, tc.expect, isProviderValidationError(tc.err))
})
}
}
func TestNotificationProviderHandler_Update_UnsupportedType(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupNotificationCoverageDB(t)
svc := services.NewNotificationService(db)
h := NewNotificationProviderHandler(svc)
existing := models.NotificationProvider{
ID: "unsupported-type",
Name: "Custom Provider",
Type: "slack",
URL: "https://hooks.slack.com/test",
}
require.NoError(t, db.Create(&existing).Error)
payload := map[string]any{
"name": "Updated Slack Provider",
"url": "https://hooks.slack.com/updated",
}
body, _ := json.Marshal(payload)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
setAdminContext(c)
c.Params = gin.Params{{Key: "id", Value: "unsupported-type"}}
c.Request = httptest.NewRequest("PUT", "/providers/unsupported-type", bytes.NewBuffer(body))
c.Request.Header.Set("Content-Type", "application/json")
h.Update(c)
assert.Equal(t, 400, w.Code)
assert.Contains(t, w.Body.String(), "UNSUPPORTED_PROVIDER_TYPE")
}
func TestNotificationProviderHandler_Update_GotifyKeepsExistingToken(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupNotificationCoverageDB(t)
svc := services.NewNotificationService(db)
h := NewNotificationProviderHandler(svc)
existing := models.NotificationProvider{
ID: "gotify-keep-token",
Name: "Gotify Provider",
Type: "gotify",
URL: "https://gotify.example.com",
Token: "existing-secret-token",
}
require.NoError(t, db.Create(&existing).Error)
payload := map[string]any{
"name": "Updated Gotify",
"url": "https://gotify.example.com/new",
"template": "minimal",
}
body, _ := json.Marshal(payload)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
setAdminContext(c)
c.Params = gin.Params{{Key: "id", Value: "gotify-keep-token"}}
c.Request = httptest.NewRequest("PUT", "/providers/gotify-keep-token", bytes.NewBuffer(body))
c.Request.Header.Set("Content-Type", "application/json")
h.Update(c)
assert.Equal(t, 200, w.Code)
var updated models.NotificationProvider
require.NoError(t, db.Where("id = ?", "gotify-keep-token").First(&updated).Error)
assert.Equal(t, "existing-secret-token", updated.Token)
}
func TestNotificationProviderHandler_Test_ReadDBError(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupNotificationCoverageDB(t)
svc := services.NewNotificationService(db)
h := NewNotificationProviderHandler(svc)
_ = db.Migrator().DropTable(&models.NotificationProvider{})
payload := map[string]any{
"type": "discord",
"id": "some-provider",
}
body, _ := json.Marshal(payload)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
setAdminContext(c)
c.Request = httptest.NewRequest("POST", "/providers/test", bytes.NewBuffer(body))
c.Request.Header.Set("Content-Type", "application/json")
h.Test(c)
assert.Equal(t, 500, w.Code)
assert.Contains(t, w.Body.String(), "PROVIDER_READ_FAILED")
}

View File

@@ -168,3 +168,34 @@ func TestLogPermissionAudit_ActorFallback(t *testing.T) {
assert.Equal(t, "permissions", audit.EventCategory)
assert.Contains(t, audit.Details, fmt.Sprintf("\"admin\":%v", false))
}
func TestRequireAuthenticatedAdmin_NoUserID(t *testing.T) {
t.Parallel()
ctx, rec := newTestContextWithRequest()
result := requireAuthenticatedAdmin(ctx)
assert.False(t, result)
assert.Equal(t, http.StatusUnauthorized, rec.Code)
assert.Contains(t, rec.Body.String(), "Authorization header required")
}
func TestRequireAuthenticatedAdmin_UserIDPresentAndAdmin(t *testing.T) {
t.Parallel()
ctx, _ := newTestContextWithRequest()
ctx.Set("userID", uint(1))
ctx.Set("role", "admin")
result := requireAuthenticatedAdmin(ctx)
assert.True(t, result)
}
func TestRequireAuthenticatedAdmin_UserIDPresentButNotAdmin(t *testing.T) {
t.Parallel()
ctx, rec := newTestContextWithRequest()
ctx.Set("userID", uint(1))
ctx.Set("role", "user")
result := requireAuthenticatedAdmin(ctx)
assert.False(t, result)
assert.Equal(t, http.StatusForbidden, rec.Code)
}

View File

@@ -3,6 +3,7 @@ package handlers
import (
"bytes"
"encoding/json"
"errors"
"net/http"
"net/http/httptest"
"strconv"
@@ -2639,3 +2640,68 @@ func TestResendInvite_WithExpiredInvite(t *testing.T) {
db.First(&updatedUser, user.ID)
assert.True(t, updatedUser.InviteExpires.After(time.Now()))
}
// ===== Additional coverage for uncovered utility functions =====
func TestIsSetupConflictError(t *testing.T) {
tests := []struct {
name string
err error
expected bool
}{
{"nil error", nil, false},
{"unique constraint failed", errors.New("UNIQUE constraint failed: users.email"), true},
{"duplicate key", errors.New("duplicate key value violates unique constraint"), true},
{"database is locked", errors.New("database is locked"), true},
{"database table is locked", errors.New("database table is locked"), true},
{"case insensitive", errors.New("UNIQUE CONSTRAINT FAILED"), true},
{"unrelated error", errors.New("connection refused"), false},
{"empty error", errors.New(""), false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := isSetupConflictError(tt.err)
assert.Equal(t, tt.expected, result)
})
}
}
func TestMaskSecretForResponse(t *testing.T) {
tests := []struct {
name string
input string
expected string
}{
{"non-empty secret", "my-secret-key", "********"},
{"empty string", "", ""},
{"whitespace only", " ", ""},
{"single char", "x", "********"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := maskSecretForResponse(tt.input)
assert.Equal(t, tt.expected, result)
})
}
}
func TestRedactInviteURL(t *testing.T) {
tests := []struct {
name string
input string
expected string
}{
{"non-empty url", "https://example.com/invite/abc123", "[REDACTED]"},
{"empty string", "", ""},
{"whitespace only", " ", ""},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := redactInviteURL(tt.input)
assert.Equal(t, tt.expected, result)
})
}
}

View File

@@ -5,6 +5,7 @@ import (
"errors"
"fmt"
"io"
"net"
"net/http"
"net/http/httptest"
neturl "net/url"
@@ -497,3 +498,426 @@ func TestBuildSafeRequestURLWithTLSServer(t *testing.T) {
t.Fatalf("expected host header %q, got %q", serverURL.Host, hostHeader)
}
}
// ===== Additional coverage for uncovered paths =====
type errReader struct{}
func (errReader) Read([]byte) (int, error) {
return 0, errors.New("simulated read error")
}
type roundTripFunc func(*http.Request) (*http.Response, error)
func (f roundTripFunc) RoundTrip(req *http.Request) (*http.Response, error) {
return f(req)
}
func TestApplyRedirectGuardNilClient(t *testing.T) {
wrapper := NewNotifyHTTPWrapper()
wrapper.applyRedirectGuard(nil)
}
func TestGuardDestinationNilURL(t *testing.T) {
wrapper := NewNotifyHTTPWrapper()
err := wrapper.guardDestination(nil)
if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
t.Fatalf("expected validation failure for nil URL, got: %v", err)
}
}
func TestGuardDestinationEmptyHostname(t *testing.T) {
wrapper := NewNotifyHTTPWrapper()
err := wrapper.guardDestination(&neturl.URL{Scheme: "https", Host: ""})
if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
t.Fatalf("expected validation failure for empty hostname, got: %v", err)
}
}
func TestGuardDestinationUserInfoRejection(t *testing.T) {
wrapper := NewNotifyHTTPWrapper()
u := &neturl.URL{Scheme: "https", Host: "example.com", User: neturl.User("admin")}
err := wrapper.guardDestination(u)
if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
t.Fatalf("expected userinfo rejection, got: %v", err)
}
}
func TestGuardDestinationFragmentRejection(t *testing.T) {
wrapper := NewNotifyHTTPWrapper()
u := &neturl.URL{Scheme: "https", Host: "example.com", Fragment: "section"}
err := wrapper.guardDestination(u)
if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
t.Fatalf("expected fragment rejection, got: %v", err)
}
}
func TestGuardDestinationPrivateIPRejection(t *testing.T) {
wrapper := NewNotifyHTTPWrapper()
wrapper.allowHTTP = false
err := wrapper.guardDestination(&neturl.URL{Scheme: "https", Host: "192.168.1.1"})
if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
t.Fatalf("expected private IP rejection, got: %v", err)
}
}
func TestIsAllowedDestinationIPEdgeCases(t *testing.T) {
wrapper := NewNotifyHTTPWrapper()
wrapper.allowHTTP = false
tests := []struct {
name string
hostname string
ip net.IP
expected bool
}{
{"nil IP", "", nil, false},
{"unspecified", "0.0.0.0", net.IPv4zero, false},
{"multicast", "224.0.0.1", net.ParseIP("224.0.0.1"), false},
{"link-local unicast", "169.254.1.1", net.ParseIP("169.254.1.1"), false},
{"loopback without allowHTTP", "127.0.0.1", net.ParseIP("127.0.0.1"), false},
{"private 10.x", "10.0.0.1", net.ParseIP("10.0.0.1"), false},
{"private 172.16.x", "172.16.0.1", net.ParseIP("172.16.0.1"), false},
{"private 192.168.x", "192.168.1.1", net.ParseIP("192.168.1.1"), false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := wrapper.isAllowedDestinationIP(tt.hostname, tt.ip)
if result != tt.expected {
t.Fatalf("isAllowedDestinationIP(%q, %v) = %v, want %v", tt.hostname, tt.ip, result, tt.expected)
}
})
}
}
func TestIsAllowedDestinationIPLoopbackAllowHTTP(t *testing.T) {
wrapper := NewNotifyHTTPWrapper()
wrapper.allowHTTP = true
if !wrapper.isAllowedDestinationIP("localhost", net.ParseIP("127.0.0.1")) {
t.Fatal("expected loopback allowed for localhost with allowHTTP")
}
if wrapper.isAllowedDestinationIP("not-localhost", net.ParseIP("127.0.0.1")) {
t.Fatal("expected loopback rejected for non-localhost hostname")
}
}
func TestIsLocalDestinationHost(t *testing.T) {
tests := []struct {
host string
expected bool
}{
{"localhost", true},
{"LOCALHOST", true},
{"127.0.0.1", true},
{"::1", true},
{"example.com", false},
{"", false},
}
for _, tt := range tests {
t.Run(tt.host, func(t *testing.T) {
if got := isLocalDestinationHost(tt.host); got != tt.expected {
t.Fatalf("isLocalDestinationHost(%q) = %v, want %v", tt.host, got, tt.expected)
}
})
}
}
func TestShouldRetryComprehensive(t *testing.T) {
tests := []struct {
name string
resp *http.Response
err error
expected bool
}{
{"nil resp nil err", nil, nil, false},
{"timeout error string", nil, errors.New("operation timeout"), true},
{"connection error string", nil, errors.New("connection reset"), true},
{"unrelated error", nil, errors.New("json parse error"), false},
{"500 response", &http.Response{StatusCode: 500}, nil, true},
{"502 response", &http.Response{StatusCode: 502}, nil, true},
{"503 response", &http.Response{StatusCode: 503}, nil, true},
{"429 response", &http.Response{StatusCode: 429}, nil, true},
{"200 response", &http.Response{StatusCode: 200}, nil, false},
{"400 response", &http.Response{StatusCode: 400}, nil, false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := shouldRetry(tt.resp, tt.err); got != tt.expected {
t.Fatalf("shouldRetry = %v, want %v", got, tt.expected)
}
})
}
}
func TestShouldRetryNetError(t *testing.T) {
netErr := &net.DNSError{Err: "no such host", Name: "example.invalid"}
if !shouldRetry(nil, netErr) {
t.Fatal("expected net.Error to trigger retry via errors.As fallback")
}
}
func TestReadCappedResponseBodyReadError(t *testing.T) {
_, err := readCappedResponseBody(errReader{})
if err == nil || !strings.Contains(err.Error(), "read response body") {
t.Fatalf("expected read body error, got: %v", err)
}
}
func TestReadCappedResponseBodyOversize(t *testing.T) {
oversized := strings.NewReader(strings.Repeat("x", MaxNotifyResponseBodyBytes+10))
_, err := readCappedResponseBody(oversized)
if err == nil || !strings.Contains(err.Error(), "response payload exceeds") {
t.Fatalf("expected oversize error, got: %v", err)
}
}
func TestReadCappedResponseBodySuccess(t *testing.T) {
content, err := readCappedResponseBody(strings.NewReader("hello"))
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if string(content) != "hello" {
t.Fatalf("expected 'hello', got %q", string(content))
}
}
func TestHasDisallowedQueryAuthKeyAllVariants(t *testing.T) {
tests := []struct {
name string
key string
expected bool
}{
{"token", "token", true},
{"auth", "auth", true},
{"apikey", "apikey", true},
{"api_key", "api_key", true},
{"TOKEN uppercase", "TOKEN", true},
{"Api_Key mixed", "Api_Key", true},
{"safe key", "callback", false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
query := neturl.Values{}
query.Set(tt.key, "secret")
if got := hasDisallowedQueryAuthKey(query); got != tt.expected {
t.Fatalf("hasDisallowedQueryAuthKey with key %q = %v, want %v", tt.key, got, tt.expected)
}
})
}
}
func TestHasDisallowedQueryAuthKeyEmptyQuery(t *testing.T) {
if hasDisallowedQueryAuthKey(neturl.Values{}) {
t.Fatal("expected empty query to be safe")
}
}
func TestNotifyMaxRedirects(t *testing.T) {
tests := []struct {
name string
envValue string
expected int
}{
{"empty", "", 0},
{"valid 3", "3", 3},
{"zero", "0", 0},
{"negative", "-1", 0},
{"above max", "10", 5},
{"exactly 5", "5", 5},
{"invalid", "abc", 0},
{"whitespace", " 2 ", 2},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Setenv("CHARON_NOTIFY_MAX_REDIRECTS", tt.envValue)
if got := notifyMaxRedirects(); got != tt.expected {
t.Fatalf("notifyMaxRedirects() = %d, want %d", got, tt.expected)
}
})
}
}
func TestResolveAllowedDestinationIPRejectsPrivateIP(t *testing.T) {
wrapper := NewNotifyHTTPWrapper()
wrapper.allowHTTP = false
_, err := wrapper.resolveAllowedDestinationIP("192.168.1.1")
if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
t.Fatalf("expected private IP rejection, got: %v", err)
}
}
func TestResolveAllowedDestinationIPRejectsLoopback(t *testing.T) {
wrapper := NewNotifyHTTPWrapper()
wrapper.allowHTTP = false
_, err := wrapper.resolveAllowedDestinationIP("127.0.0.1")
if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
t.Fatalf("expected loopback rejection, got: %v", err)
}
}
func TestResolveAllowedDestinationIPAllowsPublic(t *testing.T) {
wrapper := NewNotifyHTTPWrapper()
ip, err := wrapper.resolveAllowedDestinationIP("1.1.1.1")
if err != nil {
t.Fatalf("expected public IP to be allowed, got: %v", err)
}
if !ip.Equal(net.ParseIP("1.1.1.1")) {
t.Fatalf("expected 1.1.1.1, got %v", ip)
}
}
func TestBuildSafeRequestURLRejectsPrivateHostname(t *testing.T) {
wrapper := NewNotifyHTTPWrapper()
wrapper.allowHTTP = false
u := &neturl.URL{Scheme: "https", Host: "192.168.1.1", Path: "/hook"}
_, _, err := wrapper.buildSafeRequestURL(u)
if err == nil || !strings.Contains(err.Error(), "destination URL validation failed") {
t.Fatalf("expected private host rejection, got: %v", err)
}
}
func TestWaitBeforeRetryBasic(t *testing.T) {
wrapper := NewNotifyHTTPWrapper()
var sleptDuration time.Duration
wrapper.sleep = func(d time.Duration) { sleptDuration = d }
wrapper.jitterNanos = func(int64) int64 { return 0 }
wrapper.retryPolicy.BaseDelay = 100 * time.Millisecond
wrapper.retryPolicy.MaxDelay = 1 * time.Second
wrapper.waitBeforeRetry(1)
if sleptDuration != 100*time.Millisecond {
t.Fatalf("expected 100ms delay for attempt 1, got %v", sleptDuration)
}
wrapper.waitBeforeRetry(2)
if sleptDuration != 200*time.Millisecond {
t.Fatalf("expected 200ms delay for attempt 2, got %v", sleptDuration)
}
}
func TestWaitBeforeRetryClampedToMax(t *testing.T) {
wrapper := NewNotifyHTTPWrapper()
var sleptDuration time.Duration
wrapper.sleep = func(d time.Duration) { sleptDuration = d }
wrapper.jitterNanos = func(int64) int64 { return 0 }
wrapper.retryPolicy.BaseDelay = 1 * time.Second
wrapper.retryPolicy.MaxDelay = 2 * time.Second
wrapper.waitBeforeRetry(5)
if sleptDuration != 2*time.Second {
t.Fatalf("expected clamped delay of 2s, got %v", sleptDuration)
}
}
func TestWaitBeforeRetryDefaultJitter(t *testing.T) {
wrapper := NewNotifyHTTPWrapper()
wrapper.jitterNanos = nil
wrapper.sleep = func(time.Duration) {}
wrapper.retryPolicy.BaseDelay = 100 * time.Millisecond
wrapper.retryPolicy.MaxDelay = 1 * time.Second
wrapper.waitBeforeRetry(1)
}
func TestHTTPWrapperSendExhaustsRetriesOnTransportError(t *testing.T) {
var calls int32
wrapper := NewNotifyHTTPWrapper()
wrapper.allowHTTP = true
wrapper.sleep = func(time.Duration) {}
wrapper.jitterNanos = func(int64) int64 { return 0 }
wrapper.httpClientFactory = func(bool, int) *http.Client {
return &http.Client{
Transport: roundTripFunc(func(req *http.Request) (*http.Response, error) {
atomic.AddInt32(&calls, 1)
return nil, errors.New("connection timeout failure")
}),
}
}
_, err := wrapper.Send(context.Background(), HTTPWrapperRequest{
URL: "http://localhost:19999/hook",
Body: []byte(`{"msg":"test"}`),
})
if err == nil {
t.Fatal("expected error after transport failures")
}
if !strings.Contains(err.Error(), "outbound request failed") {
t.Fatalf("expected outbound request failed message, got: %v", err)
}
if got := atomic.LoadInt32(&calls); got != 3 {
t.Fatalf("expected 3 attempts, got %d", got)
}
}
func TestHTTPWrapperSendExhaustsRetriesOn500(t *testing.T) {
var calls int32
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
atomic.AddInt32(&calls, 1)
w.WriteHeader(http.StatusInternalServerError)
}))
defer server.Close()
wrapper := NewNotifyHTTPWrapper()
wrapper.allowHTTP = true
wrapper.sleep = func(time.Duration) {}
wrapper.jitterNanos = func(int64) int64 { return 0 }
_, err := wrapper.Send(context.Background(), HTTPWrapperRequest{
URL: server.URL,
Body: []byte(`{"msg":"test"}`),
})
if err == nil || !strings.Contains(err.Error(), "status 500") {
t.Fatalf("expected 500 status error, got: %v", err)
}
if got := atomic.LoadInt32(&calls); got != 3 {
t.Fatalf("expected 3 attempts for 500 retries, got %d", got)
}
}
func TestHTTPWrapperSendTransportErrorNoRetry(t *testing.T) {
wrapper := NewNotifyHTTPWrapper()
wrapper.allowHTTP = true
wrapper.retryPolicy.MaxAttempts = 1
wrapper.httpClientFactory = func(bool, int) *http.Client {
return &http.Client{
Transport: roundTripFunc(func(req *http.Request) (*http.Response, error) {
return nil, errors.New("some unretryable error")
}),
}
}
_, err := wrapper.Send(context.Background(), HTTPWrapperRequest{
URL: "http://localhost:19999/hook",
Body: []byte(`{"msg":"test"}`),
})
if err == nil || !strings.Contains(err.Error(), "outbound request failed") {
t.Fatalf("expected outbound request failed, got: %v", err)
}
}
func TestSanitizeTransportErrorReasonNetworkUnreachable(t *testing.T) {
result := sanitizeTransportErrorReason(errors.New("connect: network is unreachable"))
if result != "network unreachable" {
t.Fatalf("expected 'network unreachable', got %q", result)
}
}
func TestSanitizeTransportErrorReasonCertificate(t *testing.T) {
result := sanitizeTransportErrorReason(errors.New("x509: certificate signed by unknown authority"))
if result != "tls handshake failed" {
t.Fatalf("expected 'tls handshake failed', got %q", result)
}
}
func TestAllowNotifyHTTPOverride(t *testing.T) {
result := allowNotifyHTTPOverride()
if !result {
t.Fatal("expected allowHTTP to be true in test binary")
}
}

View File

@@ -3,6 +3,7 @@ package services
import (
"context"
"errors"
"fmt"
"net"
"net/url"
"os"
@@ -263,3 +264,94 @@ func TestBuildLocalDockerUnavailableDetails_GenericError(t *testing.T) {
assert.Contains(t, details, "uid=")
assert.Contains(t, details, "gid=")
}
// ===== Additional coverage for uncovered paths =====
func TestDockerUnavailableError_NilDetails(t *testing.T) {
var nilErr *DockerUnavailableError
assert.Equal(t, "", nilErr.Details())
}
func TestExtractErrno_UrlErrorWrapping(t *testing.T) {
urlErr := &url.Error{Op: "dial", URL: "unix:///var/run/docker.sock", Err: syscall.EACCES}
errno, ok := extractErrno(urlErr)
assert.True(t, ok)
assert.Equal(t, syscall.EACCES, errno)
}
func TestExtractErrno_SyscallError(t *testing.T) {
scErr := &os.SyscallError{Syscall: "connect", Err: syscall.ECONNREFUSED}
errno, ok := extractErrno(scErr)
assert.True(t, ok)
assert.Equal(t, syscall.ECONNREFUSED, errno)
}
func TestExtractErrno_NilError(t *testing.T) {
_, ok := extractErrno(nil)
assert.False(t, ok)
}
func TestExtractErrno_NonSyscallError(t *testing.T) {
_, ok := extractErrno(errors.New("some generic error"))
assert.False(t, ok)
}
func TestExtractErrno_OpErrorWrapping(t *testing.T) {
opErr := &net.OpError{Op: "dial", Net: "unix", Err: syscall.EPERM}
errno, ok := extractErrno(opErr)
assert.True(t, ok)
assert.Equal(t, syscall.EPERM, errno)
}
func TestExtractErrno_NestedUrlSyscallOpError(t *testing.T) {
innerErr := &net.OpError{
Op: "dial",
Net: "unix",
Err: &os.SyscallError{Syscall: "connect", Err: syscall.EACCES},
}
urlErr := &url.Error{Op: "Get", URL: "unix:///var/run/docker.sock", Err: innerErr}
errno, ok := extractErrno(urlErr)
assert.True(t, ok)
assert.Equal(t, syscall.EACCES, errno)
}
func TestSocketPathFromDockerHost(t *testing.T) {
tests := []struct {
name string
host string
expected string
}{
{"unix socket", "unix:///var/run/docker.sock", "/var/run/docker.sock"},
{"tcp host", "tcp://192.168.1.1:2375", ""},
{"empty", "", ""},
{"whitespace unix", " unix:///tmp/docker.sock ", "/tmp/docker.sock"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := socketPathFromDockerHost(tt.host)
assert.Equal(t, tt.expected, result)
})
}
}
func TestBuildLocalDockerUnavailableDetails_OsErrNotExist(t *testing.T) {
err := fmt.Errorf("wrapped: %w", os.ErrNotExist)
details := buildLocalDockerUnavailableDetails(err, "unix:///var/run/docker.sock")
assert.Contains(t, details, "not found")
assert.Contains(t, details, "/var/run/docker.sock")
}
func TestBuildLocalDockerUnavailableDetails_NonUnixHost(t *testing.T) {
err := errors.New("cannot connect")
details := buildLocalDockerUnavailableDetails(err, "tcp://192.168.1.1:2375")
assert.Contains(t, details, "Cannot connect")
assert.Contains(t, details, "tcp://192.168.1.1:2375")
}
func TestBuildLocalDockerUnavailableDetails_EPERMWithStatFail(t *testing.T) {
err := &net.OpError{Op: "dial", Net: "unix", Err: syscall.EPERM}
details := buildLocalDockerUnavailableDetails(err, "unix:///tmp/nonexistent-eperm.sock")
assert.Contains(t, details, "not accessible")
assert.Contains(t, details, "could not be stat")
}

View File

@@ -2538,3 +2538,79 @@ func TestTestProvider_WebhookWorksWhenFlagExplicitlyFalse(t *testing.T) {
err := svc.TestProvider(provider)
assert.NoError(t, err)
}
func TestUpdateProvider_TypeMutationBlocked(t *testing.T) {
db := setupNotificationTestDB(t)
svc := NewNotificationService(db)
existing := models.NotificationProvider{
ID: "prov-type-mut",
Type: "webhook",
Name: "Original",
URL: "https://example.com/hook",
}
require.NoError(t, db.Create(&existing).Error)
update := models.NotificationProvider{
ID: "prov-type-mut",
Type: "discord",
Name: "Changed",
URL: "https://discord.com/api/webhooks/123/abc",
}
err := svc.UpdateProvider(&update)
require.Error(t, err)
assert.Contains(t, err.Error(), "cannot change provider type")
}
func TestUpdateProvider_GotifyKeepsExistingToken(t *testing.T) {
db := setupNotificationTestDB(t)
svc := NewNotificationService(db)
existing := models.NotificationProvider{
ID: "prov-gotify-token",
Type: "gotify",
Name: "My Gotify",
URL: "https://gotify.example.com",
Token: "original-secret-token",
}
require.NoError(t, db.Create(&existing).Error)
update := models.NotificationProvider{
ID: "prov-gotify-token",
Type: "gotify",
Name: "My Gotify Updated",
URL: "https://gotify.example.com",
Token: "",
}
err := svc.UpdateProvider(&update)
require.NoError(t, err)
assert.Equal(t, "original-secret-token", update.Token)
}
func TestGetFeatureFlagValue_FoundSetting(t *testing.T) {
db := setupNotificationTestDB(t)
require.NoError(t, db.AutoMigrate(&models.Setting{}))
svc := NewNotificationService(db)
tests := []struct {
name string
value string
expected bool
}{
{"true_string", "true", true},
{"yes_string", "yes", true},
{"one_string", "1", true},
{"false_string", "false", false},
{"no_string", "no", false},
{"zero_string", "0", false},
{"whitespace_true", " True ", true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
db.Where("key = ?", "test.flag").Delete(&models.Setting{})
db.Create(&models.Setting{Key: "test.flag", Value: tt.value})
result := svc.getFeatureFlagValue("test.flag", false)
assert.Equal(t, tt.expected, result, "value=%q", tt.value)
})
}
}

View File

@@ -159,7 +159,8 @@ A new scheduled workflow and helper script were added to safely prune old contai
- **Files added**:
- `.github/workflows/container-prune.yml` (weekly schedule, manual dispatch)
- `scripts/prune-container-images.sh` (dry-run by default; supports GHCR and Docker Hub)
- `scripts/prune-ghcr.sh` (GHCR cleanup)
- `scripts/prune-dockerhub.sh` (Docker Hub cleanup)
- **Behavior**:
- Default: **dry-run=true** (no destructive changes).

View File

@@ -0,0 +1,138 @@
# QA Report — PR #754: Enable and Test Gotify and Custom Webhook Notifications
**Branch:** `feature/beta-release`
**Date:** 2026-02-25
**Auditor:** QA Security Agent
---
## Summary
| # | Check | Result | Details |
|---|-------|--------|---------|
| 1 | Local Patch Coverage Preflight | **WARN** | 79.5% overall (threshold 90%), 78.3% backend (threshold 85%) — advisory only |
| 2 | Backend Coverage ≥ 85% | **PASS** | 87.0% statement / 87.3% line (threshold 87%) |
| 3 | Frontend Coverage ≥ 85% | **PASS** | 88.21% statement / 88.97% line (threshold 85%) |
| 4 | TypeScript Type Check | **PASS** | Zero errors |
| 5 | Pre-commit Hooks | **PASS** | All 15 hooks passed |
| 6a | Trivy Filesystem Scan | **PASS** | 0 CRITICAL/HIGH in project code (findings only in Go module cache) |
| 6b | Docker Image Scan | **WARN** | 1 HIGH in Caddy transitive dep (CVE-2026-25793, nebula v1.9.7 → fixed 1.10.3) |
| 6c | CodeQL (Go + JavaScript) | **PASS** | 0 errors, 0 warnings across both languages |
| 7 | GORM Security Scan | **PASS** | 0 CRITICAL/HIGH (2 INFO suggestions: missing indexes on UserPermittedHost) |
| 8 | Go Vulnerability Check | **PASS** | No vulnerabilities found |
---
## Detailed Findings
### 1. Local Patch Coverage Preflight
- **Status:** WARN (advisory, not blocking per policy)
- Overall patch coverage: **79.5%** (threshold: 90%)
- Backend patch coverage: **78.3%** (threshold: 85%)
- Artifacts generated but `test-results/` directory was not persisted at repo root
- **Action:** Consider adding targeted tests for uncovered changed lines in notification service/handler
### 2. Backend Unit Test Coverage
- **Status:** PASS
- Statement coverage: **87.0%**
- Line coverage: **87.3%**
- All tests passed (0 failures)
### 3. Frontend Unit Test Coverage
- **Status:** PASS
- Statement coverage: **88.21%**
- Branch coverage: **80.58%**
- Function coverage: **85.20%**
- Line coverage: **88.97%**
- All tests passed (0 failures)
- Coverage files generated: `lcov.info`, `coverage-summary.json`, `coverage-final.json`
### 4. TypeScript Type Check
- **Status:** PASS
- `tsc --noEmit` completed with zero errors
### 5. Pre-commit Hooks
- **Status:** PASS
- All hooks passed:
- fix end of files
- trim trailing whitespace
- check yaml
- check for added large files
- shellcheck
- actionlint (GitHub Actions)
- dockerfile validation
- Go Vet
- golangci-lint (Fast Linters - BLOCKING)
- Check .version matches latest Git tag
- Prevent large files not tracked by LFS
- Prevent committing CodeQL DB artifacts
- Prevent committing data/backups files
- Frontend TypeScript Check
- Frontend Lint (Fix)
### 6a. Trivy Filesystem Scan
- **Status:** PASS
- Scanned `backend/` and `frontend/` directories: **0 CRITICAL, 0 HIGH**
- Full workspace scan found 3 CRITICAL + 14 HIGH across Go module cache dependencies (not project code)
- Trivy misconfig scanner crashed (known Trivy bug in ansible parser — nil pointer dereference in `discovery.go:82`). Vuln scanner completed successfully.
### 6b. Docker Image Scan
- **Status:** WARN (not blocking — upstream dependency)
- Image: `charon:local`
- **1 HIGH finding:**
- **CVE-2026-25793** — `github.com/slackhq/nebula` v1.9.7 (in `usr/bin/caddy` binary)
- Description: Blocklist evasion via ECDSA Signature Malleability
- Fixed in: v1.10.3
- Impact: Caddy transitive dependency, not Charon code
- **Remediation:** Upgrade Caddy to a version that pulls nebula ≥ 1.10.3 when available
### 6c. CodeQL Scans
- **Status:** PASS
- **Go:** 0 errors, 0 warnings
- **JavaScript:** 0 errors, 0 warnings (347/347 files scanned)
- SARIF outputs: `codeql-results-go.sarif`, `codeql-results-javascript.sarif`
### 7. GORM Security Scan
- **Status:** PASS
- Scanned: 41 Go files (2207 lines), 2 seconds
- **0 CRITICAL, 0 HIGH, 0 MEDIUM**
- 2 INFO suggestions:
- `backend/internal/models/user.go:109``UserPermittedHost.UserID` missing index
- `backend/internal/models/user.go:110``UserPermittedHost.ProxyHostID` missing index
### 8. Go Vulnerability Check
- **Status:** PASS
- `govulncheck ./...` — No vulnerabilities found
---
## Gotify Token Security Review
- No Gotify tokens found in logs, test artifacts, or API examples
- No tokenized URL query parameters exposed in diagnostics or output
- Token handling follows `json:"-"` pattern (verified via `HasToken` computed field approach in PR)
---
## Recommendation
### GO / NO-GO: **GO** (conditional)
All blocking gates pass. Two advisory warnings exist:
1. **Patch coverage** (79.5% overall, 78.3% backend) is below advisory thresholds but not a blocking gate per current policy
2. **Docker image** has 1 HIGH CVE in Caddy's transitive dependency (nebula) — upstream fix required, not actionable in Charon code
**Conditions:**
- Track nebula CVE-2026-25793 remediation as a follow-up issue when a Caddy update incorporates the fix
- Consider adding targeted tests for uncovered changed lines in notification service/handler to improve patch coverage

View File

@@ -41,7 +41,7 @@
"@testing-library/jest-dom": "^6.9.1",
"@testing-library/react": "^16.3.2",
"@testing-library/user-event": "^14.6.1",
"@types/node": "^25.3.0",
"@types/node": "^25.3.1",
"@types/react": "^19.2.14",
"@types/react-dom": "^19.2.3",
"@typescript-eslint/eslint-plugin": "^8.56.1",
@@ -50,7 +50,7 @@
"@vitest/coverage-istanbul": "^4.0.18",
"@vitest/coverage-v8": "^4.0.18",
"@vitest/ui": "^4.0.18",
"autoprefixer": "^10.4.24",
"autoprefixer": "^10.4.27",
"eslint": "^9.39.3 <10.0.0",
"eslint-plugin-react-hooks": "^7.0.1",
"eslint-plugin-react-refresh": "^0.5.2",
@@ -3565,9 +3565,9 @@
"license": "MIT"
},
"node_modules/@types/node": {
"version": "25.3.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-25.3.0.tgz",
"integrity": "sha512-4K3bqJpXpqfg2XKGK9bpDTc6xO/xoUP/RBWS7AtRMug6zZFaRekiLzjVtAoZMquxoAbzBvy5nxQ7veS5eYzf8A==",
"version": "25.3.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-25.3.1.tgz",
"integrity": "sha512-hj9YIJimBCipHVfHKRMnvmHg+wfhKc0o4mTtXh9pKBjC8TLJzz0nzGmLi5UJsYAUgSvXFHgb0V2oY10DUFtImw==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -4186,9 +4186,9 @@
"license": "MIT"
},
"node_modules/autoprefixer": {
"version": "10.4.24",
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.24.tgz",
"integrity": "sha512-uHZg7N9ULTVbutaIsDRoUkoS8/h3bdsmVJYZ5l3wv8Cp/6UIIoRDm90hZ+BwxUj/hGBEzLxdHNSKuFpn8WOyZw==",
"version": "10.4.27",
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.27.tgz",
"integrity": "sha512-NP9APE+tO+LuJGn7/9+cohklunJsXWiaWEfV3si4Gi/XHDwVNgkwr1J3RQYFIvPy76GmJ9/bW8vyoU1LcxwKHA==",
"dev": true,
"funding": [
{
@@ -4207,7 +4207,7 @@
"license": "MIT",
"dependencies": {
"browserslist": "^4.28.1",
"caniuse-lite": "^1.0.30001766",
"caniuse-lite": "^1.0.30001774",
"fraction.js": "^5.3.4",
"picocolors": "^1.1.1",
"postcss-value-parser": "^4.2.0"

View File

@@ -60,7 +60,7 @@
"@testing-library/jest-dom": "^6.9.1",
"@testing-library/react": "^16.3.2",
"@testing-library/user-event": "^14.6.1",
"@types/node": "^25.3.0",
"@types/node": "^25.3.1",
"@types/react": "^19.2.14",
"@types/react-dom": "^19.2.3",
"@typescript-eslint/eslint-plugin": "^8.56.1",
@@ -69,7 +69,7 @@
"@vitest/coverage-istanbul": "^4.0.18",
"@vitest/coverage-v8": "^4.0.18",
"@vitest/ui": "^4.0.18",
"autoprefixer": "^10.4.24",
"autoprefixer": "^10.4.27",
"eslint": "^9.39.3 <10.0.0",
"eslint-plugin-react-hooks": "^7.0.1",
"eslint-plugin-react-refresh": "^0.5.2",

8
package-lock.json generated
View File

@@ -14,7 +14,7 @@
"devDependencies": {
"@bgotink/playwright-coverage": "^0.3.2",
"@playwright/test": "^1.58.2",
"@types/node": "^25.3.0",
"@types/node": "^25.3.1",
"dotenv": "^17.3.1",
"markdownlint-cli2": "^0.21.0",
"prettier": "^3.8.1",
@@ -937,9 +937,9 @@
"license": "MIT"
},
"node_modules/@types/node": {
"version": "25.3.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-25.3.0.tgz",
"integrity": "sha512-4K3bqJpXpqfg2XKGK9bpDTc6xO/xoUP/RBWS7AtRMug6zZFaRekiLzjVtAoZMquxoAbzBvy5nxQ7veS5eYzf8A==",
"version": "25.3.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-25.3.1.tgz",
"integrity": "sha512-hj9YIJimBCipHVfHKRMnvmHg+wfhKc0o4mTtXh9pKBjC8TLJzz0nzGmLi5UJsYAUgSvXFHgb0V2oY10DUFtImw==",
"devOptional": true,
"license": "MIT",
"dependencies": {

View File

@@ -19,7 +19,7 @@
"devDependencies": {
"@bgotink/playwright-coverage": "^0.3.2",
"@playwright/test": "^1.58.2",
"@types/node": "^25.3.0",
"@types/node": "^25.3.1",
"dotenv": "^17.3.1",
"markdownlint-cli2": "^0.21.0",
"prettier": "^3.8.1",

174
scripts/prune-dockerhub.sh Executable file
View File

@@ -0,0 +1,174 @@
#!/usr/bin/env bash
set -euo pipefail
# prune-dockerhub.sh
# Deletes old container images from Docker Hub according to retention and protection rules.
OWNER=${OWNER:-${GITHUB_REPOSITORY_OWNER:-Wikid82}}
IMAGE_NAME=${IMAGE_NAME:-charon}
KEEP_DAYS=${KEEP_DAYS:-30}
KEEP_LAST_N=${KEEP_LAST_N:-30}
DRY_RUN=${DRY_RUN:-false}
PROTECTED_REGEX=${PROTECTED_REGEX:-'["^v","^latest$","^main$","^develop$"]'}
DOCKERHUB_USERNAME=${DOCKERHUB_USERNAME:-}
DOCKERHUB_TOKEN=${DOCKERHUB_TOKEN:-}
LOG_PREFIX="[prune-dockerhub]"
cutoff_ts=$(date -d "$KEEP_DAYS days ago" +%s 2>/dev/null || date -d "-$KEEP_DAYS days" +%s)
dry_run=false
case "${DRY_RUN,,}" in
true|1|yes|y|on) dry_run=true ;;
*) dry_run=false ;;
esac
TOTAL_CANDIDATES=0
TOTAL_CANDIDATES_BYTES=0
TOTAL_DELETED=0
TOTAL_DELETED_BYTES=0
echo "$LOG_PREFIX starting with OWNER=$OWNER IMAGE_NAME=$IMAGE_NAME KEEP_DAYS=$KEEP_DAYS KEEP_LAST_N=$KEEP_LAST_N DRY_RUN=$dry_run"
echo "$LOG_PREFIX PROTECTED_REGEX=$PROTECTED_REGEX"
require() {
command -v "$1" >/dev/null 2>&1 || { echo "$LOG_PREFIX missing required command: $1" >&2; exit 1; }
}
require curl
require jq
is_protected_tag() {
local tag="$1"
local rgx
while IFS= read -r rgx; do
[[ -z "$rgx" ]] && continue
if [[ "$tag" =~ $rgx ]]; then
return 0
fi
done < <(echo "$PROTECTED_REGEX" | jq -r '.[]')
return 1
}
human_readable() {
local bytes=${1:-0}
if [[ -z "$bytes" ]] || (( bytes <= 0 )); then
echo "0 B"
return
fi
local unit=(B KiB MiB GiB TiB)
local i=0
local value=$bytes
while (( value > 1024 )) && (( i < 4 )); do
value=$((value / 1024))
i=$((i + 1))
done
printf "%s %s" "${value}" "${unit[$i]}"
}
action_delete_dockerhub() {
echo "$LOG_PREFIX -> Docker Hub cleanup for ${DOCKERHUB_USERNAME:-<unset>}/$IMAGE_NAME (dry-run=$dry_run)"
if [[ -z "${DOCKERHUB_USERNAME:-}" || -z "${DOCKERHUB_TOKEN:-}" ]]; then
echo "$LOG_PREFIX Docker Hub credentials not set; skipping Docker Hub cleanup"
return
fi
local hub_token page page_size all resp results_count total
local keep_tags tag tag_name last_updated last_ts protected bytes
hub_token=$(printf '{"username":"%s","password":"%s"}' "$DOCKERHUB_USERNAME" "$DOCKERHUB_TOKEN" | \
curl -sS -X POST -H "Content-Type: application/json" --data-binary @- \
https://hub.docker.com/v2/users/login/ | jq -r '.token')
if [[ -z "$hub_token" || "$hub_token" == "null" ]]; then
echo "$LOG_PREFIX Failed to obtain Docker Hub token; aborting Docker Hub cleanup"
return
fi
page=1
page_size=100
all='[]'
while :; do
resp=$(curl -sS -H "Authorization: JWT $hub_token" \
"https://hub.docker.com/v2/repositories/${DOCKERHUB_USERNAME}/${IMAGE_NAME}/tags?page_size=$page_size&page=$page")
results_count=$(echo "$resp" | jq -r '.results | length')
if [[ -z "$results_count" || "$results_count" == "0" ]]; then
break
fi
all=$(jq -s '.[0] + .[1].results' <(echo "$all") <(echo "$resp"))
((page++))
done
total=$(echo "$all" | jq -r 'length')
if [[ -z "$total" || "$total" == "0" ]]; then
echo "$LOG_PREFIX Docker Hub: no tags found"
return
fi
echo "$LOG_PREFIX Docker Hub: fetched $total tags total"
keep_tags=$(echo "$all" | jq -r --argjson n "${KEEP_LAST_N:-0}" '
(sort_by(.last_updated) | reverse) as $s
| ($s[0:$n] | map(.name)) | join(" ")
')
while IFS= read -r tag; do
tag_name=$(echo "$tag" | jq -r '.name')
last_updated=$(echo "$tag" | jq -r '.last_updated')
last_ts=$(date -d "$last_updated" +%s 2>/dev/null || echo 0)
if [[ -n "$keep_tags" && " $keep_tags " == *" $tag_name "* ]]; then
echo "$LOG_PREFIX keep (last_n): tag=$tag_name last_updated=$last_updated"
continue
fi
protected=false
if is_protected_tag "$tag_name"; then
protected=true
fi
if $protected; then
echo "$LOG_PREFIX keep (protected): tag=$tag_name last_updated=$last_updated"
continue
fi
if (( last_ts >= cutoff_ts )); then
echo "$LOG_PREFIX keep (recent): tag=$tag_name last_updated=$last_updated"
continue
fi
echo "$LOG_PREFIX candidate: tag=$tag_name last_updated=$last_updated"
bytes=$(echo "$tag" | jq -r '.images | map(.size) | add // 0' 2>/dev/null || echo 0)
TOTAL_CANDIDATES=$((TOTAL_CANDIDATES + 1))
TOTAL_CANDIDATES_BYTES=$((TOTAL_CANDIDATES_BYTES + bytes))
if $dry_run; then
echo "$LOG_PREFIX DRY RUN: would delete Docker Hub tag=$tag_name (approx ${bytes} bytes)"
else
echo "$LOG_PREFIX deleting Docker Hub tag=$tag_name (approx ${bytes} bytes)"
curl -sS -X DELETE -H "Authorization: JWT $hub_token" \
"https://hub.docker.com/v2/repositories/${DOCKERHUB_USERNAME}/${IMAGE_NAME}/tags/${tag_name}/" >/dev/null || true
TOTAL_DELETED=$((TOTAL_DELETED + 1))
TOTAL_DELETED_BYTES=$((TOTAL_DELETED_BYTES + bytes))
fi
done < <(echo "$all" | jq -c 'sort_by(.last_updated) | .[]')
}
# Main
action_delete_dockerhub
echo "$LOG_PREFIX SUMMARY: total_candidates=${TOTAL_CANDIDATES} total_candidates_bytes=${TOTAL_CANDIDATES_BYTES} total_deleted=${TOTAL_DELETED} total_deleted_bytes=${TOTAL_DELETED_BYTES}"
echo "$LOG_PREFIX SUMMARY_HUMAN: candidates=${TOTAL_CANDIDATES} candidates_size=$(human_readable "${TOTAL_CANDIDATES_BYTES}") deleted=${TOTAL_DELETED} deleted_size=$(human_readable "${TOTAL_DELETED_BYTES}")"
: > prune-summary-dockerhub.env
echo "TOTAL_CANDIDATES=${TOTAL_CANDIDATES}" >> prune-summary-dockerhub.env
echo "TOTAL_CANDIDATES_BYTES=${TOTAL_CANDIDATES_BYTES}" >> prune-summary-dockerhub.env
echo "TOTAL_DELETED=${TOTAL_DELETED}" >> prune-summary-dockerhub.env
echo "TOTAL_DELETED_BYTES=${TOTAL_DELETED_BYTES}" >> prune-summary-dockerhub.env
echo "$LOG_PREFIX done"

View File

@@ -1,10 +1,9 @@
#!/usr/bin/env bash
set -euo pipefail
echo "[prune] SCRIPT VERSION: GH_API_VARIANT"
# prune-container-images.sh
# Deletes old images from GHCR and Docker Hub according to retention and protection rules.
# prune-ghcr.sh
# Deletes old container images from GitHub Container Registry (GHCR)
# according to retention and protection rules.
REGISTRIES=${REGISTRIES:-ghcr}
OWNER=${OWNER:-${GITHUB_REPOSITORY_OWNER:-Wikid82}}
IMAGE_NAME=${IMAGE_NAME:-charon}
@@ -14,33 +13,29 @@ KEEP_LAST_N=${KEEP_LAST_N:-30}
DRY_RUN=${DRY_RUN:-false}
PROTECTED_REGEX=${PROTECTED_REGEX:-'["^v","^latest$","^main$","^develop$"]'}
# Extra knobs (optional)
PRUNE_UNTAGGED=${PRUNE_UNTAGGED:-true}
PRUNE_SBOM_TAGS=${PRUNE_SBOM_TAGS:-true}
LOG_PREFIX="[prune]"
LOG_PREFIX="[prune-ghcr]"
now_ts=$(date +%s)
cutoff_ts=$(date -d "$KEEP_DAYS days ago" +%s 2>/dev/null || date -d "-$KEEP_DAYS days" +%s)
# Normalize DRY_RUN to true/false reliably
dry_run=false
case "${DRY_RUN,,}" in
true|1|yes|y|on) dry_run=true ;;
*) dry_run=false ;;
esac
# Totals
TOTAL_CANDIDATES=0
TOTAL_CANDIDATES_BYTES=0
TOTAL_DELETED=0
TOTAL_DELETED_BYTES=0
echo "$LOG_PREFIX starting with REGISTRIES=$REGISTRIES OWNER=$OWNER IMAGE_NAME=$IMAGE_NAME KEEP_DAYS=$KEEP_DAYS KEEP_LAST_N=$KEEP_LAST_N DRY_RUN=$dry_run"
echo "$LOG_PREFIX starting with OWNER=$OWNER IMAGE_NAME=$IMAGE_NAME KEEP_DAYS=$KEEP_DAYS KEEP_LAST_N=$KEEP_LAST_N DRY_RUN=$dry_run"
echo "$LOG_PREFIX PROTECTED_REGEX=$PROTECTED_REGEX PRUNE_UNTAGGED=$PRUNE_UNTAGGED PRUNE_SBOM_TAGS=$PRUNE_SBOM_TAGS"
require() {
command -v "$1" >/dev/null 2>&1 || { echo "$LOG_PREFIX missing required command: $1"; exit 1; }
command -v "$1" >/dev/null 2>&1 || { echo "$LOG_PREFIX missing required command: $1" >&2; exit 1; }
}
require curl
require jq
@@ -57,8 +52,6 @@ is_protected_tag() {
return 1
}
# Some repos generate tons of tags like sha-xxxx, pr-123-xxxx, *.sbom.
# We treat SBOM-only tags as deletable (optional).
tag_is_sbom() {
local tag="$1"
[[ "$tag" == *.sbom ]]
@@ -80,9 +73,9 @@ human_readable() {
printf "%s %s" "${value}" "${unit[$i]}"
}
# --- GHCR ---
# All echo/log statements go to stderr so stdout remains pure JSON
ghcr_list_all_versions_json() {
local namespace_type="$1" # orgs or users
local namespace_type="$1"
local page=1
local per_page=100
local all='[]'
@@ -90,7 +83,6 @@ ghcr_list_all_versions_json() {
while :; do
local url="https://api.github.com/${namespace_type}/${OWNER}/packages/container/${IMAGE_NAME}/versions?per_page=$per_page&page=$page"
# Use GitHubs recommended headers
local resp
resp=$(curl -sS \
-H "Authorization: Bearer $GITHUB_TOKEN" \
@@ -98,29 +90,26 @@ ghcr_list_all_versions_json() {
-H "X-GitHub-Api-Version: 2022-11-28" \
"$url" || true)
# ✅ NEW: ensure we got JSON
if ! echo "$resp" | jq -e . >/dev/null 2>&1; then
echo "$LOG_PREFIX GHCR returned non-JSON for url=$url"
echo "$LOG_PREFIX GHCR response (first 200 chars): $(echo "$resp" | head -c 200 | tr '\n' ' ')"
echo "$LOG_PREFIX GHCR returned non-JSON for url=$url" >&2
echo "$LOG_PREFIX GHCR response (first 200 chars): $(echo "$resp" | head -c 200 | tr '\n' ' ')" >&2
echo "[]"
return 0
fi
# Handle JSON error messages
if echo "$resp" | jq -e 'has("message")' >/dev/null 2>&1; then
local msg
msg=$(echo "$resp" | jq -r '.message')
if [[ "$msg" == "Not Found" ]]; then
echo "$LOG_PREFIX GHCR ${namespace_type} endpoint returned Not Found"
echo "$LOG_PREFIX GHCR ${namespace_type} endpoint returned Not Found" >&2
echo "[]"
return 0
fi
echo "$LOG_PREFIX GHCR API error: $msg"
# also print documentation_url if present (helpful)
echo "$LOG_PREFIX GHCR API error: $msg" >&2
doc=$(echo "$resp" | jq -r '.documentation_url // empty')
[[ -n "$doc" ]] && echo "$LOG_PREFIX GHCR docs: $doc"
[[ -n "$doc" ]] && echo "$LOG_PREFIX GHCR docs: $doc" >&2
echo "[]"
return 0
fi
@@ -146,7 +135,6 @@ action_delete_ghcr() {
return
fi
# Try orgs first, then users
local all
local namespace_type="orgs"
all=$(ghcr_list_all_versions_json "$namespace_type")
@@ -164,12 +152,6 @@ action_delete_ghcr() {
echo "$LOG_PREFIX GHCR: fetched $total versions total"
# Normalize a working list:
# - id
# - created_at
# - created_ts
# - tags array
# - tags_csv
local normalized
normalized=$(echo "$all" | jq -c '
map({
@@ -181,8 +163,6 @@ action_delete_ghcr() {
})
')
# Compute the globally newest KEEP_LAST_N ids to always keep
# (If KEEP_LAST_N is 0 or empty, keep none by this rule)
local keep_ids
keep_ids=$(echo "$normalized" | jq -r --argjson n "${KEEP_LAST_N:-0}" '
(sort_by(.created_ts) | reverse) as $s
@@ -193,21 +173,20 @@ action_delete_ghcr() {
echo "$LOG_PREFIX GHCR: keeping newest KEEP_LAST_N ids: $KEEP_LAST_N"
fi
# Iterate versions sorted oldest->newest so deletions are predictable
local ver protected all_sbom candidate_bytes
while IFS= read -r ver; do
local id created created_ts tags_csv
all_sbom=false
id=$(echo "$ver" | jq -r '.id')
created=$(echo "$ver" | jq -r '.created_at')
created_ts=$(echo "$ver" | jq -r '.created_ts')
tags_csv=$(echo "$ver" | jq -r '.tags_csv')
# KEEP_LAST_N rule (global)
if [[ -n "$keep_ids" && " $keep_ids " == *" $id "* ]]; then
echo "$LOG_PREFIX keep (last_n): id=$id tags=$tags_csv created=$created"
continue
fi
# Protected tags rule
protected=false
if [[ -n "$tags_csv" ]]; then
while IFS= read -r t; do
@@ -223,8 +202,6 @@ action_delete_ghcr() {
continue
fi
# Optional: treat SBOM-only versions/tags as deletable
# If every tag is *.sbom and PRUNE_SBOM_TAGS=true, we allow pruning regardless of “tag protected” rules.
if [[ "${PRUNE_SBOM_TAGS,,}" == "true" && -n "$tags_csv" ]]; then
all_sbom=true
while IFS= read -r t; do
@@ -234,46 +211,40 @@ action_delete_ghcr() {
break
fi
done < <(echo "$tags_csv" | tr ',' '\n')
if $all_sbom; then
# allow fallthrough; do not "keep" just because tags are recent
:
fi
fi
# Age rule
if (( created_ts >= cutoff_ts )); then
echo "$LOG_PREFIX keep (recent): id=$id tags=$tags_csv created=$created"
continue
fi
# Optional: prune untagged versions (common GHCR bloat)
if [[ "${PRUNE_UNTAGGED,,}" == "true" ]]; then
# tags_csv can be empty for untagged
if [[ -z "$tags_csv" ]]; then
echo "$LOG_PREFIX candidate (untagged): id=$id tags=<none> created=$created"
else
echo "$LOG_PREFIX candidate: id=$id tags=$tags_csv created=$created"
fi
# If all tags are SBOM tags and PRUNE_SBOM_TAGS is enabled, skip the age check
if [[ "${all_sbom:-false}" == "true" ]]; then
echo "$LOG_PREFIX candidate (sbom-only): id=$id tags=$tags_csv created=$created"
else
# If not pruning untagged, skip them
if [[ -z "$tags_csv" ]]; then
echo "$LOG_PREFIX keep (untagged disabled): id=$id created=$created"
if (( created_ts >= cutoff_ts )); then
echo "$LOG_PREFIX keep (recent): id=$id tags=$tags_csv created=$created"
continue
fi
echo "$LOG_PREFIX candidate: id=$id tags=$tags_csv created=$created"
if [[ "${PRUNE_UNTAGGED,,}" == "true" ]]; then
if [[ -z "$tags_csv" ]]; then
echo "$LOG_PREFIX candidate (untagged): id=$id tags=<none> created=$created"
else
echo "$LOG_PREFIX candidate: id=$id tags=$tags_csv created=$created"
fi
else
if [[ -z "$tags_csv" ]]; then
echo "$LOG_PREFIX keep (untagged disabled): id=$id created=$created"
continue
fi
echo "$LOG_PREFIX candidate: id=$id tags=$tags_csv created=$created"
fi
fi
# Candidate bookkeeping
TOTAL_CANDIDATES=$((TOTAL_CANDIDATES + 1))
# Best-effort size estimation: GHCR registry auth is messy; dont block prune on it.
candidate_bytes=0
if $dry_run; then
echo "$LOG_PREFIX DRY RUN: would delete GHCR version id=$id (approx ${candidate_bytes} bytes)"
else
echo "$LOG_PREFIX deleting GHCR version id=$id"
# Use GitHub API delete
curl -sS -X DELETE -H "Authorization: Bearer $GITHUB_TOKEN" \
"https://api.github.com/${namespace_type}/${OWNER}/packages/container/${IMAGE_NAME}/versions/$id" >/dev/null || true
TOTAL_DELETED=$((TOTAL_DELETED + 1))
@@ -282,116 +253,16 @@ action_delete_ghcr() {
done < <(echo "$normalized" | jq -c 'sort_by(.created_ts) | .[]')
}
# --- Docker Hub ---
action_delete_dockerhub() {
echo "$LOG_PREFIX -> Docker Hub cleanup for ${DOCKERHUB_USERNAME:-<unset>}/$IMAGE_NAME (dry-run=$dry_run)"
# Main
action_delete_ghcr
if [[ -z "${DOCKERHUB_USERNAME:-}" || -z "${DOCKERHUB_TOKEN:-}" ]]; then
echo "$LOG_PREFIX Docker Hub credentials not set; skipping Docker Hub cleanup"
return
fi
hub_token=$(curl -sS -X POST -H "Content-Type: application/json" \
-d "{\"username\":\"${DOCKERHUB_USERNAME}\",\"password\":\"${DOCKERHUB_TOKEN}\"}" \
https://hub.docker.com/v2/users/login/ | jq -r '.token')
if [[ -z "$hub_token" || "$hub_token" == "null" ]]; then
echo "$LOG_PREFIX Failed to obtain Docker Hub token; aborting Docker Hub cleanup"
return
fi
# Fetch all pages first so KEEP_LAST_N can be global
page=1
page_size=100
all='[]'
while :; do
resp=$(curl -sS -H "Authorization: JWT $hub_token" \
"https://hub.docker.com/v2/repositories/${DOCKERHUB_USERNAME}/${IMAGE_NAME}/tags?page_size=$page_size&page=$page")
results_count=$(echo "$resp" | jq -r '.results | length')
if [[ -z "$results_count" || "$results_count" == "0" ]]; then
break
fi
all=$(jq -s '.[0] + .[1].results' <(echo "$all") <(echo "$resp"))
((page++))
done
total=$(echo "$all" | jq -r 'length')
if [[ -z "$total" || "$total" == "0" ]]; then
echo "$LOG_PREFIX Docker Hub: no tags found"
return
fi
echo "$LOG_PREFIX Docker Hub: fetched $total tags total"
keep_tags=$(echo "$all" | jq -r --argjson n "${KEEP_LAST_N:-0}" '
(sort_by(.last_updated) | reverse) as $s
| ($s[0:$n] | map(.name)) | join(" ")
')
while IFS= read -r tag; do
tag_name=$(echo "$tag" | jq -r '.name')
last_updated=$(echo "$tag" | jq -r '.last_updated')
last_ts=$(date -d "$last_updated" +%s 2>/dev/null || 0)
if [[ -n "$keep_tags" && " $keep_tags " == *" $tag_name "* ]]; then
echo "$LOG_PREFIX keep (last_n): tag=$tag_name last_updated=$last_updated"
continue
fi
protected=false
if is_protected_tag "$tag_name"; then
protected=true
fi
if $protected; then
echo "$LOG_PREFIX keep (protected): tag=$tag_name last_updated=$last_updated"
continue
fi
if (( last_ts >= cutoff_ts )); then
echo "$LOG_PREFIX keep (recent): tag=$tag_name last_updated=$last_updated"
continue
fi
echo "$LOG_PREFIX candidate: tag=$tag_name last_updated=$last_updated"
bytes=$(echo "$tag" | jq -r '.images | map(.size) | add // 0' 2>/dev/null || echo 0)
TOTAL_CANDIDATES=$((TOTAL_CANDIDATES + 1))
TOTAL_CANDIDATES_BYTES=$((TOTAL_CANDIDATES_BYTES + bytes))
if $dry_run; then
echo "$LOG_PREFIX DRY RUN: would delete Docker Hub tag=$tag_name (approx ${bytes} bytes)"
else
echo "$LOG_PREFIX deleting Docker Hub tag=$tag_name (approx ${bytes} bytes)"
curl -sS -X DELETE -H "Authorization: JWT $hub_token" \
"https://hub.docker.com/v2/repositories/${DOCKERHUB_USERNAME}/${IMAGE_NAME}/tags/${tag_name}/" >/dev/null || true
TOTAL_DELETED=$((TOTAL_DELETED + 1))
TOTAL_DELETED_BYTES=$((TOTAL_DELETED_BYTES + bytes))
fi
done < <(echo "$all" | jq -c 'sort_by(.last_updated) | .[]')
}
# Main: iterate requested registries
IFS=',' read -ra regs <<< "$REGISTRIES"
for r in "${regs[@]}"; do
case "$r" in
ghcr) action_delete_ghcr ;;
dockerhub) action_delete_dockerhub ;;
*) echo "$LOG_PREFIX unknown registry: $r" ;;
esac
done
# Summary
echo "$LOG_PREFIX SUMMARY: total_candidates=${TOTAL_CANDIDATES} total_candidates_bytes=${TOTAL_CANDIDATES_BYTES} total_deleted=${TOTAL_DELETED} total_deleted_bytes=${TOTAL_DELETED_BYTES}"
echo "$LOG_PREFIX SUMMARY_HUMAN: candidates=${TOTAL_CANDIDATES} candidates_size=$(human_readable "${TOTAL_CANDIDATES_BYTES}") deleted=${TOTAL_DELETED} deleted_size=$(human_readable "${TOTAL_DELETED_BYTES}")"
# Export summary for workflow parsing
: > prune-summary.env
echo "TOTAL_CANDIDATES=${TOTAL_CANDIDATES}" >> prune-summary.env
echo "TOTAL_CANDIDATES_BYTES=${TOTAL_CANDIDATES_BYTES}" >> prune-summary.env
echo "TOTAL_DELETED=${TOTAL_DELETED}" >> prune-summary.env
echo "TOTAL_DELETED_BYTES=${TOTAL_DELETED_BYTES}" >> prune-summary.env
: > prune-summary-ghcr.env
echo "TOTAL_CANDIDATES=${TOTAL_CANDIDATES}" >> prune-summary-ghcr.env
echo "TOTAL_CANDIDATES_BYTES=${TOTAL_CANDIDATES_BYTES}" >> prune-summary-ghcr.env
echo "TOTAL_DELETED=${TOTAL_DELETED}" >> prune-summary-ghcr.env
echo "TOTAL_DELETED_BYTES=${TOTAL_DELETED_BYTES}" >> prune-summary-ghcr.env
echo "$LOG_PREFIX done"

View File

@@ -1,107 +0,0 @@
const DEFAULT_OUTPUT = ".github/badges/ghcr-downloads.json";
const GH_API_BASE = "https://api.github.com";
const owner = process.env.GHCR_OWNER || process.env.GITHUB_REPOSITORY_OWNER;
const packageName = process.env.GHCR_PACKAGE || "charon";
const outputPath = process.env.BADGE_OUTPUT || DEFAULT_OUTPUT;
const token = process.env.GITHUB_TOKEN || "";
if (!owner) {
throw new Error("GHCR owner is required. Set GHCR_OWNER or GITHUB_REPOSITORY_OWNER.");
}
const headers = {
Accept: "application/vnd.github+json",
};
if (token) {
headers.Authorization = `Bearer ${token}`;
}
const formatCount = (value) => {
if (value >= 1_000_000_000) {
return `${(value / 1_000_000_000).toFixed(1).replace(/\.0$/, "")}B`;
}
if (value >= 1_000_000) {
return `${(value / 1_000_000).toFixed(1).replace(/\.0$/, "")}M`;
}
if (value >= 1_000) {
return `${(value / 1_000).toFixed(1).replace(/\.0$/, "")}k`;
}
return String(value);
};
const getNextLink = (linkHeader) => {
if (!linkHeader) {
return null;
}
const match = linkHeader.match(/<([^>]+)>;\s*rel="next"/);
return match ? match[1] : null;
};
const fetchPage = async (url) => {
const response = await fetch(url, { headers });
if (!response.ok) {
const detail = await response.text();
const error = new Error(`Request failed: ${response.status} ${response.statusText}`);
error.status = response.status;
error.detail = detail;
throw error;
}
const data = await response.json();
const link = response.headers.get("link");
return { data, next: getNextLink(link) };
};
const fetchAllVersions = async (baseUrl) => {
let url = `${baseUrl}?per_page=100`;
const versions = [];
while (url) {
const { data, next } = await fetchPage(url);
versions.push(...data);
url = next;
}
return versions;
};
const fetchVersionsWithFallback = async () => {
const userUrl = `${GH_API_BASE}/users/${owner}/packages/container/${packageName}/versions`;
try {
return await fetchAllVersions(userUrl);
} catch (error) {
if (error.status !== 404) {
throw error;
}
}
const orgUrl = `${GH_API_BASE}/orgs/${owner}/packages/container/${packageName}/versions`;
return fetchAllVersions(orgUrl);
};
const run = async () => {
const versions = await fetchVersionsWithFallback();
const totalDownloads = versions.reduce(
(sum, version) => sum + (version.download_count || 0),
0
);
const badge = {
schemaVersion: 1,
label: "GHCR pulls",
message: formatCount(totalDownloads),
color: "blue",
cacheSeconds: 3600,
};
const output = `${JSON.stringify(badge, null, 2)}\n`;
await import("node:fs/promises").then((fs) => fs.writeFile(outputPath, output));
console.log(`GHCR downloads: ${totalDownloads} -> ${outputPath}`);
};
run().catch((error) => {
console.error(error);
process.exit(1);
});