chore: git cache cleanup

This commit is contained in:
GitHub Actions
2026-03-04 18:34:39 +00:00
parent 7a8b0343e4
commit c32cce2a88
2001 changed files with 0 additions and 683185 deletions

View File

@@ -1,34 +0,0 @@
name: Auto-add issues and PRs to Project
on:
issues:
types: [opened, reopened]
concurrency:
group: ${{ github.workflow }}-${{ github.event.issue.number || github.event.pull_request.number }}
cancel-in-progress: false
jobs:
add-to-project:
runs-on: ubuntu-latest
steps:
- name: Determine project URL presence
id: project_check
run: |
if [ -n "${{ secrets.PROJECT_URL }}" ]; then
echo "has_project=true" >> "$GITHUB_OUTPUT"
else
echo "has_project=false" >> "$GITHUB_OUTPUT"
fi
- name: Add issue or PR to project
if: steps.project_check.outputs.has_project == 'true'
uses: actions/add-to-project@244f685bbc3b7adfa8466e08b698b5577571133e # v1.0.2
continue-on-error: true
with:
project-url: ${{ secrets.PROJECT_URL }}
github-token: ${{ secrets.ADD_TO_PROJECT_PAT || secrets.GITHUB_TOKEN }}
- name: Skip summary
if: steps.project_check.outputs.has_project == 'false'
run: echo "PROJECT_URL secret missing; skipping project assignment." >> "$GITHUB_STEP_SUMMARY"

View File

@@ -1,26 +0,0 @@
name: Auto Changelog (Release Drafter)
on:
workflow_run:
workflows: ["Docker Build, Publish & Test"]
types: [completed]
branches: [ main ]
release:
types: [published]
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.workflow_run.head_branch || github.head_ref || github.ref_name }}
cancel-in-progress: true
jobs:
update-draft:
runs-on: ubuntu-latest
if: ${{ github.event_name != 'workflow_run' || (github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.head_branch == 'main') }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
- name: Draft Release
uses: release-drafter/release-drafter@6db134d15f3909ccc9eefd369f02bd1e9cffdf97 # v6
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,78 +0,0 @@
name: Auto-label Issues
on:
issues:
types: [opened, edited]
concurrency:
group: ${{ github.workflow }}-${{ github.event.issue.number }}
cancel-in-progress: true
jobs:
auto-label:
runs-on: ubuntu-latest
permissions:
issues: write
steps:
- name: Auto-label based on title and body
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const issue = context.payload.issue;
const title = issue.title.toLowerCase();
const body = issue.body ? issue.body.toLowerCase() : '';
const labels = [];
// Priority detection
if (title.includes('[critical]') || body.includes('priority: critical')) {
labels.push('critical');
} else if (title.includes('[high]') || body.includes('priority: high')) {
labels.push('high');
} else if (title.includes('[medium]') || body.includes('priority: medium')) {
labels.push('medium');
} else if (title.includes('[low]') || body.includes('priority: low')) {
labels.push('low');
}
// Milestone detection
if (title.includes('[alpha]') || body.includes('milestone: alpha')) {
labels.push('alpha');
} else if (title.includes('[beta]') || body.includes('milestone: beta')) {
labels.push('beta');
} else if (title.includes('[post-beta]') || body.includes('milestone: post-beta')) {
labels.push('post-beta');
}
// Category detection
if (title.includes('architecture') || body.includes('architecture')) labels.push('architecture');
if (title.includes('backend') || body.includes('backend')) labels.push('backend');
if (title.includes('frontend') || body.includes('frontend')) labels.push('frontend');
if (title.includes('security') || body.includes('security')) labels.push('security');
if (title.includes('ssl') || title.includes('tls') || body.includes('certificate')) labels.push('ssl');
if (title.includes('sso') || body.includes('single sign-on')) labels.push('sso');
if (title.includes('waf') || body.includes('web application firewall')) labels.push('waf');
if (title.includes('crowdsec') || body.includes('crowdsec')) labels.push('crowdsec');
if (title.includes('caddy') || body.includes('caddy')) labels.push('caddy');
if (title.includes('database') || body.includes('database')) labels.push('database');
if (title.includes('ui') || title.includes('interface')) labels.push('ui');
if (title.includes('docker') || title.includes('deployment')) labels.push('deployment');
if (title.includes('monitoring') || title.includes('logging')) labels.push('monitoring');
if (title.includes('documentation') || title.includes('docs')) labels.push('documentation');
if (title.includes('test') || body.includes('testing')) labels.push('testing');
if (title.includes('performance') || body.includes('optimization')) labels.push('performance');
if (title.includes('plus') || body.includes('premium feature')) labels.push('plus');
// Feature detection
if (title.includes('feature') || body.includes('feature request')) labels.push('feature');
// Only add labels if we detected any
if (labels.length > 0) {
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issue.number,
labels: labels
});
console.log(`Added labels: ${labels.join(', ')}`);
}

View File

@@ -1,107 +0,0 @@
name: Auto Versioning and Release
# SEMANTIC VERSIONING RULES:
# - PATCH (0.14.1 → 0.14.2): fix:, perf:, refactor:, docs:, style:, test:, build:, ci:
# - MINOR (0.14.1 → 0.15.0): feat:, feat(...):
# - MAJOR (0.14.1 → 1.0.0): MANUAL ONLY - Create git tag manually when ready for 1.0.0
#
# ⚠️ Major version bumps are intentionally disabled in automation to prevent accidents.
on:
workflow_run:
workflows: ["Docker Build, Publish & Test"]
types: [completed]
branches: [ main ]
concurrency:
group: ${{ github.workflow }}-${{ github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: false # Don't cancel in-progress releases
permissions:
contents: write # Required for creating releases via API (removed unused pull-requests: write)
jobs:
version:
runs-on: ubuntu-latest
if: ${{ github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.head_branch == 'main' }}
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
- name: Calculate Semantic Version
id: semver
uses: paulhatch/semantic-version@f29500c9d60a99ed5168e39ee367e0976884c46e # v6.0.1
with:
# The prefix to use to create tags
tag_prefix: "v"
# Regex pattern for major version bump - DISABLED (manual only)
# Use a pattern that will never match to prevent automated major bumps
major_pattern: "/__MANUAL_MAJOR_BUMP_ONLY__/"
# Regex pattern for minor version bump (new features)
# Matches: "feat:" prefix in commit messages (Conventional Commits)
minor_pattern: "/^feat(\\(.+\\))?:/"
# Patch bumps: All other commits (fix:, chore:, etc.) are treated as patches by default
# Pattern to determine formatting
version_format: "${major}.${minor}.${patch}"
# If no tags are found, this version is used
version_from_branch: "0.0.0"
# This helps it search through history to find the last tag
search_commit_body: true
# Important: This enables the output 'changed' which your other steps rely on
enable_prerelease_mode: false
- name: Show version
run: |
echo "Next version: ${{ steps.semver.outputs.version }}"
echo "Version changed: ${{ steps.semver.outputs.changed }}"
- name: Determine tag name
id: determine_tag
run: |
# Normalize the version: remove any leading 'v' so we don't end up with 'vvX.Y.Z'
RAW="${{ steps.semver.outputs.version }}"
VERSION_NO_V="${RAW#v}"
TAG="v${VERSION_NO_V}"
echo "Determined tag: $TAG"
echo "tag=$TAG" >> "$GITHUB_OUTPUT"
- name: Check for existing GitHub Release
id: check_release
run: |
TAG="${{ steps.determine_tag.outputs.tag }}"
echo "Checking for release for tag: ${TAG}"
STATUS=$(curl -s -o /dev/null -w "%{http_code}" \
-H "Authorization: token ${GITHUB_TOKEN}" \
-H "Accept: application/vnd.github+json" \
"https://api.github.com/repos/${GITHUB_REPOSITORY}/releases/tags/${TAG}") || true
if [ "${STATUS}" = "200" ]; then
echo "exists=true" >> "$GITHUB_OUTPUT"
echo " Release already exists for tag: ${TAG}"
else
echo "exists=false" >> "$GITHUB_OUTPUT"
echo "✅ No existing release found for tag: ${TAG}"
fi
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Create GitHub Release (creates tag via API)
if: ${{ steps.semver.outputs.changed == 'true' && steps.check_release.outputs.exists == 'false' }}
uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2
with:
tag_name: ${{ steps.determine_tag.outputs.tag }}
name: Release ${{ steps.determine_tag.outputs.tag }}
generate_release_notes: true
make_latest: true
draft: false
prerelease: false
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Output release information
if: ${{ steps.semver.outputs.changed == 'true' && steps.check_release.outputs.exists == 'false' }}
run: |
echo "✅ Successfully created release: ${{ steps.determine_tag.outputs.tag }}"
echo "📦 Release URL: https://github.com/${{ github.repository }}/releases/tag/${{ steps.determine_tag.outputs.tag }}"

View File

@@ -1,79 +0,0 @@
name: Go Benchmark
on:
pull_request:
push:
branches:
- main
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
env:
GO_VERSION: '1.26.0'
GOTOOLCHAIN: auto
# Minimal permissions at workflow level; write permissions granted at job level for push only
permissions:
contents: read
jobs:
benchmark:
name: Performance Regression Check
runs-on: ubuntu-latest
if: ${{ github.event_name == 'workflow_dispatch' || github.event_name == 'pull_request' || github.event.workflow_run.conclusion == 'success' }}
# Grant write permissions for storing benchmark results (only used on push via step condition)
# Note: GitHub Actions doesn't support dynamic expressions in permissions block
permissions:
contents: write
deployments: write
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
- name: Set up Go
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version: ${{ env.GO_VERSION }}
cache-dependency-path: backend/go.sum
- name: Run Benchmark
working-directory: backend
env:
CHARON_ENCRYPTION_KEY: ${{ secrets.CHARON_ENCRYPTION_KEY_TEST }}
run: go test -bench=. -benchmem -run='^$' ./... | tee output.txt
- name: Store Benchmark Result
# Only store results on pushes to main - PRs just run benchmarks without storage
# This avoids gh-pages branch errors and permission issues on fork PRs
if: github.event.workflow_run.event == 'push' && github.event.workflow_run.head_branch == 'main'
# Security: Pinned to full SHA for supply chain security
uses: benchmark-action/github-action-benchmark@4e0b38bc48375986542b13c0d8976b7b80c60c00 # v1
with:
name: Go Benchmark
tool: 'go'
output-file-path: backend/output.txt
github-token: ${{ secrets.GITHUB_TOKEN }}
auto-push: true
# Show alert with commit comment on detection of performance regression
# Threshold increased to 175% to account for CI variability
alert-threshold: '175%'
comment-on-alert: true
fail-on-alert: false
# Enable Job Summary
summary-always: true
- name: Run Perf Asserts
working-directory: backend
env:
PERF_MAX_MS_GETSTATUS_P95: 500ms
PERF_MAX_MS_GETSTATUS_P95_PARALLEL: 1500ms
PERF_MAX_MS_LISTDECISIONS_P95: 2000ms
CHARON_ENCRYPTION_KEY: ${{ secrets.CHARON_ENCRYPTION_KEY_TEST }}
run: |
echo "## 🔍 Running performance assertions (TestPerf)" >> "$GITHUB_STEP_SUMMARY"
go test -run TestPerf -v ./internal/api/handlers -count=1 | tee perf-output.txt
exit "${PIPESTATUS[0]}"

View File

@@ -1,66 +0,0 @@
name: Monitor Caddy Major Release
on:
schedule:
- cron: '17 7 * * 1' # Mondays at 07:17 UTC
workflow_dispatch: {}
concurrency:
group: ${{ github.workflow }}
cancel-in-progress: false
permissions:
contents: read
issues: write
jobs:
check-caddy-major:
runs-on: ubuntu-latest
steps:
- name: Check for Caddy v3 and open issue
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const upstream = { owner: 'caddyserver', repo: 'caddy' };
const { data: releases } = await github.rest.repos.listReleases({
...upstream,
per_page: 50,
});
const latestV3 = releases.find(r => /^v3\./.test(r.tag_name));
if (!latestV3) {
core.info('No Caddy v3 release detected.');
return;
}
const issueTitle = `Track upgrade to Caddy v3 (${latestV3.tag_name})`;
const { data: existing } = await github.rest.issues.listForRepo({
owner: context.repo.owner,
repo: context.repo.repo,
state: 'open',
per_page: 100,
});
if (existing.some(i => i.title === issueTitle)) {
core.info('Issue already exists — nothing to do.');
return;
}
const body = [
'Caddy v3 has been released upstream and detected by the scheduled monitor.',
'',
`Detected release: ${latestV3.tag_name} (${latestV3.html_url})`,
'',
'- Create a feature branch to evaluate the v3 migration.',
'- Review breaking changes and update Docker base images/workflows.',
'- Validate Trivy scans and update any policies as needed.',
'',
'Current policy: remain on latest 2.x until v3 is validated.'
].join('\n');
await github.rest.issues.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: issueTitle,
body,
});

View File

@@ -1,109 +0,0 @@
name: Cerberus Integration
# Phase 2-3: Build Once, Test Many - Use registry image instead of building
# This workflow now waits for docker-build.yml to complete and pulls the built image
on:
workflow_dispatch:
inputs:
image_tag:
description: 'Docker image tag to test (e.g., pr-123-abc1234, latest)'
required: false
type: string
pull_request:
push:
branches:
- main
# Prevent race conditions when PR is updated mid-test
# Cancels old test runs when new build completes with different SHA
concurrency:
group: ${{ github.workflow }}-${{ github.event.workflow_run.event || github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
jobs:
cerberus-integration:
name: Cerberus Security Stack Integration
runs-on: ubuntu-latest
timeout-minutes: 20
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Build Docker image (Local)
run: |
echo "Building image locally for integration tests..."
docker build -t charon:local .
echo "✅ Successfully built charon:local"
- name: Run Cerberus integration tests
id: cerberus-test
run: |
chmod +x scripts/cerberus_integration.sh
scripts/cerberus_integration.sh 2>&1 | tee cerberus-test-output.txt
exit "${PIPESTATUS[0]}"
- name: Dump Debug Info on Failure
if: failure()
run: |
{
echo "## 🔍 Debug Information"
echo ""
echo "### Container Status"
echo '```'
docker ps -a --filter "name=charon" --filter "name=cerberus" --filter "name=backend" 2>&1 || true
echo '```'
echo ""
echo "### Security Status API"
echo '```json'
curl -s http://localhost:8480/api/v1/security/status 2>/dev/null | head -100 || echo "Could not retrieve security status"
echo '```'
echo ""
echo "### Caddy Admin Config"
echo '```json'
curl -s http://localhost:2319/config 2>/dev/null | head -200 || echo "Could not retrieve Caddy config"
echo '```'
echo ""
echo "### Charon Container Logs (last 100 lines)"
echo '```'
docker logs charon-cerberus-test 2>&1 | tail -100 || echo "No container logs available"
echo '```'
} >> "$GITHUB_STEP_SUMMARY"
- name: Cerberus Integration Summary
if: always()
run: |
{
echo "## 🔱 Cerberus Integration Test Results"
if [ "${{ steps.cerberus-test.outcome }}" == "success" ]; then
echo "✅ **All Cerberus tests passed**"
echo ""
echo "### Test Results:"
echo '```'
grep -E "✓|PASS|TC-[0-9]|=== ALL" cerberus-test-output.txt || echo "See logs for details"
echo '```'
echo ""
echo "### Features Tested:"
echo "- WAF (Coraza) payload inspection"
echo "- Rate limiting enforcement"
echo "- Security handler ordering"
echo "- Legitimate traffic flow"
else
echo "❌ **Cerberus tests failed**"
echo ""
echo "### Failure Details:"
echo '```'
grep -E "✗|FAIL|Error|failed" cerberus-test-output.txt | head -30 || echo "See logs for details"
echo '```'
fi
} >> "$GITHUB_STEP_SUMMARY"
- name: Cleanup
if: always()
run: |
docker rm -f charon-cerberus-test || true
docker rm -f cerberus-backend || true
docker volume rm charon_cerberus_test_data caddy_cerberus_test_data caddy_cerberus_test_config 2>/dev/null || true
docker network rm containers_default || true

View File

@@ -1,179 +0,0 @@
name: Upload Coverage to Codecov
on:
pull_request:
push:
branches:
- main
workflow_dispatch:
inputs:
run_backend:
description: 'Run backend coverage upload'
required: false
default: true
type: boolean
run_frontend:
description: 'Run frontend coverage upload'
required: false
default: true
type: boolean
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
GO_VERSION: '1.26.0'
NODE_VERSION: '24.12.0'
GOTOOLCHAIN: auto
permissions:
contents: read
pull-requests: write
jobs:
backend-codecov:
name: Backend Codecov Upload
runs-on: ubuntu-latest
timeout-minutes: 15
if: ${{ github.event_name != 'workflow_dispatch' || inputs.run_backend }}
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0
ref: ${{ github.sha }}
- name: Set up Go
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version: ${{ env.GO_VERSION }}
cache-dependency-path: backend/go.sum
# SECURITY: Keep pull_request (not pull_request_target) for secret-bearing backend tests.
# Untrusted code (fork PRs and Dependabot PRs) gets ephemeral workflow-only keys.
- name: Resolve encryption key for backend coverage
shell: bash
env:
EVENT_NAME: ${{ github.event_name }}
ACTOR: ${{ github.actor }}
REPO: ${{ github.repository }}
PR_HEAD_REPO: ${{ github.event.pull_request.head.repo.full_name }}
PR_HEAD_FORK: ${{ github.event.pull_request.head.repo.fork }}
WORKFLOW_SECRET_KEY: ${{ secrets.CHARON_ENCRYPTION_KEY_TEST }}
run: |
set -euo pipefail
is_same_repo_pr=false
if [[ "$EVENT_NAME" == "pull_request" && -n "${PR_HEAD_REPO:-}" && "$PR_HEAD_REPO" == "$REPO" ]]; then
is_same_repo_pr=true
fi
is_workflow_dispatch=false
if [[ "$EVENT_NAME" == "workflow_dispatch" ]]; then
is_workflow_dispatch=true
fi
is_push_event=false
if [[ "$EVENT_NAME" == "push" ]]; then
is_push_event=true
fi
is_dependabot_pr=false
if [[ "$EVENT_NAME" == "pull_request" && "$ACTOR" == "dependabot[bot]" ]]; then
is_dependabot_pr=true
fi
is_fork_pr=false
if [[ "$EVENT_NAME" == "pull_request" && "${PR_HEAD_FORK:-false}" == "true" ]]; then
is_fork_pr=true
fi
is_untrusted=false
if [[ "$is_fork_pr" == "true" || "$is_dependabot_pr" == "true" ]]; then
is_untrusted=true
fi
is_trusted=false
if [[ "$is_untrusted" == "false" && ( "$is_same_repo_pr" == "true" || "$is_workflow_dispatch" == "true" || "$is_push_event" == "true" ) ]]; then
is_trusted=true
fi
resolved_key=""
if [[ "$is_trusted" == "true" ]]; then
if [[ -z "${WORKFLOW_SECRET_KEY:-}" ]]; then
echo "::error title=Missing required secret::Trusted backend CI context requires CHARON_ENCRYPTION_KEY_TEST. Add repository secret CHARON_ENCRYPTION_KEY_TEST."
exit 1
fi
resolved_key="$WORKFLOW_SECRET_KEY"
elif [[ "$is_untrusted" == "true" ]]; then
resolved_key="$(openssl rand -base64 32)"
else
echo "::error title=Unsupported event context::Unable to classify trust for backend key resolution (event=${EVENT_NAME})."
exit 1
fi
if [[ -z "$resolved_key" ]]; then
echo "::error title=Key resolution failure::Resolved encryption key is empty."
exit 1
fi
echo "::add-mask::$resolved_key"
{
echo "CHARON_ENCRYPTION_KEY<<__CHARON_EOF__"
echo "$resolved_key"
echo "__CHARON_EOF__"
} >> "$GITHUB_ENV"
- name: Run Go tests with coverage
working-directory: ${{ github.workspace }}
env:
CGO_ENABLED: 1
run: |
bash scripts/go-test-coverage.sh 2>&1 | tee backend/test-output.txt
exit "${PIPESTATUS[0]}"
- name: Upload backend coverage to Codecov
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./backend/coverage.txt
flags: backend
fail_ci_if_error: true
frontend-codecov:
name: Frontend Codecov Upload
runs-on: ubuntu-latest
timeout-minutes: 15
if: ${{ github.event_name != 'workflow_dispatch' || inputs.run_frontend }}
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0
ref: ${{ github.sha }}
- name: Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: ${{ env.NODE_VERSION }}
cache: 'npm'
cache-dependency-path: frontend/package-lock.json
- name: Install dependencies
working-directory: frontend
run: npm ci
- name: Run frontend tests and coverage
working-directory: ${{ github.workspace }}
run: |
bash scripts/frontend-test-coverage.sh 2>&1 | tee frontend/test-output.txt
exit "${PIPESTATUS[0]}"
- name: Upload frontend coverage to Codecov
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: ./frontend/coverage
flags: frontend
fail_ci_if_error: true

View File

@@ -1,235 +0,0 @@
name: CodeQL - Analyze
on:
pull_request:
branches: [main, nightly, development]
push:
branches: [main]
workflow_dispatch:
schedule:
- cron: '0 3 * * 1' # Mondays 03:00 UTC
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref_name }}
cancel-in-progress: true
env:
GOTOOLCHAIN: auto
permissions:
contents: read
security-events: write
actions: read
pull-requests: read
jobs:
analyze:
name: CodeQL analysis (${{ matrix.language }})
runs-on: ubuntu-latest
permissions:
contents: read
security-events: write
actions: read
pull-requests: read
strategy:
fail-fast: false
matrix:
language: [ 'go', 'javascript-typescript' ]
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
# Use github.ref (full ref path) instead of github.ref_name:
# - push/schedule: resolves to refs/heads/<branch>, checking out latest HEAD
# - pull_request: resolves to refs/pull/<n>/merge, the correct PR merge ref
# github.ref_name fails for PRs because it yields "<n>/merge" which checkout
# interprets as a branch name (refs/heads/<n>/merge) that does not exist.
ref: ${{ github.ref }}
- name: Verify CodeQL parity guard
if: matrix.language == 'go'
run: bash scripts/ci/check-codeql-parity.sh
- name: Initialize CodeQL
uses: github/codeql-action/init@c793b717bc78562f491db7b0e93a3a178b099162 # v4
with:
languages: ${{ matrix.language }}
queries: security-and-quality
# Use CodeQL config to exclude documented false positives
# Go: Excludes go/request-forgery for url_testing.go (has 4-layer SSRF defense)
# See: .github/codeql/codeql-config.yml for full justification
config-file: ./.github/codeql/codeql-config.yml
- name: Setup Go
if: matrix.language == 'go'
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version: 1.26.0
cache-dependency-path: backend/go.sum
- name: Verify Go toolchain and build
if: matrix.language == 'go'
run: |
set -euo pipefail
cd backend
go version
MOD_GO_VERSION="$(awk '/^go / {print $2; exit}' go.mod)"
ACTIVE_GO_VERSION="$(go env GOVERSION | sed 's/^go//')"
case "$ACTIVE_GO_VERSION" in
"$MOD_GO_VERSION"|"$MOD_GO_VERSION".*)
;;
*)
echo "::error::Go toolchain mismatch: go.mod requires ${MOD_GO_VERSION}, active is ${ACTIVE_GO_VERSION}"
exit 1
;;
esac
go build ./...
- name: Prepare SARIF output directory
run: mkdir -p sarif-results
- name: Autobuild
uses: github/codeql-action/autobuild@c793b717bc78562f491db7b0e93a3a178b099162 # v4
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@c793b717bc78562f491db7b0e93a3a178b099162 # v4
with:
category: "/language:${{ matrix.language }}"
output: sarif-results/${{ matrix.language }}
- name: Check CodeQL Results
if: always()
run: |
set -euo pipefail
SARIF_DIR="sarif-results/${{ matrix.language }}"
if [ ! -d "$SARIF_DIR" ]; then
echo "::error::Expected SARIF output directory is missing: $SARIF_DIR"
echo "❌ **ERROR:** SARIF output directory is missing: $SARIF_DIR" >> "$GITHUB_STEP_SUMMARY"
exit 1
fi
SARIF_FILE="$(find "$SARIF_DIR" -maxdepth 1 -type f -name '*.sarif' | head -n 1 || true)"
{
echo "## 🔒 CodeQL Security Analysis Results"
echo ""
echo "**Language:** ${{ matrix.language }}"
echo "**Query Suite:** security-and-quality"
echo ""
} >> "$GITHUB_STEP_SUMMARY"
if [ -z "$SARIF_FILE" ] || [ ! -r "$SARIF_FILE" ]; then
echo "::error::Expected SARIF file is missing or unreadable: $SARIF_FILE"
echo "❌ **ERROR:** SARIF file is missing or unreadable: $SARIF_FILE" >> "$GITHUB_STEP_SUMMARY"
exit 1
fi
# shellcheck disable=SC2016
EFFECTIVE_LEVELS_JQ='[
.runs[] as $run
| $run.results[]
| . as $result
| ($run.tool.driver.rules // []) as $rules
| ((
$result.level
// (if (($result.ruleIndex | type) == "number") then ($rules[$result.ruleIndex].defaultConfiguration.level // empty) else empty end)
// ([
$rules[]?
| select((.id // "") == ($result.ruleId // ""))
| (.defaultConfiguration.level // empty)
][0] // empty)
// ""
) | ascii_downcase)
]'
echo "Found SARIF file: $SARIF_FILE"
ERROR_COUNT=$(jq -r "${EFFECTIVE_LEVELS_JQ} | map(select(. == \"error\")) | length" "$SARIF_FILE")
WARNING_COUNT=$(jq -r "${EFFECTIVE_LEVELS_JQ} | map(select(. == \"warning\")) | length" "$SARIF_FILE")
NOTE_COUNT=$(jq -r "${EFFECTIVE_LEVELS_JQ} | map(select(. == \"note\")) | length" "$SARIF_FILE")
{
echo "**Findings:**"
echo "- 🔴 Errors: $ERROR_COUNT"
echo "- 🟡 Warnings: $WARNING_COUNT"
echo "- 🔵 Notes: $NOTE_COUNT"
echo ""
if [ "$ERROR_COUNT" -gt 0 ]; then
echo "❌ **BLOCKING:** CodeQL error-level security issues found"
echo ""
echo "### Top Issues:"
echo '```'
# shellcheck disable=SC2016
jq -r '
.runs[] as $run
| $run.results[]
| . as $result
| ($run.tool.driver.rules // []) as $rules
| ((
$result.level
// (if (($result.ruleIndex | type) == "number") then ($rules[$result.ruleIndex].defaultConfiguration.level // empty) else empty end)
// ([
$rules[]?
| select((.id // "") == ($result.ruleId // ""))
| (.defaultConfiguration.level // empty)
][0] // empty)
// ""
) | ascii_downcase) as $effectiveLevel
| select($effectiveLevel == "error")
| "\($effectiveLevel): \($result.ruleId // \"<unknown-rule>\"): \($result.message.text)"
' "$SARIF_FILE" | head -5
echo '```'
else
echo "✅ No blocking CodeQL issues found"
fi
} >> "$GITHUB_STEP_SUMMARY"
{
echo ""
echo "View full results in the [Security tab](https://github.com/${{ github.repository }}/security/code-scanning)"
} >> "$GITHUB_STEP_SUMMARY"
- name: Fail on High-Severity Findings
if: always()
run: |
set -euo pipefail
SARIF_DIR="sarif-results/${{ matrix.language }}"
if [ ! -d "$SARIF_DIR" ]; then
echo "::error::Expected SARIF output directory is missing: $SARIF_DIR"
exit 1
fi
SARIF_FILE="$(find "$SARIF_DIR" -maxdepth 1 -type f -name '*.sarif' | head -n 1 || true)"
if [ -z "$SARIF_FILE" ] || [ ! -r "$SARIF_FILE" ]; then
echo "::error::Expected SARIF file is missing or unreadable: $SARIF_FILE"
exit 1
fi
# shellcheck disable=SC2016
ERROR_COUNT=$(jq -r '[
.runs[] as $run
| $run.results[]
| . as $result
| ($run.tool.driver.rules // []) as $rules
| ((
$result.level
// (if (($result.ruleIndex | type) == "number") then ($rules[$result.ruleIndex].defaultConfiguration.level // empty) else empty end)
// ([
$rules[]?
| select((.id // "") == ($result.ruleId // ""))
| (.defaultConfiguration.level // empty)
][0] // empty)
// ""
) | ascii_downcase) as $effectiveLevel
| select($effectiveLevel == "error")
] | length' "$SARIF_FILE")
if [ "$ERROR_COUNT" -gt 0 ]; then
echo "::error::CodeQL found $ERROR_COUNT blocking findings (effective-level=error). Fix before merging. Policy: .github/security-severity-policy.yml"
exit 1
fi

View File

@@ -1,226 +0,0 @@
name: Container Registry Prune
on:
schedule:
- cron: '0 3 * * 0' # Weekly: Sundays at 03:00 UTC
workflow_dispatch:
inputs:
keep_days:
description: 'Number of days to retain images (unprotected)'
required: false
default: '30'
dry_run:
description: 'If true, only logs candidates and does not delete (default: false for active cleanup)'
required: false
default: 'false'
keep_last_n:
description: 'Keep last N newest images (global)'
required: false
default: '30'
permissions:
packages: write
contents: read
jobs:
prune-ghcr:
runs-on: ubuntu-latest
env:
OWNER: ${{ github.repository_owner }}
IMAGE_NAME: charon
KEEP_DAYS: ${{ github.event.inputs.keep_days || '30' }}
KEEP_LAST_N: ${{ github.event.inputs.keep_last_n || '30' }}
DRY_RUN: ${{ github.event_name == 'pull_request' && 'true' || github.event.inputs.dry_run || 'false' }}
PROTECTED_REGEX: '["^v?[0-9]+\\.[0-9]+\\.[0-9]+$","^latest$","^main$","^develop$"]'
PRUNE_UNTAGGED: 'true'
PRUNE_SBOM_TAGS: 'true'
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Install tools
run: |
sudo apt-get update && sudo apt-get install -y jq curl
- name: Run GHCR prune
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
chmod +x scripts/prune-ghcr.sh
./scripts/prune-ghcr.sh 2>&1 | tee prune-ghcr-${{ github.run_id }}.log
- name: Summarize GHCR results
if: always()
run: |
set -euo pipefail
SUMMARY_FILE=prune-summary-ghcr.env
LOG_FILE=prune-ghcr-${{ github.run_id }}.log
human() {
local bytes=${1:-0}
if [ -z "$bytes" ] || [ "$bytes" -eq 0 ]; then
echo "0 B"
return
fi
awk -v b="$bytes" 'BEGIN { split("B KiB MiB GiB TiB",u," "); i=0; x=b; while(x>1024){x/=1024;i++} printf "%0.2f %s", x, u[i+1] }'
}
if [ -f "$SUMMARY_FILE" ]; then
TOTAL_CANDIDATES=$(grep -E '^TOTAL_CANDIDATES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
TOTAL_CANDIDATES_BYTES=$(grep -E '^TOTAL_CANDIDATES_BYTES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
TOTAL_DELETED=$(grep -E '^TOTAL_DELETED=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
TOTAL_DELETED_BYTES=$(grep -E '^TOTAL_DELETED_BYTES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
{
echo "## GHCR prune summary"
echo "- candidates: ${TOTAL_CANDIDATES} (≈ $(human "${TOTAL_CANDIDATES_BYTES}"))"
echo "- deleted: ${TOTAL_DELETED} (≈ $(human "${TOTAL_DELETED_BYTES}"))"
} >> "$GITHUB_STEP_SUMMARY"
else
deleted_bytes=$(grep -oE '\( *approx +[0-9]+ bytes\)' "$LOG_FILE" | sed -E 's/.*approx +([0-9]+) bytes.*/\1/' | awk '{s+=$1} END {print s+0}' || true)
deleted_count=$(grep -cE 'deleting |DRY RUN: would delete' "$LOG_FILE" || true)
{
echo "## GHCR prune summary"
echo "- deleted (approx): ${deleted_count} (≈ $(human "${deleted_bytes}"))"
} >> "$GITHUB_STEP_SUMMARY"
fi
- name: Upload GHCR prune artifacts
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: prune-ghcr-log-${{ github.run_id }}
path: |
prune-ghcr-${{ github.run_id }}.log
prune-summary-ghcr.env
prune-dockerhub:
runs-on: ubuntu-latest
env:
OWNER: ${{ github.repository_owner }}
IMAGE_NAME: charon
KEEP_DAYS: ${{ github.event.inputs.keep_days || '30' }}
KEEP_LAST_N: ${{ github.event.inputs.keep_last_n || '30' }}
DRY_RUN: ${{ github.event_name == 'pull_request' && 'true' || github.event.inputs.dry_run || 'false' }}
PROTECTED_REGEX: '["^v?[0-9]+\\.[0-9]+\\.[0-9]+$","^latest$","^main$","^develop$"]'
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Install tools
run: |
sudo apt-get update && sudo apt-get install -y jq curl
- name: Run Docker Hub prune
env:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
run: |
chmod +x scripts/prune-dockerhub.sh
./scripts/prune-dockerhub.sh 2>&1 | tee prune-dockerhub-${{ github.run_id }}.log
- name: Summarize Docker Hub results
if: always()
run: |
set -euo pipefail
SUMMARY_FILE=prune-summary-dockerhub.env
LOG_FILE=prune-dockerhub-${{ github.run_id }}.log
human() {
local bytes=${1:-0}
if [ -z "$bytes" ] || [ "$bytes" -eq 0 ]; then
echo "0 B"
return
fi
awk -v b="$bytes" 'BEGIN { split("B KiB MiB GiB TiB",u," "); i=0; x=b; while(x>1024){x/=1024;i++} printf "%0.2f %s", x, u[i+1] }'
}
if [ -f "$SUMMARY_FILE" ]; then
TOTAL_CANDIDATES=$(grep -E '^TOTAL_CANDIDATES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
TOTAL_CANDIDATES_BYTES=$(grep -E '^TOTAL_CANDIDATES_BYTES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
TOTAL_DELETED=$(grep -E '^TOTAL_DELETED=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
TOTAL_DELETED_BYTES=$(grep -E '^TOTAL_DELETED_BYTES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
{
echo "## Docker Hub prune summary"
echo "- candidates: ${TOTAL_CANDIDATES} (≈ $(human "${TOTAL_CANDIDATES_BYTES}"))"
echo "- deleted: ${TOTAL_DELETED} (≈ $(human "${TOTAL_DELETED_BYTES}"))"
} >> "$GITHUB_STEP_SUMMARY"
else
deleted_bytes=$(grep -oE '\( *approx +[0-9]+ bytes\)' "$LOG_FILE" | sed -E 's/.*approx +([0-9]+) bytes.*/\1/' | awk '{s+=$1} END {print s+0}' || true)
deleted_count=$(grep -cE 'deleting |DRY RUN: would delete' "$LOG_FILE" || true)
{
echo "## Docker Hub prune summary"
echo "- deleted (approx): ${deleted_count} (≈ $(human "${deleted_bytes}"))"
} >> "$GITHUB_STEP_SUMMARY"
fi
- name: Upload Docker Hub prune artifacts
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: prune-dockerhub-log-${{ github.run_id }}
path: |
prune-dockerhub-${{ github.run_id }}.log
prune-summary-dockerhub.env
summarize:
runs-on: ubuntu-latest
needs: [prune-ghcr, prune-dockerhub]
if: always()
steps:
- name: Download all artifacts
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8
with:
pattern: prune-*-log-${{ github.run_id }}
merge-multiple: true
- name: Combined summary
run: |
set -euo pipefail
human() {
local bytes=${1:-0}
if [ -z "$bytes" ] || [ "$bytes" -eq 0 ]; then
echo "0 B"
return
fi
awk -v b="$bytes" 'BEGIN { split("B KiB MiB GiB TiB",u," "); i=0; x=b; while(x>1024){x/=1024;i++} printf "%0.2f %s", x, u[i+1] }'
}
GHCR_CANDIDATES=0 GHCR_CANDIDATES_BYTES=0 GHCR_DELETED=0 GHCR_DELETED_BYTES=0
if [ -f prune-summary-ghcr.env ]; then
GHCR_CANDIDATES=$(grep -E '^TOTAL_CANDIDATES=' prune-summary-ghcr.env | cut -d= -f2 || echo 0)
GHCR_CANDIDATES_BYTES=$(grep -E '^TOTAL_CANDIDATES_BYTES=' prune-summary-ghcr.env | cut -d= -f2 || echo 0)
GHCR_DELETED=$(grep -E '^TOTAL_DELETED=' prune-summary-ghcr.env | cut -d= -f2 || echo 0)
GHCR_DELETED_BYTES=$(grep -E '^TOTAL_DELETED_BYTES=' prune-summary-ghcr.env | cut -d= -f2 || echo 0)
fi
HUB_CANDIDATES=0 HUB_CANDIDATES_BYTES=0 HUB_DELETED=0 HUB_DELETED_BYTES=0
if [ -f prune-summary-dockerhub.env ]; then
HUB_CANDIDATES=$(grep -E '^TOTAL_CANDIDATES=' prune-summary-dockerhub.env | cut -d= -f2 || echo 0)
HUB_CANDIDATES_BYTES=$(grep -E '^TOTAL_CANDIDATES_BYTES=' prune-summary-dockerhub.env | cut -d= -f2 || echo 0)
HUB_DELETED=$(grep -E '^TOTAL_DELETED=' prune-summary-dockerhub.env | cut -d= -f2 || echo 0)
HUB_DELETED_BYTES=$(grep -E '^TOTAL_DELETED_BYTES=' prune-summary-dockerhub.env | cut -d= -f2 || echo 0)
fi
TOTAL_CANDIDATES=$((GHCR_CANDIDATES + HUB_CANDIDATES))
TOTAL_CANDIDATES_BYTES=$((GHCR_CANDIDATES_BYTES + HUB_CANDIDATES_BYTES))
TOTAL_DELETED=$((GHCR_DELETED + HUB_DELETED))
TOTAL_DELETED_BYTES=$((GHCR_DELETED_BYTES + HUB_DELETED_BYTES))
{
echo "## Combined container prune summary"
echo ""
echo "| Registry | Candidates | Deleted | Space Reclaimed |"
echo "|----------|------------|---------|-----------------|"
echo "| GHCR | ${GHCR_CANDIDATES} | ${GHCR_DELETED} | $(human "${GHCR_DELETED_BYTES}") |"
echo "| Docker Hub | ${HUB_CANDIDATES} | ${HUB_DELETED} | $(human "${HUB_DELETED_BYTES}") |"
echo "| **Total** | **${TOTAL_CANDIDATES}** | **${TOTAL_DELETED}** | **$(human "${TOTAL_DELETED_BYTES}")** |"
} >> "$GITHUB_STEP_SUMMARY"
printf 'PRUNE_SUMMARY: candidates=%s candidates_bytes=%s deleted=%s deleted_bytes=%s\n' \
"${TOTAL_CANDIDATES}" "${TOTAL_CANDIDATES_BYTES}" "${TOTAL_DELETED}" "${TOTAL_DELETED_BYTES}"
echo "Total space reclaimed: $(human "${TOTAL_DELETED_BYTES}")"

View File

@@ -1,82 +0,0 @@
name: Create Project Labels
# This workflow only runs manually to set up labels
on:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}
cancel-in-progress: false
jobs:
create-labels:
runs-on: ubuntu-latest
permissions:
issues: write
steps:
- name: Create all project labels
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const labels = [
// Priority labels
{ name: 'critical', color: 'B60205', description: 'Must have for the release, blocks other work' },
{ name: 'high', color: 'D93F0B', description: 'Important feature, should be included' },
{ name: 'medium', color: 'FBCA04', description: 'Nice to have, can be deferred' },
{ name: 'low', color: '0E8A16', description: 'Future enhancement, not urgent' },
// Milestone labels
{ name: 'alpha', color: '5319E7', description: 'Part of initial alpha release' },
{ name: 'beta', color: '0052CC', description: 'Part of beta release' },
{ name: 'post-beta', color: '006B75', description: 'Post-beta enhancement' },
// Category labels
{ name: 'architecture', color: 'C5DEF5', description: 'System design and structure' },
{ name: 'backend', color: '1D76DB', description: 'Server-side code' },
{ name: 'frontend', color: '5EBEFF', description: 'UI/UX code' },
{ name: 'feature', color: 'A2EEEF', description: 'New functionality' },
{ name: 'security', color: 'EE0701', description: 'Security-related' },
{ name: 'ssl', color: 'F9D0C4', description: 'SSL/TLS certificates' },
{ name: 'sso', color: 'D4C5F9', description: 'Single Sign-On' },
{ name: 'waf', color: 'B60205', description: 'Web Application Firewall' },
{ name: 'crowdsec', color: 'FF6B6B', description: 'CrowdSec integration' },
{ name: 'caddy', color: '1F6FEB', description: 'Caddy-specific' },
{ name: 'database', color: '006B75', description: 'Database-related' },
{ name: 'ui', color: '7057FF', description: 'User interface' },
{ name: 'deployment', color: '0E8A16', description: 'Docker, installation' },
{ name: 'monitoring', color: 'FEF2C0', description: 'Logging and statistics' },
{ name: 'documentation', color: '0075CA', description: 'Docs and guides' },
{ name: 'testing', color: 'BFD4F2', description: 'Test suite' },
{ name: 'performance', color: 'EDEDED', description: 'Optimization' },
{ name: 'community', color: 'D876E3', description: 'Community building' },
{ name: 'plus', color: 'FFD700', description: 'Premium/"Plus" feature' },
{ name: 'enterprise', color: '8B4513', description: 'Enterprise-grade feature' }
];
for (const label of labels) {
try {
await github.rest.issues.createLabel({
owner: context.repo.owner,
repo: context.repo.repo,
name: label.name,
color: label.color,
description: label.description
});
console.log(`✓ Created label: ${label.name}`);
} catch (error) {
if (error.status === 422) {
console.log(`⚠ Label already exists: ${label.name}`);
// Update the label if it exists
await github.rest.issues.updateLabel({
owner: context.repo.owner,
repo: context.repo.repo,
name: label.name,
color: label.color,
description: label.description
});
console.log(`✓ Updated label: ${label.name}`);
} else {
console.error(`✗ Error creating label ${label.name}:`, error.message);
}
}
}

View File

@@ -1,136 +0,0 @@
name: CrowdSec Integration
# Phase 2-3: Build Once, Test Many - Use registry image instead of building
# This workflow now waits for docker-build.yml to complete and pulls the built image
on:
workflow_dispatch:
inputs:
image_tag:
description: 'Docker image tag to test (e.g., pr-123-abc1234, latest)'
required: false
type: string
pull_request:
push:
branches:
- main
# Prevent race conditions when PR is updated mid-test
# Cancels old test runs when new build completes with different SHA
concurrency:
group: ${{ github.workflow }}-${{ github.event.workflow_run.event || github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
jobs:
crowdsec-integration:
name: CrowdSec Bouncer Integration
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Build Docker image (Local)
run: |
echo "Building image locally for integration tests..."
docker build -t charon:local .
echo "✅ Successfully built charon:local"
- name: Run CrowdSec integration tests
id: crowdsec-test
run: |
chmod +x .github/skills/scripts/skill-runner.sh
.github/skills/scripts/skill-runner.sh integration-test-crowdsec 2>&1 | tee crowdsec-test-output.txt
exit "${PIPESTATUS[0]}"
- name: Run CrowdSec Startup and LAPI Tests
id: lapi-test
run: |
chmod +x .github/skills/scripts/skill-runner.sh
.github/skills/scripts/skill-runner.sh integration-test-crowdsec-startup 2>&1 | tee lapi-test-output.txt
exit "${PIPESTATUS[0]}"
- name: Dump Debug Info on Failure
if: failure()
run: |
{
echo "## 🔍 Debug Information"
echo ""
echo "### Container Status"
echo '```'
docker ps -a --filter "name=charon" --filter "name=crowdsec" 2>&1 || true
echo '```'
echo ""
# Check which test container exists and dump its logs
if docker ps -a --filter "name=charon-crowdsec-startup-test" --format "{{.Names}}" | grep -q "charon-crowdsec-startup-test"; then
echo "### Charon Startup Test Container Logs (last 100 lines)"
echo '```'
docker logs charon-crowdsec-startup-test 2>&1 | tail -100 || echo "No container logs available"
echo '```'
elif docker ps -a --filter "name=charon-debug" --format "{{.Names}}" | grep -q "charon-debug"; then
echo "### Charon Container Logs (last 100 lines)"
echo '```'
docker logs charon-debug 2>&1 | tail -100 || echo "No container logs available"
echo '```'
fi
echo ""
# Check for CrowdSec specific logs if LAPI test ran
if [ -f "lapi-test-output.txt" ]; then
echo "### CrowdSec LAPI Test Failures"
echo '```'
grep -E "✗ FAIL|✗ CRITICAL|CROWDSEC.*BROKEN" lapi-test-output.txt 2>&1 || echo "No critical failures found in LAPI test"
echo '```'
fi
} >> "$GITHUB_STEP_SUMMARY"
- name: CrowdSec Integration Summary
if: always()
run: |
{
echo "## 🛡️ CrowdSec Integration Test Results"
# CrowdSec Preset Integration Tests
if [ "${{ steps.crowdsec-test.outcome }}" == "success" ]; then
echo "✅ **CrowdSec Hub Presets: Passed**"
echo ""
echo "### Preset Test Results:"
echo '```'
grep -E "^✓|^===|^Pull|^Apply" crowdsec-test-output.txt || echo "See logs for details"
echo '```'
else
echo "❌ **CrowdSec Hub Presets: Failed**"
echo ""
echo "### Preset Failure Details:"
echo '```'
grep -E "^✗|Unexpected|Error|failed|FAIL" crowdsec-test-output.txt | head -20 || echo "See logs for details"
echo '```'
fi
echo ""
# CrowdSec Startup and LAPI Tests
if [ "${{ steps.lapi-test.outcome }}" == "success" ]; then
echo "✅ **CrowdSec Startup & LAPI: Passed**"
echo ""
echo "### LAPI Test Results:"
echo '```'
grep -E "^\[TEST\]|✓ PASS|Check [0-9]|CrowdSec LAPI" lapi-test-output.txt || echo "See logs for details"
echo '```'
else
echo "❌ **CrowdSec Startup & LAPI: Failed**"
echo ""
echo "### LAPI Failure Details:"
echo '```'
grep -E "✗ FAIL|✗ CRITICAL|Error|failed" lapi-test-output.txt | head -20 || echo "See logs for details"
echo '```'
fi
} >> "$GITHUB_STEP_SUMMARY"
- name: Cleanup
if: always()
run: |
docker rm -f charon-debug || true
docker rm -f charon-crowdsec-startup-test || true
docker rm -f crowdsec || true
docker network rm containers_default || true

View File

@@ -1,761 +0,0 @@
name: Docker Build, Publish & Test
# This workflow replaced .github/workflows/docker-publish.yml (deleted in commit f640524b on Dec 21, 2025)
# Enhancements over the previous workflow:
# - SBOM generation and attestation for supply chain security
# - CVE-2025-68156 verification for Caddy security patches
# - Enhanced PR handling with dedicated scanning
# - Improved workflow orchestration with supply-chain-verify.yml
#
# PHASE 1 OPTIMIZATION (February 2026):
# - PR images now pushed to GHCR registry (enables downstream workflow consumption)
# - Immutable PR tagging: pr-{number}-{short-sha} (prevents race conditions)
# - Feature branch tagging: {sanitized-branch-name}-{short-sha} (enables unique testing)
# - Tag sanitization per spec Section 3.2 (handles special chars, slashes, etc.)
# - Mandatory security scanning for PR images (blocks on CRITICAL/HIGH vulnerabilities)
# - Retry logic for registry pushes (3 attempts, 10s wait - handles transient failures)
# - Enhanced metadata labels for image freshness validation
# - Artifact upload retained as fallback during migration period
# - Reduced build timeout from 30min to 25min for faster feedback (with retry buffer)
#
# See: docs/plans/current_spec.md (Section 4.1 - docker-build.yml changes)
on:
pull_request:
push:
branches: [main]
workflow_dispatch:
workflow_run:
workflows: ["Docker Lint"]
types: [completed]
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.head_ref || github.ref_name }}
cancel-in-progress: true
env:
GHCR_REGISTRY: ghcr.io
DOCKERHUB_REGISTRY: docker.io
IMAGE_NAME: wikid82/charon
TRIGGER_EVENT: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.event || github.event_name }}
TRIGGER_HEAD_BRANCH: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref_name }}
TRIGGER_HEAD_SHA: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.sha }}
TRIGGER_REF: ${{ github.event_name == 'workflow_run' && format('refs/heads/{0}', github.event.workflow_run.head_branch) || github.ref }}
TRIGGER_HEAD_REF: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.head_ref }}
TRIGGER_PR_NUMBER: ${{ github.event_name == 'workflow_run' && join(github.event.workflow_run.pull_requests.*.number, '') || github.event.pull_request.number }}
TRIGGER_ACTOR: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.actor.login || github.actor }}
jobs:
build-and-push:
if: ${{ github.event_name != 'workflow_run' || (github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.name == 'Docker Lint' && github.event.workflow_run.path == '.github/workflows/docker-lint.yml') }}
env:
HAS_DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN != '' }}
runs-on: ubuntu-latest
timeout-minutes: 20 # Phase 1: Reduced timeout for faster feedback
permissions:
contents: read
packages: write
security-events: write
id-token: write # Required for SBOM attestation
attestations: write # Required for SBOM attestation
outputs:
skip_build: ${{ steps.skip.outputs.skip_build }}
digest: ${{ steps.build-and-push.outputs.digest }}
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: ${{ env.TRIGGER_HEAD_SHA }}
- name: Normalize image name
run: |
IMAGE_NAME=$(echo "${{ env.IMAGE_NAME }}" | tr '[:upper:]' '[:lower:]')
echo "IMAGE_NAME=${IMAGE_NAME}" >> "$GITHUB_ENV"
- name: Determine skip condition
id: skip
env:
ACTOR: ${{ env.TRIGGER_ACTOR }}
EVENT: ${{ env.TRIGGER_EVENT }}
REF: ${{ env.TRIGGER_REF }}
HEAD_REF: ${{ env.TRIGGER_HEAD_REF }}
PR_NUMBER: ${{ env.TRIGGER_PR_NUMBER }}
REPO: ${{ github.repository }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
should_skip=false
pr_title=""
head_msg=$(git log -1 --pretty=%s)
if [ "$EVENT" = "pull_request" ] && [ -n "$PR_NUMBER" ]; then
pr_title=$(curl -sS \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "Accept: application/vnd.github+json" \
"https://api.github.com/repos/${REPO}/pulls/${PR_NUMBER}" | jq -r '.title // empty')
fi
if [ "$ACTOR" = "renovate[bot]" ]; then should_skip=true; fi
if echo "$head_msg" | grep -Ei '^chore\(deps' >/dev/null 2>&1; then should_skip=true; fi
if echo "$head_msg" | grep -Ei '^chore:' >/dev/null 2>&1; then should_skip=true; fi
if echo "$pr_title" | grep -Ei '^chore\(deps' >/dev/null 2>&1; then should_skip=true; fi
if echo "$pr_title" | grep -Ei '^chore:' >/dev/null 2>&1; then should_skip=true; fi
# Always build on feature branches to ensure artifacts for testing
# For PRs: use HEAD_REF (actual source branch)
# For pushes: use REF (refs/heads/branch-name)
is_feature_push=false
if [[ "$EVENT" != "pull_request" && "$REF" == refs/heads/feature/* ]]; then
should_skip=false
is_feature_push=true
echo "Force building on feature branch (push)"
elif [[ "$HEAD_REF" == feature/* ]]; then
should_skip=false
echo "Force building on feature branch (PR)"
fi
echo "skip_build=$should_skip" >> "$GITHUB_OUTPUT"
echo "is_feature_push=$is_feature_push" >> "$GITHUB_OUTPUT"
- name: Set up QEMU
if: steps.skip.outputs.skip_build != 'true'
uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
- name: Set up Docker Buildx
if: steps.skip.outputs.skip_build != 'true'
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Resolve Alpine base image digest
if: steps.skip.outputs.skip_build != 'true'
id: caddy
run: |
docker pull alpine:3.23.3
DIGEST=$(docker inspect --format='{{index .RepoDigests 0}}' alpine:3.23.3)
echo "image=$DIGEST" >> "$GITHUB_OUTPUT"
- name: Log in to GitHub Container Registry
if: steps.skip.outputs.skip_build != 'true'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ${{ env.GHCR_REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Log in to Docker Hub
if: steps.skip.outputs.skip_build != 'true' && env.HAS_DOCKERHUB_TOKEN == 'true'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: docker.io
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Compute branch tags
if: steps.skip.outputs.skip_build != 'true'
id: branch-tags
run: |
if [[ "$TRIGGER_EVENT" == "pull_request" ]]; then
BRANCH_NAME="${TRIGGER_HEAD_REF}"
else
BRANCH_NAME="${TRIGGER_REF#refs/heads/}"
fi
SHORT_SHA="$(echo "${{ env.TRIGGER_HEAD_SHA }}" | cut -c1-7)"
sanitize_tag() {
local raw="$1"
local max_len="$2"
local sanitized
sanitized=$(echo "$raw" | tr '[:upper:]' '[:lower:]')
sanitized=${sanitized//[^a-z0-9-]/-}
while [[ "$sanitized" == *"--"* ]]; do
sanitized=${sanitized//--/-}
done
sanitized=${sanitized##[^a-z0-9]*}
sanitized=${sanitized%%[^a-z0-9-]*}
if [ -z "$sanitized" ]; then
sanitized="branch"
fi
sanitized=$(echo "$sanitized" | cut -c1-"$max_len")
sanitized=${sanitized##[^a-z0-9]*}
if [ -z "$sanitized" ]; then
sanitized="branch"
fi
echo "$sanitized"
}
SANITIZED_BRANCH=$(sanitize_tag "${BRANCH_NAME}" 128)
BASE_BRANCH=$(sanitize_tag "${BRANCH_NAME}" 120)
BRANCH_SHA_TAG="${BASE_BRANCH}-${SHORT_SHA}"
if [[ "$TRIGGER_EVENT" == "pull_request" ]]; then
if [[ "$BRANCH_NAME" == feature/* ]]; then
echo "pr_feature_branch_sha_tag=${BRANCH_SHA_TAG}" >> "$GITHUB_OUTPUT"
fi
else
echo "branch_sha_tag=${BRANCH_SHA_TAG}" >> "$GITHUB_OUTPUT"
if [[ "$TRIGGER_REF" == refs/heads/feature/* ]]; then
echo "feature_branch_tag=${SANITIZED_BRANCH}" >> "$GITHUB_OUTPUT"
echo "feature_branch_sha_tag=${BRANCH_SHA_TAG}" >> "$GITHUB_OUTPUT"
fi
fi
- name: Generate Docker metadata
id: meta
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
with:
images: |
${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}
${{ env.DOCKERHUB_REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=raw,value=latest,enable=${{ env.TRIGGER_REF == 'refs/heads/main' }}
type=raw,value=dev,enable=${{ env.TRIGGER_REF == 'refs/heads/development' }}
type=raw,value=nightly,enable=${{ env.TRIGGER_REF == 'refs/heads/nightly' }}
type=raw,value=${{ steps.branch-tags.outputs.pr_feature_branch_sha_tag }},enable=${{ env.TRIGGER_EVENT == 'pull_request' && steps.branch-tags.outputs.pr_feature_branch_sha_tag != '' }}
type=raw,value=${{ steps.branch-tags.outputs.feature_branch_tag }},enable=${{ env.TRIGGER_EVENT != 'pull_request' && startsWith(env.TRIGGER_REF, 'refs/heads/feature/') && steps.branch-tags.outputs.feature_branch_tag != '' }}
type=raw,value=${{ steps.branch-tags.outputs.branch_sha_tag }},enable=${{ env.TRIGGER_EVENT != 'pull_request' && steps.branch-tags.outputs.branch_sha_tag != '' }}
type=raw,value=pr-${{ env.TRIGGER_PR_NUMBER }}-{{sha}},enable=${{ env.TRIGGER_EVENT == 'pull_request' }},prefix=,suffix=
type=sha,format=short,prefix=,suffix=,enable=${{ env.TRIGGER_EVENT != 'pull_request' && (env.TRIGGER_REF == 'refs/heads/main' || env.TRIGGER_REF == 'refs/heads/development' || env.TRIGGER_REF == 'refs/heads/nightly') }}
flavor: |
latest=false
labels: |
org.opencontainers.image.revision=${{ env.TRIGGER_HEAD_SHA }}
io.charon.pr.number=${{ env.TRIGGER_PR_NUMBER }}
io.charon.build.timestamp=${{ github.event.repository.updated_at }}
io.charon.feature.branch=${{ steps.branch-tags.outputs.feature_branch_tag }}
# Phase 1 Optimization: Build once, test many
# - For PRs: Multi-platform (amd64, arm64) + immutable tags (pr-{number}-{short-sha})
# - For feature branches: Multi-platform (amd64, arm64) + sanitized tags ({branch}-{short-sha})
# - For main/dev: Multi-platform (amd64, arm64) for production
# - Always push to registry (enables downstream workflow consumption)
# - Retry logic handles transient registry failures (3 attempts, 10s wait)
# See: docs/plans/current_spec.md Section 4.1
- name: Build and push Docker image (with retry)
if: steps.skip.outputs.skip_build != 'true'
id: build-and-push
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # v3.0.2
with:
timeout_minutes: 25
max_attempts: 3
retry_wait_seconds: 10
retry_on: error
warning_on_retry: true
command: |
set -euo pipefail
echo "🔨 Building Docker image with retry logic..."
PLATFORMS="linux/amd64,linux/arm64"
echo "Platform: ${PLATFORMS}"
# Build tag arguments array from metadata output (properly quoted)
TAG_ARGS_ARRAY=()
while IFS= read -r tag; do
[[ -n "$tag" ]] && TAG_ARGS_ARRAY+=("--tag" "$tag")
done <<< "${{ steps.meta.outputs.tags }}"
# Build label arguments array from metadata output (properly quoted)
LABEL_ARGS_ARRAY=()
while IFS= read -r label; do
[[ -n "$label" ]] && LABEL_ARGS_ARRAY+=("--label" "$label")
done <<< "${{ steps.meta.outputs.labels }}"
# Build the complete command as an array (handles spaces in label values correctly)
BUILD_CMD=(
docker buildx build
--platform "${PLATFORMS}"
--push
"${TAG_ARGS_ARRAY[@]}"
"${LABEL_ARGS_ARRAY[@]}"
--no-cache
--pull
--build-arg "VERSION=${{ steps.meta.outputs.version }}"
--build-arg "BUILD_DATE=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.created'] }}"
--build-arg "VCS_REF=${{ env.TRIGGER_HEAD_SHA }}"
--build-arg "CADDY_IMAGE=${{ steps.caddy.outputs.image }}"
--iidfile /tmp/image-digest.txt
.
)
# Execute build
echo "Executing: ${BUILD_CMD[*]}"
"${BUILD_CMD[@]}"
# Extract digest for downstream jobs (format: sha256:xxxxx)
DIGEST=$(cat /tmp/image-digest.txt)
echo "digest=${DIGEST}" >> "$GITHUB_OUTPUT"
echo "✅ Build complete. Digest: ${DIGEST}"
# For PRs only, pull the image back locally for artifact creation
# Feature branches now build multi-platform and cannot be loaded locally
# This enables backward compatibility with workflows that use artifacts
if [[ "${{ env.TRIGGER_EVENT }}" == "pull_request" ]]; then
echo "📥 Pulling image back for artifact creation..."
FIRST_TAG=$(echo "${{ steps.meta.outputs.tags }}" | head -n1)
docker pull "${FIRST_TAG}"
echo "✅ Image pulled: ${FIRST_TAG}"
fi
# Critical Fix: Use exact tag from metadata instead of manual reconstruction
# WHY: docker/build-push-action with load:true applies the exact tags from
# docker/metadata-action. Manual reconstruction can cause mismatches due to:
# - Case sensitivity variations (owner name normalization)
# - Tag format differences in Buildx internal behavior
# - Registry prefix inconsistencies
#
# SOLUTION: Extract the first tag from metadata output (which is the PR tag)
# and use it directly with docker save. This guarantees we reference the
# exact image that was loaded into the local Docker daemon.
#
# VALIDATION: Added defensive checks to fail fast with diagnostics if:
# 1. No tag found in metadata output
# 2. Image doesn't exist locally after build
# 3. Artifact creation fails
- name: Save Docker Image as Artifact
if: success() && steps.skip.outputs.skip_build != 'true' && env.TRIGGER_EVENT == 'pull_request'
run: |
# Extract the first tag from metadata action (PR tag)
IMAGE_TAG=$(echo "${{ steps.meta.outputs.tags }}" | head -n 1)
if [[ -z "${IMAGE_TAG}" ]]; then
echo "❌ ERROR: No image tag found in metadata output"
echo "Metadata tags output:"
echo "${{ steps.meta.outputs.tags }}"
exit 1
fi
echo "🔍 Detected image tag: ${IMAGE_TAG}"
# Verify the image exists locally
if ! docker image inspect "${IMAGE_TAG}" >/dev/null 2>&1; then
echo "❌ ERROR: Image ${IMAGE_TAG} not found locally"
echo "📋 Available images:"
docker images
exit 1
fi
# Save the image using the exact tag from metadata
echo "💾 Saving image: ${IMAGE_TAG}"
docker save "${IMAGE_TAG}" -o /tmp/charon-pr-image.tar
# Verify the artifact was created
echo "✅ Artifact created:"
ls -lh /tmp/charon-pr-image.tar
- name: Upload Image Artifact
if: success() && steps.skip.outputs.skip_build != 'true' && env.TRIGGER_EVENT == 'pull_request'
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: ${{ env.TRIGGER_EVENT == 'pull_request' && format('pr-image-{0}', env.TRIGGER_PR_NUMBER) || 'push-image' }}
path: /tmp/charon-pr-image.tar
retention-days: 1 # Only needed for workflow duration
- name: Verify Caddy Security Patches (CVE-2025-68156)
if: steps.skip.outputs.skip_build != 'true'
timeout-minutes: 2
continue-on-error: true
run: |
echo "🔍 Verifying Caddy binary contains patched expr-lang/expr@v1.17.7..."
echo ""
# Determine the image reference based on event type
if [ "${{ env.TRIGGER_EVENT }}" = "pull_request" ]; then
PR_NUM="${{ env.TRIGGER_PR_NUMBER }}"
if [ -z "${PR_NUM}" ]; then
echo "❌ ERROR: Pull request number is empty"
exit 1
fi
IMAGE_REF="${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:pr-${PR_NUM}"
echo "Using PR image: $IMAGE_REF"
else
if [ -z "${{ steps.build-and-push.outputs.digest }}" ]; then
echo "❌ ERROR: Build digest is empty"
exit 1
fi
IMAGE_REF="${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }}"
echo "Using digest: $IMAGE_REF"
fi
echo ""
echo "==> Caddy version:"
timeout 30s docker run --rm --pull=never "$IMAGE_REF" caddy version || echo "⚠️ Caddy version check timed out or failed"
echo ""
echo "==> Extracting Caddy binary for inspection..."
CONTAINER_ID=$(docker create --pull=never "$IMAGE_REF")
docker cp "${CONTAINER_ID}:/usr/bin/caddy" ./caddy_binary
docker rm "$CONTAINER_ID"
# Determine the image reference based on event type
if [ "${{ env.TRIGGER_EVENT }}" = "pull_request" ]; then
PR_NUM="${{ env.TRIGGER_PR_NUMBER }}"
if [ -z "${PR_NUM}" ]; then
echo "❌ ERROR: Pull request number is empty"
exit 1
fi
IMAGE_REF="${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:pr-${PR_NUM}"
echo "Using PR image: $IMAGE_REF"
else
if [ -z "${{ steps.build-and-push.outputs.digest }}" ]; then
echo "❌ ERROR: Build digest is empty"
exit 1
fi
IMAGE_REF="${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }}"
echo "Using digest: $IMAGE_REF"
fi
echo ""
echo "==> Checking if Go toolchain is available locally..."
if command -v go >/dev/null 2>&1; then
echo "✅ Go found locally, inspecting binary dependencies..."
go version -m ./caddy_binary > caddy_deps.txt
echo ""
echo "==> Searching for expr-lang/expr dependency:"
if grep -i "expr-lang/expr" caddy_deps.txt; then
EXPR_VERSION=$(grep "expr-lang/expr" caddy_deps.txt | awk '{print $3}')
echo ""
echo "✅ Found expr-lang/expr: $EXPR_VERSION"
# Check if version is v1.17.7 or higher (vulnerable version is v1.16.9)
if echo "$EXPR_VERSION" | grep -E "^v1\.(1[7-9]|[2-9][0-9])\.[0-9]+$" >/dev/null; then
echo "✅ PASS: expr-lang version $EXPR_VERSION is patched (>= v1.17.7)"
else
echo "⚠️ WARNING: expr-lang version $EXPR_VERSION may be vulnerable (< v1.17.7)"
echo "Expected: v1.17.7 or higher to mitigate CVE-2025-68156"
exit 1
fi
else
echo "⚠️ expr-lang/expr not found in binary dependencies"
echo "This could mean:"
echo " 1. The dependency was stripped/optimized out"
echo " 2. Caddy was built without the expression evaluator"
echo " 3. Binary inspection failed"
echo ""
echo "Displaying all dependencies for review:"
cat caddy_deps.txt
fi
else
echo "⚠️ Go toolchain not available in CI environment"
echo "Cannot inspect binary modules - skipping dependency verification"
echo "Note: Runtime image does not require Go as Caddy is a standalone binary"
fi
# Cleanup
rm -f ./caddy_binary caddy_deps.txt
echo ""
echo "==> Verification complete"
- name: Verify CrowdSec Security Patches (CVE-2025-68156)
if: success()
continue-on-error: true
run: |
echo "🔍 Verifying CrowdSec binaries contain patched expr-lang/expr@v1.17.7..."
echo ""
# Determine the image reference based on event type
if [ "${{ env.TRIGGER_EVENT }}" = "pull_request" ]; then
PR_NUM="${{ env.TRIGGER_PR_NUMBER }}"
if [ -z "${PR_NUM}" ]; then
echo "❌ ERROR: Pull request number is empty"
exit 1
fi
IMAGE_REF="${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:pr-${PR_NUM}"
echo "Using PR image: $IMAGE_REF"
else
if [ -z "${{ steps.build-and-push.outputs.digest }}" ]; then
echo "❌ ERROR: Build digest is empty"
exit 1
fi
IMAGE_REF="${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }}"
echo "Using digest: $IMAGE_REF"
fi
echo ""
echo "==> CrowdSec cscli version:"
timeout 30s docker run --rm --pull=never "$IMAGE_REF" cscli version || echo "⚠️ CrowdSec version check timed out or failed (may not be installed for this architecture)"
echo ""
echo "==> Extracting cscli binary for inspection..."
CONTAINER_ID=$(docker create --pull=never "$IMAGE_REF")
docker cp "${CONTAINER_ID}:/usr/local/bin/cscli" ./cscli_binary 2>/dev/null || {
echo "⚠️ cscli binary not found - CrowdSec may not be available for this architecture"
docker rm "$CONTAINER_ID"
exit 0
}
docker rm "$CONTAINER_ID"
echo ""
echo "==> Checking if Go toolchain is available locally..."
if command -v go >/dev/null 2>&1; then
echo "✅ Go found locally, inspecting binary dependencies..."
go version -m ./cscli_binary > cscli_deps.txt
echo ""
echo "==> Searching for expr-lang/expr dependency:"
if grep -i "expr-lang/expr" cscli_deps.txt; then
EXPR_VERSION=$(grep "expr-lang/expr" cscli_deps.txt | awk '{print $3}')
echo ""
echo "✅ Found expr-lang/expr: $EXPR_VERSION"
# Check if version is v1.17.7 or higher (vulnerable version is v1.17.2)
if echo "$EXPR_VERSION" | grep -E "^v1\.(1[7-9]|[2-9][0-9])\.[7-9][0-9]*$|^v1\.17\.([7-9]|[1-9][0-9]+)$" >/dev/null; then
echo "✅ PASS: expr-lang version $EXPR_VERSION is patched (>= v1.17.7)"
else
echo "❌ FAIL: expr-lang version $EXPR_VERSION is vulnerable (< v1.17.7)"
echo "⚠️ WARNING: expr-lang version $EXPR_VERSION may be vulnerable (< v1.17.7)"
echo "Expected: v1.17.7 or higher to mitigate CVE-2025-68156"
exit 1
fi
else
echo "⚠️ expr-lang/expr not found in binary dependencies"
echo "This could mean:"
echo " 1. The dependency was stripped/optimized out"
echo " 2. CrowdSec was built without the expression evaluator"
echo " 3. Binary inspection failed"
echo ""
echo "Displaying all dependencies for review:"
cat cscli_deps.txt
fi
else
echo "⚠️ Go toolchain not available in CI environment"
echo "Cannot inspect binary modules - skipping dependency verification"
echo "Note: Runtime image does not require Go as CrowdSec is a standalone binary"
fi
# Cleanup
rm -f ./cscli_binary cscli_deps.txt
echo ""
echo "==> CrowdSec verification complete"
- name: Run Trivy scan (table output)
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # 0.34.2
with:
image-ref: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }}
format: 'table'
severity: 'CRITICAL,HIGH'
exit-code: '0'
continue-on-error: true
- name: Run Trivy vulnerability scanner (SARIF)
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
id: trivy
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # 0.34.2
with:
image-ref: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }}
format: 'sarif'
output: 'trivy-results.sarif'
severity: 'CRITICAL,HIGH'
continue-on-error: true
- name: Check Trivy SARIF exists
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
id: trivy-check
run: |
if [ -f trivy-results.sarif ]; then
echo "exists=true" >> "$GITHUB_OUTPUT"
else
echo "exists=false" >> "$GITHUB_OUTPUT"
fi
- name: Upload Trivy results
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.trivy-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@c793b717bc78562f491db7b0e93a3a178b099162 # v4.32.5
with:
sarif_file: 'trivy-results.sarif'
category: '.github/workflows/docker-build.yml:build-and-push'
token: ${{ secrets.GITHUB_TOKEN }}
# Generate SBOM (Software Bill of Materials) for supply chain security
# Only for production builds (main/development) - feature branches use downstream supply-chain-pr.yml
- name: Generate SBOM
uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
with:
image: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }}
format: cyclonedx-json
output-file: sbom.cyclonedx.json
# Create verifiable attestation for the SBOM
- name: Attest SBOM
uses: actions/attest-sbom@07e74fc4e78d1aad915e867f9a094073a9f71527 # v4.0.0
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
with:
subject-name: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}
subject-digest: ${{ steps.build-and-push.outputs.digest }}
sbom-path: sbom.cyclonedx.json
push-to-registry: true
# Install Cosign for keyless signing
- name: Install Cosign
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
# Sign GHCR image with keyless signing (Sigstore/Fulcio)
- name: Sign GHCR Image
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
run: |
echo "Signing GHCR image with keyless signing..."
cosign sign --yes ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }}
echo "✅ GHCR image signed successfully"
# Sign Docker Hub image with keyless signing (Sigstore/Fulcio)
- name: Sign Docker Hub Image
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true' && env.HAS_DOCKERHUB_TOKEN == 'true'
run: |
echo "Signing Docker Hub image with keyless signing..."
cosign sign --yes ${{ env.DOCKERHUB_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }}
echo "✅ Docker Hub image signed successfully"
# Attach SBOM to Docker Hub image
- name: Attach SBOM to Docker Hub
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true' && env.HAS_DOCKERHUB_TOKEN == 'true'
run: |
echo "Attaching SBOM to Docker Hub image..."
cosign attach sbom --sbom sbom.cyclonedx.json ${{ env.DOCKERHUB_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }}
echo "✅ SBOM attached to Docker Hub image"
- name: Create summary
if: steps.skip.outputs.skip_build != 'true'
run: |
{
echo "## 🎉 Docker Image Built Successfully!"
echo ""
echo "### 📦 Image Details"
echo "- **GHCR**: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}"
echo "- **Docker Hub**: ${{ env.DOCKERHUB_REGISTRY }}/${{ env.IMAGE_NAME }}"
echo "- **Tags**: "
echo '```'
echo "${{ steps.meta.outputs.tags }}"
echo '```'
} >> "$GITHUB_STEP_SUMMARY"
scan-pr-image:
name: Security Scan PR Image
needs: build-and-push
if: needs.build-and-push.outputs.skip_build != 'true' && needs.build-and-push.result == 'success' && github.event_name == 'pull_request'
runs-on: ubuntu-latest
timeout-minutes: 10
permissions:
contents: read
packages: read
security-events: write
steps:
- name: Normalize image name
run: |
IMAGE_NAME=$(echo "${{ env.IMAGE_NAME }}" | tr '[:upper:]' '[:lower:]')
echo "IMAGE_NAME=${IMAGE_NAME}" >> "$GITHUB_ENV"
- name: Determine PR image tag
id: pr-image
run: |
SHORT_SHA="$(echo "${{ env.TRIGGER_HEAD_SHA }}" | cut -c1-7)"
PR_TAG="pr-${{ env.TRIGGER_PR_NUMBER }}-${SHORT_SHA}"
echo "tag=${PR_TAG}" >> "$GITHUB_OUTPUT"
echo "image_ref=${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:${PR_TAG}" >> "$GITHUB_OUTPUT"
- name: Log in to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ${{ env.GHCR_REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Validate image freshness
run: |
echo "🔍 Validating image freshness for PR #${{ env.TRIGGER_PR_NUMBER }}..."
echo "Expected SHA: ${{ env.TRIGGER_HEAD_SHA }}"
echo "Image: ${{ steps.pr-image.outputs.image_ref }}"
# Pull image to inspect
docker pull "${{ steps.pr-image.outputs.image_ref }}"
# Extract commit SHA from image label
LABEL_SHA=$(docker inspect "${{ steps.pr-image.outputs.image_ref }}" \
--format '{{index .Config.Labels "org.opencontainers.image.revision"}}')
echo "Image label SHA: ${LABEL_SHA}"
if [[ "${LABEL_SHA}" != "${{ env.TRIGGER_HEAD_SHA }}" ]]; then
echo "⚠️ WARNING: Image SHA mismatch!"
echo " Expected: ${{ env.TRIGGER_HEAD_SHA }}"
echo " Got: ${LABEL_SHA}"
echo "Image may be stale. Resuming for triage (Bypassing failure)."
# exit 1
fi
echo "✅ Image freshness validated"
- name: Run Trivy scan on PR image (table output)
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # 0.34.2
with:
image-ref: ${{ steps.pr-image.outputs.image_ref }}
format: 'table'
severity: 'CRITICAL,HIGH'
exit-code: '0'
- name: Run Trivy scan on PR image (SARIF - blocking)
id: trivy-scan
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # 0.34.2
with:
image-ref: ${{ steps.pr-image.outputs.image_ref }}
format: 'sarif'
output: 'trivy-pr-results.sarif'
severity: 'CRITICAL,HIGH'
exit-code: '1' # Intended to block, but continued on error for now
continue-on-error: true
- name: Check Trivy PR SARIF exists
if: always()
id: trivy-pr-check
run: |
if [ -f trivy-pr-results.sarif ]; then
echo "exists=true" >> "$GITHUB_OUTPUT"
else
echo "exists=false" >> "$GITHUB_OUTPUT"
fi
- name: Upload Trivy scan results
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@c793b717bc78562f491db7b0e93a3a178b099162 # v4.32.5
with:
sarif_file: 'trivy-pr-results.sarif'
category: 'docker-pr-image'
- name: Upload Trivy compatibility results (docker-build category)
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@c793b717bc78562f491db7b0e93a3a178b099162 # v4.32.5
with:
sarif_file: 'trivy-pr-results.sarif'
category: '.github/workflows/docker-build.yml:build-and-push'
continue-on-error: true
- name: Upload Trivy compatibility results (docker-publish alias)
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@c793b717bc78562f491db7b0e93a3a178b099162 # v4.32.5
with:
sarif_file: 'trivy-pr-results.sarif'
category: '.github/workflows/docker-publish.yml:build-and-push'
continue-on-error: true
- name: Upload Trivy compatibility results (nightly alias)
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@c793b717bc78562f491db7b0e93a3a178b099162 # v4.32.5
with:
sarif_file: 'trivy-pr-results.sarif'
category: 'trivy-nightly'
continue-on-error: true
- name: Create scan summary
if: always()
run: |
{
echo "## 🔒 PR Image Security Scan"
echo ""
echo "- **Image**: ${{ steps.pr-image.outputs.image_ref }}"
echo "- **PR**: #${{ env.TRIGGER_PR_NUMBER }}"
echo "- **Commit**: ${{ env.TRIGGER_HEAD_SHA }}"
echo "- **Scan Status**: ${{ steps.trivy-scan.outcome == 'success' && '✅ No critical vulnerabilities' || '❌ Vulnerabilities detected' }}"
} >> "$GITHUB_STEP_SUMMARY"

View File

@@ -1,24 +0,0 @@
name: Docker Lint
on:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref_name }}
cancel-in-progress: true
permissions:
contents: read
jobs:
hadolint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Run Hadolint
uses: hadolint/hadolint-action@2332a7b74a6de0dda2e2221d575162eba76ba5e5 # v3.3.0
with:
dockerfile: Dockerfile
config: .hadolint.yaml
failure-threshold: warning

View File

@@ -1,378 +0,0 @@
name: Convert Docs to Issues
on:
workflow_run:
workflows: ["Docker Build, Publish & Test"]
types: [completed]
# Allow manual trigger
workflow_dispatch:
inputs:
dry_run:
description: 'Dry run (no issues created)'
required: false
default: false
type: boolean
file_path:
description: 'Specific file to process (optional)'
required: false
type: string
concurrency:
group: ${{ github.workflow }}-${{ github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: false
env:
NODE_VERSION: '24.12.0'
permissions:
contents: write
issues: write
pull-requests: write
jobs:
convert-docs:
name: Convert Markdown to Issues
runs-on: ubuntu-latest
if: github.actor != 'github-actions[bot]' && (github.event_name != 'workflow_run' || github.event.workflow_run.conclusion == 'success')
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 2
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
- name: Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: ${{ env.NODE_VERSION }}
- name: Install dependencies
run: npm install gray-matter
- name: Detect changed files
id: changes
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
COMMIT_SHA: ${{ github.event.workflow_run.head_sha || github.sha }}
with:
script: |
const fs = require('fs');
const path = require('path');
const commitSha = process.env.COMMIT_SHA || context.sha;
// Manual file specification
const manualFile = '${{ github.event.inputs.file_path }}';
if (manualFile) {
if (fs.existsSync(manualFile)) {
core.setOutput('files', JSON.stringify([manualFile]));
return;
} else {
core.setFailed(`File not found: ${manualFile}`);
return;
}
}
// Get changed files from commit
const { data: commit } = await github.rest.repos.getCommit({
owner: context.repo.owner,
repo: context.repo.repo,
ref: commitSha
});
const changedFiles = (commit.files || [])
.filter(f => f.filename.startsWith('docs/issues/'))
.filter(f => !f.filename.startsWith('docs/issues/created/'))
.filter(f => !f.filename.includes('_TEMPLATE'))
.filter(f => !f.filename.includes('README'))
.filter(f => f.filename.endsWith('.md'))
.filter(f => f.status !== 'removed')
.map(f => f.filename);
console.log('Changed issue files:', changedFiles);
core.setOutput('files', JSON.stringify(changedFiles));
- name: Process issue files
id: process
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
DRY_RUN: ${{ github.event.inputs.dry_run || 'false' }}
with:
script: |
const fs = require('fs');
const path = require('path');
const matter = require('gray-matter');
const files = JSON.parse('${{ steps.changes.outputs.files }}');
const isDryRun = process.env.DRY_RUN === 'true';
const createdIssues = [];
const errors = [];
if (files.length === 0) {
console.log('No issue files to process');
core.setOutput('created_count', 0);
core.setOutput('created_issues', '[]');
core.setOutput('errors', '[]');
return;
}
// Label color map
const labelColors = {
testing: 'BFD4F2',
feature: 'A2EEEF',
enhancement: '84B6EB',
bug: 'D73A4A',
documentation: '0075CA',
backend: '1D76DB',
frontend: '5EBEFF',
security: 'EE0701',
ui: '7057FF',
caddy: '1F6FEB',
'needs-triage': 'FBCA04',
acl: 'C5DEF5',
regression: 'D93F0B',
'manual-testing': 'BFD4F2',
'bulk-acl': '006B75',
'error-handling': 'D93F0B',
'ui-ux': '7057FF',
integration: '0E8A16',
performance: 'EDEDED',
'cross-browser': '5319E7',
plus: 'FFD700',
beta: '0052CC',
alpha: '5319E7',
high: 'D93F0B',
medium: 'FBCA04',
low: '0E8A16',
critical: 'B60205',
architecture: '006B75',
database: '006B75',
'post-beta': '006B75'
};
// Helper: Ensure label exists
async function ensureLabel(name) {
try {
await github.rest.issues.getLabel({
owner: context.repo.owner,
repo: context.repo.repo,
name: name
});
} catch (e) {
if (e.status === 404) {
await github.rest.issues.createLabel({
owner: context.repo.owner,
repo: context.repo.repo,
name: name,
color: labelColors[name.toLowerCase()] || '666666'
});
console.log(`Created label: ${name}`);
}
}
}
// Helper: Parse markdown file
function parseIssueFile(filePath) {
const content = fs.readFileSync(filePath, 'utf8');
const { data: frontmatter, content: body } = matter(content);
// Extract title: frontmatter > first H1 > filename
let title = frontmatter.title;
if (!title) {
const h1Match = body.match(/^#\s+(.+)$/m);
title = h1Match ? h1Match[1] : path.basename(filePath, '.md').replace(/-/g, ' ');
}
// Build labels array
const labels = [...(frontmatter.labels || [])];
if (frontmatter.priority) labels.push(frontmatter.priority);
if (frontmatter.type) labels.push(frontmatter.type);
return {
title,
body: body.trim(),
labels: [...new Set(labels)],
assignees: frontmatter.assignees || [],
milestone: frontmatter.milestone,
parent_issue: frontmatter.parent_issue,
create_sub_issues: frontmatter.create_sub_issues || false
};
}
// Helper: Extract sub-issues from H2 sections
function extractSubIssues(body, parentLabels) {
const sections = [];
const lines = body.split('\n');
let currentSection = null;
let currentBody = [];
for (const line of lines) {
const h2Match = line.match(/^##\s+(?:Sub-Issue\s*#?\d*:?\s*)?(.+)$/);
if (h2Match) {
if (currentSection) {
sections.push({
title: currentSection,
body: currentBody.join('\n').trim(),
labels: [...parentLabels]
});
}
currentSection = h2Match[1].trim();
currentBody = [];
} else if (currentSection) {
currentBody.push(line);
}
}
if (currentSection) {
sections.push({
title: currentSection,
body: currentBody.join('\n').trim(),
labels: [...parentLabels]
});
}
return sections;
}
// Process each file
for (const filePath of files) {
console.log(`\nProcessing: ${filePath}`);
try {
const parsed = parseIssueFile(filePath);
console.log(` Title: ${parsed.title}`);
console.log(` Labels: ${parsed.labels.join(', ')}`);
if (isDryRun) {
console.log(' [DRY RUN] Would create issue');
createdIssues.push({ file: filePath, title: parsed.title, dryRun: true });
continue;
}
// Ensure labels exist
for (const label of parsed.labels) {
await ensureLabel(label);
}
// Create the main issue
const issueBody = parsed.body +
`\n\n---\n*Auto-created from [${path.basename(filePath)}](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/${context.sha}/${filePath})*`;
const issueResponse = await github.rest.issues.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: parsed.title,
body: issueBody,
labels: parsed.labels,
assignees: parsed.assignees
});
const issueNumber = issueResponse.data.number;
console.log(` Created issue #${issueNumber}`);
// Handle sub-issues
if (parsed.create_sub_issues) {
const subIssues = extractSubIssues(parsed.body, parsed.labels);
for (const sub of subIssues) {
for (const label of sub.labels) {
await ensureLabel(label);
}
const subResponse = await github.rest.issues.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: `[${parsed.title}] ${sub.title}`,
body: sub.body + `\n\n---\n*Sub-issue of #${issueNumber}*`,
labels: sub.labels,
assignees: parsed.assignees
});
console.log(` Created sub-issue #${subResponse.data.number}: ${sub.title}`);
}
}
// Link to parent issue if specified
if (parsed.parent_issue) {
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: parsed.parent_issue,
body: `Sub-issue created: #${issueNumber}`
});
}
createdIssues.push({
file: filePath,
title: parsed.title,
issueNumber
});
} catch (error) {
console.error(` Error processing ${filePath}: ${error.message}`);
errors.push({ file: filePath, error: error.message });
}
}
core.setOutput('created_count', createdIssues.length);
core.setOutput('created_issues', JSON.stringify(createdIssues));
core.setOutput('errors', JSON.stringify(errors));
if (errors.length > 0) {
core.warning(`${errors.length} file(s) had errors`);
}
- name: Move processed files
if: steps.process.outputs.created_count != '0' && github.event.inputs.dry_run != 'true'
run: |
mkdir -p docs/issues/created
CREATED_ISSUES='${{ steps.process.outputs.created_issues }}'
echo "$CREATED_ISSUES" | jq -r '.[].file' | while IFS= read -r file; do
if [ -f "$file" ] && [ -n "$file" ]; then
filename=$(basename "$file")
timestamp=$(date +%Y%m%d)
mv "$file" "docs/issues/created/${timestamp}-${filename}"
echo "Moved: $file -> docs/issues/created/${timestamp}-${filename}"
fi
done
- name: Commit moved files
if: steps.process.outputs.created_count != '0' && github.event.inputs.dry_run != 'true'
run: |
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git add docs/issues/
# Removed [skip ci] to allow CI checks to run on PRs
# Infinite loop protection: path filter excludes docs/issues/created/** AND github.actor guard prevents bot loops
git diff --staged --quiet || git commit -m "chore: move processed issue files to created/"
git push
- name: Summary
if: always()
run: |
CREATED='${{ steps.process.outputs.created_issues }}'
ERRORS='${{ steps.process.outputs.errors }}'
DRY_RUN='${{ github.event.inputs.dry_run }}'
{
echo "## Docs to Issues Summary"
echo ""
if [ "$DRY_RUN" = "true" ]; then
echo "🔍 **Dry Run Mode** - No issues were actually created"
echo ""
fi
echo "### Created Issues"
if [ -n "$CREATED" ] && [ "$CREATED" != "[]" ] && [ "$CREATED" != "null" ]; then
echo "$CREATED" | jq -r '.[] | "- \(.title) (#\(.issueNumber // "dry-run"))"' || echo "_Parse error_"
else
echo "_No issues created_"
fi
echo ""
echo "### Errors"
if [ -n "$ERRORS" ] && [ "$ERRORS" != "[]" ] && [ "$ERRORS" != "null" ]; then
echo "$ERRORS" | jq -r '.[] | "- ❌ \(.file): \(.error)"' || echo "_Parse error_"
else
echo "_No errors_"
fi
} >> "$GITHUB_STEP_SUMMARY"

View File

@@ -1,393 +0,0 @@
name: Deploy Documentation to GitHub Pages
on:
workflow_run:
workflows: ["Docker Build, Publish & Test"]
types: [completed]
workflow_dispatch: # Allow manual trigger
# Sets permissions to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
# Allow only one concurrent deployment
concurrency:
group: "pages-${{ github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}"
cancel-in-progress: false
env:
NODE_VERSION: '24.12.0'
jobs:
build:
name: Build Documentation
runs-on: ubuntu-latest
timeout-minutes: 10
if: ${{ github.event_name == 'workflow_dispatch' || github.event.workflow_run.conclusion == 'success' }}
env:
REPO_NAME: ${{ github.event.repository.name }}
steps:
# Step 1: Get the code
- name: 📥 Checkout code
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
# Step 2: Set up Node.js (for building any JS-based doc tools)
- name: 🔧 Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: ${{ env.NODE_VERSION }}
# Step 3: Create a beautiful docs site structure
- name: 📝 Build documentation site
run: |
# Create output directory
mkdir -p _site
# Copy all markdown files
cp README.md _site/
cp -r docs _site/
# Create a simple HTML index that looks nice
cat > _site/index.html << 'EOF'
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Charon - Documentation</title>
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@picocss/pico@2/css/pico.min.css">
<style>
:root {
--primary: #1d4ed8;
--primary-hover: #1e40af;
}
body {
background-color: #0f172a;
color: #e2e8f0;
}
header {
background: linear-gradient(135deg, #1e3a8a 0%, #1d4ed8 100%);
padding: 3rem 0;
text-align: center;
margin-bottom: 2rem;
}
header h1 {
color: white;
font-size: 2.5rem;
margin-bottom: 0.5rem;
}
header p {
color: #e0e7ff;
font-size: 1.25rem;
}
.container {
max-width: 1200px;
margin: 0 auto;
padding: 2rem;
}
.card {
background: #1e293b;
border: 1px solid #334155;
border-radius: 12px;
padding: 1.5rem;
margin-bottom: 1.5rem;
transition: transform 0.2s, box-shadow 0.2s;
}
.card:hover {
transform: translateY(-4px);
box-shadow: 0 8px 16px rgba(0, 0, 0, 0.3);
}
.card h3 {
color: #60a5fa;
margin-top: 0;
display: flex;
align-items: center;
gap: 0.5rem;
}
.card p {
color: #cbd5e1;
margin-bottom: 1rem;
}
.card a {
color: #60a5fa;
text-decoration: none;
font-weight: 600;
display: inline-flex;
align-items: center;
gap: 0.5rem;
}
.card a:hover {
color: #93c5fd;
}
.badge {
display: inline-block;
padding: 0.25rem 0.75rem;
border-radius: 9999px;
font-size: 0.875rem;
font-weight: 600;
margin-left: 0.5rem;
}
.badge-beginner {
background: #10b981;
color: white;
}
.badge-advanced {
background: #f59e0b;
color: white;
}
.grid {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
gap: 1.5rem;
}
footer {
text-align: center;
padding: 3rem 0;
color: #64748b;
border-top: 1px solid #334155;
margin-top: 4rem;
}
</style>
</head>
<body>
<header>
<h1>🚀 Charon</h1>
<p>Make your websites easy to reach - No coding required!</p>
</header>
<div class="container">
<section>
<h2>👋 Welcome!</h2>
<p style="font-size: 1.1rem; color: #cbd5e1;">
This documentation will help you get started with Charon.
Whether you're a complete beginner or an experienced developer, we've got you covered!
</p>
</section>
<h2 style="margin-top: 3rem;">📚 Getting Started</h2>
<div class="grid">
<div class="card">
<h3>🏠 Getting Started Guide <span class="badge badge-beginner">Start Here</span></h3>
<p>Your first setup in just 5 minutes! We'll walk you through everything step by step.</p>
<a href="docs/getting-started.html">Read the Guide →</a>
</div>
<div class="card">
<h3>📖 README <span class="badge badge-beginner">Essential</span></h3>
<p>Learn what the app does, how to install it, and see examples of what you can build.</p>
<a href="README.html">Read More →</a>
</div>
<div class="card">
<h3>📥 Import Guide</h3>
<p>Already using Caddy? Learn how to bring your existing configuration into the app.</p>
<a href="docs/import-guide.html">Import Your Configs →</a>
</div>
</div>
<h2 style="margin-top: 3rem;">🔧 Developer Documentation</h2>
<div class="grid">
<div class="card">
<h3>🔌 API Reference <span class="badge badge-advanced">Advanced</span></h3>
<p>Complete REST API documentation with examples in JavaScript and Python.</p>
<a href="docs/api.html">View API Docs →</a>
</div>
<div class="card">
<h3>💾 Database Schema <span class="badge badge-advanced">Advanced</span></h3>
<p>Understand how data is stored, relationships, and backup strategies.</p>
<a href="docs/database-schema.html">View Schema →</a>
</div>
<div class="card">
<h3>✨ Contributing Guide</h3>
<p>Want to help make this better? Learn how to contribute code, docs, or ideas.</p>
<a href="CONTRIBUTING.html">Start Contributing →</a>
</div>
</div>
<h2 style="margin-top: 3rem;">📋 All Documentation</h2>
<div class="card">
<h3>📚 Documentation Index</h3>
<p>Browse all available documentation organized by topic and skill level.</p>
<a href="docs/index.html">View Full Index →</a>
</div>
<h2 style="margin-top: 3rem;">🆘 Need Help?</h2>
<div class="card" style="background: #1e3a8a; border-color: #1e40af;">
<h3 style="color: #dbeafe;">Get Support</h3>
<p style="color: #bfdbfe;">
Stuck? Have questions? We're here to help!
</p>
<div style="display: flex; gap: 1rem; flex-wrap: wrap; margin-top: 1rem;">
<a href="https://github.com/Wikid82/charon/discussions"
style="background: white; color: #1e40af; padding: 0.5rem 1rem; border-radius: 6px; text-decoration: none;">
💬 Ask a Question
</a>
<a href="https://github.com/Wikid82/charon/issues"
style="background: white; color: #1e40af; padding: 0.5rem 1rem; border-radius: 6px; text-decoration: none;">
🐛 Report a Bug
</a>
<a href="https://github.com/Wikid82/charon"
style="background: white; color: #1e40af; padding: 0.5rem 1rem; border-radius: 6px; text-decoration: none;">
⭐ View on GitHub
</a>
</div>
</div>
</div>
<footer>
<p>Built with ❤️ by <a href="https://github.com/Wikid82" style="color: #60a5fa;">@Wikid82</a></p>
<p>Made for humans, not just techies!</p>
</footer>
</body>
</html>
EOF
# Convert markdown files to HTML using a simple converter
npm install -g marked
# Convert each markdown file
for file in _site/docs/*.md; do
if [ -f "$file" ]; then
filename=$(basename "$file" .md)
marked "$file" -o "_site/docs/${filename}.html" --gfm
fi
done
# Convert README and CONTRIBUTING
marked _site/README.md -o _site/README.html --gfm
if [ -f "CONTRIBUTING.md" ]; then
cp CONTRIBUTING.md _site/
marked _site/CONTRIBUTING.md -o _site/CONTRIBUTING.html --gfm
fi
# Add simple styling to all HTML files
for html_file in _site/*.html _site/docs/*.html; do
if [ -f "$html_file" ] && [ "$html_file" != "_site/index.html" ]; then
# Add a header with navigation to each page
temp_file="${html_file}.tmp"
cat > "$temp_file" << 'HEADER'
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Charon - Documentation</title>
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@picocss/pico@2/css/pico.min.css">
<style>
body { background-color: #0f172a; color: #e2e8f0; }
nav { background: #1e293b; padding: 1rem; margin-bottom: 2rem; }
nav a { color: #60a5fa; margin-right: 1rem; text-decoration: none; }
nav a:hover { color: #93c5fd; }
main { max-width: 900px; margin: 0 auto; padding: 2rem; }
a { color: #60a5fa; }
code { background: #1e293b; color: #fbbf24; padding: 0.2rem 0.4rem; border-radius: 4px; }
pre { background: #1e293b; padding: 1rem; border-radius: 8px; overflow-x: auto; }
pre code { background: none; padding: 0; }
</style>
</head>
<body>
<nav>
<a href="/charon/">🏠 Home</a>
<a href="/charon/docs/index.html">📚 Docs</a>
<a href="/charon/docs/getting-started.html">🚀 Get Started</a>
<a href="https://github.com/Wikid82/charon">⭐ GitHub</a>
</nav>
<main>
HEADER
# Append original content
cat "$html_file" >> "$temp_file"
# Add footer
cat >> "$temp_file" << 'FOOTER'
</main>
<footer style="text-align: center; padding: 2rem; color: #64748b;">
<p>Charon - Built with ❤️ for the community</p>
</footer>
</body>
</html>
FOOTER
mv "$temp_file" "$html_file"
fi
done
# --- 🚀 ROBUST DYNAMIC PATH FIX ---
echo "🔧 Calculating paths..."
# 1. Determine BASE_PATH
if [[ "${REPO_NAME}" == *".github.io" ]]; then
echo " - Mode: Root domain (e.g. user.github.io)"
BASE_PATH="/"
else
echo " - Mode: Sub-path (e.g. user.github.io/repo)"
BASE_PATH="/${REPO_NAME}/"
fi
# 2. Define standard repo variables
FULL_REPO="${{ github.repository }}"
REPO_URL="https://github.com/${FULL_REPO}"
echo " - Repo: ${FULL_REPO}"
echo " - URL: ${REPO_URL}"
echo " - Base: ${BASE_PATH}"
# 3. Fix paths in all HTML files
find _site -name "*.html" -exec sed -i \
-e "s|/charon/|${BASE_PATH}|g" \
-e "s|https://github.com/Wikid82/charon|${REPO_URL}|g" \
-e "s|Wikid82/charon|${FULL_REPO}|g" \
{} +
echo "✅ Paths fixed successfully!"
echo "✅ Documentation site built successfully!"
# Step 4: Upload the built site
- name: 📤 Upload artifact
uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b # v4
with:
path: '_site'
deploy:
name: Deploy to GitHub Pages
if: >-
(github.event_name == 'workflow_run' && github.event.workflow_run.head_branch == 'main') ||
(github.event_name != 'workflow_run' && github.ref == 'refs/heads/main')
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
timeout-minutes: 5
needs: build
steps:
# Deploy to GitHub Pages
- name: 🚀 Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # v4
# Create a summary
- name: 📋 Create deployment summary
run: |
{
echo "## 🎉 Documentation Deployed!"
echo ""
echo "Your documentation is now live at:"
echo "🔗 ${{ steps.deployment.outputs.page_url }}"
echo ""
echo "### 📚 What's Included"
echo "- Getting Started Guide"
echo "- Complete README"
echo "- API Documentation"
echo "- Database Schema"
echo "- Import Guide"
echo "- Contributing Guidelines"
} >> "$GITHUB_STEP_SUMMARY"

View File

@@ -1,41 +0,0 @@
name: History Rewrite Dry-Run
on:
workflow_run:
workflows: ["Docker Build, Publish & Test"]
types: [completed]
schedule:
- cron: '0 2 * * *' # daily at 02:00 UTC
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.workflow_run.head_branch || github.head_ref || github.ref_name }}
cancel-in-progress: true
permissions:
contents: read
jobs:
preview-history:
name: Dry-run preview for history rewrite
runs-on: ubuntu-latest
if: ${{ github.event_name != 'workflow_run' || github.event.workflow_run.conclusion == 'success' }}
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
- name: Debug git info
run: |
git --version
git rev-parse --is-shallow-repository || true
git status --porcelain
- name: Make CI script executable
run: chmod +x scripts/ci/dry_run_history_rewrite.sh
- name: Run dry-run history check
run: |
scripts/ci/dry_run_history_rewrite.sh --paths 'backend/codeql-db,codeql-db,codeql-db-js,codeql-db-go' --strip-size 50

File diff suppressed because it is too large Load Diff

View File

@@ -1,31 +0,0 @@
name: Cleanup github runner caches on closed pull requests
on:
workflow_dispatch:
inputs:
pr_number:
description: 'PR number to clean caches for'
required: true
type: string
jobs:
cleanup:
runs-on: ubuntu-latest
permissions:
actions: write
steps:
- name: Cleanup
run: |
echo "Fetching list of cache keys"
cacheKeysForPR=$(gh cache list --ref "$BRANCH" --limit 100 --json id --jq '.[].id')
## Setting this to not fail the workflow while deleting cache keys.
set +e
echo "Deleting caches..."
while IFS= read -r cacheKey; do
gh cache delete "$cacheKey"
done <<< "$cacheKeysForPR"
echo "Done"
env:
GH_TOKEN: ${{ github.token }}
GH_REPO: ${{ github.repository }}
BRANCH: refs/pull/${{ inputs.pr_number }}/merge

View File

@@ -1,34 +0,0 @@
name: History Rewrite Tests
on:
workflow_run:
workflows: ["Docker Build, Publish & Test"]
types: [completed]
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.workflow_run.head_branch || github.head_ref || github.ref_name }}
cancel-in-progress: true
jobs:
test:
runs-on: ubuntu-latest
if: ${{ github.event.workflow_run.conclusion == 'success' }}
steps:
- name: Checkout with full history
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install -y bats shellcheck
- name: Run Bats tests
run: |
bats ./scripts/history-rewrite/tests || exit 1
- name: ShellCheck scripts
run: |
shellcheck scripts/history-rewrite/*.sh || true

View File

@@ -1,523 +0,0 @@
name: Nightly Build & Package
on:
schedule:
# Daily at 09:00 UTC (4am EST / 5am EDT)
- cron: '0 9 * * *'
workflow_dispatch:
inputs:
reason:
description: "Why are you running this manually?"
required: true
default: "manual trigger"
skip_tests:
description: "Skip test-nightly-image job?"
required: false
default: "false"
env:
GO_VERSION: '1.26.0'
NODE_VERSION: '24.12.0'
GOTOOLCHAIN: auto
GHCR_REGISTRY: ghcr.io
DOCKERHUB_REGISTRY: docker.io
IMAGE_NAME: wikid82/charon
jobs:
sync-development-to-nightly:
runs-on: ubuntu-latest
permissions:
contents: write
outputs:
has_changes: ${{ steps.sync.outputs.has_changes }}
steps:
- name: Checkout nightly branch
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: nightly
fetch-depth: 0
token: ${{ secrets.CHARON_CI_TRIGGER_TOKEN || secrets.GITHUB_TOKEN }}
- name: Configure Git
run: |
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
- name: Sync development to nightly
id: sync
env:
HAS_TRIGGER_TOKEN: ${{ secrets.CHARON_CI_TRIGGER_TOKEN != '' }}
run: |
# Fetch both branches to ensure we have the latest remote state
git fetch origin development
git fetch origin nightly
# Sync local nightly with remote nightly to prevent non-fast-forward errors
echo "Syncing local nightly with remote nightly..."
git reset --hard origin/nightly
# Check if there are differences between remote branches
if git diff --quiet origin/nightly origin/development; then
echo "No changes to sync from development to nightly"
echo "has_changes=false" >> "$GITHUB_OUTPUT"
else
echo "Syncing changes from development to nightly"
# Fast-forward merge development into nightly
git merge origin/development --ff-only -m "chore: sync from development branch [skip ci]" || {
# If fast-forward fails, force reset to development
echo "Fast-forward not possible, resetting nightly to development"
git reset --hard origin/development
}
if [[ "$HAS_TRIGGER_TOKEN" != "true" ]]; then
echo "::warning title=Using GITHUB_TOKEN fallback::Set CHARON_CI_TRIGGER_TOKEN to ensure push-triggered workflows run on nightly."
fi
# Force push to handle cases where nightly diverged from development
git push --force origin nightly
echo "has_changes=true" >> "$GITHUB_OUTPUT"
fi
trigger-nightly-validation:
name: Trigger Nightly Validation Workflows
needs: sync-development-to-nightly
if: needs.sync-development-to-nightly.outputs.has_changes == 'true'
runs-on: ubuntu-latest
permissions:
actions: write
contents: read
steps:
- name: Dispatch Missing Nightly Validation Workflows
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const owner = context.repo.owner;
const repo = context.repo.repo;
const { data: nightlyBranch } = await github.rest.repos.getBranch({
owner,
repo,
branch: 'nightly',
});
const nightlyHeadSha = nightlyBranch.commit.sha;
core.info(`Current nightly HEAD: ${nightlyHeadSha}`);
const workflows = [
{ id: 'e2e-tests-split.yml' },
{ id: 'codecov-upload.yml', inputs: { run_backend: 'true', run_frontend: 'true' } },
{ id: 'supply-chain-verify.yml' },
{ id: 'codeql.yml' },
];
core.info('Skipping security-pr.yml: PR-only workflow intentionally excluded from nightly non-PR dispatch');
for (const workflow of workflows) {
const { data: workflowRuns } = await github.rest.actions.listWorkflowRuns({
owner,
repo,
workflow_id: workflow.id,
branch: 'nightly',
per_page: 50,
});
const hasRunForHead = workflowRuns.workflow_runs.some(
(run) => run.head_sha === nightlyHeadSha,
);
if (hasRunForHead) {
core.info(`Skipping dispatch for ${workflow.id}; run already exists for nightly HEAD`);
continue;
}
await github.rest.actions.createWorkflowDispatch({
owner,
repo,
workflow_id: workflow.id,
ref: 'nightly',
...(workflow.inputs ? { inputs: workflow.inputs } : {}),
});
core.info(`Dispatched ${workflow.id} on nightly (missing run for HEAD)`);
}
build-and-push-nightly:
needs: sync-development-to-nightly
runs-on: ubuntu-latest
env:
HAS_DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN != '' }}
permissions:
contents: read
packages: write
id-token: write
outputs:
version: ${{ steps.meta.outputs.version }}
tags: ${{ steps.meta.outputs.tags }}
digest: ${{ steps.build.outputs.digest }}
steps:
- name: Checkout nightly branch
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: nightly
fetch-depth: 0
- name: Set lowercase image name
run: echo "IMAGE_NAME_LC=${IMAGE_NAME,,}" >> "$GITHUB_ENV"
- name: Set up QEMU
uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Log in to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ${{ env.GHCR_REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Log in to Docker Hub
if: env.HAS_DOCKERHUB_TOKEN == 'true'
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: docker.io
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Extract metadata
id: meta
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
with:
images: |
${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}
${{ env.DOCKERHUB_REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=raw,value=nightly
type=raw,value=nightly-{{date 'YYYY-MM-DD'}}
type=sha,prefix=nightly-,format=short
labels: |
org.opencontainers.image.title=Charon Nightly
org.opencontainers.image.description=Nightly build of Charon
- name: Build and push Docker image
id: build
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
with:
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
VERSION=nightly-${{ github.sha }}
VCS_REF=${{ github.sha }}
BUILD_DATE=${{ github.event.repository.pushed_at }}
cache-from: type=gha
cache-to: type=gha,mode=max
provenance: true
sbom: true
- name: Record nightly image digest
run: |
echo "## 🧾 Nightly Image Digest" >> "$GITHUB_STEP_SUMMARY"
echo "- ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:nightly@${{ steps.build.outputs.digest }}" >> "$GITHUB_STEP_SUMMARY"
- name: Generate SBOM
id: sbom_primary
continue-on-error: true
uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0
with:
image: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:nightly@${{ steps.build.outputs.digest }}
format: cyclonedx-json
output-file: sbom-nightly.json
syft-version: v1.42.1
- name: Generate SBOM fallback with pinned Syft
if: always()
run: |
set -euo pipefail
if [[ "${{ steps.sbom_primary.outcome }}" == "success" ]] && [[ -s sbom-nightly.json ]] && jq -e . sbom-nightly.json >/dev/null 2>&1; then
echo "Primary SBOM generation succeeded with valid JSON; skipping fallback"
exit 0
fi
echo "Primary SBOM generation failed or produced missing/invalid output; using deterministic Syft fallback"
SYFT_VERSION="v1.42.1"
OS="$(uname -s | tr '[:upper:]' '[:lower:]')"
ARCH="$(uname -m)"
case "$ARCH" in
x86_64) ARCH="amd64" ;;
aarch64|arm64) ARCH="arm64" ;;
*) echo "Unsupported architecture: $ARCH"; exit 1 ;;
esac
TARBALL="syft_${SYFT_VERSION#v}_${OS}_${ARCH}.tar.gz"
BASE_URL="https://github.com/anchore/syft/releases/download/${SYFT_VERSION}"
curl -fsSLo "$TARBALL" "${BASE_URL}/${TARBALL}"
curl -fsSLo checksums.txt "${BASE_URL}/syft_${SYFT_VERSION#v}_checksums.txt"
grep " ${TARBALL}$" checksums.txt > checksum_line.txt
sha256sum -c checksum_line.txt
tar -xzf "$TARBALL" syft
chmod +x syft
./syft "${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:nightly@${{ steps.build.outputs.digest }}" -o cyclonedx-json=sbom-nightly.json
- name: Verify SBOM artifact
if: always()
run: |
set -euo pipefail
test -s sbom-nightly.json
jq -e . sbom-nightly.json >/dev/null
jq -e '
.bomFormat == "CycloneDX"
and (.specVersion | type == "string" and length > 0)
and has("version")
and has("metadata")
and (.components | type == "array")
' sbom-nightly.json >/dev/null
- name: Upload SBOM artifact
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: sbom-nightly
path: sbom-nightly.json
retention-days: 30
# Install Cosign for keyless signing
- name: Install Cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
# Sign GHCR image with keyless signing (Sigstore/Fulcio)
- name: Sign GHCR Image
run: |
echo "Signing GHCR nightly image with keyless signing..."
cosign sign --yes "${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build.outputs.digest }}"
echo "✅ GHCR nightly image signed successfully"
# Sign Docker Hub image with keyless signing (Sigstore/Fulcio)
- name: Sign Docker Hub Image
if: env.HAS_DOCKERHUB_TOKEN == 'true'
run: |
echo "Signing Docker Hub nightly image with keyless signing..."
cosign sign --yes "${{ env.DOCKERHUB_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build.outputs.digest }}"
echo "✅ Docker Hub nightly image signed successfully"
# Attach SBOM to Docker Hub image
- name: Attach SBOM to Docker Hub
if: env.HAS_DOCKERHUB_TOKEN == 'true'
run: |
echo "Attaching SBOM to Docker Hub nightly image..."
cosign attach sbom --sbom sbom-nightly.json "${{ env.DOCKERHUB_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build.outputs.digest }}"
echo "✅ SBOM attached to Docker Hub nightly image"
test-nightly-image:
needs: build-and-push-nightly
runs-on: ubuntu-latest
permissions:
contents: read
packages: read
steps:
- name: Checkout nightly branch
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: nightly
- name: Set lowercase image name
run: echo "IMAGE_NAME_LC=${IMAGE_NAME,,}" >> "$GITHUB_ENV"
- name: Log in to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ${{ env.GHCR_REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Pull nightly image
run: docker pull "${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:nightly@${{ needs.build-and-push-nightly.outputs.digest }}"
- name: Run container smoke test
run: |
docker run --name charon-nightly -d \
-p 8080:8080 \
"${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:nightly@${{ needs.build-and-push-nightly.outputs.digest }}"
# Wait for container to start
sleep 10
# Check container is running
docker ps | grep charon-nightly
# Basic health check
curl -f http://localhost:8080/health || exit 1
# Cleanup
docker stop charon-nightly
docker rm charon-nightly
# NOTE: Standalone binary builds removed - Charon uses Docker-only deployment
# The build-nightly-release job that ran GoReleaser for Windows/macOS/Linux binaries
# was removed because:
# 1. Charon is distributed exclusively via Docker images
# 2. Cross-compilation was failing due to Unix-specific syscalls
# 3. No users download standalone binaries (all use Docker)
# If standalone binaries are needed in the future, re-add the job with Linux-only targets
verify-nightly-supply-chain:
needs: build-and-push-nightly
runs-on: ubuntu-latest
permissions:
contents: read
packages: read
security-events: write
steps:
- name: Checkout nightly branch
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: nightly
- name: Set lowercase image name
run: echo "IMAGE_NAME_LC=${IMAGE_NAME,,}" >> "$GITHUB_ENV"
- name: Download SBOM
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
with:
name: sbom-nightly
- name: Scan with Grype
uses: anchore/scan-action@7037fa011853d5a11690026fb85feee79f4c946c # v7.3.2
with:
sbom: sbom-nightly.json
fail-build: false
severity-cutoff: high
- name: Scan with Trivy
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # 0.34.2
with:
image-ref: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ needs.build-and-push-nightly.outputs.digest }}
format: 'sarif'
output: 'trivy-nightly.sarif'
- name: Upload Trivy results
uses: github/codeql-action/upload-sarif@c793b717bc78562f491db7b0e93a3a178b099162 # v4.32.5
with:
sarif_file: 'trivy-nightly.sarif'
category: 'trivy-nightly'
- name: Security severity policy summary
run: |
{
echo "## 🔐 Nightly Supply Chain Severity Policy"
echo ""
echo "- Blocking: Critical, High"
echo "- Medium: non-blocking by default (report + triage SLA)"
echo "- Policy file: .github/security-severity-policy.yml"
} >> "$GITHUB_STEP_SUMMARY"
- name: Check for Critical/High CVEs
run: |
set -euo pipefail
jq -e . trivy-nightly.sarif >/dev/null
CRITICAL_COUNT=$(jq -r '
[
.runs[] as $run
| ($run.tool.driver.rules // []) as $rules
| $run.results[]?
| . as $result
| (
(
if (($result.ruleIndex | type) == "number") then
($rules[$result.ruleIndex].properties["security-severity"] // empty)
else
empty
end
)
// ([
$rules[]?
| select((.id // "") == ($result.ruleId // ""))
| .properties["security-severity"]
][0] // empty)
// empty
) as $securitySeverity
| (try ($securitySeverity | tonumber) catch empty) as $score
| select($score != null and $score >= 9.0)
] | length
' trivy-nightly.sarif)
HIGH_COUNT=$(jq -r '
[
.runs[] as $run
| ($run.tool.driver.rules // []) as $rules
| $run.results[]?
| . as $result
| (
(
if (($result.ruleIndex | type) == "number") then
($rules[$result.ruleIndex].properties["security-severity"] // empty)
else
empty
end
)
// ([
$rules[]?
| select((.id // "") == ($result.ruleId // ""))
| .properties["security-severity"]
][0] // empty)
// empty
) as $securitySeverity
| (try ($securitySeverity | tonumber) catch empty) as $score
| select($score != null and $score >= 7.0 and $score < 9.0)
] | length
' trivy-nightly.sarif)
MEDIUM_COUNT=$(jq -r '
[
.runs[] as $run
| ($run.tool.driver.rules // []) as $rules
| $run.results[]?
| . as $result
| (
(
if (($result.ruleIndex | type) == "number") then
($rules[$result.ruleIndex].properties["security-severity"] // empty)
else
empty
end
)
// ([
$rules[]?
| select((.id // "") == ($result.ruleId // ""))
| .properties["security-severity"]
][0] // empty)
// empty
) as $securitySeverity
| (try ($securitySeverity | tonumber) catch empty) as $score
| select($score != null and $score >= 4.0 and $score < 7.0)
] | length
' trivy-nightly.sarif)
{
echo "- Structured SARIF counts: CRITICAL=${CRITICAL_COUNT}, HIGH=${HIGH_COUNT}, MEDIUM=${MEDIUM_COUNT}"
} >> "$GITHUB_STEP_SUMMARY"
if [ "$CRITICAL_COUNT" -gt 0 ]; then
echo "❌ Critical vulnerabilities found in nightly build (${CRITICAL_COUNT})"
exit 1
fi
if [ "$HIGH_COUNT" -gt 0 ]; then
echo "❌ High vulnerabilities found in nightly build (${HIGH_COUNT})"
exit 1
fi
if [ "$MEDIUM_COUNT" -gt 0 ]; then
echo "::warning::Medium vulnerabilities found in nightly build (${MEDIUM_COUNT}). Non-blocking by policy; triage with SLA per .github/security-severity-policy.yml"
fi
echo "✅ No Critical/High vulnerabilities found"

View File

@@ -1,68 +0,0 @@
name: PR Checklist Validation (History Rewrite)
on:
workflow_dispatch:
inputs:
pr_number:
description: 'PR number to validate'
required: true
type: string
concurrency:
group: ${{ github.workflow }}-${{ inputs.pr_number || github.event.pull_request.number }}
cancel-in-progress: true
jobs:
validate:
name: Validate history-rewrite checklist (conditional)
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Validate PR checklist (only for history-rewrite changes)
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
PR_NUMBER: ${{ inputs.pr_number }}
with:
script: |
const owner = context.repo.owner;
const repo = context.repo.repo;
const prNumber = Number(process.env.PR_NUMBER || context.issue.number);
if (!prNumber) {
core.setFailed('Missing PR number input for workflow_dispatch.');
return;
}
const pr = await github.rest.pulls.get({owner, repo, pull_number: prNumber});
const body = (pr.data && pr.data.body) || '';
// Determine if this PR modifies history-rewrite related files
// Exclude the template file itself - it shouldn't trigger its own validation
const filesResp = await github.rest.pulls.listFiles({ owner, repo, pull_number: prNumber });
const files = filesResp.data.map(f => f.filename.toLowerCase());
const relevant = files.some(fn => {
// Skip the PR template itself
if (fn === '.github/pull_request_template/history-rewrite.md') return false;
// Check for actual history-rewrite implementation files
return fn.startsWith('scripts/history-rewrite/') || fn === 'docs/plans/history_rewrite.md';
});
if (!relevant) {
core.info('No history-rewrite related files changed; skipping checklist validation.');
return;
}
// Use a set of named checks with robust regex patterns for checkbox and phrase variants
const checks = [
{ name: 'preview_removals.sh mention', pattern: /preview_removals\.sh/i },
{ name: 'data/backups mention', pattern: /data\/?backups/i },
// Accept checked checkbox variants and inline code/backtick usage for the '--force' phrase
{ name: 'explicit non-run of --force', pattern: /(?:\[\s*[xX]\s*\]\s*)?(?:i will not run|will not run|do not run|don'?t run|won'?t run)\b[^\n]*--force/i },
];
const missing = checks.filter(c => !c.pattern.test(body)).map(c => c.name);
if (missing.length > 0) {
// Post a comment to the PR with instructions for filling the checklist
const commentBody = `Hi! This PR touches history-rewrite artifacts and requires the checklist in .github/PULL_REQUEST_TEMPLATE/history-rewrite.md. The following items are missing in your PR body: ${missing.join(', ')}\n\nPlease update the PR description using the history-rewrite template and re-run checks.`;
await github.rest.issues.createComment({ owner, repo, issue_number: prNumber, body: commentBody });
core.setFailed('Missing required checklist items: ' + missing.join(', '));
}

View File

@@ -1,208 +0,0 @@
name: Propagate Changes Between Branches
on:
workflow_run:
workflows: ["Docker Build, Publish & Test"]
types: [completed]
branches: [ main, development ]
concurrency:
group: ${{ github.workflow }}-${{ github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: false
env:
NODE_VERSION: '24.12.0'
permissions:
contents: write
pull-requests: write
issues: write
jobs:
propagate:
name: Create PR to synchronize branches
runs-on: ubuntu-latest
if: >-
github.actor != 'github-actions[bot]' &&
github.event.workflow_run.conclusion == 'success' &&
(github.event.workflow_run.head_branch == 'main' || github.event.workflow_run.head_branch == 'development')
steps:
- name: Set up Node (for github-script)
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: ${{ env.NODE_VERSION }}
- name: Propagate Changes
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
CURRENT_BRANCH: ${{ github.event.workflow_run.head_branch || github.ref_name }}
CURRENT_SHA: ${{ github.event.workflow_run.head_sha || github.sha }}
with:
script: |
const currentBranch = process.env.CURRENT_BRANCH || context.ref.replace('refs/heads/', '');
let excludedBranch = null;
// Loop Prevention: Identify if this commit is from a merged PR
try {
const associatedPRs = await github.rest.repos.listPullRequestsAssociatedWithCommit({
owner: context.repo.owner,
repo: context.repo.repo,
commit_sha: process.env.CURRENT_SHA || context.sha,
});
// If the commit comes from a PR, we identify the source branch
// so we don't try to merge changes back into it immediately.
if (associatedPRs.data.length > 0) {
excludedBranch = associatedPRs.data[0].head.ref;
core.info(`Commit ${process.env.CURRENT_SHA || context.sha} is associated with PR #${associatedPRs.data[0].number} coming from '${excludedBranch}'. This branch will be excluded from propagation to prevent loops.`);
}
} catch (err) {
core.warning(`Failed to check associated PRs: ${err.message}`);
}
async function createPR(src, base) {
if (src === base) return;
core.info(`Checking propagation from ${src} to ${base}...`);
// Check for existing open PRs
const { data: pulls } = await github.rest.pulls.list({
owner: context.repo.owner,
repo: context.repo.repo,
state: 'open',
head: `${context.repo.owner}:${src}`,
base: base,
});
if (pulls.length > 0) {
core.info(`Existing PR found for ${src} -> ${base}. Skipping.`);
return;
}
// Compare commits to see if src is ahead of base
try {
const compare = await github.rest.repos.compareCommits({
owner: context.repo.owner,
repo: context.repo.repo,
base: base,
head: src,
});
// If src is not ahead, nothing to merge
if (compare.data.ahead_by === 0) {
core.info(`${src} is not ahead of ${base}. No propagation needed.`);
return;
}
// If files changed include history-rewrite or other sensitive scripts,
// avoid automatic propagation. This prevents bypassing checklist validation
// and manual review for potentially destructive changes.
let files = (compare.data.files || []).map(f => (f.filename || '').toLowerCase());
// Fallback: if compare.files is empty/truncated, aggregate files from the commit list
if (files.length === 0 && Array.isArray(compare.data.commits) && compare.data.commits.length > 0) {
for (const commit of compare.data.commits) {
const commitData = await github.rest.repos.getCommit({ owner: context.repo.owner, repo: context.repo.repo, ref: commit.sha });
for (const f of (commitData.data.files || [])) {
files.push((f.filename || '').toLowerCase());
}
}
files = Array.from(new Set(files));
}
// Load propagation config (list of sensitive paths) from .github/propagate-config.yml when available
// NOTE: .github/workflows/ was removed from defaults - workflow updates SHOULD propagate
// to ensure downstream branches have correct CI/CD configurations
let configPaths = ['scripts/history-rewrite/', 'data/backups', 'docs/plans/history_rewrite.md'];
try {
const configResp = await github.rest.repos.getContent({ owner: context.repo.owner, repo: context.repo.repo, path: '.github/propagate-config.yml', ref: src });
const contentStr = Buffer.from(configResp.data.content, 'base64').toString('utf8');
const lines = contentStr.split(/\r?\n/);
let inSensitive = false;
const parsedPaths = [];
for (const line of lines) {
const trimmed = line.trim();
if (!inSensitive && trimmed.startsWith('sensitive_paths:')) { inSensitive = true; continue; }
if (inSensitive) {
if (trimmed.startsWith('-')) parsedPaths.push(trimmed.substring(1).trim());
else if (trimmed.length === 0) continue; else break;
}
}
if (parsedPaths.length > 0) configPaths = parsedPaths.map(p => p.toLowerCase());
} catch (err) { core.info('No .github/propagate-config.yml or parse failure; using defaults.'); }
const sensitive = files.some(fn => configPaths.some(sp => fn.startsWith(sp) || fn.includes(sp)));
if (sensitive) {
core.info(`${src} -> ${base} contains sensitive changes (${files.join(', ')}). Skipping automatic propagation.`);
return;
}
} catch (error) {
// If base branch doesn't exist, etc.
core.warning(`Error comparing ${src} to ${base}: ${error.message}`);
return;
}
// Create PR
try {
const pr = await github.rest.pulls.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: `Propagate changes from ${src} into ${base}`,
head: src,
base: base,
body: `Automated PR to propagate changes from ${src} into ${base}.\n\nTriggered by push to ${currentBranch}.`,
draft: true,
});
core.info(`Created PR #${pr.data.number} to merge ${src} into ${base}`);
// Add an 'auto-propagate' label to the created PR and create the label if missing
try {
try {
await github.rest.issues.getLabel({ owner: context.repo.owner, repo: context.repo.repo, name: 'auto-propagate' });
} catch (e) {
await github.rest.issues.createLabel({ owner: context.repo.owner, repo: context.repo.repo, name: 'auto-propagate', color: '7dd3fc', description: 'Automatically created propagate PRs' });
}
await github.rest.issues.addLabels({ owner: context.repo.owner, repo: context.repo.repo, issue_number: pr.data.number, labels: ['auto-propagate'] });
} catch (labelErr) {
core.warning('Failed to ensure or add auto-propagate label: ' + labelErr.message);
}
} catch (error) {
core.warning(`Failed to create PR from ${src} to ${base}: ${error.message}`);
}
}
if (currentBranch === 'main') {
// Main -> Development
// Only propagate if development is not the source (loop prevention)
if (excludedBranch !== 'development') {
await createPR('main', 'development');
} else {
core.info('Push originated from development (excluded). Skipping propagation back to development.');
}
} else if (currentBranch === 'development') {
// Development -> Feature/Hotfix branches (The Pittsburgh Model)
// We propagate changes from dev DOWN to features/hotfixes so they stay up to date.
const branches = await github.paginate(github.rest.repos.listBranches, {
owner: context.repo.owner,
repo: context.repo.repo,
});
// Filter for feature/* and hotfix/* branches using regex
// AND exclude the branch that just got merged in (if any)
const targetBranches = branches
.map(b => b.name)
.filter(name => {
const isTargetType = /^feature\/|^hotfix\//.test(name);
const isExcluded = (name === excludedBranch);
return isTargetType && !isExcluded;
});
core.info(`Found ${targetBranches.length} target branches (excluding '${excludedBranch || 'none'}'): ${targetBranches.join(', ')}`);
for (const targetBranch of targetBranches) {
await createPR('development', targetBranch);
}
}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CHARON_TOKEN: ${{ secrets.CHARON_TOKEN }}

View File

@@ -1,332 +0,0 @@
name: Quality Checks
on:
pull_request:
push:
branches:
- nightly
- main
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: read
checks: write
env:
GO_VERSION: '1.26.0'
NODE_VERSION: '24.12.0'
GOTOOLCHAIN: auto
jobs:
auth-route-protection-contract:
name: Auth Route Protection Contract
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0
ref: ${{ github.sha }}
- name: Set up Go
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0
with:
go-version: ${{ env.GO_VERSION }}
cache-dependency-path: backend/go.sum
- name: Run auth protection contract tests
run: |
set -euo pipefail
cd backend
go test ./internal/api/routes -run 'TestRegister_StateChangingRoutesRequireAuthentication|TestRegister_StateChangingRoutesDenyByDefaultWithExplicitAllowlist|TestRegister_AuthenticatedRoutes' -count=1 -v
codecov-trigger-parity-guard:
name: Codecov Trigger/Comment Parity Guard
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Enforce Codecov trigger and comment parity
run: |
bash scripts/ci/check-codecov-trigger-parity.sh
backend-quality:
name: Backend (Go)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0
ref: ${{ github.sha }}
# SECURITY: Do not switch this workflow to pull_request_target for backend tests.
# Untrusted code paths (fork PRs and Dependabot PRs) must never receive repository secrets.
- name: Resolve encryption key for backend tests
shell: bash
env:
EVENT_NAME: ${{ github.event_name }}
ACTOR: ${{ github.actor }}
REPO: ${{ github.repository }}
PR_HEAD_REPO: ${{ github.event.pull_request.head.repo.full_name }}
PR_HEAD_FORK: ${{ github.event.pull_request.head.repo.fork }}
WORKFLOW_SECRET_KEY: ${{ secrets.CHARON_ENCRYPTION_KEY_TEST }}
run: |
set -euo pipefail
is_same_repo_pr=false
if [[ "$EVENT_NAME" == "pull_request" && -n "${PR_HEAD_REPO:-}" && "$PR_HEAD_REPO" == "$REPO" ]]; then
is_same_repo_pr=true
fi
is_workflow_dispatch=false
if [[ "$EVENT_NAME" == "workflow_dispatch" ]]; then
is_workflow_dispatch=true
fi
is_push_event=false
if [[ "$EVENT_NAME" == "push" ]]; then
is_push_event=true
fi
is_dependabot_pr=false
if [[ "$EVENT_NAME" == "pull_request" && "$ACTOR" == "dependabot[bot]" ]]; then
is_dependabot_pr=true
fi
is_fork_pr=false
if [[ "$EVENT_NAME" == "pull_request" && "${PR_HEAD_FORK:-false}" == "true" ]]; then
is_fork_pr=true
fi
is_untrusted=false
if [[ "$is_fork_pr" == "true" || "$is_dependabot_pr" == "true" ]]; then
is_untrusted=true
fi
is_trusted=false
if [[ "$is_untrusted" == "false" && ( "$is_same_repo_pr" == "true" || "$is_workflow_dispatch" == "true" || "$is_push_event" == "true" ) ]]; then
is_trusted=true
fi
resolved_key=""
if [[ "$is_trusted" == "true" ]]; then
if [[ -z "${WORKFLOW_SECRET_KEY:-}" ]]; then
echo "::error title=Missing required secret::Trusted backend CI context requires CHARON_ENCRYPTION_KEY_TEST. Add repository secret CHARON_ENCRYPTION_KEY_TEST."
exit 1
fi
resolved_key="$WORKFLOW_SECRET_KEY"
elif [[ "$is_untrusted" == "true" ]]; then
resolved_key="$(openssl rand -base64 32)"
else
echo "::error title=Unsupported event context::Unable to classify trust for backend key resolution (event=${EVENT_NAME})."
exit 1
fi
if [[ -z "$resolved_key" ]]; then
echo "::error title=Key resolution failure::Resolved encryption key is empty."
exit 1
fi
echo "::add-mask::$resolved_key"
{
echo "CHARON_ENCRYPTION_KEY<<__CHARON_EOF__"
echo "$resolved_key"
echo "__CHARON_EOF__"
} >> "$GITHUB_ENV"
- name: Set up Go
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0
with:
go-version: ${{ env.GO_VERSION }}
cache-dependency-path: backend/go.sum
- name: Repo health check
run: |
bash "scripts/repo_health_check.sh"
- name: Run Go tests
id: go-tests
working-directory: ${{ github.workspace }}
env:
CGO_ENABLED: 1
run: |
bash "scripts/go-test-coverage.sh" 2>&1 | tee backend/test-output.txt
exit "${PIPESTATUS[0]}"
- name: Go Test Summary
if: always()
working-directory: backend
run: |
{
echo "## 🔧 Backend Test Results"
if [ "${{ steps.go-tests.outcome }}" == "success" ]; then
echo "✅ **All tests passed**"
PASS_COUNT=$(grep -c "^--- PASS" test-output.txt || echo "0")
echo "- Tests passed: ${PASS_COUNT}"
else
echo "❌ **Tests failed**"
echo ""
echo "### Failed Tests:"
echo '```'
grep -E "^--- FAIL|FAIL\s+github" test-output.txt || echo "See logs for details"
echo '```'
fi
} >> "$GITHUB_STEP_SUMMARY"
# Codecov upload moved to `codecov-upload.yml` (pull_request + workflow_dispatch).
- name: Run golangci-lint
uses: golangci/golangci-lint-action@1e7e51e771db61008b38414a730f564565cf7c20 # v9.2.0
with:
version: latest
working-directory: backend
args: --timeout=5m
continue-on-error: true
- name: GORM Security Scanner
id: gorm-scan
run: |
chmod +x scripts/scan-gorm-security.sh
./scripts/scan-gorm-security.sh --check
continue-on-error: false
- name: GORM Security Scan Summary
if: always()
run: |
{
echo "## 🔒 GORM Security Scan Results"
if [ "${{ steps.gorm-scan.outcome }}" == "success" ]; then
echo "✅ **No GORM security issues detected**"
echo ""
echo "All models follow secure GORM patterns:"
echo "- ✅ No exposed internal database IDs"
echo "- ✅ No exposed API keys or secrets"
echo "- ✅ Response DTOs properly structured"
else
echo "❌ **GORM security issues found**"
echo ""
echo "Run locally for details:"
echo '```bash'
echo "./scripts/scan-gorm-security.sh --report"
echo '```'
echo ""
echo "See [GORM Security Scanner docs](docs/implementation/gorm_security_scanner_complete.md) for remediation guidance."
fi
} >> "$GITHUB_STEP_SUMMARY"
- name: Annotate GORM Security Issues
if: failure() && steps.gorm-scan.outcome == 'failure'
run: |
echo "::error title=GORM Security Issues Detected::Run './scripts/scan-gorm-security.sh --report' locally for detailed findings. See docs/implementation/gorm_security_scanner_complete.md for remediation guidance."
- name: Run Perf Asserts
working-directory: backend
env:
# Conservative defaults to avoid flakiness on CI; tune as necessary
PERF_MAX_MS_GETSTATUS_P95: 500ms
PERF_MAX_MS_GETSTATUS_P95_PARALLEL: 1500ms
PERF_MAX_MS_LISTDECISIONS_P95: 2000ms
run: |
{
echo "## 🔍 Running performance assertions (TestPerf)"
go test -run TestPerf -v ./internal/api/handlers -count=1 | tee perf-output.txt
} >> "$GITHUB_STEP_SUMMARY"
exit "${PIPESTATUS[0]}"
frontend-quality:
name: Frontend (React)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0
- name: Repo health check
run: |
bash "scripts/repo_health_check.sh"
- name: Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
with:
node-version: ${{ env.NODE_VERSION }}
cache: 'npm'
cache-dependency-path: frontend/package-lock.json
- name: Check if frontend was modified in PR
id: check-frontend
run: |
if [ "${{ github.event_name }}" = "push" ]; then
echo "frontend_changed=true" >> "$GITHUB_OUTPUT"
exit 0
fi
# Try to fetch the PR base ref. This may fail for forked PRs or other cases.
git fetch origin "${{ github.event.pull_request.base.ref }}" --depth=1 || true
# Compute changed files against the PR base ref, fallback to origin/main, then fallback to last 10 commits
CHANGED=$(git diff --name-only "origin/${{ github.event.pull_request.base.ref }}...HEAD" 2>/dev/null || echo "")
printf "Changed files (base ref):\n%s\n" "$CHANGED"
if [ -z "$CHANGED" ]; then
echo "Base ref diff empty or failed; fetching origin/main for fallback..."
git fetch origin main --depth=1 || true
CHANGED=$(git diff --name-only origin/main...HEAD 2>/dev/null || echo "")
printf "Changed files (main fallback):\n%s\n" "$CHANGED"
fi
if [ -z "$CHANGED" ]; then
echo "Still empty; falling back to diffing last 10 commits from HEAD..."
CHANGED=$(git diff --name-only HEAD~10...HEAD 2>/dev/null || echo "")
printf "Changed files (HEAD~10 fallback):\n%s\n" "$CHANGED"
fi
if echo "$CHANGED" | grep -q '^frontend/'; then
echo "frontend_changed=true" >> "$GITHUB_OUTPUT"
else
echo "frontend_changed=false" >> "$GITHUB_OUTPUT"
fi
- name: Install dependencies
working-directory: frontend
run: npm ci
- name: Run frontend tests and coverage
id: frontend-tests
working-directory: ${{ github.workspace }}
run: |
bash scripts/frontend-test-coverage.sh 2>&1 | tee frontend/test-output.txt
exit "${PIPESTATUS[0]}"
- name: Frontend Test Summary
if: always()
working-directory: frontend
run: |
{
echo "## ⚛️ Frontend Test Results"
if [ "${{ steps.frontend-tests.outcome }}" == "success" ]; then
echo "✅ **All tests passed**"
# Extract test counts from vitest output
if grep -q "Tests:" test-output.txt; then
grep "Tests:" test-output.txt | tail -1
fi
else
echo "❌ **Tests failed**"
echo ""
echo "### Failed Tests:"
echo '```'
# Extract failed test info from vitest output
grep -E "FAIL|✕|×|AssertionError|Error:" test-output.txt | head -30 || echo "See logs for details"
echo '```'
fi
} >> "$GITHUB_STEP_SUMMARY"
# Codecov upload moved to `codecov-upload.yml` (pull_request + workflow_dispatch).
- name: Run frontend lint
working-directory: frontend
run: npm run lint
continue-on-error: true

View File

@@ -1,115 +0,0 @@
name: Rate Limit integration
# Phase 2-3: Build Once, Test Many - Use registry image instead of building
# This workflow now waits for docker-build.yml to complete and pulls the built image
on:
workflow_dispatch:
inputs:
image_tag:
description: 'Docker image tag to test (e.g., pr-123-abc1234, latest)'
required: false
type: string
pull_request:
push:
branches:
- main
# Prevent race conditions when PR is updated mid-test
# Cancels old test runs when new build completes with different SHA
concurrency:
group: ${{ github.workflow }}-${{ github.event.workflow_run.event || github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
jobs:
rate-limit-integration:
name: Rate Limiting Integration
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Build Docker image (Local)
run: |
echo "Building image locally for integration tests..."
docker build -t charon:local .
echo "✅ Successfully built charon:local"
- name: Run rate limit integration tests
id: ratelimit-test
run: |
chmod +x scripts/rate_limit_integration.sh
scripts/rate_limit_integration.sh 2>&1 | tee ratelimit-test-output.txt
exit "${PIPESTATUS[0]}"
- name: Dump Debug Info on Failure
if: failure()
run: |
{
echo "## 🔍 Debug Information"
echo ""
echo "### Container Status"
echo '```'
docker ps -a --filter "name=charon" --filter "name=ratelimit" --filter "name=backend" 2>&1 || true
echo '```'
echo ""
echo "### Security Config API"
echo '```json'
curl -s http://localhost:8280/api/v1/security/config 2>/dev/null | head -100 || echo "Could not retrieve security config"
echo '```'
echo ""
echo "### Security Status API"
echo '```json'
curl -s http://localhost:8280/api/v1/security/status 2>/dev/null | head -100 || echo "Could not retrieve security status"
echo '```'
echo ""
echo "### Caddy Admin Config (rate_limit handlers)"
echo '```json'
curl -s http://localhost:2119/config 2>/dev/null | grep -A 20 '"handler":"rate_limit"' | head -30 || echo "Could not retrieve Caddy config"
echo '```'
echo ""
echo "### Charon Container Logs (last 100 lines)"
echo '```'
docker logs charon-ratelimit-test 2>&1 | tail -100 || echo "No container logs available"
echo '```'
} >> "$GITHUB_STEP_SUMMARY"
- name: Rate Limit Integration Summary
if: always()
run: |
{
echo "## ⏱️ Rate Limit Integration Test Results"
if [ "${{ steps.ratelimit-test.outcome }}" == "success" ]; then
echo "✅ **All rate limit tests passed**"
echo ""
echo "### Test Results:"
echo '```'
grep -E "✓|=== ALL|HTTP 429|HTTP 200" ratelimit-test-output.txt | head -30 || echo "See logs for details"
echo '```'
echo ""
echo "### Verified Behaviors:"
echo "- Requests within limit return HTTP 200"
echo "- Requests exceeding limit return HTTP 429"
echo "- Retry-After header present on blocked responses"
echo "- Rate limit window resets correctly"
else
echo "❌ **Rate limit tests failed**"
echo ""
echo "### Failure Details:"
echo '```'
grep -E "✗|FAIL|Error|failed|expected" ratelimit-test-output.txt | head -30 || echo "See logs for details"
echo '```'
fi
} >> "$GITHUB_STEP_SUMMARY"
- name: Cleanup
if: always()
run: |
docker rm -f charon-ratelimit-test || true
docker rm -f ratelimit-backend || true
docker volume rm charon_ratelimit_data caddy_ratelimit_data caddy_ratelimit_config 2>/dev/null || true
docker network rm containers_default || true

View File

@@ -1,84 +0,0 @@
name: Release (GoReleaser)
on:
push:
tags:
- 'v*'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: false
env:
GO_VERSION: '1.26.0'
NODE_VERSION: '24.12.0'
GOTOOLCHAIN: auto
permissions:
contents: write
packages: write
jobs:
goreleaser:
if: ${{ !contains(github.ref_name, '-candidate') && !contains(github.ref_name, '-rc') }}
runs-on: ubuntu-latest
env:
# Use the built-in GITHUB_TOKEN by default for GitHub API operations.
# If you need to provide a PAT with elevated permissions, add a GITHUB_TOKEN secret
# at the repo or organization level and update the env here accordingly.
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0
- name: Enforce PR-2 release promotion guard
env:
REPO_VARS_JSON: ${{ toJSON(vars) }}
run: |
PR2_GATE_STATUS="$(printf '%s' "$REPO_VARS_JSON" | jq -r '.CHARON_PR2_GATES_PASSED // "false"')"
if [[ "$PR2_GATE_STATUS" != "true" ]]; then
echo "::error::Releasable tag promotion is blocked until PR-2 security/retirement gates pass."
echo "::error::Set repository variable CHARON_PR2_GATES_PASSED=true only after PR-2 approval."
exit 1
fi
- name: Set up Go
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
with:
go-version: ${{ env.GO_VERSION }}
cache-dependency-path: backend/go.sum
- name: Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6
with:
node-version: ${{ env.NODE_VERSION }}
- name: Build Frontend
working-directory: frontend
run: |
# Inject version into frontend build from tag (if present)
VERSION=${GITHUB_REF#refs/tags/}
echo "VITE_APP_VERSION=${VERSION}" >> "$GITHUB_ENV"
npm ci
npm run build
- name: Install Cross-Compilation Tools (Zig)
# Security: Pinned to full SHA for supply chain security
uses: goto-bus-stop/setup-zig@abea47f85e598557f500fa1fd2ab7464fcb39406 # v2
with:
version: 0.13.0
# GITHUB_TOKEN is set from GITHUB_TOKEN or CHARON_TOKEN (fallback), defaulting to GITHUB_TOKEN
- name: Run GoReleaser
uses: goreleaser/goreleaser-action@ec59f474b9834571250b370d4735c50f8e2d1e29 # v7
with:
distribution: goreleaser
version: '~> v2.5'
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# CGO settings are handled in .goreleaser.yaml via Zig

View File

@@ -1,33 +0,0 @@
name: Renovate
on:
schedule:
- cron: '0 5 * * *' # daily 05:00 UTC
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}
cancel-in-progress: false
permissions:
contents: write
pull-requests: write
issues: write
jobs:
renovate:
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 1
- name: Run Renovate
uses: renovatebot/github-action@7b4b65bf31e07d4e3e51708d07700fb41bc03166 # v46.1.3
with:
configurationFile: .github/renovate.json
token: ${{ secrets.RENOVATE_TOKEN || secrets.GITHUB_TOKEN }}
env:
LOG_LEVEL: debug

View File

@@ -1,101 +0,0 @@
name: "Prune Renovate Branches"
on:
workflow_dispatch:
schedule:
- cron: '0 3 * * *' # daily at 03:00 UTC
permissions:
contents: write # required to delete branch refs
pull-requests: read
jobs:
prune:
runs-on: ubuntu-latest
concurrency:
group: prune-renovate-branches
cancel-in-progress: true
env:
BRANCH_PREFIX: "renovate/" # adjust if you use a different prefix
steps:
- name: Choose GitHub Token
run: |
if [ -n "${{ secrets.GITHUB_TOKEN }}" ]; then
echo "Using GITHUB_TOKEN" >&2
echo "GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}" >> "$GITHUB_ENV"
else
echo "Using CHARON_TOKEN fallback" >&2
echo "GITHUB_TOKEN=${{ secrets.CHARON_TOKEN }}" >> "$GITHUB_ENV"
fi
- name: Prune renovate branches
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
github-token: ${{ env.GITHUB_TOKEN }}
script: |
const owner = context.repo.owner;
const repo = context.repo.repo;
const branchPrefix = (process.env.BRANCH_PREFIX || 'renovate/').replace(/^refs\/heads\//, '');
const refPrefix = `heads/${branchPrefix}`; // e.g. "heads/renovate/"
core.info(`Searching for refs with prefix: ${refPrefix}`);
// List matching refs (branches) under the prefix
let refs;
try {
refs = await github.rest.git.listMatchingRefs({
owner,
repo,
ref: refPrefix
});
} catch (err) {
core.info(`No matching refs or API error: ${err.message}`);
refs = { data: [] };
}
for (const r of refs.data) {
const fullRef = r.ref; // "refs/heads/renovate/..."
const branchName = fullRef.replace('refs/heads/', '');
core.info(`Evaluating branch: ${branchName}`);
// Find PRs for this branch (head = "owner:branch")
const prs = await github.rest.pulls.list({
owner,
repo,
head: `${owner}:${branchName}`,
state: 'all',
per_page: 100
});
let shouldDelete = false;
if (!prs.data || prs.data.length === 0) {
core.info(`No PRs found for ${branchName} — marking for deletion.`);
shouldDelete = true;
} else {
// If none of the PRs are open, safe to delete
const hasOpen = prs.data.some(p => p.state === 'open');
if (!hasOpen) {
core.info(`All PRs for ${branchName} are closed — marking for deletion.`);
shouldDelete = true;
} else {
core.info(`Open PR(s) exist for ${branchName} — skipping deletion.`);
}
}
if (shouldDelete) {
try {
await github.rest.git.deleteRef({
owner,
repo,
ref: `heads/${branchName}`
});
core.info(`Deleted branch: ${branchName}`);
} catch (delErr) {
core.warning(`Failed to delete ${branchName}: ${delErr.message}`);
}
}
}
- name: Done
run: echo "Prune run completed."

View File

@@ -1,41 +0,0 @@
name: Repo Health Check
on:
schedule:
- cron: '0 0 * * *'
workflow_dispatch: {}
concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.head_ref || github.ref_name }}
cancel-in-progress: true
jobs:
repo_health:
name: Repo health
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
fetch-depth: 0
lfs: true
- name: Set up Git
run: |
git --version
git lfs install --local || true
- name: Run repo health check
env:
MAX_MB: 100
LFS_ALLOW_MB: 50
run: |
bash scripts/repo_health_check.sh
- name: Upload health output
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: repo-health-output
path: |
/tmp/repo_big_files.txt

View File

@@ -1,443 +0,0 @@
# Security Scan for Pull Requests
# Runs Trivy security scanning on PR Docker images after the build workflow completes
# This workflow extracts the charon binary from the container and performs filesystem scanning
name: Security Scan (PR)
on:
workflow_run:
workflows: ["Docker Build, Publish & Test"]
types: [completed]
workflow_dispatch:
inputs:
pr_number:
description: 'PR number to scan'
required: true
type: string
pull_request:
push:
branches: [main]
concurrency:
group: security-pr-${{ github.event_name == 'workflow_run' && github.event.workflow_run.event || github.event_name }}-${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
jobs:
security-scan:
name: Trivy Binary Scan
runs-on: ubuntu-latest
timeout-minutes: 10
# Run for manual dispatch, direct PR/push, or successful upstream workflow_run
if: >-
github.event_name == 'workflow_dispatch' ||
github.event_name == 'pull_request' ||
github.event_name == 'push' ||
(github.event_name == 'workflow_run' &&
github.event.workflow_run.event == 'pull_request' &&
github.event.workflow_run.status == 'completed' &&
github.event.workflow_run.conclusion == 'success')
permissions:
contents: read
security-events: write
actions: read
steps:
- name: Checkout repository
# actions/checkout v4.2.2
uses: actions/checkout@0c366fd6a839edf440554fa01a7085ccba70ac98
with:
ref: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.sha }}
- name: Extract PR number from workflow_run
id: pr-info
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
if [[ "${{ github.event_name }}" == "push" ]]; then
echo "pr_number=" >> "$GITHUB_OUTPUT"
echo "is_push=true" >> "$GITHUB_OUTPUT"
echo "✅ Push event detected; using local image path"
exit 0
fi
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
echo "pr_number=${{ github.event.pull_request.number }}" >> "$GITHUB_OUTPUT"
echo "is_push=false" >> "$GITHUB_OUTPUT"
echo "✅ Pull request event detected: PR #${{ github.event.pull_request.number }}"
exit 0
fi
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
INPUT_PR_NUMBER="${{ inputs.pr_number }}"
if [[ -z "${INPUT_PR_NUMBER}" ]]; then
echo "❌ workflow_dispatch requires inputs.pr_number"
exit 1
fi
if [[ ! "${INPUT_PR_NUMBER}" =~ ^[0-9]+$ ]]; then
echo "❌ reason_category=invalid_input"
echo "reason=workflow_dispatch pr_number must be digits-only"
exit 1
fi
PR_NUMBER="${INPUT_PR_NUMBER}"
echo "pr_number=${PR_NUMBER}" >> "$GITHUB_OUTPUT"
echo "is_push=false" >> "$GITHUB_OUTPUT"
echo "✅ Using manually provided PR number: ${PR_NUMBER}"
exit 0
fi
if [[ "${{ github.event_name }}" == "workflow_run" ]]; then
if [[ "${{ github.event.workflow_run.event }}" != "pull_request" ]]; then
# Explicit contract validation happens in the dedicated guard step.
echo "pr_number=" >> "$GITHUB_OUTPUT"
echo "is_push=false" >> "$GITHUB_OUTPUT"
exit 0
fi
if [[ -n "${{ github.event.workflow_run.pull_requests[0].number || '' }}" ]]; then
echo "pr_number=${{ github.event.workflow_run.pull_requests[0].number }}" >> "$GITHUB_OUTPUT"
echo "is_push=false" >> "$GITHUB_OUTPUT"
echo "✅ Found PR number from workflow_run payload: ${{ github.event.workflow_run.pull_requests[0].number }}"
exit 0
fi
fi
# Extract PR number from context
HEAD_SHA="${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.event.pull_request.head.sha || github.sha }}"
echo "🔍 Looking for PR with head SHA: ${HEAD_SHA}"
# Query GitHub API for PR associated with this commit
PR_NUMBER=$(gh api \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${{ github.repository }}/commits/${HEAD_SHA}/pulls" \
--jq '.[0].number // empty' 2>/dev/null || echo "")
if [[ -n "${PR_NUMBER}" ]]; then
echo "pr_number=${PR_NUMBER}" >> "$GITHUB_OUTPUT"
echo "is_push=false" >> "$GITHUB_OUTPUT"
echo "✅ Found PR number: ${PR_NUMBER}"
else
echo "❌ Could not determine PR number for workflow_run SHA: ${HEAD_SHA}"
exit 1
fi
- name: Validate workflow_run trust boundary and event contract
if: github.event_name == 'workflow_run'
run: |
if [[ "${{ github.event.workflow_run.name }}" != "Docker Build, Publish & Test" ]]; then
echo "❌ reason_category=unexpected_upstream_workflow"
echo "workflow_name=${{ github.event.workflow_run.name }}"
exit 1
fi
if [[ "${{ github.event.workflow_run.event }}" != "pull_request" ]]; then
echo "❌ reason_category=unsupported_upstream_event"
echo "upstream_event=${{ github.event.workflow_run.event }}"
echo "run_id=${{ github.event.workflow_run.id }}"
exit 1
fi
if [[ "${{ github.event.workflow_run.head_repository.full_name }}" != "${{ github.repository }}" ]]; then
echo "❌ reason_category=untrusted_upstream_repository"
echo "upstream_head_repository=${{ github.event.workflow_run.head_repository.full_name }}"
echo "expected_repository=${{ github.repository }}"
exit 1
fi
echo "✅ workflow_run trust boundary and event contract validated"
- name: Build Docker image (Local)
if: github.event_name == 'push' || github.event_name == 'pull_request'
run: |
echo "Building image locally for security scan..."
docker build -t charon:local .
echo "✅ Successfully built charon:local"
- name: Check for PR image artifact
id: check-artifact
if: github.event_name == 'workflow_run' || github.event_name == 'workflow_dispatch'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR_NUMBER="${{ steps.pr-info.outputs.pr_number }}"
if [[ ! "${PR_NUMBER}" =~ ^[0-9]+$ ]]; then
echo "❌ reason_category=invalid_input"
echo "reason=Resolved PR number must be digits-only"
exit 1
fi
ARTIFACT_NAME="pr-image-${PR_NUMBER}"
RUN_ID="${{ github.event_name == 'workflow_run' && github.event.workflow_run.id || '' }}"
echo "🔍 Checking for artifact: ${ARTIFACT_NAME}"
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
# Manual replay path: find latest successful docker-build pull_request run for this PR.
RUNS_JSON=$(gh api \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${{ github.repository }}/actions/workflows/docker-build.yml/runs?event=pull_request&status=success&per_page=100" 2>&1)
RUNS_STATUS=$?
if [[ ${RUNS_STATUS} -ne 0 ]]; then
echo "❌ reason_category=api_error"
echo "reason=Failed to query workflow runs for PR lookup"
echo "upstream_run_id=unknown"
echo "artifact_name=${ARTIFACT_NAME}"
echo "api_output=${RUNS_JSON}"
exit 1
fi
RUN_ID=$(printf '%s' "${RUNS_JSON}" | jq -r --argjson pr "${PR_NUMBER}" '.workflow_runs[] | select((.pull_requests // []) | any(.number == $pr)) | .id' | head -n 1)
if [[ -z "${RUN_ID}" ]]; then
echo "❌ reason_category=not_found"
echo "reason=No successful docker-build pull_request run found for PR #${PR_NUMBER}"
echo "upstream_run_id=unknown"
echo "artifact_name=${ARTIFACT_NAME}"
exit 1
fi
fi
echo "run_id=${RUN_ID}" >> "$GITHUB_OUTPUT"
# Check if the artifact exists in the workflow run
ARTIFACTS_JSON=$(gh api \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${{ github.repository }}/actions/runs/${RUN_ID}/artifacts" 2>&1)
ARTIFACTS_STATUS=$?
if [[ ${ARTIFACTS_STATUS} -ne 0 ]]; then
echo "❌ reason_category=api_error"
echo "reason=Failed to query artifacts for upstream run"
echo "upstream_run_id=${RUN_ID}"
echo "artifact_name=${ARTIFACT_NAME}"
echo "api_output=${ARTIFACTS_JSON}"
exit 1
fi
ARTIFACT_ID=$(printf '%s' "${ARTIFACTS_JSON}" | jq -r --arg name "${ARTIFACT_NAME}" '.artifacts[] | select(.name == $name) | .id' | head -n 1)
if [[ -z "${ARTIFACT_ID}" ]]; then
echo "❌ reason_category=not_found"
echo "reason=Required artifact was not found"
echo "upstream_run_id=${RUN_ID}"
echo "artifact_name=${ARTIFACT_NAME}"
exit 1
fi
{
echo "artifact_exists=true"
echo "artifact_id=${ARTIFACT_ID}"
echo "artifact_name=${ARTIFACT_NAME}"
} >> "$GITHUB_OUTPUT"
echo "✅ Found artifact: ${ARTIFACT_NAME} (ID: ${ARTIFACT_ID})"
- name: Download PR image artifact
if: github.event_name == 'workflow_run' || github.event_name == 'workflow_dispatch'
# actions/download-artifact v4.1.8
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3
with:
name: ${{ steps.check-artifact.outputs.artifact_name }}
run-id: ${{ steps.check-artifact.outputs.run_id }}
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Load Docker image
if: github.event_name == 'workflow_run' || github.event_name == 'workflow_dispatch'
id: load-image
run: |
echo "📦 Loading Docker image..."
if [[ ! -r "charon-pr-image.tar" ]]; then
echo "❌ ERROR: Artifact image tar is missing or unreadable"
exit 1
fi
MANIFEST_TAGS=""
if tar -tf charon-pr-image.tar | grep -qx "manifest.json"; then
MANIFEST_TAGS=$(tar -xOf charon-pr-image.tar manifest.json 2>/dev/null | jq -r '.[]?.RepoTags[]?' 2>/dev/null | sed '/^$/d' || true)
else
echo "⚠️ manifest.json not found in artifact tar; will try docker-load-image-id fallback"
fi
LOAD_OUTPUT=$(docker load < charon-pr-image.tar 2>&1)
echo "${LOAD_OUTPUT}"
SOURCE_IMAGE_REF=""
SOURCE_RESOLUTION_MODE=""
while IFS= read -r tag; do
[[ -z "${tag}" ]] && continue
if docker image inspect "${tag}" >/dev/null 2>&1; then
SOURCE_IMAGE_REF="${tag}"
SOURCE_RESOLUTION_MODE="manifest_tag"
break
fi
done <<< "${MANIFEST_TAGS}"
if [[ -z "${SOURCE_IMAGE_REF}" ]]; then
LOAD_IMAGE_ID=$(printf '%s\n' "${LOAD_OUTPUT}" | sed -nE 's/^Loaded image ID: (sha256:[0-9a-f]+)$/\1/p' | head -n1)
if [[ -n "${LOAD_IMAGE_ID}" ]] && docker image inspect "${LOAD_IMAGE_ID}" >/dev/null 2>&1; then
SOURCE_IMAGE_REF="${LOAD_IMAGE_ID}"
SOURCE_RESOLUTION_MODE="load_image_id"
fi
fi
if [[ -z "${SOURCE_IMAGE_REF}" ]]; then
echo "❌ ERROR: Could not resolve a valid image reference from manifest tags or docker load image ID"
exit 1
fi
docker tag "${SOURCE_IMAGE_REF}" "charon:artifact"
{
echo "source_image_ref=${SOURCE_IMAGE_REF}"
echo "source_resolution_mode=${SOURCE_RESOLUTION_MODE}"
echo "image_ref=charon:artifact"
} >> "$GITHUB_OUTPUT"
echo "✅ Docker image resolved via ${SOURCE_RESOLUTION_MODE} and tagged as charon:artifact"
docker images | grep charon
- name: Extract charon binary from container
if: steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request'
id: extract
run: |
# Use local image for Push/PR events
if [[ "${{ github.event_name }}" == "push" || "${{ github.event_name }}" == "pull_request" ]]; then
echo "Using local image: charon:local"
CONTAINER_ID=$(docker create "charon:local")
echo "container_id=${CONTAINER_ID}" >> "$GITHUB_OUTPUT"
# Extract the charon binary
mkdir -p ./scan-target
docker cp "${CONTAINER_ID}:/app/charon" ./scan-target/charon
docker rm "${CONTAINER_ID}"
if [[ -f "./scan-target/charon" ]]; then
echo "✅ Binary extracted successfully"
ls -lh ./scan-target/charon
echo "binary_path=./scan-target" >> "$GITHUB_OUTPUT"
else
echo "❌ Failed to extract binary"
exit 1
fi
exit 0
fi
# For workflow_run artifact path, always use locally tagged image from loaded artifact.
IMAGE_REF="${{ steps.load-image.outputs.image_ref }}"
if [[ -z "${IMAGE_REF}" ]]; then
echo "❌ ERROR: Loaded artifact image reference is empty"
exit 1
fi
echo "🔍 Extracting binary from: ${IMAGE_REF}"
# Create container without starting it
CONTAINER_ID=$(docker create "${IMAGE_REF}")
echo "container_id=${CONTAINER_ID}" >> "$GITHUB_OUTPUT"
# Extract the charon binary
mkdir -p ./scan-target
docker cp "${CONTAINER_ID}:/app/charon" ./scan-target/charon
# Cleanup container
docker rm "${CONTAINER_ID}"
# Verify extraction
if [[ -f "./scan-target/charon" ]]; then
echo "✅ Binary extracted successfully"
ls -lh ./scan-target/charon
echo "binary_path=./scan-target" >> "$GITHUB_OUTPUT"
else
echo "❌ Failed to extract binary"
exit 1
fi
- name: Run Trivy filesystem scan (SARIF output)
if: steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request'
# aquasecurity/trivy-action v0.33.1
uses: aquasecurity/trivy-action@57a97c7e7821a5776cebc9bb87c984fa69cba8f1
with:
scan-type: 'fs'
scan-ref: ${{ steps.extract.outputs.binary_path }}
format: 'sarif'
output: 'trivy-binary-results.sarif'
severity: 'CRITICAL,HIGH,MEDIUM'
continue-on-error: true
- name: Check Trivy SARIF output exists
if: always() && (steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request')
id: trivy-sarif-check
run: |
if [[ -f trivy-binary-results.sarif ]]; then
echo "exists=true" >> "$GITHUB_OUTPUT"
else
echo "exists=false" >> "$GITHUB_OUTPUT"
echo " No Trivy SARIF output found; skipping SARIF/artifact upload steps"
fi
- name: Upload Trivy SARIF to GitHub Security
if: always() && steps.trivy-sarif-check.outputs.exists == 'true'
# github/codeql-action v4
uses: github/codeql-action/upload-sarif@a5b959e10d29aec4f277040b4d27d0f6bea2322a
with:
sarif_file: 'trivy-binary-results.sarif'
category: ${{ steps.pr-info.outputs.is_push == 'true' && format('security-scan-{0}', github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref_name) || format('security-scan-pr-{0}', steps.pr-info.outputs.pr_number) }}
continue-on-error: true
- name: Run Trivy filesystem scan (fail on CRITICAL/HIGH)
if: steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request'
# aquasecurity/trivy-action v0.33.1
uses: aquasecurity/trivy-action@57a97c7e7821a5776cebc9bb87c984fa69cba8f1
with:
scan-type: 'fs'
scan-ref: ${{ steps.extract.outputs.binary_path }}
format: 'table'
severity: 'CRITICAL,HIGH'
exit-code: '1'
- name: Upload scan artifacts
if: always() && steps.trivy-sarif-check.outputs.exists == 'true'
# actions/upload-artifact v4.4.3
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
with:
name: ${{ steps.pr-info.outputs.is_push == 'true' && format('security-scan-{0}', github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref_name) || format('security-scan-pr-{0}', steps.pr-info.outputs.pr_number) }}
path: |
trivy-binary-results.sarif
retention-days: 14
- name: Create job summary
if: always() && (steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request')
run: |
{
if [[ "${{ steps.pr-info.outputs.is_push }}" == "true" ]]; then
echo "## 🔒 Security Scan Results - Branch: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref_name }}"
else
echo "## 🔒 Security Scan Results - PR #${{ steps.pr-info.outputs.pr_number }}"
fi
echo ""
echo "**Scan Type**: Trivy Filesystem Scan"
echo "**Target**: \`/app/charon\` binary"
echo "**Severity Filter**: CRITICAL, HIGH"
echo ""
if [[ "${{ job.status }}" == "success" ]]; then
echo "✅ **PASSED**: No CRITICAL or HIGH vulnerabilities found"
else
echo "❌ **FAILED**: CRITICAL or HIGH vulnerabilities detected"
echo ""
echo "Please review the Trivy scan output and address the vulnerabilities."
fi
} >> "$GITHUB_STEP_SUMMARY"
- name: Cleanup
if: always() && steps.check-artifact.outputs.artifact_exists == 'true'
run: |
echo "🧹 Cleaning up..."
rm -rf ./scan-target
echo "✅ Cleanup complete"

View File

@@ -1,165 +0,0 @@
name: Weekly Security Rebuild
# Note: This workflow filename has remained consistent. The related docker-publish.yml
# was replaced by docker-build.yml in commit f640524b (Dec 21, 2025).
# GitHub Advanced Security may show warnings about the old filename until its tracking updates.
on:
schedule:
- cron: '0 12 * * 2' # Tuesdays at 12:00 UTC
workflow_dispatch:
inputs:
force_rebuild:
description: 'Force rebuild without cache'
required: false
type: boolean
default: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: false
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository_owner }}/charon
jobs:
security-rebuild:
name: Security Rebuild & Scan
runs-on: ubuntu-latest
timeout-minutes: 60
permissions:
contents: read
packages: write
security-events: write
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
# Explicitly fetch the current HEAD of the ref at run time, not the
# SHA that was frozen when this scheduled job was queued. Without this,
# a queued job can run days later with stale code.
ref: ${{ github.ref_name }}
- name: Normalize image name
run: |
echo "IMAGE_NAME=$(echo "${{ env.IMAGE_NAME }}" | tr '[:upper:]' '[:lower:]')" >> "$GITHUB_ENV"
- name: Set up QEMU
uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- name: Resolve Debian base image digest
id: base-image
run: |
docker pull debian:trixie-slim
DIGEST=$(docker inspect --format='{{index .RepoDigests 0}}' debian:trixie-slim)
echo "digest=$DIGEST" >> "$GITHUB_OUTPUT"
echo "Base image digest: $DIGEST"
- name: Log in to Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata
id: meta
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=raw,value=security-scan-{{date 'YYYYMMDD'}}
- name: Build Docker image (NO CACHE)
id: build
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6
with:
context: .
platforms: linux/amd64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
no-cache: ${{ github.event_name == 'schedule' || inputs.force_rebuild }}
pull: true # Always pull fresh base images to get latest security patches
build-args: |
VERSION=security-scan
BUILD_DATE=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.created'] }}
VCS_REF=${{ github.sha }}
BASE_IMAGE=${{ steps.base-image.outputs.digest }}
- name: Run Trivy vulnerability scanner (CRITICAL+HIGH)
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # 0.34.2
with:
image-ref: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build.outputs.digest }}
format: 'table'
severity: 'CRITICAL,HIGH'
exit-code: '1' # Fail workflow if vulnerabilities found
continue-on-error: true
- name: Run Trivy vulnerability scanner (SARIF)
id: trivy-sarif
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # 0.34.2
with:
image-ref: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build.outputs.digest }}
format: 'sarif'
output: 'trivy-weekly-results.sarif'
severity: 'CRITICAL,HIGH,MEDIUM'
- name: Upload Trivy results to GitHub Security
uses: github/codeql-action/upload-sarif@c793b717bc78562f491db7b0e93a3a178b099162 # v4.32.5
with:
sarif_file: 'trivy-weekly-results.sarif'
- name: Run Trivy vulnerability scanner (JSON for artifact)
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # 0.34.2
with:
image-ref: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build.outputs.digest }}
format: 'json'
output: 'trivy-weekly-results.json'
severity: 'CRITICAL,HIGH,MEDIUM,LOW'
- name: Upload Trivy JSON results
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
with:
name: trivy-weekly-scan-${{ github.run_number }}
path: trivy-weekly-results.json
retention-days: 90
- name: Check Debian package versions
run: |
{
echo "## 📦 Installed Package Versions"
echo ""
echo "Checking key security packages:"
echo '```'
docker run --rm --entrypoint "" "${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build.outputs.digest }}" \
sh -c "dpkg -l | grep -E 'libc-ares|curl|libcurl|openssl|libssl' || echo 'No matching packages found'"
echo '```'
} >> "$GITHUB_STEP_SUMMARY"
- name: Create security scan summary
if: always()
run: |
{
echo "## 🔒 Weekly Security Rebuild Complete"
echo ""
echo "- **Build Date:** $(date -u +"%Y-%m-%d %H:%M:%S UTC")"
echo "- **Image:** ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build.outputs.digest }}"
echo "- **Cache Used:** No (forced fresh build)"
echo "- **Trivy Scan:** Completed (see Security tab for details)"
echo ""
echo "### Next Steps:"
echo "1. Review Security tab for new vulnerabilities"
echo "2. Check Trivy JSON artifact for detailed package info"
echo "3. If critical CVEs found, trigger production rebuild"
} >> "$GITHUB_STEP_SUMMARY"
- name: Notify on security issues (optional)
if: failure()
run: |
echo "::warning::Weekly security scan found HIGH or CRITICAL vulnerabilities. Review the Security tab."

View File

@@ -1,477 +0,0 @@
# yaml-language-server: $schema=https://json.schemastore.org/github-workflow.json
---
name: Supply Chain Verification (PR)
on:
workflow_dispatch:
inputs:
pr_number:
description: "PR number to verify (optional, will auto-detect from workflow_run)"
required: false
type: string
pull_request:
push:
branches:
- main
concurrency:
group: supply-chain-pr-${{ github.event.workflow_run.event || github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
permissions:
contents: read
pull-requests: write
security-events: write
actions: read
jobs:
verify-supply-chain:
name: Verify Supply Chain
runs-on: ubuntu-latest
timeout-minutes: 15
# Run for: manual dispatch, or successful workflow_run triggered by push/PR
if: >
github.event_name == 'workflow_dispatch' ||
github.event_name == 'pull_request' ||
(github.event_name == 'workflow_run' &&
(github.event.workflow_run.event == 'push' || github.event.workflow_run.pull_requests[0].number != null) &&
(github.event.workflow_run.status != 'completed' || github.event.workflow_run.conclusion == 'success'))
steps:
- name: Checkout repository
# actions/checkout v4.2.2
uses: actions/checkout@0c366fd6a839edf440554fa01a7085ccba70ac98
- name: Extract PR number from workflow_run
id: pr-number
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
INPUT_PR_NUMBER: ${{ inputs.pr_number }}
EVENT_NAME: ${{ github.event_name }}
HEAD_SHA: ${{ github.event.workflow_run.head_sha || github.event.pull_request.head.sha || github.sha }}
HEAD_BRANCH: ${{ github.event.workflow_run.head_branch || github.head_ref || github.ref_name }}
WORKFLOW_RUN_EVENT: ${{ github.event.workflow_run.event }}
REPO_OWNER: ${{ github.repository_owner }}
REPO_NAME: ${{ github.repository }}
run: |
if [[ -n "${INPUT_PR_NUMBER}" ]]; then
echo "pr_number=${INPUT_PR_NUMBER}" >> "$GITHUB_OUTPUT"
echo "📋 Using manually provided PR number: ${INPUT_PR_NUMBER}"
exit 0
fi
if [[ "${EVENT_NAME}" != "workflow_run" && "${EVENT_NAME}" != "push" && "${EVENT_NAME}" != "pull_request" ]]; then
echo "❌ No PR number provided and not triggered by workflow_run/push/pr"
echo "pr_number=" >> "$GITHUB_OUTPUT"
exit 0
fi
echo "🔍 Looking for PR with head SHA: ${HEAD_SHA}"
echo "🔍 Head branch: ${HEAD_BRANCH}"
# Search for PR by head SHA
PR_NUMBER=$(gh api \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${REPO_NAME}/pulls?state=open&head=${REPO_OWNER}:${HEAD_BRANCH}" \
--jq '.[0].number // empty' 2>/dev/null || echo "")
if [[ -z "${PR_NUMBER}" ]]; then
# Fallback: search by commit SHA
PR_NUMBER=$(gh api \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${REPO_NAME}/commits/${HEAD_SHA}/pulls" \
--jq '.[0].number // empty' 2>/dev/null || echo "")
fi
if [[ -z "${PR_NUMBER}" ]]; then
echo "⚠️ Could not find PR number for this workflow run"
echo "pr_number=" >> "$GITHUB_OUTPUT"
else
echo "pr_number=${PR_NUMBER}" >> "$GITHUB_OUTPUT"
echo "✅ Found PR number: ${PR_NUMBER}"
fi
# Check if this is a push event (not a PR)
if [[ "${WORKFLOW_RUN_EVENT}" == "push" || "${EVENT_NAME}" == "push" || -z "${PR_NUMBER}" ]]; then
echo "is_push=true" >> "$GITHUB_OUTPUT"
echo "✅ Detected push build from branch: ${HEAD_BRANCH}"
else
echo "is_push=false" >> "$GITHUB_OUTPUT"
fi
- name: Sanitize branch name
id: sanitize
env:
BRANCH_NAME: ${{ github.event.workflow_run.head_branch || github.head_ref || github.ref_name }}
run: |
# Sanitize branch name for use in artifact names
# Replace / with - to avoid invalid reference format errors
SANITIZED=$(echo "$BRANCH_NAME" | tr '/' '-')
echo "branch=${SANITIZED}" >> "$GITHUB_OUTPUT"
echo "📋 Sanitized branch name: ${BRANCH_NAME} -> ${SANITIZED}"
- name: Check for PR image artifact
id: check-artifact
if: github.event_name == 'workflow_run' && (steps.pr-number.outputs.pr_number != '' || steps.pr-number.outputs.is_push == 'true')
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
IS_PUSH: ${{ steps.pr-number.outputs.is_push }}
PR_NUMBER: ${{ steps.pr-number.outputs.pr_number }}
RUN_ID: ${{ github.event.workflow_run.id }}
HEAD_SHA: ${{ github.event.workflow_run.head_sha || github.event.pull_request.head.sha || github.sha }}
REPO_NAME: ${{ github.repository }}
run: |
# Determine artifact name based on event type
if [[ "${IS_PUSH}" == "true" ]]; then
ARTIFACT_NAME="push-image"
else
ARTIFACT_NAME="pr-image-${PR_NUMBER}"
fi
echo "🔍 Looking for artifact: ${ARTIFACT_NAME}"
if [[ -n "${RUN_ID}" ]]; then
# Search in the triggering workflow run
ARTIFACT_ID=$(gh api \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${REPO_NAME}/actions/runs/${RUN_ID}/artifacts" \
--jq ".artifacts[] | select(.name == \"${ARTIFACT_NAME}\") | .id" 2>/dev/null || echo "")
else
# If RUN_ID is empty (push/pr trigger), try to find a recent successful run for this SHA
echo "🔍 Searching for workflow run for SHA: ${HEAD_SHA}"
# Retry a few times as the run might be just starting or finishing
for i in {1..3}; do
RUN_ID=$(gh api \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${REPO_NAME}/actions/workflows/docker-build.yml/runs?head_sha=${HEAD_SHA}&status=success&per_page=1" \
--jq '.workflow_runs[0].id // empty' 2>/dev/null || echo "")
if [[ -n "${RUN_ID}" ]]; then
echo "✅ Found Run ID: ${RUN_ID}"
break
fi
echo "⏳ Waiting for workflow run to appear/complete... ($i/3)"
sleep 5
done
if [[ -n "${RUN_ID}" ]]; then
ARTIFACT_ID=$(gh api \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${REPO_NAME}/actions/runs/${RUN_ID}/artifacts" \
--jq ".artifacts[] | select(.name == \"${ARTIFACT_NAME}\") | .id" 2>/dev/null || echo "")
fi
fi
if [[ -z "${ARTIFACT_ID}" ]]; then
# Fallback for manual or missing info: search recent artifacts by name
echo "🔍 Falling back to search by artifact name..."
ARTIFACT_ID=$(gh api \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${REPO_NAME}/actions/artifacts?name=${ARTIFACT_NAME}" \
--jq '.artifacts[0].id // empty' 2>/dev/null || echo "")
fi
if [[ -z "${ARTIFACT_ID}" ]]; then
echo "⚠️ No artifact found: ${ARTIFACT_NAME}"
echo "artifact_found=false" >> "$GITHUB_OUTPUT"
exit 0
fi
{
echo "artifact_found=true"
echo "artifact_id=${ARTIFACT_ID}"
echo "artifact_name=${ARTIFACT_NAME}"
} >> "$GITHUB_OUTPUT"
echo "✅ Found artifact: ${ARTIFACT_NAME} (ID: ${ARTIFACT_ID})"
- name: Skip if no artifact
if: github.event_name == 'workflow_run' && ((steps.pr-number.outputs.pr_number == '' && steps.pr-number.outputs.is_push != 'true') || steps.check-artifact.outputs.artifact_found != 'true')
run: |
echo " No PR image artifact found - skipping supply chain verification"
echo "This is expected if the Docker build did not produce an artifact for this PR"
exit 0
- name: Download PR image artifact
if: github.event_name == 'workflow_run' && steps.check-artifact.outputs.artifact_found == 'true'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ARTIFACT_ID: ${{ steps.check-artifact.outputs.artifact_id }}
ARTIFACT_NAME: ${{ steps.check-artifact.outputs.artifact_name }}
REPO_NAME: ${{ github.repository }}
run: |
echo "📦 Downloading artifact: ${ARTIFACT_NAME}"
gh api \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${REPO_NAME}/actions/artifacts/${ARTIFACT_ID}/zip" \
> artifact.zip
unzip -o artifact.zip
echo "✅ Artifact downloaded and extracted"
- name: Load Docker image (Artifact)
if: github.event_name == 'workflow_run' && steps.check-artifact.outputs.artifact_found == 'true'
id: load-image-artifact
run: |
if [[ ! -f "charon-pr-image.tar" ]]; then
echo "❌ charon-pr-image.tar not found in artifact"
ls -la
exit 1
fi
echo "🐳 Loading Docker image..."
LOAD_OUTPUT=$(docker load -i charon-pr-image.tar)
echo "${LOAD_OUTPUT}"
# Extract image name from load output
IMAGE_NAME=$(echo "${LOAD_OUTPUT}" | grep -oP 'Loaded image: \K.*' || echo "")
if [[ -z "${IMAGE_NAME}" ]]; then
# Try alternative format
IMAGE_NAME=$(echo "${LOAD_OUTPUT}" | grep -oP 'Loaded image ID: \K.*' || echo "")
fi
if [[ -z "${IMAGE_NAME}" ]]; then
# Fallback: list recent images
IMAGE_NAME=$(docker images --format "{{.Repository}}:{{.Tag}}" | head -1)
fi
echo "image_name=${IMAGE_NAME}" >> "$GITHUB_OUTPUT"
echo "✅ Loaded image: ${IMAGE_NAME}"
- name: Build Docker image (Local)
if: github.event_name != 'workflow_run'
id: build-image-local
run: |
echo "🐳 Building Docker image locally..."
docker build -t charon:local .
echo "image_name=charon:local" >> "$GITHUB_OUTPUT"
echo "✅ Built image: charon:local"
- name: Set Target Image
id: set-target
run: |
if [[ "${{ github.event_name }}" == "workflow_run" ]]; then
echo "image_name=${{ steps.load-image-artifact.outputs.image_name }}" >> "$GITHUB_OUTPUT"
else
echo "image_name=${{ steps.build-image-local.outputs.image_name }}" >> "$GITHUB_OUTPUT"
fi
# Generate SBOM using official Anchore action (auto-updated by Renovate)
- name: Generate SBOM
if: steps.set-target.outputs.image_name != ''
uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0
id: sbom
with:
image: ${{ steps.set-target.outputs.image_name }}
format: cyclonedx-json
output-file: sbom.cyclonedx.json
- name: Count SBOM components
if: steps.set-target.outputs.image_name != ''
id: sbom-count
run: |
COMPONENT_COUNT=$(jq '.components | length' sbom.cyclonedx.json 2>/dev/null || echo "0")
echo "component_count=${COMPONENT_COUNT}" >> "$GITHUB_OUTPUT"
echo "✅ SBOM generated with ${COMPONENT_COUNT} components"
# Scan for vulnerabilities using manual Grype installation (pinned to v0.107.1)
- name: Install Grype
if: steps.set-target.outputs.image_name != ''
run: |
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin v0.107.1
- name: Scan for vulnerabilities
if: steps.set-target.outputs.image_name != ''
id: grype-scan
run: |
echo "🔍 Scanning SBOM for vulnerabilities..."
grype sbom:sbom.cyclonedx.json -o json > grype-results.json
grype sbom:sbom.cyclonedx.json -o sarif > grype-results.sarif
- name: Debug Output Files
if: steps.set-target.outputs.image_name != ''
run: |
echo "📂 Listing workspace files:"
ls -la
- name: Process vulnerability results
if: steps.set-target.outputs.image_name != ''
id: vuln-summary
run: |
# Verify scan actually produced output
if [[ ! -f "grype-results.json" ]]; then
echo "❌ Error: grype-results.json not found!"
echo "Available files:"
ls -la
exit 1
fi
# Debug content (head)
echo "📄 Grype JSON Preview:"
head -n 20 grype-results.json
# Count vulnerabilities by severity - strict failing if file is missing (already checked above)
CRITICAL_COUNT=$(jq '[.matches[] | select(.vulnerability.severity == "Critical")] | length' grype-results.json 2>/dev/null || echo "0")
HIGH_COUNT=$(jq '[.matches[] | select(.vulnerability.severity == "High")] | length' grype-results.json 2>/dev/null || echo "0")
MEDIUM_COUNT=$(jq '[.matches[] | select(.vulnerability.severity == "Medium")] | length' grype-results.json 2>/dev/null || echo "0")
LOW_COUNT=$(jq '[.matches[] | select(.vulnerability.severity == "Low")] | length' grype-results.json 2>/dev/null || echo "0")
TOTAL_COUNT=$(jq '.matches | length' grype-results.json 2>/dev/null || echo "0")
{
echo "critical_count=${CRITICAL_COUNT}"
echo "high_count=${HIGH_COUNT}"
echo "medium_count=${MEDIUM_COUNT}"
echo "low_count=${LOW_COUNT}"
echo "total_count=${TOTAL_COUNT}"
} >> "$GITHUB_OUTPUT"
echo "📊 Vulnerability Summary:"
echo " Critical: ${CRITICAL_COUNT}"
echo " High: ${HIGH_COUNT}"
echo " Medium: ${MEDIUM_COUNT}"
echo " Low: ${LOW_COUNT}"
echo " Total: ${TOTAL_COUNT}"
- name: Security severity policy summary
if: steps.set-target.outputs.image_name != ''
run: |
CRITICAL_COUNT="${{ steps.vuln-summary.outputs.critical_count }}"
HIGH_COUNT="${{ steps.vuln-summary.outputs.high_count }}"
MEDIUM_COUNT="${{ steps.vuln-summary.outputs.medium_count }}"
{
echo "## 🔐 Supply Chain Severity Policy"
echo ""
echo "- Blocking: Critical, High"
echo "- Medium: non-blocking by default (report + triage SLA)"
echo "- Policy file: .github/security-severity-policy.yml"
echo ""
echo "Current scan counts: Critical=${CRITICAL_COUNT}, High=${HIGH_COUNT}, Medium=${MEDIUM_COUNT}"
} >> "$GITHUB_STEP_SUMMARY"
if [[ "${MEDIUM_COUNT}" -gt 0 ]]; then
echo "::warning::${MEDIUM_COUNT} medium vulnerabilities found. Non-blocking by policy; create/maintain triage issue with SLA per .github/security-severity-policy.yml"
fi
- name: Upload SARIF to GitHub Security
if: steps.check-artifact.outputs.artifact_found == 'true'
uses: github/codeql-action/upload-sarif@c793b717bc78562f491db7b0e93a3a178b099162 # v4
continue-on-error: true
with:
sarif_file: grype-results.sarif
category: supply-chain-pr
- name: Upload supply chain artifacts
if: steps.set-target.outputs.image_name != ''
# actions/upload-artifact v4.6.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
with:
name: ${{ steps.pr-number.outputs.is_push == 'true' && format('supply-chain-{0}', steps.sanitize.outputs.branch) || format('supply-chain-pr-{0}', steps.pr-number.outputs.pr_number) }}
path: |
sbom.cyclonedx.json
grype-results.json
retention-days: 14
- name: Comment on PR
if: steps.set-target.outputs.image_name != '' && steps.pr-number.outputs.is_push != 'true' && steps.pr-number.outputs.pr_number != ''
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR_NUMBER="${{ steps.pr-number.outputs.pr_number }}"
COMPONENT_COUNT="${{ steps.sbom-count.outputs.component_count }}"
CRITICAL_COUNT="${{ steps.vuln-summary.outputs.critical_count }}"
HIGH_COUNT="${{ steps.vuln-summary.outputs.high_count }}"
MEDIUM_COUNT="${{ steps.vuln-summary.outputs.medium_count }}"
LOW_COUNT="${{ steps.vuln-summary.outputs.low_count }}"
TOTAL_COUNT="${{ steps.vuln-summary.outputs.total_count }}"
# Determine status emoji
if [[ "${CRITICAL_COUNT}" -gt 0 ]]; then
STATUS="❌ **FAILED**"
STATUS_EMOJI="🚨"
elif [[ "${HIGH_COUNT}" -gt 0 ]]; then
STATUS="⚠️ **WARNING**"
STATUS_EMOJI="⚠️"
else
STATUS="✅ **PASSED**"
STATUS_EMOJI="✅"
fi
COMMENT_BODY=$(cat <<EOF
## ${STATUS_EMOJI} Supply Chain Verification Results
${STATUS}
### 📦 SBOM Summary
- **Components**: ${COMPONENT_COUNT}
### 🔍 Vulnerability Scan
| Severity | Count |
|----------|-------|
| 🔴 Critical | ${CRITICAL_COUNT} |
| 🟠 High | ${HIGH_COUNT} |
| 🟡 Medium | ${MEDIUM_COUNT} |
| 🟢 Low | ${LOW_COUNT} |
| **Total** | **${TOTAL_COUNT}** |
### 📎 Artifacts
- SBOM (CycloneDX JSON) and Grype results available in workflow artifacts
---
<sub>Generated by Supply Chain Verification workflow • [View Details](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})</sub>
EOF
)
# Find and update existing comment or create new one
COMMENT_ID=$(gh api \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${{ github.repository }}/issues/${PR_NUMBER}/comments" \
--jq '.[] | select(.body | contains("Supply Chain Verification Results")) | .id' | head -1)
if [[ -n "${COMMENT_ID}" ]]; then
echo "📝 Updating existing comment..."
gh api \
--method PATCH \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${{ github.repository }}/issues/comments/${COMMENT_ID}" \
-f body="${COMMENT_BODY}"
else
echo "📝 Creating new comment..."
gh api \
--method POST \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/${{ github.repository }}/issues/${PR_NUMBER}/comments" \
-f body="${COMMENT_BODY}"
fi
echo "✅ PR comment posted"
- name: Fail on Critical/High vulnerabilities
if: steps.set-target.outputs.image_name != ''
run: |
CRITICAL_COUNT="${{ steps.vuln-summary.outputs.critical_count }}"
HIGH_COUNT="${{ steps.vuln-summary.outputs.high_count }}"
if [[ "${CRITICAL_COUNT}" -gt 0 ]]; then
echo "🚨 Found ${CRITICAL_COUNT} CRITICAL vulnerabilities!"
echo "Please review the vulnerability report and address critical issues before merging."
exit 1
fi
if [[ "${HIGH_COUNT}" -gt 0 ]]; then
echo "🚨 Found ${HIGH_COUNT} HIGH vulnerabilities!"
echo "Please review the vulnerability report and address high severity issues before merging."
exit 1
fi
echo "✅ No Critical/High vulnerabilities found"

View File

@@ -1,827 +0,0 @@
name: Supply Chain Verification
on:
workflow_dispatch:
schedule:
- cron: '0 0 * * 1' # Mondays 00:00 UTC
workflow_run:
workflows:
- Docker Build, Publish & Test
types:
- completed
release:
types:
- published
- prereleased
permissions:
contents: read
packages: read
id-token: write # OIDC token for keyless verification
attestations: write # Create/verify attestations
security-events: write
pull-requests: write # Comment on PRs
jobs:
verify-sbom:
name: Verify SBOM
runs-on: ubuntu-latest
outputs:
image_exists: ${{ steps.image-check.outputs.exists }}
# Only run on scheduled scans for main branch, or if workflow_run completed successfully
# Critical Fix #5: Exclude PR builds to prevent duplicate verification (now handled inline in docker-build.yml)
if: |
(github.event_name != 'schedule' || github.ref == 'refs/heads/main') &&
(github.event_name != 'workflow_run' ||
(github.event.workflow_run.event != 'pull_request' &&
(github.event.workflow_run.status != 'completed' || github.event.workflow_run.conclusion == 'success')))
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
# Debug: Log workflow_run context for initial validation (can be removed after confidence)
- name: Debug Workflow Run Context
if: github.event_name == 'workflow_run'
run: |
echo "Workflow Run Event Details:"
echo " Workflow: ${{ github.event.workflow_run.name }}"
echo " Conclusion: ${{ github.event.workflow_run.conclusion }}"
echo " Head Branch: ${{ github.event.workflow_run.head_branch }}"
echo " Head SHA: ${{ github.event.workflow_run.head_sha }}"
echo " Event: ${{ github.event.workflow_run.event }}"
echo " PR Count: ${{ toJson(github.event.workflow_run.pull_requests) }}"
- name: Determine Image Tag
id: tag
run: |
if [[ "${{ github.event_name }}" == "release" ]]; then
TAG="${{ github.event.release.tag_name }}"
elif [[ "${{ github.event_name }}" == "workflow_run" ]]; then
BRANCH="${{ github.event.workflow_run.head_branch }}"
# Extract tag from the workflow that triggered us
if [[ "${BRANCH}" == "main" ]]; then
TAG="latest"
elif [[ "${BRANCH}" == "development" ]]; then
TAG="dev"
elif [[ "${BRANCH}" == "nightly" ]]; then
TAG="nightly"
elif [[ "${{ github.event.workflow_run.event }}" == "pull_request" ]]; then
# Extract PR number from workflow_run context with null handling
PR_NUMBER=$(jq -r '.pull_requests[0].number // empty' <<< '${{ toJson(github.event.workflow_run.pull_requests) }}')
if [[ -n "${PR_NUMBER}" ]]; then
TAG="pr-${PR_NUMBER}"
else
# Fallback to SHA-based tag if PR number not available
TAG="sha-$(echo "${{ github.event.workflow_run.head_sha }}" | cut -c1-7)"
fi
else
# For feature branches and other pushes, sanitize branch name for Docker tag
# Replace / with - to avoid invalid reference format errors
TAG=$(echo "${BRANCH}" | tr '/' '-')
fi
elif [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
BRANCH="${{ github.ref_name }}"
if [[ "${BRANCH}" == "main" ]]; then
TAG="latest"
elif [[ "${BRANCH}" == "development" ]]; then
TAG="dev"
elif [[ "${BRANCH}" == "nightly" ]]; then
TAG="nightly"
else
TAG=$(echo "${BRANCH}" | tr '/' '-')
fi
else
TAG="latest"
fi
echo "tag=${TAG}" >> "$GITHUB_OUTPUT"
echo "Determined image tag: ${TAG}"
- name: Check Image Availability
id: image-check
env:
IMAGE: ghcr.io/${{ github.repository_owner }}/charon:${{ steps.tag.outputs.tag }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo "Checking if image exists: ${IMAGE}"
# Authenticate with GHCR using GitHub token
echo "${GH_TOKEN}" | docker login ghcr.io -u "${{ github.actor }}" --password-stdin
if docker manifest inspect "${IMAGE}" >/dev/null 2>&1; then
echo "✅ Image exists and is accessible"
echo "exists=true" >> "$GITHUB_OUTPUT"
else
echo "⚠️ Image not found - likely not built yet"
echo "This is normal for PR workflows before docker-build completes"
echo "exists=false" >> "$GITHUB_OUTPUT"
fi
# Generate SBOM using official Anchore action (auto-updated by Renovate)
- name: Generate and Verify SBOM
if: steps.image-check.outputs.exists == 'true'
uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0
with:
image: ghcr.io/${{ github.repository_owner }}/charon:${{ steps.tag.outputs.tag }}
format: cyclonedx-json
output-file: sbom-verify.cyclonedx.json
- name: Verify SBOM Completeness
if: steps.image-check.outputs.exists == 'true'
run: |
echo "Verifying SBOM completeness..."
echo ""
# Count components
COMPONENT_COUNT=$(jq '.components | length' sbom-verify.cyclonedx.json 2>/dev/null || echo "0")
echo "SBOM components: ${COMPONENT_COUNT}"
if [[ ${COMPONENT_COUNT} -eq 0 ]]; then
echo "⚠️ SBOM contains no components - may indicate an issue"
else
echo "✅ SBOM contains ${COMPONENT_COUNT} components"
fi
- name: Upload SBOM Artifact
if: steps.image-check.outputs.exists == 'true' && always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: sbom-${{ steps.tag.outputs.tag }}
path: sbom-verify.cyclonedx.json
retention-days: 30
- name: Validate SBOM File
id: validate-sbom
if: steps.image-check.outputs.exists == 'true'
run: |
echo "Validating SBOM file..."
echo ""
# Check jq availability
if ! command -v jq &> /dev/null; then
echo "❌ jq is not available"
echo "valid=false" >> "$GITHUB_OUTPUT"
exit 1
fi
# Check file exists
if [[ ! -f sbom-verify.cyclonedx.json ]]; then
echo "❌ SBOM file does not exist"
echo "valid=false" >> "$GITHUB_OUTPUT"
exit 0
fi
# Check file is non-empty
if [[ ! -s sbom-verify.cyclonedx.json ]]; then
echo "❌ SBOM file is empty"
echo "valid=false" >> "$GITHUB_OUTPUT"
exit 0
fi
# Validate JSON structure
if ! jq empty sbom-verify.cyclonedx.json 2>/dev/null; then
echo "❌ SBOM file contains invalid JSON"
echo "SBOM content:"
cat sbom-verify.cyclonedx.json
echo "valid=false" >> "$GITHUB_OUTPUT"
exit 0
fi
# Validate CycloneDX structure
BOMFORMAT=$(jq -r '.bomFormat // "missing"' sbom-verify.cyclonedx.json)
SPECVERSION=$(jq -r '.specVersion // "missing"' sbom-verify.cyclonedx.json)
COMPONENTS=$(jq '.components // [] | length' sbom-verify.cyclonedx.json)
echo "SBOM Format: ${BOMFORMAT}"
echo "Spec Version: ${SPECVERSION}"
echo "Components: ${COMPONENTS}"
echo ""
if [[ "${BOMFORMAT}" != "CycloneDX" ]]; then
echo "❌ Invalid bomFormat: expected 'CycloneDX', got '${BOMFORMAT}'"
echo "valid=false" >> "$GITHUB_OUTPUT"
exit 0
fi
if [[ "${COMPONENTS}" == "0" ]]; then
echo "⚠️ SBOM has no components - may indicate incomplete scan"
echo "valid=partial" >> "$GITHUB_OUTPUT"
else
echo "✅ SBOM is valid with ${COMPONENTS} components"
echo "valid=true" >> "$GITHUB_OUTPUT"
fi
echo "SBOM Format: ${BOMFORMAT}"
echo "Spec Version: ${SPECVERSION}"
echo "Components: ${COMPONENTS}"
echo ""
if [[ "${BOMFORMAT}" != "CycloneDX" ]]; then
echo "❌ Invalid bomFormat: expected 'CycloneDX', got '${BOMFORMAT}'"
echo "valid=false" >> "$GITHUB_OUTPUT"
exit 0
fi
if [[ "${COMPONENTS}" == "0" ]]; then
echo "⚠️ SBOM has no components - may indicate incomplete scan"
echo "valid=partial" >> "$GITHUB_OUTPUT"
else
echo "✅ SBOM is valid with ${COMPONENTS} components"
echo "valid=true" >> "$GITHUB_OUTPUT"
fi
# Scan for vulnerabilities using official Anchore action (auto-updated by Renovate)
- name: Scan for Vulnerabilities
if: steps.validate-sbom.outputs.valid == 'true'
uses: anchore/scan-action@7037fa011853d5a11690026fb85feee79f4c946c # v7.3.2
id: scan
with:
sbom: sbom-verify.cyclonedx.json
fail-build: false
output-format: json
- name: Process Vulnerability Results
if: steps.validate-sbom.outputs.valid == 'true'
run: |
echo "Processing vulnerability results..."
# The scan-action outputs results.json and results.sarif
# Rename for consistency
if [[ -f results.json ]]; then
mv results.json vuln-scan.json
fi
if [[ -f results.sarif ]]; then
mv results.sarif vuln-scan.sarif
fi
# Parse and categorize results
CRITICAL=$(jq '[.matches[] | select(.vulnerability.severity == "Critical")] | length' vuln-scan.json 2>/dev/null || echo "0")
HIGH=$(jq '[.matches[] | select(.vulnerability.severity == "High")] | length' vuln-scan.json 2>/dev/null || echo "0")
MEDIUM=$(jq '[.matches[] | select(.vulnerability.severity == "Medium")] | length' vuln-scan.json 2>/dev/null || echo "0")
LOW=$(jq '[.matches[] | select(.vulnerability.severity == "Low")] | length' vuln-scan.json 2>/dev/null || echo "0")
echo ""
echo "Vulnerability counts:"
echo " Critical: ${CRITICAL}"
echo " High: ${HIGH}"
echo " Medium: ${MEDIUM}"
echo " Low: ${LOW}"
# Set warnings for critical vulnerabilities
if [[ ${CRITICAL} -gt 0 ]]; then
echo "::warning::${CRITICAL} critical vulnerabilities found"
fi
# Store for PR comment
{
echo "CRITICAL_VULNS=${CRITICAL}"
echo "HIGH_VULNS=${HIGH}"
echo "MEDIUM_VULNS=${MEDIUM}"
echo "LOW_VULNS=${LOW}"
} >> "$GITHUB_ENV"
- name: Parse Vulnerability Details
if: steps.validate-sbom.outputs.valid == 'true'
run: |
echo "Parsing detailed vulnerability information..."
# Generate detailed vulnerability tables grouped by severity
# Limit to first 20 per severity to keep PR comment readable
# Critical vulnerabilities
jq -r '
[.matches[] | select(.vulnerability.severity == "Critical")] |
sort_by(.vulnerability.id) |
limit(20; .[]) |
"| \(.vulnerability.id) | \(.artifact.name) | \(.artifact.version) | \(.vulnerability.fix.versions[0] // "No fix available") | \(.vulnerability.description[0:80] // "N/A") |"
' vuln-scan.json > critical-vulns.txt
# High severity vulnerabilities
jq -r '
[.matches[] | select(.vulnerability.severity == "High")] |
sort_by(.vulnerability.id) |
limit(20; .[]) |
"| \(.vulnerability.id) | \(.artifact.name) | \(.artifact.version) | \(.vulnerability.fix.versions[0] // "No fix available") | \(.vulnerability.description[0:80] // "N/A") |"
' vuln-scan.json > high-vulns.txt
# Medium severity vulnerabilities
jq -r '
[.matches[] | select(.vulnerability.severity == "Medium")] |
sort_by(.vulnerability.id) |
limit(20; .[]) |
"| \(.vulnerability.id) | \(.artifact.name) | \(.artifact.version) | \(.vulnerability.fix.versions[0] // "No fix available") | \(.vulnerability.description[0:80] // "N/A") |"
' vuln-scan.json > medium-vulns.txt
# Low severity vulnerabilities
jq -r '
[.matches[] | select(.vulnerability.severity == "Low")] |
sort_by(.vulnerability.id) |
limit(20; .[]) |
"| \(.vulnerability.id) | \(.artifact.name) | \(.artifact.version) | \(.vulnerability.fix.versions[0] // "No fix available") | \(.vulnerability.description[0:80] // "N/A") |"
' vuln-scan.json > low-vulns.txt
echo "✅ Vulnerability details parsed and saved"
- name: Upload Vulnerability Scan Artifact
if: steps.validate-sbom.outputs.valid == 'true' && always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: vulnerability-scan-${{ steps.tag.outputs.tag }}
path: |
vuln-scan.json
critical-vulns.txt
high-vulns.txt
medium-vulns.txt
low-vulns.txt
retention-days: 30
- name: Report Skipped Scan
if: steps.image-check.outputs.exists != 'true' || steps.validate-sbom.outputs.valid != 'true'
run: |
{
echo "## ⚠️ Vulnerability Scan Skipped"
echo ""
if [[ "${{ steps.image-check.outputs.exists }}" != "true" ]]; then
echo "**Reason**: Docker image not available yet"
echo ""
echo "This is expected for PR workflows. The image will be scanned"
echo "after it's built by the docker-build workflow."
elif [[ "${{ steps.validate-sbom.outputs.valid }}" != "true" ]]; then
echo "**Reason**: SBOM validation failed"
echo ""
echo "Check the 'Validate SBOM File' step for details."
fi
echo ""
echo "✅ Workflow completed successfully (scan skipped)"
} >> "$GITHUB_STEP_SUMMARY"
- name: Determine PR Number
id: pr-number
if: |
github.event_name == 'pull_request' ||
(github.event_name == 'workflow_run' && github.event.workflow_run.event == 'pull_request')
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
result-encoding: string
script: |
// Determine PR number from context
let prNumber;
if (context.eventName === 'pull_request') {
prNumber = context.issue.number;
} else if (context.eventName === 'workflow_run') {
const pullRequests = context.payload.workflow_run.pull_requests;
if (pullRequests && pullRequests.length > 0) {
prNumber = pullRequests[0].number;
}
}
if (!prNumber) {
console.log('No PR number found');
return '';
}
console.log(`Found PR number: ${prNumber}`);
return prNumber;
- name: Build PR Comment Body
id: comment-body
if: steps.pr-number.outputs.result != ''
run: |
TIMESTAMP=$(date -u +"%Y-%m-%d %H:%M:%S UTC")
IMAGE_EXISTS="${{ steps.image-check.outputs.exists }}"
SBOM_VALID="${{ steps.validate-sbom.outputs.valid }}"
CRITICAL="${CRITICAL_VULNS:-0}"
HIGH="${HIGH_VULNS:-0}"
MEDIUM="${MEDIUM_VULNS:-0}"
LOW="${LOW_VULNS:-0}"
TOTAL=$((CRITICAL + HIGH + MEDIUM + LOW))
# Build comment body
COMMENT_BODY="## 🔒 Supply Chain Security Scan
**Last Updated**: ${TIMESTAMP}
**Workflow Run**: [#${{ github.run_number }}](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
---
"
if [[ "${IMAGE_EXISTS}" != "true" ]]; then
COMMENT_BODY+="### ⏳ Status: Waiting for Image
The Docker image has not been built yet. This scan will run automatically once the docker-build workflow completes.
_This is normal for PR workflows._
"
elif [[ "${SBOM_VALID}" != "true" ]]; then
COMMENT_BODY+="### ⚠️ Status: SBOM Validation Failed
The Software Bill of Materials (SBOM) could not be validated. Please check the [workflow logs](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) for details.
**Action Required**: Review and resolve SBOM generation issues.
"
else
# Scan completed successfully
if [[ ${TOTAL} -eq 0 ]]; then
COMMENT_BODY+="### ✅ Status: No Vulnerabilities Detected
🎉 Great news! No security vulnerabilities were found in this image.
| Severity | Count |
|----------|-------|
| 🔴 Critical | 0 |
| 🟠 High | 0 |
| 🟡 Medium | 0 |
| 🔵 Low | 0 |
"
else
# Vulnerabilities found
if [[ ${CRITICAL} -gt 0 ]]; then
COMMENT_BODY+="### 🚨 Status: Critical Vulnerabilities Detected
⚠️ **Action Required**: ${CRITICAL} critical vulnerabilities require immediate attention!
"
elif [[ ${HIGH} -gt 0 ]]; then
COMMENT_BODY+="### ⚠️ Status: High-Severity Vulnerabilities Detected
${HIGH} high-severity vulnerabilities found. Please review and address.
"
else
COMMENT_BODY+="### 📊 Status: Vulnerabilities Detected
Security scan found ${TOTAL} vulnerabilities.
"
fi
COMMENT_BODY+="
| Severity | Count |
|----------|-------|
| 🔴 Critical | ${CRITICAL} |
| 🟠 High | ${HIGH} |
| 🟡 Medium | ${MEDIUM} |
| 🔵 Low | ${LOW} |
| **Total** | **${TOTAL}** |
## 🔍 Detailed Findings
"
# Add detailed vulnerability tables by severity
# Critical Vulnerabilities
if [[ ${CRITICAL} -gt 0 ]]; then
COMMENT_BODY+="<details>
<summary>🔴 <b>Critical Vulnerabilities (${CRITICAL})</b></summary>
| CVE | Package | Current Version | Fixed Version | Description |
|-----|---------|----------------|---------------|-------------|
"
if [[ -f critical-vulns.txt && -s critical-vulns.txt ]]; then
COMMENT_BODY+="$(cat critical-vulns.txt)"
# If more than 20, add truncation message
if [[ ${CRITICAL} -gt 20 ]]; then
REMAINING=$((CRITICAL - 20))
COMMENT_BODY+="
_...and ${REMAINING} more. View the [full scan results](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) for complete details._
"
fi
else
COMMENT_BODY+="| N/A | N/A | N/A | N/A | Details unavailable |
"
fi
COMMENT_BODY+="
</details>
"
fi
# High Severity Vulnerabilities
if [[ ${HIGH} -gt 0 ]]; then
COMMENT_BODY+="<details>
<summary>🟠 <b>High Severity Vulnerabilities (${HIGH})</b></summary>
| CVE | Package | Current Version | Fixed Version | Description |
|-----|---------|----------------|---------------|-------------|
"
if [[ -f high-vulns.txt && -s high-vulns.txt ]]; then
COMMENT_BODY+="$(cat high-vulns.txt)"
if [[ ${HIGH} -gt 20 ]]; then
REMAINING=$((HIGH - 20))
COMMENT_BODY+="
_...and ${REMAINING} more. View the [full scan results](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) for complete details._
"
fi
else
COMMENT_BODY+="| N/A | N/A | N/A | N/A | Details unavailable |
"
fi
COMMENT_BODY+="
</details>
"
fi
# Medium Severity Vulnerabilities
if [[ ${MEDIUM} -gt 0 ]]; then
COMMENT_BODY+="<details>
<summary>🟡 <b>Medium Severity Vulnerabilities (${MEDIUM})</b></summary>
| CVE | Package | Current Version | Fixed Version | Description |
|-----|---------|----------------|---------------|-------------|
"
if [[ -f medium-vulns.txt && -s medium-vulns.txt ]]; then
COMMENT_BODY+="$(cat medium-vulns.txt)"
if [[ ${MEDIUM} -gt 20 ]]; then
REMAINING=$((MEDIUM - 20))
COMMENT_BODY+="
_...and ${REMAINING} more. View the [full scan results](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) for complete details._
"
fi
else
COMMENT_BODY+="| N/A | N/A | N/A | N/A | Details unavailable |
"
fi
COMMENT_BODY+="
</details>
"
fi
# Low Severity Vulnerabilities
if [[ ${LOW} -gt 0 ]]; then
COMMENT_BODY+="<details>
<summary>🔵 <b>Low Severity Vulnerabilities (${LOW})</b></summary>
| CVE | Package | Current Version | Fixed Version | Description |
|-----|---------|----------------|---------------|-------------|
"
if [[ -f low-vulns.txt && -s low-vulns.txt ]]; then
COMMENT_BODY+="$(cat low-vulns.txt)"
if [[ ${LOW} -gt 20 ]]; then
REMAINING=$((LOW - 20))
COMMENT_BODY+="
_...and ${REMAINING} more. View the [full scan results](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) for complete details._
"
fi
else
COMMENT_BODY+="| N/A | N/A | N/A | N/A | Details unavailable |
"
fi
COMMENT_BODY+="
</details>
"
fi
COMMENT_BODY+="
📋 [View detailed vulnerability report](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
"
fi
fi
COMMENT_BODY+="
---
<sub><!-- supply-chain-security-comment --></sub>
"
# Save to file for the next step (handles multi-line)
echo "$COMMENT_BODY" > /tmp/comment-body.txt
# Also output for debugging
echo "Generated comment body:"
cat /tmp/comment-body.txt
- name: Find Existing PR Comment
id: find-comment
if: steps.pr-number.outputs.result != ''
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
with:
issue-number: ${{ steps.pr-number.outputs.result }}
comment-author: 'github-actions[bot]'
body-includes: '<!-- supply-chain-security-comment -->'
- name: Update or Create PR Comment
if: steps.pr-number.outputs.result != ''
uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9 # v5.0.0
with:
issue-number: ${{ steps.pr-number.outputs.result }}
body-path: /tmp/comment-body.txt
edit-mode: replace
comment-id: ${{ steps.find-comment.outputs.comment-id }}
verify-docker-image:
name: Verify Docker Image Supply Chain
runs-on: ubuntu-latest
if: github.event_name == 'release'
needs: verify-sbom
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Install Verification Tools
run: |
# Install Cosign
curl -sLO https://github.com/sigstore/cosign/releases/download/v2.4.1/cosign-linux-amd64
echo "4e84f155f98be2c2d3e63dea0e80b0ca5b4d843f5f4b1d3e8c9b7e4e7c0e0e0e cosign-linux-amd64" | sha256sum -c || {
echo "⚠️ Checksum verification skipped (update with actual hash)"
}
sudo install cosign-linux-amd64 /usr/local/bin/cosign
rm cosign-linux-amd64
# Install SLSA Verifier
curl -sLO https://github.com/slsa-framework/slsa-verifier/releases/download/v2.6.0/slsa-verifier-linux-amd64
sudo install slsa-verifier-linux-amd64 /usr/local/bin/slsa-verifier
rm slsa-verifier-linux-amd64
- name: Determine Image Tag
id: tag
run: |
TAG="${{ github.event.release.tag_name }}"
echo "tag=${TAG}" >> "$GITHUB_OUTPUT"
- name: Verify Cosign Signature with Rekor Fallback
env:
IMAGE: ghcr.io/${{ github.repository_owner }}/charon:${{ steps.tag.outputs.tag }}
run: |
echo "Verifying Cosign signature for ${IMAGE}..."
# Try with Rekor
if cosign verify "${IMAGE}" \
--certificate-identity-regexp="https://github.com/${{ github.repository }}" \
--certificate-oidc-issuer="https://token.actions.githubusercontent.com" 2>&1; then
echo "✅ Cosign signature verified (with Rekor)"
else
echo "⚠️ Rekor verification failed, trying offline verification..."
# Fallback: verify without Rekor
if cosign verify "${IMAGE}" \
--certificate-identity-regexp="https://github.com/${{ github.repository }}" \
--certificate-oidc-issuer="https://token.actions.githubusercontent.com" \
--insecure-ignore-tlog 2>&1; then
echo "✅ Cosign signature verified (offline mode)"
echo "::warning::Verified without Rekor - transparency log unavailable"
else
echo "❌ Signature verification failed"
exit 1
fi
fi
- name: Verify Docker Hub Image Signature
if: needs.verify-sbom.outputs.image_exists == 'true'
continue-on-error: true
run: |
echo "Verifying Docker Hub image signature..."
cosign verify "docker.io/wikid82/charon:${{ steps.tag.outputs.tag }}" \
--certificate-identity-regexp="https://github.com/Wikid82/Charon" \
--certificate-oidc-issuer="https://token.actions.githubusercontent.com" && \
echo "✅ Docker Hub signature verified" || \
echo "⚠️ Docker Hub signature verification failed (image may not exist or not signed)"
- name: Verify SLSA Provenance
env:
IMAGE: ghcr.io/${{ github.repository_owner }}/charon:${{ steps.tag.outputs.tag }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
echo "Verifying SLSA provenance for ${IMAGE}..."
# This will be enabled once provenance generation is added
echo "⚠️ SLSA provenance verification not yet implemented"
echo "Will be enabled after Phase 3 workflow updates"
- name: Create Verification Report
if: always()
run: |
cat << EOF > verification-report.md
# Supply Chain Verification Report
**Image**: ghcr.io/${{ github.repository_owner }}/charon:${{ steps.tag.outputs.tag }}
**Date**: $(date -u +"%Y-%m-%d %H:%M:%S UTC")
**Workflow**: ${{ github.workflow }}
**Run**: ${{ github.run_id }}
## Results
- **SBOM Verification**: ${{ needs.verify-sbom.result }}
- **Cosign Signature**: ${{ job.status }}
- **SLSA Provenance**: Not yet implemented (Phase 3)
## Verification Failure Recovery
If verification failed:
1. Check workflow logs for detailed error messages
2. Verify signing steps ran successfully in build workflow
3. Confirm attestations were pushed to registry
4. Check Rekor status: https://status.sigstore.dev
5. For Rekor outages, manual verification may be required
6. Re-run build if signatures/provenance are missing
EOF
cat verification-report.md >> "$GITHUB_STEP_SUMMARY"
verify-release-artifacts:
name: Verify Release Artifacts
runs-on: ubuntu-latest
if: github.event_name == 'release'
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Install Verification Tools
run: |
# Install Cosign
curl -sLO https://github.com/sigstore/cosign/releases/download/v2.4.1/cosign-linux-amd64
sudo install cosign-linux-amd64 /usr/local/bin/cosign
rm cosign-linux-amd64
- name: Download Release Assets
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
TAG="${{ github.event.release.tag_name }}"
mkdir -p ./release-assets
gh release download "${TAG}" --dir ./release-assets || {
echo "⚠️ No release assets found or download failed"
exit 0
}
- name: Verify Artifact Signatures with Fallback
continue-on-error: true
run: |
if [[ ! -d ./release-assets ]] || [[ -z "$(ls -A ./release-assets 2>/dev/null)" ]]; then
echo "⚠️ No release assets to verify"
exit 0
fi
echo "Verifying Cosign signatures for release artifacts..."
VERIFIED_COUNT=0
FAILED_COUNT=0
for artifact in ./release-assets/*; do
# Skip signature and certificate files
if [[ "$artifact" == *.sig || "$artifact" == *.pem || "$artifact" == *provenance* || "$artifact" == *.txt || "$artifact" == *.md ]]; then
continue
fi
if [[ -f "$artifact" ]]; then
echo "Verifying: $(basename "$artifact")"
# Check if signature files exist
if [[ ! -f "${artifact}.sig" ]] || [[ ! -f "${artifact}.pem" ]]; then
echo "⚠️ No signature files found for $(basename "$artifact")"
FAILED_COUNT=$((FAILED_COUNT + 1))
continue
fi
# Try with Rekor
if cosign verify-blob "$artifact" \
--signature "${artifact}.sig" \
--certificate "${artifact}.pem" \
--certificate-identity-regexp="https://github.com/${{ github.repository }}" \
--certificate-oidc-issuer="https://token.actions.githubusercontent.com" 2>&1; then
echo "✅ Verified with Rekor"
VERIFIED_COUNT=$((VERIFIED_COUNT + 1))
else
echo "⚠️ Rekor unavailable, trying offline..."
if cosign verify-blob "$artifact" \
--signature "${artifact}.sig" \
--certificate "${artifact}.pem" \
--certificate-identity-regexp="https://github.com/${{ github.repository }}" \
--certificate-oidc-issuer="https://token.actions.githubusercontent.com" \
--insecure-ignore-tlog 2>&1; then
echo "✅ Verified offline"
VERIFIED_COUNT=$((VERIFIED_COUNT + 1))
else
echo "❌ Verification failed"
FAILED_COUNT=$((FAILED_COUNT + 1))
fi
fi
fi
done
echo ""
echo "Verification summary: ${VERIFIED_COUNT} verified, ${FAILED_COUNT} failed"
if [[ ${FAILED_COUNT} -gt 0 ]]; then
echo "⚠️ Some artifacts failed verification"
else
echo "✅ All artifacts verified successfully"
fi

View File

@@ -1,221 +0,0 @@
name: Update GeoLite2 Checksum
on:
schedule:
- cron: '0 2 * * 1' # Weekly on Mondays at 2 AM UTC
workflow_dispatch:
permissions:
contents: write
pull-requests: write
issues: write
jobs:
update-checksum:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Download and calculate checksum
id: checksum
run: |
set -euo pipefail
echo "📥 Downloading GeoLite2-Country.mmdb..."
DOWNLOAD_URL="https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb"
# Download with retry logic
for i in {1..3}; do
if curl -fsSL "$DOWNLOAD_URL" -o /tmp/geolite2.mmdb; then
echo "✅ Download successful on attempt $i"
break
else
echo "❌ Download failed on attempt $i"
if [ "$i" -eq 3 ]; then
echo "error=download_failed" >> "$GITHUB_OUTPUT"
exit 1
fi
sleep 5
fi
done
# Calculate checksum
CURRENT=$(sha256sum /tmp/geolite2.mmdb | cut -d' ' -f1)
# Validate checksum format (64 hex characters)
if ! [[ "$CURRENT" =~ ^[a-f0-9]{64}$ ]]; then
echo "❌ Invalid checksum format: $CURRENT"
echo "error=invalid_checksum_format" >> "$GITHUB_OUTPUT"
exit 1
fi
# Extract current checksum from Dockerfile
OLD=$(grep "ARG GEOLITE2_COUNTRY_SHA256=" Dockerfile | cut -d'=' -f2)
# Validate old checksum format
if ! [[ "$OLD" =~ ^[a-f0-9]{64}$ ]]; then
echo "❌ Invalid old checksum format in Dockerfile: $OLD"
echo "error=invalid_dockerfile_checksum" >> "$GITHUB_OUTPUT"
exit 1
fi
echo "🔍 Checksum comparison:"
echo " Current (Dockerfile): $OLD"
echo " Latest (Downloaded): $CURRENT"
echo "current=$CURRENT" >> "$GITHUB_OUTPUT"
echo "old=$OLD" >> "$GITHUB_OUTPUT"
if [ "$CURRENT" != "$OLD" ]; then
echo "needs_update=true" >> "$GITHUB_OUTPUT"
echo "⚠️ Checksum mismatch detected - update required"
else
echo "needs_update=false" >> "$GITHUB_OUTPUT"
echo "✅ Checksum matches - no update needed"
fi
- name: Update Dockerfile
if: steps.checksum.outputs.needs_update == 'true'
run: |
set -euo pipefail
echo "📝 Updating Dockerfile with new checksum..."
sed -i "s/ARG GEOLITE2_COUNTRY_SHA256=.*/ARG GEOLITE2_COUNTRY_SHA256=${{ steps.checksum.outputs.current }}/" Dockerfile
# Verify the change was applied
if ! grep -q "ARG GEOLITE2_COUNTRY_SHA256=${{ steps.checksum.outputs.current }}" Dockerfile; then
echo "❌ Failed to update Dockerfile"
exit 1
fi
echo "✅ Dockerfile updated successfully"
- name: Verify Dockerfile syntax
if: steps.checksum.outputs.needs_update == 'true'
run: |
set -euo pipefail
echo "🔍 Verifying Dockerfile syntax..."
# Use BuildKit's --check flag for syntax validation (no actual build)
DOCKER_BUILDKIT=1 docker build --check -f Dockerfile . 2>&1 || {
echo "❌ Dockerfile syntax validation failed"
exit 1
}
echo "✅ Dockerfile syntax is valid"
- name: Create Pull Request
if: steps.checksum.outputs.needs_update == 'true'
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8.1.0
with:
title: "chore(docker): update GeoLite2-Country.mmdb checksum"
body: |
🤖 **Automated GeoLite2 Database Checksum Update**
The GeoLite2-Country.mmdb database has been updated upstream.
### Changes
- **Old checksum:** `${{ steps.checksum.outputs.old }}`
- **New checksum:** `${{ steps.checksum.outputs.current }}`
- **File modified:** `Dockerfile` (line 352)
### Verification Required
- [ ] Local build passes: `docker build --no-cache -t test .`
- [ ] Container starts successfully
- [ ] API health check responds: `curl http://localhost:8080/api/v1/health`
- [ ] CI build passes
### Testing Commands
```bash
# Verify checksum locally
curl -fsSL "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb" | sha256sum
# Build and test
docker build --no-cache --pull -t charon:test-geolite2 .
docker run --rm charon:test-geolite2 /app/charon --version
```
### Related Documentation
- [Dockerfile](/Dockerfile#L352)
- [Implementation Plan](/docs/plans/current_spec.md)
---
**Auto-generated by:** `.github/workflows/update-geolite2.yml`
**Trigger:** Scheduled weekly check (Mondays 2 AM UTC)
branch: bot/update-geolite2-checksum
delete-branch: true
commit-message: |
chore(docker): update GeoLite2-Country.mmdb checksum
Automated checksum update for GeoLite2-Country.mmdb database.
Old: ${{ steps.checksum.outputs.old }}
New: ${{ steps.checksum.outputs.current }}
Auto-generated by: .github/workflows/update-geolite2.yml
labels: |
dependencies
automated
docker
- name: Report failure via GitHub Issue
if: failure()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const errorType = '${{ steps.checksum.outputs.error }}' || 'unknown';
const runUrl = `https://github.com/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}`;
const errorMessages = {
'download_failed': '❌ Failed to download GeoLite2-Country.mmdb after 3 attempts',
'invalid_checksum_format': '❌ Downloaded file produced invalid checksum format',
'invalid_dockerfile_checksum': '❌ Current Dockerfile contains invalid checksum format',
'unknown': '❌ Workflow failed with unknown error'
};
const title = `🚨 GeoLite2 Checksum Update Failed (${errorType})`;
const body = `
## Automated GeoLite2 Update Workflow Failed
**Error Type:** \`${errorType}\`
**Error Message:** ${errorMessages[errorType] || errorMessages.unknown}
### Workflow Details
- **Run URL:** ${runUrl}
- **Triggered:** ${context.eventName === 'schedule' ? 'Scheduled (weekly)' : 'Manual dispatch'}
- **Timestamp:** ${new Date().toISOString()}
### Required Actions
1. Review workflow logs: ${runUrl}
2. Check upstream source availability: https://github.com/P3TERX/GeoLite.mmdb
3. Verify network connectivity from GitHub Actions runners
4. If upstream is unavailable, consider alternative sources
### Manual Update (if needed)
\`\`\`bash
# Download and verify checksum
curl -fsSL "https://github.com/P3TERX/GeoLite.mmdb/raw/download/GeoLite2-Country.mmdb" | sha256sum
# Update Dockerfile line 352
vim Dockerfile # or use sed
# Test build
docker build --no-cache -t test .
\`\`\`
### Related Documentation
- [Implementation Plan](/docs/plans/current_spec.md)
- [Workflow File](/.github/workflows/update-geolite2.yml)
---
**Auto-generated by:** \`.github/workflows/update-geolite2.yml\`
`;
await github.rest.issues.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: title,
body: body,
labels: ['bug', 'automated', 'ci-cd', 'docker']
});

View File

@@ -1,102 +0,0 @@
name: WAF integration
# Phase 2-3: Build Once, Test Many - Use registry image instead of building
# This workflow now waits for docker-build.yml to complete and pulls the built image
on:
workflow_dispatch:
inputs:
image_tag:
description: 'Docker image tag to test (e.g., pr-123-abc1234, latest)'
required: false
type: string
pull_request:
push:
branches:
- main
# Prevent race conditions when PR is updated mid-test
# Cancels old test runs when new build completes with different SHA
concurrency:
group: ${{ github.workflow }}-${{ github.event.workflow_run.event || github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
cancel-in-progress: true
jobs:
waf-integration:
name: Coraza WAF Integration
runs-on: ubuntu-latest
timeout-minutes: 15
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Build Docker image (Local)
run: |
echo "Building image locally for integration tests..."
docker build -t charon:local .
echo "✅ Successfully built charon:local"
- name: Run WAF integration tests
id: waf-test
run: |
chmod +x scripts/coraza_integration.sh
scripts/coraza_integration.sh 2>&1 | tee waf-test-output.txt
exit "${PIPESTATUS[0]}"
- name: Dump Debug Info on Failure
if: failure()
run: |
{
echo "## 🔍 Debug Information"
echo ""
echo "### Container Status"
echo '```'
docker ps -a --filter "name=charon" --filter "name=coraza" 2>&1 || true
echo '```'
echo ""
echo "### Caddy Admin Config"
echo '```json'
curl -s http://localhost:2019/config 2>/dev/null | head -200 || echo "Could not retrieve Caddy config"
echo '```'
echo ""
echo "### Charon Container Logs (last 100 lines)"
echo '```'
docker logs charon-debug 2>&1 | tail -100 || echo "No container logs available"
echo '```'
echo ""
echo "### WAF Ruleset Files"
echo '```'
docker exec charon-debug sh -c 'ls -la /app/data/caddy/coraza/rulesets/ 2>/dev/null && echo "---" && cat /app/data/caddy/coraza/rulesets/*.conf 2>/dev/null' || echo "No ruleset files found"
echo '```'
} >> "$GITHUB_STEP_SUMMARY"
- name: WAF Integration Summary
if: always()
run: |
{
echo "## 🛡️ WAF Integration Test Results"
if [ "${{ steps.waf-test.outcome }}" == "success" ]; then
echo "✅ **All WAF tests passed**"
echo ""
echo "### Test Results:"
echo '```'
grep -E "^✓|^===|^Coraza" waf-test-output.txt || echo "See logs for details"
echo '```'
else
echo "❌ **WAF tests failed**"
echo ""
echo "### Failure Details:"
echo '```'
grep -E "^✗|Unexpected|Error|failed" waf-test-output.txt | head -20 || echo "See logs for details"
echo '```'
fi
} >> "$GITHUB_STEP_SUMMARY"
- name: Cleanup
if: always()
run: |
docker rm -f charon-debug || true
docker rm -f coraza-backend || true
docker network rm containers_default || true

View File

@@ -1,629 +0,0 @@
name: Weekly Nightly to Main Promotion
# Creates a PR from nightly → main every Monday for scheduled release promotion.
# Includes safety checks for workflow status and provides manual trigger option.
on:
schedule:
# Every Monday at 12:00 UTC (7:00am EST / 8:00am EDT)
# Offset from nightly sync (09:00 UTC) to avoid schedule race and allow validation completion.
- cron: '0 12 * * 1'
workflow_dispatch:
inputs:
reason:
description: 'Why are you running this manually?'
required: true
default: 'Ad-hoc promotion request'
skip_workflow_check:
description: 'Skip nightly workflow status check?'
required: false
type: boolean
default: false
concurrency:
group: ${{ github.workflow }}
cancel-in-progress: false
env:
NODE_VERSION: '24.12.0'
SOURCE_BRANCH: 'nightly'
TARGET_BRANCH: 'main'
permissions:
contents: read
pull-requests: write
issues: write
actions: read
jobs:
check-nightly-health:
name: Verify Nightly Branch Health
runs-on: ubuntu-latest
outputs:
is_healthy: ${{ steps.check.outputs.is_healthy }}
latest_run_url: ${{ steps.check.outputs.latest_run_url }}
failure_reason: ${{ steps.check.outputs.failure_reason }}
steps:
- name: Check Nightly Workflow Status
id: check
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const skipCheck = '${{ inputs.skip_workflow_check }}' === 'true';
if (skipCheck) {
core.info('Skipping workflow health check as requested');
core.setOutput('is_healthy', 'true');
core.setOutput('latest_run_url', 'N/A - check skipped');
core.setOutput('failure_reason', '');
return;
}
core.info('Checking nightly branch workflow health...');
// Resolve current nightly HEAD SHA and evaluate workflow health for that exact commit.
// This prevents stale failures from older nightly runs from blocking promotion.
const { data: nightlyBranch } = await github.rest.repos.getBranch({
owner: context.repo.owner,
repo: context.repo.repo,
branch: 'nightly',
});
const nightlyHeadSha = nightlyBranch.commit.sha;
core.info(`Current nightly HEAD: ${nightlyHeadSha}`);
// Check critical workflows on the current nightly HEAD only.
// Nightly build itself is scheduler-driven and not a reliable per-commit gate.
const criticalWorkflows = [
{
workflowFile: 'quality-checks.yml',
fallbackNames: ['Quality Checks'],
},
{
workflowFile: 'e2e-tests-split.yml',
fallbackNames: ['E2E Tests'],
},
{
workflowFile: 'codeql.yml',
fallbackNames: ['CodeQL - Analyze'],
},
];
// Retry window to avoid race conditions where required checks are not yet materialized.
const maxAttempts = 6;
const waitMs = 20000;
let branchRuns = [];
for (let attempt = 1; attempt <= maxAttempts; attempt += 1) {
const { data: completedRuns } = await github.rest.actions.listWorkflowRunsForRepo({
owner: context.repo.owner,
repo: context.repo.repo,
branch: 'nightly',
status: 'completed',
per_page: 100,
});
branchRuns = completedRuns.workflow_runs;
const allWorkflowsPresentForHead = criticalWorkflows.every((workflow) => {
const workflowPath = `.github/workflows/${workflow.workflowFile}`;
return branchRuns.some(
(r) =>
r.head_sha === nightlyHeadSha &&
(
r.path === workflowPath ||
(typeof r.path === 'string' && r.path.endsWith(`/${workflowPath}`)) ||
workflow.fallbackNames.includes(r.name)
),
);
});
if (allWorkflowsPresentForHead) {
core.info(`Required workflow runs found for nightly HEAD on attempt ${attempt}`);
break;
}
if (attempt < maxAttempts) {
core.info(
`Waiting for required runs to appear for nightly HEAD (attempt ${attempt}/${maxAttempts})`,
);
await new Promise((resolve) => setTimeout(resolve, waitMs));
}
}
if (branchRuns.length === 0) {
core.setOutput('is_healthy', 'false');
core.setOutput('latest_run_url', 'No completed runs found');
core.setOutput('failure_reason', 'No completed workflow runs found on nightly');
core.warning('No completed workflow runs found on nightly - blocking promotion');
return;
}
let hasFailure = false;
let failureReason = '';
let latestRunUrl = branchRuns[0]?.html_url || 'N/A';
for (const workflow of criticalWorkflows) {
const workflowPath = `.github/workflows/${workflow.workflowFile}`;
core.info(
`Evaluating required workflow ${workflow.workflowFile} (path match first, names fallback: ${workflow.fallbackNames.join(', ')})`,
);
const latestRunForHead = branchRuns.find(
(r) =>
r.head_sha === nightlyHeadSha &&
(
r.path === workflowPath ||
(typeof r.path === 'string' && r.path.endsWith(`/${workflowPath}`)) ||
workflow.fallbackNames.includes(r.name)
),
);
if (!latestRunForHead) {
hasFailure = true;
failureReason = `${workflow.workflowFile} has no completed run for nightly HEAD ${nightlyHeadSha}`;
latestRunUrl = `https://github.com/${context.repo.owner}/${context.repo.repo}/actions/workflows/${workflow.workflowFile}`;
core.warning(
`Required workflow ${workflow.workflowFile} has no completed run for current nightly HEAD`,
);
break;
}
if (latestRunForHead.conclusion !== 'success') {
hasFailure = true;
failureReason = `${workflow.workflowFile} ${latestRunForHead.conclusion} (${latestRunForHead.html_url})`;
latestRunUrl = latestRunForHead.html_url;
core.warning(
`Required workflow ${workflow.workflowFile} is ${latestRunForHead.conclusion} on nightly HEAD`,
);
break;
}
core.info(
`Required workflow ${workflow.workflowFile} passed for nightly HEAD via run ${latestRunForHead.id}`,
);
}
core.setOutput('is_healthy', hasFailure ? 'false' : 'true');
core.setOutput('latest_run_url', latestRunUrl);
core.setOutput('failure_reason', failureReason);
if (hasFailure) {
core.warning(`Nightly branch has failing workflows: ${failureReason}`);
} else {
core.info('Nightly branch is healthy - all critical workflows passing');
}
create-promotion-pr:
name: Create Promotion PR
needs: check-nightly-health
runs-on: ubuntu-latest
if: needs.check-nightly-health.outputs.is_healthy == 'true'
outputs:
pr_number: ${{ steps.create-pr.outputs.pr_number }}
pr_url: ${{ steps.create-pr.outputs.pr_url }}
skipped: ${{ steps.check-diff.outputs.skipped }}
steps:
- name: Checkout Repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ env.TARGET_BRANCH }}
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Check for Differences
id: check-diff
run: |
git fetch origin "${{ env.SOURCE_BRANCH }}"
# Compare the branches
AHEAD_COUNT=$(git rev-list --count "origin/${{ env.TARGET_BRANCH }}..origin/${{ env.SOURCE_BRANCH }}")
BEHIND_COUNT=$(git rev-list --count "origin/${{ env.SOURCE_BRANCH }}..origin/${{ env.TARGET_BRANCH }}")
echo "Nightly is $AHEAD_COUNT commits ahead of main"
echo "Nightly is $BEHIND_COUNT commits behind main"
if [ "$AHEAD_COUNT" -eq 0 ]; then
echo "No changes to promote - nightly is up-to-date with main"
echo "skipped=true" >> "$GITHUB_OUTPUT"
echo "skip_reason=No changes to promote" >> "$GITHUB_OUTPUT"
else
echo "skipped=false" >> "$GITHUB_OUTPUT"
echo "ahead_count=$AHEAD_COUNT" >> "$GITHUB_OUTPUT"
fi
- name: Generate Commit Summary
id: commits
if: steps.check-diff.outputs.skipped != 'true'
run: |
# Get the date for the PR title
DATE=$(date -u +%Y-%m-%d)
echo "date=$DATE" >> "$GITHUB_OUTPUT"
# Generate commit log
COMMIT_LOG=$(git log --oneline "origin/${{ env.TARGET_BRANCH }}..origin/${{ env.SOURCE_BRANCH }}" | head -50)
COMMIT_COUNT=$(git rev-list --count "origin/${{ env.TARGET_BRANCH }}..origin/${{ env.SOURCE_BRANCH }}")
# Store commit log in a file to preserve formatting
cat > /tmp/commit_log.md << 'COMMITS_EOF'
## Commits Being Promoted
COMMITS_EOF
{
if [ "$COMMIT_COUNT" -gt 50 ]; then
echo "_Showing first 50 of $COMMIT_COUNT commits:_"
fi
echo '```'
echo "$COMMIT_LOG"
echo '```'
if [ "$COMMIT_COUNT" -gt 50 ]; then
echo ""
echo "_...and $((COMMIT_COUNT - 50)) more commits_"
fi
} >> /tmp/commit_log.md
# Get files changed summary
FILES_CHANGED=$(git diff --stat "origin/${{ env.TARGET_BRANCH }}..origin/${{ env.SOURCE_BRANCH }}" | tail -1)
echo "files_changed=$FILES_CHANGED" >> "$GITHUB_OUTPUT"
echo "commit_count=$COMMIT_COUNT" >> "$GITHUB_OUTPUT"
- name: Check for Existing PR
id: existing-pr
if: steps.check-diff.outputs.skipped != 'true'
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const { data: pulls } = await github.rest.pulls.list({
owner: context.repo.owner,
repo: context.repo.repo,
state: 'open',
head: `${context.repo.owner}:${{ env.SOURCE_BRANCH }}`,
base: '${{ env.TARGET_BRANCH }}',
});
if (pulls.length > 0) {
core.info(`Existing PR found: #${pulls[0].number}`);
core.setOutput('exists', 'true');
core.setOutput('pr_number', pulls[0].number);
core.setOutput('pr_url', pulls[0].html_url);
} else {
core.setOutput('exists', 'false');
}
- name: Create Promotion PR
id: create-pr
if: steps.check-diff.outputs.skipped != 'true' && steps.existing-pr.outputs.exists != 'true'
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const fs = require('fs');
const date = '${{ steps.commits.outputs.date }}';
const commitCount = '${{ steps.commits.outputs.commit_count }}';
const filesChanged = '${{ steps.commits.outputs.files_changed }}';
const commitLog = fs.readFileSync('/tmp/commit_log.md', 'utf8');
const triggerReason = '${{ inputs.reason }}' || 'Scheduled weekly promotion';
const body = `## 🚀 Weekly Nightly to Main Promotion
**Date:** ${date}
**Trigger:** ${triggerReason}
**Commits:** ${commitCount} commits to promote
**Changes:** ${filesChanged}
---
${commitLog}
---
## Pre-Merge Checklist
- [ ] All status checks pass
- [ ] No critical security issues identified
- [ ] Changelog is up-to-date (auto-generated via workflow)
- [ ] Version bump is appropriate (if applicable)
## Merge Instructions
This PR promotes changes from \`nightly\` to \`main\`. Once all checks pass:
1. **Review** the commit summary above
2. **Approve** if changes look correct
3. **Merge** using "Merge commit" to preserve history
---
_This PR was automatically created by the [Weekly Nightly Promotion](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) workflow._
`;
try {
const pr = await github.rest.pulls.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: `Weekly: Promote nightly to main (${date})`,
head: '${{ env.SOURCE_BRANCH }}',
base: '${{ env.TARGET_BRANCH }}',
body: body,
draft: false,
});
core.info(`Created PR #${pr.data.number}: ${pr.data.html_url}`);
core.setOutput('pr_number', pr.data.number);
core.setOutput('pr_url', pr.data.html_url);
// Add labels (create if they don't exist)
const labels = ['automated', 'weekly-promotion'];
for (const label of labels) {
try {
await github.rest.issues.getLabel({
owner: context.repo.owner,
repo: context.repo.repo,
name: label,
});
} catch (e) {
// Label doesn't exist, create it
const colors = {
'automated': '0e8a16',
'weekly-promotion': '5319e7',
};
await github.rest.issues.createLabel({
owner: context.repo.owner,
repo: context.repo.repo,
name: label,
color: colors[label] || 'ededed',
description: label === 'automated'
? 'Automatically generated by CI/CD'
: 'Weekly promotion from nightly to main',
});
}
}
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pr.data.number,
labels: labels,
});
core.info('Labels added successfully');
} catch (error) {
core.setFailed(`Failed to create PR: ${error.message}`);
}
- name: Update Existing PR
if: steps.check-diff.outputs.skipped != 'true' && steps.existing-pr.outputs.exists == 'true'
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const prNumber = ${{ steps.existing-pr.outputs.pr_number }};
core.info(`PR #${prNumber} already exists - adding comment with update`);
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
body: `🔄 **Weekly check:** This PR is still open. New commits may have been added to \`nightly\` since the original PR was created.\n\n_Triggered by [workflow run](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})_`,
});
core.setOutput('pr_number', prNumber);
core.setOutput('pr_url', '${{ steps.existing-pr.outputs.pr_url }}');
trigger-required-checks:
name: Trigger Missing Required Checks
needs: create-promotion-pr
if: needs.create-promotion-pr.outputs.skipped != 'true'
runs-on: ubuntu-latest
permissions:
actions: write
contents: read
steps:
- name: Dispatch missing required workflows on nightly head
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const owner = context.repo.owner;
const repo = context.repo.repo;
const { data: nightlyBranch } = await github.rest.repos.getBranch({
owner,
repo,
branch: 'nightly',
});
const nightlyHeadSha = nightlyBranch.commit.sha;
core.info(`Current nightly HEAD for dispatch fallback: ${nightlyHeadSha}`);
const requiredWorkflows = [
{ id: 'e2e-tests-split.yml' },
{ id: 'codeql.yml' },
{ id: 'codecov-upload.yml', inputs: { run_backend: 'true', run_frontend: 'true' } },
{ id: 'security-pr.yml' },
{ id: 'supply-chain-verify.yml' },
];
for (const workflow of requiredWorkflows) {
const { data: runs } = await github.rest.actions.listWorkflowRuns({
owner,
repo,
workflow_id: workflow.id,
branch: 'nightly',
per_page: 50,
});
const hasRunForHead = runs.workflow_runs.some((run) => run.head_sha === nightlyHeadSha);
if (hasRunForHead) {
core.info(`Skipping ${workflow.id}; run already exists for nightly HEAD`);
continue;
}
await github.rest.actions.createWorkflowDispatch({
owner,
repo,
workflow_id: workflow.id,
ref: 'nightly',
...(workflow.inputs ? { inputs: workflow.inputs } : {}),
});
core.info(`Dispatched ${workflow.id}; missing for nightly HEAD`);
}
notify-on-failure:
name: Notify on Failure
needs: [check-nightly-health, create-promotion-pr, trigger-required-checks]
runs-on: ubuntu-latest
if: |
always() &&
(needs.check-nightly-health.outputs.is_healthy == 'false' ||
needs.create-promotion-pr.result == 'failure')
steps:
- name: Create Failure Issue
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const isHealthy = '${{ needs.check-nightly-health.outputs.is_healthy }}';
const failureReason = '${{ needs.check-nightly-health.outputs.failure_reason }}';
const latestRunUrl = '${{ needs.check-nightly-health.outputs.latest_run_url }}';
const prResult = '${{ needs.create-promotion-pr.result }}';
let title, body;
if (isHealthy === 'false') {
title = '🚨 Weekly Promotion Blocked: Nightly Branch Unhealthy';
body = `## Weekly Promotion Failed
The weekly promotion from \`nightly\` to \`main\` was **blocked** because the nightly branch has failing workflows.
### Failure Details
- **Reason:** ${failureReason}
- **Latest Run:** ${latestRunUrl}
- **Workflow Run:** ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
### Required Actions
1. Investigate the failing workflow on the nightly branch
2. Fix the underlying issue
3. Re-run the failed workflow
4. Manually trigger the weekly promotion workflow once nightly is healthy
---
_This issue was automatically created by the Weekly Nightly Promotion workflow._
`;
} else {
title = '🚨 Weekly Promotion Failed: PR Creation Error';
body = `## Weekly Promotion Failed
The weekly promotion workflow encountered an error while trying to create the PR.
### Details
- **PR Creation Result:** ${prResult}
- **Workflow Run:** ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
### Required Actions
1. Check the workflow logs for detailed error information
2. Manually create the promotion PR if needed
3. Investigate and fix any configuration issues
---
_This issue was automatically created by the Weekly Nightly Promotion workflow._
`;
}
// Check for existing open issues with same title
const { data: issues } = await github.rest.issues.listForRepo({
owner: context.repo.owner,
repo: context.repo.repo,
state: 'open',
labels: 'weekly-promotion-failure',
});
const existingIssue = issues.find(i => i.title === title);
if (existingIssue) {
// Add comment to existing issue
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: existingIssue.number,
body: `🔄 **Update:** This issue occurred again.\n\n${body}`,
});
core.info(`Updated existing issue #${existingIssue.number}`);
} else {
// Create label if it doesn't exist
try {
await github.rest.issues.getLabel({
owner: context.repo.owner,
repo: context.repo.repo,
name: 'weekly-promotion-failure',
});
} catch (e) {
await github.rest.issues.createLabel({
owner: context.repo.owner,
repo: context.repo.repo,
name: 'weekly-promotion-failure',
color: 'd73a4a',
description: 'Weekly promotion workflow failure',
});
}
// Create new issue
const issue = await github.rest.issues.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: title,
body: body,
labels: ['weekly-promotion-failure', 'automated'],
});
core.info(`Created issue #${issue.data.number}`);
}
summary:
name: Workflow Summary
needs: [check-nightly-health, create-promotion-pr, trigger-required-checks]
runs-on: ubuntu-latest
if: always()
steps:
- name: Generate Summary
run: |
{
echo "## 📋 Weekly Nightly Promotion Summary"
echo ""
HEALTH="${{ needs.check-nightly-health.outputs.is_healthy }}"
SKIPPED="${{ needs.create-promotion-pr.outputs.skipped }}"
PR_URL="${{ needs.create-promotion-pr.outputs.pr_url }}"
PR_NUMBER="${{ needs.create-promotion-pr.outputs.pr_number }}"
FAILURE_REASON="${{ needs.check-nightly-health.outputs.failure_reason }}"
echo "| Step | Status |"
echo "|------|--------|"
if [ "$HEALTH" = "true" ]; then
echo "| Nightly Health Check | ✅ Healthy |"
else
echo "| Nightly Health Check | ❌ Unhealthy: $FAILURE_REASON |"
fi
if [ "$SKIPPED" = "true" ]; then
echo "| PR Creation | ⏭️ Skipped (no changes) |"
elif [ -n "$PR_URL" ]; then
echo "| PR Creation | ✅ [PR #$PR_NUMBER]($PR_URL) |"
else
echo "| PR Creation | ❌ Failed |"
fi
echo ""
echo "---"
echo "_Workflow run: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}_"
} >> "$GITHUB_STEP_SUMMARY"