Compare commits
253 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f58c96d29f | ||
|
|
2ecd6dd9d4 | ||
|
|
409dc0526f | ||
|
|
10259146df | ||
|
|
8cbd907d82 | ||
|
|
ff5ef35a0f | ||
|
|
fbb86b1cc3 | ||
|
|
0f995edbd1 | ||
|
|
aaddb88488 | ||
|
|
f79f0218c5 | ||
|
|
d94c9ba623 | ||
|
|
0241de69f4 | ||
|
|
f20e789a16 | ||
|
|
6f5c8873f9 | ||
|
|
7a12ab7928 | ||
|
|
871adca270 | ||
|
|
dbff270d22 | ||
|
|
8e1b9d91e2 | ||
|
|
67bcef32e4 | ||
|
|
739104e029 | ||
|
|
2204b7bd35 | ||
|
|
fdbba5b838 | ||
|
|
4ff65c83be | ||
|
|
3409e204eb | ||
|
|
61bb19e6f3 | ||
|
|
3cc979f5b8 | ||
|
|
ef8f237233 | ||
|
|
43a63007a7 | ||
|
|
404aa92ea0 | ||
|
|
94356e7d4e | ||
|
|
63c9976e5f | ||
|
|
09ef4f579e | ||
|
|
fbd94a031e | ||
|
|
6483a25555 | ||
|
|
61b73bc57b | ||
|
|
d77d618de0 | ||
|
|
2cd19d8964 | ||
|
|
61d4e12c56 | ||
|
|
5c5c1eabfc | ||
|
|
d9cc0ead71 | ||
|
|
b78798b877 | ||
|
|
e90ad34c28 | ||
|
|
1a559e3c64 | ||
|
|
a83967daa3 | ||
|
|
e374d6f7d2 | ||
|
|
7723d291ce | ||
|
|
386fcd8276 | ||
|
|
10f5e5dd1d | ||
|
|
89281c4255 | ||
|
|
de7861abea | ||
|
|
25443d3319 | ||
|
|
be279ba864 | ||
|
|
5fe1cf9265 | ||
|
|
cdf7948575 | ||
|
|
b04b94e429 | ||
|
|
0ff19f66b6 | ||
|
|
bf583927c1 | ||
|
|
6ed8d8054f | ||
|
|
5c4a558486 | ||
|
|
2024ad1373 | ||
|
|
5c0185d5eb | ||
|
|
c9e4916d43 | ||
|
|
75d945f706 | ||
|
|
99ab2202a2 | ||
|
|
feaae052ac | ||
|
|
476e65e7dd | ||
|
|
24a5773637 | ||
|
|
0eb0e43d60 | ||
|
|
6f98962981 | ||
|
|
2b3b5c3ff2 | ||
|
|
eb5518092f | ||
|
|
1b10198d50 | ||
|
|
449d316174 | ||
|
|
9356756065 | ||
|
|
5b3e005f2b | ||
|
|
7654acc710 | ||
|
|
afb2901618 | ||
|
|
117fd51082 | ||
|
|
b66ba3ad4d | ||
|
|
cbe238b27d | ||
|
|
f814706fe2 | ||
|
|
fc508d01d7 | ||
|
|
ba880083be | ||
|
|
b657235870 | ||
|
|
132b78b317 | ||
|
|
25cb0528e2 | ||
|
|
e9acaa61cc | ||
|
|
218ce5658e | ||
|
|
08a17d7716 | ||
|
|
f9c43d50c6 | ||
|
|
e348b5b2a3 | ||
|
|
678b442f5e | ||
|
|
2470861c4a | ||
|
|
9e201126a9 | ||
|
|
5b67808d13 | ||
|
|
68e3bee684 | ||
|
|
4081003051 | ||
|
|
bd2b1bd8b7 | ||
|
|
5e033e4bef | ||
|
|
06ba9bc438 | ||
|
|
3339208e53 | ||
|
|
4fad52aef5 | ||
|
|
9664e379ea | ||
|
|
1e126996cb | ||
|
|
f4115a2977 | ||
|
|
c6fd201f90 | ||
|
|
6ed988dc5b | ||
|
|
f34a9c4f37 | ||
|
|
940c42f341 | ||
|
|
759cff5e7f | ||
|
|
5a626715d6 | ||
|
|
82d18f11a5 | ||
|
|
fb5fdb8c4e | ||
|
|
8ff3f305db | ||
|
|
06ceb9ef6f | ||
|
|
5a3b143127 | ||
|
|
d28add1a73 | ||
|
|
70d2465429 | ||
|
|
3cc5126267 | ||
|
|
26fde2d649 | ||
|
|
da2db85bfc | ||
|
|
ccdc719501 | ||
|
|
ac720f95df | ||
|
|
1913e9d739 | ||
|
|
a7be6c304d | ||
|
|
d89b86675c | ||
|
|
fb69f3da12 | ||
|
|
e1c0173e3d | ||
|
|
46fe59cf0a | ||
|
|
4a398185c2 | ||
|
|
122030269e | ||
|
|
5b436a883d | ||
|
|
a1c88de3c4 | ||
|
|
a6c6ce550e | ||
|
|
1af04987e0 | ||
|
|
ad31bacc1c | ||
|
|
bab8414666 | ||
|
|
0deffd37e7 | ||
|
|
a98c9ed311 | ||
|
|
12a04b4744 | ||
|
|
d97c08bada | ||
|
|
ce335ff342 | ||
|
|
cb16ac05a2 | ||
|
|
0917edb863 | ||
|
|
4d0df36e5e | ||
|
|
7b1861f5a9 | ||
|
|
29f6664ab0 | ||
|
|
690480e181 | ||
|
|
c156183666 | ||
|
|
d8e6d8d9a9 | ||
|
|
7591d2cda8 | ||
|
|
aa2e7a1685 | ||
|
|
9a683c3231 | ||
|
|
e5cebc091d | ||
|
|
15cdaa8294 | ||
|
|
32f2d25d58 | ||
|
|
a9dcc007e5 | ||
|
|
bf53712b7c | ||
|
|
2b4f60615f | ||
|
|
bbaad17e97 | ||
|
|
bc4c7c1406 | ||
|
|
e13b49cfd2 | ||
|
|
4d4a5d3adb | ||
|
|
7983de9f2a | ||
|
|
0034968919 | ||
|
|
6cec0a67eb | ||
|
|
f56fa41301 | ||
|
|
b1a1a7a238 | ||
|
|
8381790b0b | ||
|
|
65228c5ee8 | ||
|
|
b531a840e8 | ||
|
|
5a2e11878b | ||
|
|
fcc60a0aa3 | ||
|
|
fdbf1a66cd | ||
|
|
e8a513541f | ||
|
|
bc9f2cf882 | ||
|
|
1329b00ed5 | ||
|
|
a9c5b5b2d8 | ||
|
|
4b9508a9be | ||
|
|
dc1426ae31 | ||
|
|
72bfca2dc3 | ||
|
|
09f9f7eb3d | ||
|
|
9e71dd218b | ||
|
|
ee5350d675 | ||
|
|
9424aca5e2 | ||
|
|
8fa0950138 | ||
|
|
1315d7a3ef | ||
|
|
63d7c5c0c4 | ||
|
|
79c8e660f5 | ||
|
|
7b640cc0af | ||
|
|
1f2b4c7d5e | ||
|
|
441c3dc947 | ||
|
|
735b9fdd0e | ||
|
|
45458df1bf | ||
|
|
4004c6bc08 | ||
|
|
427babd3c1 | ||
|
|
2486dc24a1 | ||
|
|
3fa1074ea9 | ||
|
|
51d997c6fb | ||
|
|
cdcd1b6639 | ||
|
|
9634eb65ad | ||
|
|
a52ba29f02 | ||
|
|
f5db7ad0e4 | ||
|
|
7497cbecd0 | ||
|
|
b14f6f040f | ||
|
|
89a1768496 | ||
|
|
57e7aa3e81 | ||
|
|
ff88ae9fd8 | ||
|
|
cddec19862 | ||
|
|
1bbd71cac3 | ||
|
|
a21351cd0f | ||
|
|
783956cb78 | ||
|
|
9094d3b99b | ||
|
|
718358314f | ||
|
|
f11cd689a5 | ||
|
|
3a3c06a5ff | ||
|
|
c48ced8c03 | ||
|
|
4ea22c11b3 | ||
|
|
a558c36853 | ||
|
|
1e14dcd59c | ||
|
|
1d909afe41 | ||
|
|
0d9ca68a94 | ||
|
|
105338ef67 | ||
|
|
8e88d9feae | ||
|
|
1309189523 | ||
|
|
a278ae1287 | ||
|
|
12dd09b32b | ||
|
|
0dfbb74c3c | ||
|
|
5429d85e8a | ||
|
|
82c1737d4b | ||
|
|
1a477f90f4 | ||
|
|
efbbf46a7a | ||
|
|
6b03ffc4bc | ||
|
|
7f53c27344 | ||
|
|
127a81a748 | ||
|
|
8f4298951a | ||
|
|
c68804d37e | ||
|
|
1189fa59b6 | ||
|
|
7070ea6f44 | ||
|
|
a3cdc70453 | ||
|
|
3e2df57fd1 | ||
|
|
2944cd6bed | ||
|
|
72c4dee12f | ||
|
|
2e85325d08 | ||
|
|
e2e3cc3dcf | ||
|
|
5ee3ce8b0d | ||
|
|
f4ef79def3 | ||
|
|
745d3afab5 | ||
|
|
9a4b4632c0 | ||
|
|
28e32d5aee | ||
|
|
c484e7d6d3 | ||
|
|
7845602907 | ||
|
|
b9c1a106d5 |
@@ -1,297 +0,0 @@
|
||||
/**
|
||||
* Security Dashboard Mobile Responsive E2E Tests
|
||||
* Test IDs: MR-01 through MR-10
|
||||
*
|
||||
* Tests mobile viewport (375x667), tablet viewport (768x1024),
|
||||
* touch targets, scrolling, and layout responsiveness.
|
||||
*/
|
||||
import { test, expect } from '@bgotink/playwright-coverage'
|
||||
|
||||
const base = process.env.CHARON_BASE_URL || 'http://localhost:8080'
|
||||
|
||||
test.describe('Security Dashboard Mobile (375x667)', () => {
|
||||
test.use({ viewport: { width: 375, height: 667 } })
|
||||
|
||||
test('MR-01: cards stack vertically on mobile', async ({ page }) => {
|
||||
await page.goto(`${base}/security`)
|
||||
|
||||
// Wait for page to load
|
||||
await page.waitForSelector('[data-testid="toggle-crowdsec"]', { timeout: 10000 })
|
||||
|
||||
// On mobile, grid should be single column
|
||||
const grid = page.locator('.grid.grid-cols-1')
|
||||
await expect(grid).toBeVisible()
|
||||
|
||||
// Get the computed grid-template-columns
|
||||
const cardsContainer = page.locator('.grid').first()
|
||||
const gridStyle = await cardsContainer.evaluate((el) => {
|
||||
const style = window.getComputedStyle(el)
|
||||
return style.gridTemplateColumns
|
||||
})
|
||||
|
||||
// Single column should have just one value (not multiple columns like "repeat(4, ...)")
|
||||
const columns = gridStyle.split(' ').filter((s) => s.trim().length > 0)
|
||||
expect(columns.length).toBeLessThanOrEqual(2) // Single column or flexible
|
||||
})
|
||||
|
||||
test('MR-04: toggle switches have accessible touch targets', async ({ page }) => {
|
||||
await page.goto(`${base}/security`)
|
||||
await page.waitForSelector('[data-testid="toggle-crowdsec"]', { timeout: 10000 })
|
||||
|
||||
// Check CrowdSec toggle
|
||||
const crowdsecToggle = page.getByTestId('toggle-crowdsec')
|
||||
const crowdsecBox = await crowdsecToggle.boundingBox()
|
||||
|
||||
// Touch target should be at least 24px (component) + padding
|
||||
// Most switches have a reasonable touch target
|
||||
expect(crowdsecBox).not.toBeNull()
|
||||
if (crowdsecBox) {
|
||||
expect(crowdsecBox.height).toBeGreaterThanOrEqual(20)
|
||||
expect(crowdsecBox.width).toBeGreaterThanOrEqual(35)
|
||||
}
|
||||
|
||||
// Check WAF toggle
|
||||
const wafToggle = page.getByTestId('toggle-waf')
|
||||
const wafBox = await wafToggle.boundingBox()
|
||||
expect(wafBox).not.toBeNull()
|
||||
if (wafBox) {
|
||||
expect(wafBox.height).toBeGreaterThanOrEqual(20)
|
||||
}
|
||||
})
|
||||
|
||||
test('MR-05: config buttons are tappable on mobile', async ({ page }) => {
|
||||
await page.goto(`${base}/security`)
|
||||
await page.waitForSelector('[data-testid="toggle-crowdsec"]', { timeout: 10000 })
|
||||
|
||||
// Find config/configure buttons
|
||||
const configButtons = page.locator('button:has-text("Config"), button:has-text("Configure")')
|
||||
const buttonCount = await configButtons.count()
|
||||
|
||||
expect(buttonCount).toBeGreaterThan(0)
|
||||
|
||||
// Check first config button has reasonable size
|
||||
const firstButton = configButtons.first()
|
||||
const box = await firstButton.boundingBox()
|
||||
expect(box).not.toBeNull()
|
||||
if (box) {
|
||||
expect(box.height).toBeGreaterThanOrEqual(28) // Minimum tap height
|
||||
}
|
||||
})
|
||||
|
||||
test('MR-06: page content is scrollable on mobile', async ({ page }) => {
|
||||
await page.goto(`${base}/security`)
|
||||
await page.waitForSelector('[data-testid="toggle-crowdsec"]', { timeout: 10000 })
|
||||
|
||||
// Check if page is scrollable (content height > viewport)
|
||||
const bodyHeight = await page.evaluate(() => document.body.scrollHeight)
|
||||
const viewportHeight = 667
|
||||
|
||||
// If content is taller than viewport, page should scroll
|
||||
if (bodyHeight > viewportHeight) {
|
||||
// Attempt to scroll down
|
||||
await page.evaluate(() => window.scrollBy(0, 200))
|
||||
const scrollY = await page.evaluate(() => window.scrollY)
|
||||
expect(scrollY).toBeGreaterThan(0)
|
||||
}
|
||||
})
|
||||
|
||||
test('MR-10: navigation is accessible on mobile', async ({ page }) => {
|
||||
await page.goto(`${base}/security`)
|
||||
await page.waitForSelector('[data-testid="toggle-crowdsec"]', { timeout: 10000 })
|
||||
|
||||
// On mobile, there should be some form of navigation
|
||||
// Check if sidebar or mobile menu toggle exists
|
||||
const sidebar = page.locator('nav, aside, [role="navigation"]')
|
||||
const sidebarCount = await sidebar.count()
|
||||
|
||||
// Navigation should exist in some form
|
||||
expect(sidebarCount).toBeGreaterThanOrEqual(0) // May be hidden on mobile
|
||||
})
|
||||
|
||||
test('MR-06b: overlay renders correctly on mobile', async ({ page }) => {
|
||||
await page.goto(`${base}/security`)
|
||||
await page.waitForSelector('[data-testid="toggle-crowdsec"]', { timeout: 10000 })
|
||||
|
||||
// Skip if Cerberus is disabled (toggles would be disabled)
|
||||
const cerberusDisabled = await page.locator('text=Cerberus Disabled').isVisible()
|
||||
if (cerberusDisabled) {
|
||||
test.skip()
|
||||
return
|
||||
}
|
||||
|
||||
// Trigger loading state by clicking a toggle
|
||||
const wafToggle = page.getByTestId('toggle-waf')
|
||||
const isDisabled = await wafToggle.isDisabled()
|
||||
|
||||
if (!isDisabled) {
|
||||
await wafToggle.click()
|
||||
|
||||
// Check for overlay (may appear briefly)
|
||||
// Use a short timeout since it might disappear quickly
|
||||
try {
|
||||
const overlay = page.locator('.fixed.inset-0')
|
||||
await overlay.waitFor({ state: 'visible', timeout: 2000 })
|
||||
|
||||
// If overlay appeared, verify it fits screen
|
||||
const box = await overlay.boundingBox()
|
||||
if (box) {
|
||||
expect(box.width).toBeLessThanOrEqual(375 + 10) // Allow small margin
|
||||
}
|
||||
} catch {
|
||||
// Overlay might have disappeared before we could check
|
||||
// This is acceptable for a fast operation
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test.describe('Security Dashboard Tablet (768x1024)', () => {
|
||||
test.use({ viewport: { width: 768, height: 1024 } })
|
||||
|
||||
test('MR-02: cards show 2 columns on tablet', async ({ page }) => {
|
||||
await page.goto(`${base}/security`)
|
||||
await page.waitForSelector('[data-testid="toggle-crowdsec"]', { timeout: 10000 })
|
||||
|
||||
// On tablet (md breakpoint), should have md:grid-cols-2
|
||||
const grid = page.locator('.grid').first()
|
||||
await expect(grid).toBeVisible()
|
||||
|
||||
// Get computed style
|
||||
const gridStyle = await grid.evaluate((el) => {
|
||||
const style = window.getComputedStyle(el)
|
||||
return style.gridTemplateColumns
|
||||
})
|
||||
|
||||
// Should have 2 columns at md breakpoint
|
||||
const columns = gridStyle.split(' ').filter((s) => s.trim().length > 0 && s !== 'none')
|
||||
expect(columns.length).toBeGreaterThanOrEqual(2)
|
||||
})
|
||||
|
||||
test('MR-08: cards have proper spacing on tablet', async ({ page }) => {
|
||||
await page.goto(`${base}/security`)
|
||||
await page.waitForSelector('[data-testid="toggle-crowdsec"]', { timeout: 10000 })
|
||||
|
||||
// Check gap between cards
|
||||
const grid = page.locator('.grid.gap-6').first()
|
||||
const hasGap = await grid.isVisible()
|
||||
expect(hasGap).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
test.describe('Security Dashboard Desktop (1920x1080)', () => {
|
||||
test.use({ viewport: { width: 1920, height: 1080 } })
|
||||
|
||||
test('MR-03: cards show 4 columns on desktop', async ({ page }) => {
|
||||
await page.goto(`${base}/security`)
|
||||
await page.waitForSelector('[data-testid="toggle-crowdsec"]', { timeout: 10000 })
|
||||
|
||||
// On desktop (lg breakpoint), should have lg:grid-cols-4
|
||||
const grid = page.locator('.grid').first()
|
||||
await expect(grid).toBeVisible()
|
||||
|
||||
// Get computed style
|
||||
const gridStyle = await grid.evaluate((el) => {
|
||||
const style = window.getComputedStyle(el)
|
||||
return style.gridTemplateColumns
|
||||
})
|
||||
|
||||
// Should have 4 columns at lg breakpoint
|
||||
const columns = gridStyle.split(' ').filter((s) => s.trim().length > 0 && s !== 'none')
|
||||
expect(columns.length).toBeGreaterThanOrEqual(4)
|
||||
})
|
||||
})
|
||||
|
||||
test.describe('Security Dashboard Layout Tests', () => {
|
||||
test('cards maintain correct order across viewports', async ({ page }) => {
|
||||
// Test on mobile
|
||||
await page.setViewportSize({ width: 375, height: 667 })
|
||||
await page.goto(`${base}/security`)
|
||||
await page.waitForSelector('[data-testid="toggle-crowdsec"]', { timeout: 10000 })
|
||||
|
||||
// Get card headings
|
||||
const getCardOrder = async () => {
|
||||
const headings = await page.locator('h3').allTextContents()
|
||||
return headings.filter((h) => ['CrowdSec', 'Access Control', 'Coraza', 'Rate Limiting'].includes(h))
|
||||
}
|
||||
|
||||
const mobileOrder = await getCardOrder()
|
||||
|
||||
// Test on tablet
|
||||
await page.setViewportSize({ width: 768, height: 1024 })
|
||||
await page.waitForTimeout(100) // Allow reflow
|
||||
const tabletOrder = await getCardOrder()
|
||||
|
||||
// Test on desktop
|
||||
await page.setViewportSize({ width: 1920, height: 1080 })
|
||||
await page.waitForTimeout(100) // Allow reflow
|
||||
const desktopOrder = await getCardOrder()
|
||||
|
||||
// Order should be consistent
|
||||
expect(mobileOrder).toEqual(tabletOrder)
|
||||
expect(tabletOrder).toEqual(desktopOrder)
|
||||
expect(desktopOrder).toEqual(['CrowdSec', 'Access Control', 'Coraza', 'Rate Limiting'])
|
||||
})
|
||||
|
||||
test('MR-09: all security cards are visible on scroll', async ({ page }) => {
|
||||
await page.setViewportSize({ width: 375, height: 667 })
|
||||
await page.goto(`${base}/security`)
|
||||
await page.waitForSelector('[data-testid="toggle-crowdsec"]', { timeout: 10000 })
|
||||
|
||||
// Scroll to each card type
|
||||
const cardTypes = ['CrowdSec', 'Access Control', 'Coraza', 'Rate Limiting']
|
||||
|
||||
for (const cardType of cardTypes) {
|
||||
const card = page.locator(`h3:has-text("${cardType}")`)
|
||||
await card.scrollIntoViewIfNeeded()
|
||||
await expect(card).toBeVisible()
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test.describe('Security Dashboard Interaction Tests', () => {
|
||||
test.use({ viewport: { width: 375, height: 667 } })
|
||||
|
||||
test('MR-07: config buttons navigate correctly on mobile', async ({ page }) => {
|
||||
await page.goto(`${base}/security`)
|
||||
await page.waitForSelector('[data-testid="toggle-crowdsec"]', { timeout: 10000 })
|
||||
|
||||
// Skip if Cerberus disabled
|
||||
const cerberusDisabled = await page.locator('text=Cerberus Disabled').isVisible()
|
||||
if (cerberusDisabled) {
|
||||
test.skip()
|
||||
return
|
||||
}
|
||||
|
||||
// Find and click WAF Configure button
|
||||
const configureButton = page.locator('button:has-text("Configure")').first()
|
||||
|
||||
if (await configureButton.isVisible()) {
|
||||
await configureButton.click()
|
||||
|
||||
// Should navigate to a config page
|
||||
await page.waitForTimeout(500)
|
||||
const url = page.url()
|
||||
|
||||
// URL should include security/waf or security/rate-limiting etc
|
||||
expect(url).toMatch(/security\/(waf|rate-limiting|access-lists|crowdsec)/i)
|
||||
}
|
||||
})
|
||||
|
||||
test('documentation button works on mobile', async ({ page }) => {
|
||||
await page.goto(`${base}/security`)
|
||||
await page.waitForSelector('[data-testid="toggle-crowdsec"]', { timeout: 10000 })
|
||||
|
||||
// Find documentation button
|
||||
const docButton = page.locator('button:has-text("Documentation"), a:has-text("Documentation")').first()
|
||||
|
||||
if (await docButton.isVisible()) {
|
||||
// Check it has correct external link behavior
|
||||
const href = await docButton.getAttribute('href')
|
||||
|
||||
// Should open external docs
|
||||
if (href) {
|
||||
expect(href).toContain('wikid82.github.io')
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
@@ -1,34 +0,0 @@
|
||||
import { test, expect } from '@bgotink/playwright-coverage'
|
||||
|
||||
const base = process.env.CHARON_BASE_URL || 'http://localhost:8080'
|
||||
|
||||
// Hit an API route inside /api/v1 to ensure Cerberus middleware executes.
|
||||
const targetPath = '/api/v1/system/my-ip'
|
||||
|
||||
test.describe('WAF blocking and monitoring', () => {
|
||||
test('blocks malicious query when mode=block', async ({ request }) => {
|
||||
// Use literal '<script>' to trigger naive WAF check
|
||||
const res = await request.get(`${base}${targetPath}?<script>=x`)
|
||||
expect([400, 401]).toContain(res.status())
|
||||
// When WAF runs before auth, expect 400; if auth runs first, we still validate that the server rejects
|
||||
if (res.status() === 400) {
|
||||
const body = await res.json()
|
||||
expect(body?.error).toMatch(/WAF: suspicious payload/i)
|
||||
}
|
||||
})
|
||||
|
||||
test('does not block when mode=monitor (returns 401 due to auth)', async ({ request }) => {
|
||||
const res = await request.get(`${base}${targetPath}?safe=yes`)
|
||||
// Unauthenticated → expect 401, not 400; proves WAF did not block
|
||||
expect([401, 403]).toContain(res.status())
|
||||
})
|
||||
|
||||
test('metrics endpoint exposes Prometheus counters', async ({ request }) => {
|
||||
const res = await request.get(`${base}/metrics`)
|
||||
expect(res.status()).toBe(200)
|
||||
const text = await res.text()
|
||||
expect(text).toContain('charon_waf_requests_total')
|
||||
expect(text).toContain('charon_waf_blocked_total')
|
||||
expect(text).toContain('charon_waf_monitored_total')
|
||||
})
|
||||
})
|
||||
@@ -1,26 +0,0 @@
|
||||
import { test, expect } from '@bgotink/playwright-coverage'
|
||||
|
||||
test.describe('Login - smoke', () => {
|
||||
test('renders and has no console errors on load', async ({ page }) => {
|
||||
const consoleErrors: string[] = []
|
||||
|
||||
page.on('console', msg => {
|
||||
if (msg.type() === 'error') {
|
||||
consoleErrors.push(msg.text())
|
||||
}
|
||||
})
|
||||
|
||||
await page.goto('/login', { waitUntil: 'domcontentloaded' })
|
||||
|
||||
await expect(page).toHaveURL(/\/login(?:\?|$)/)
|
||||
|
||||
const emailInput = page.getByRole('textbox', { name: /email/i })
|
||||
const passwordInput = page.getByLabel(/password/i)
|
||||
|
||||
await expect(emailInput).toBeVisible()
|
||||
await expect(passwordInput).toBeVisible()
|
||||
await expect(page.getByRole('button', { name: /sign in/i })).toBeVisible()
|
||||
|
||||
expect(consoleErrors, 'Console errors during /login load').toEqual([])
|
||||
})
|
||||
})
|
||||
@@ -94,7 +94,7 @@ Configure the application via `docker-compose.yml`:
|
||||
| `CHARON_ENV` | `production` | Set to `development` for verbose logging (`CPM_ENV` supported for backward compatibility). |
|
||||
| `CHARON_HTTP_PORT` | `8080` | Port for the Web UI (`CPM_HTTP_PORT` supported for backward compatibility). |
|
||||
| `CHARON_DB_PATH` | `/app/data/charon.db` | Path to the SQLite database (`CPM_DB_PATH` supported for backward compatibility). |
|
||||
| `CHARON_CADDY_ADMIN_API` | `http://localhost:2019` | Internal URL for Caddy API (`CPM_CADDY_ADMIN_API` supported for backward compatibility). |
|
||||
| `CHARON_CADDY_ADMIN_API` | `http://localhost:2019` | Internal URL for Caddy API (`CPM_CADDY_ADMIN_API` supported for backward compatibility). Must resolve to an internal allowlisted host on port `2019`. |
|
||||
| `CHARON_CADDY_CONFIG_ROOT` | `/config` | Path to Caddy autosave configuration directory. |
|
||||
| `CHARON_CADDY_LOG_DIR` | `/var/log/caddy` | Directory for Caddy access logs. |
|
||||
| `CHARON_CROWDSEC_LOG_DIR` | `/var/log/crowdsec` | Directory for CrowdSec logs. |
|
||||
@@ -218,6 +218,8 @@ environment:
|
||||
- CPM_CADDY_ADMIN_API=http://your-caddy-host:2019
|
||||
```
|
||||
|
||||
If using a non-localhost internal hostname, add it to `CHARON_SSRF_INTERNAL_HOST_ALLOWLIST`.
|
||||
|
||||
**Warning**: Charon will replace Caddy's entire configuration. Backup first!
|
||||
|
||||
## Performance Tuning
|
||||
|
||||
@@ -32,6 +32,8 @@ services:
|
||||
#- CPM_SECURITY_RATELIMIT_ENABLED=false
|
||||
#- CPM_SECURITY_ACL_ENABLED=false
|
||||
- FEATURE_CERBERUS_ENABLED=true
|
||||
# Docker socket group access: copy docker-compose.override.example.yml
|
||||
# to docker-compose.override.yml and set your host's docker GID.
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro # For local container discovery
|
||||
- crowdsec_data:/app/data/crowdsec
|
||||
|
||||
@@ -27,6 +27,8 @@ services:
|
||||
- FEATURE_CERBERUS_ENABLED=true
|
||||
# Emergency "break-glass" token for security reset when ACL blocks access
|
||||
- CHARON_EMERGENCY_TOKEN=03e4682c1164f0c1cb8e17c99bd1a2d9156b59824dde41af3bb67c513e5c5e92
|
||||
# Docker socket group access: copy docker-compose.override.example.yml
|
||||
# to docker-compose.override.yml and set your host's docker GID.
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
cap_add:
|
||||
|
||||
26
.docker/compose/docker-compose.override.example.yml
Normal file
26
.docker/compose/docker-compose.override.example.yml
Normal file
@@ -0,0 +1,26 @@
|
||||
# Docker Compose override — copy to docker-compose.override.yml to activate.
|
||||
#
|
||||
# Use case: grant the container access to the host Docker socket so that
|
||||
# Charon can discover running containers.
|
||||
#
|
||||
# 1. cp docker-compose.override.example.yml docker-compose.override.yml
|
||||
# 2. Uncomment the service that matches your compose file:
|
||||
# - "charon" for docker-compose.local.yml
|
||||
# - "app" for docker-compose.dev.yml
|
||||
# 3. Replace <GID> with the output of: stat -c '%g' /var/run/docker.sock
|
||||
# 4. docker compose up -d
|
||||
|
||||
services:
|
||||
# Uncomment for docker-compose.local.yml
|
||||
charon:
|
||||
group_add:
|
||||
- "<GID>" # e.g. "988" — run: stat -c '%g' /var/run/docker.sock
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
|
||||
# Uncomment for docker-compose.dev.yml
|
||||
app:
|
||||
group_add:
|
||||
- "<GID>" # e.g. "988" — run: stat -c '%g' /var/run/docker.sock
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
@@ -85,6 +85,7 @@ services:
|
||||
- playwright_data:/app/data
|
||||
- playwright_caddy_data:/data
|
||||
- playwright_caddy_config:/config
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro # For container discovery in tests
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-sf", "http://localhost:8080/api/v1/health"]
|
||||
interval: 5s
|
||||
@@ -111,6 +112,7 @@ services:
|
||||
volumes:
|
||||
- playwright_crowdsec_data:/var/lib/crowdsec/data
|
||||
- playwright_crowdsec_config:/etc/crowdsec
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro # For container discovery in tests
|
||||
healthcheck:
|
||||
test: ["CMD", "cscli", "version"]
|
||||
interval: 10s
|
||||
|
||||
@@ -49,6 +49,8 @@ services:
|
||||
# True tmpfs for E2E test data - fresh on every run, in-memory only
|
||||
# mode=1777 allows any user to write (container runs as non-root)
|
||||
- /app/data:size=100M,mode=1777
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro # For container discovery in tests
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl -fsS http://localhost:8080/api/v1/health || exit 1"]
|
||||
interval: 5s
|
||||
|
||||
@@ -27,30 +27,24 @@ get_group_by_gid() {
|
||||
}
|
||||
|
||||
create_group_with_gid() {
|
||||
local gid="$1"
|
||||
local name="$2"
|
||||
|
||||
if command -v addgroup >/dev/null 2>&1; then
|
||||
addgroup -g "$gid" "$name" 2>/dev/null || true
|
||||
addgroup -g "$1" "$2" 2>/dev/null || true
|
||||
return
|
||||
fi
|
||||
|
||||
if command -v groupadd >/dev/null 2>&1; then
|
||||
groupadd -g "$gid" "$name" 2>/dev/null || true
|
||||
groupadd -g "$1" "$2" 2>/dev/null || true
|
||||
fi
|
||||
}
|
||||
|
||||
add_user_to_group() {
|
||||
local user="$1"
|
||||
local group="$2"
|
||||
|
||||
if command -v addgroup >/dev/null 2>&1; then
|
||||
addgroup "$user" "$group" 2>/dev/null || true
|
||||
addgroup "$1" "$2" 2>/dev/null || true
|
||||
return
|
||||
fi
|
||||
|
||||
if command -v usermod >/dev/null 2>&1; then
|
||||
usermod -aG "$group" "$user" 2>/dev/null || true
|
||||
usermod -aG "$2" "$1" 2>/dev/null || true
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -142,8 +136,15 @@ if [ -S "/var/run/docker.sock" ] && is_root; then
|
||||
fi
|
||||
fi
|
||||
elif [ -S "/var/run/docker.sock" ]; then
|
||||
echo "Note: Docker socket mounted but container is running non-root; skipping docker.sock group setup."
|
||||
echo " If Docker discovery is needed, run with matching group permissions (e.g., --group-add)"
|
||||
DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo "unknown")
|
||||
echo "Note: Docker socket mounted (GID=$DOCKER_SOCK_GID) but container is running non-root; skipping docker.sock group setup."
|
||||
echo " If Docker discovery is needed, add 'group_add: [\"$DOCKER_SOCK_GID\"]' to your compose service."
|
||||
if [ "$DOCKER_SOCK_GID" = "0" ]; then
|
||||
if [ "${ALLOW_DOCKER_SOCK_GID_0:-false}" != "true" ]; then
|
||||
echo "⚠️ WARNING: Docker socket GID is 0 (root group). group_add: [\"0\"] grants root-group access."
|
||||
echo " Set ALLOW_DOCKER_SOCK_GID_0=true to acknowledge this risk."
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "Note: Docker socket not found. Docker container discovery will be unavailable."
|
||||
fi
|
||||
@@ -191,7 +192,7 @@ if command -v cscli >/dev/null; then
|
||||
echo "Initializing persistent CrowdSec configuration..."
|
||||
|
||||
# Check if .dist has content
|
||||
if [ -d "/etc/crowdsec.dist" ] && [ -n "$(ls -A /etc/crowdsec.dist 2>/dev/null)" ]; then
|
||||
if [ -d "/etc/crowdsec.dist" ] && find /etc/crowdsec.dist -mindepth 1 -maxdepth 1 -print -quit 2>/dev/null | grep -q .; then
|
||||
echo "Copying config from /etc/crowdsec.dist..."
|
||||
if ! cp -r /etc/crowdsec.dist/* "$CS_CONFIG_DIR/"; then
|
||||
echo "ERROR: Failed to copy config from /etc/crowdsec.dist"
|
||||
@@ -208,7 +209,7 @@ if command -v cscli >/dev/null; then
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ Successfully initialized config from .dist directory"
|
||||
elif [ -d "/etc/crowdsec" ] && [ ! -L "/etc/crowdsec" ] && [ -n "$(ls -A /etc/crowdsec 2>/dev/null)" ]; then
|
||||
elif [ -d "/etc/crowdsec" ] && [ ! -L "/etc/crowdsec" ] && find /etc/crowdsec -mindepth 1 -maxdepth 1 -print -quit 2>/dev/null | grep -q .; then
|
||||
echo "Copying config from /etc/crowdsec (fallback)..."
|
||||
if ! cp -r /etc/crowdsec/* "$CS_CONFIG_DIR/"; then
|
||||
echo "ERROR: Failed to copy config from /etc/crowdsec (fallback)"
|
||||
@@ -248,7 +249,7 @@ if command -v cscli >/dev/null; then
|
||||
echo "Expected: /etc/crowdsec -> /app/data/crowdsec/config"
|
||||
echo "This indicates a critical build-time issue. Symlink must be created at build time as root."
|
||||
echo "DEBUG: Directory check:"
|
||||
ls -la /etc/ | grep crowdsec || echo " (no crowdsec entry found)"
|
||||
find /etc -mindepth 1 -maxdepth 1 -name '*crowdsec*' -exec ls -ld {} \; 2>/dev/null || echo " (no crowdsec entry found)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
19
.github/agents/Backend_Dev.agent.md
vendored
19
.github/agents/Backend_Dev.agent.md
vendored
File diff suppressed because one or more lines are too long
3
.github/agents/DevOps.agent.md
vendored
3
.github/agents/DevOps.agent.md
vendored
File diff suppressed because one or more lines are too long
3
.github/agents/Doc_Writer.agent.md
vendored
3
.github/agents/Doc_Writer.agent.md
vendored
File diff suppressed because one or more lines are too long
7
.github/agents/Frontend_Dev.agent.md
vendored
7
.github/agents/Frontend_Dev.agent.md
vendored
File diff suppressed because one or more lines are too long
23
.github/agents/Management.agent.md
vendored
23
.github/agents/Management.agent.md
vendored
File diff suppressed because one or more lines are too long
4
.github/agents/Planning.agent.md
vendored
4
.github/agents/Planning.agent.md
vendored
File diff suppressed because one or more lines are too long
4
.github/agents/Playwright_Dev.agent.md
vendored
4
.github/agents/Playwright_Dev.agent.md
vendored
File diff suppressed because one or more lines are too long
20
.github/agents/QA_Security.agent.md
vendored
20
.github/agents/QA_Security.agent.md
vendored
File diff suppressed because one or more lines are too long
6
.github/agents/Supervisor.agent.md
vendored
6
.github/agents/Supervisor.agent.md
vendored
File diff suppressed because one or more lines are too long
7
.github/badges/ghcr-downloads.json
vendored
7
.github/badges/ghcr-downloads.json
vendored
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"schemaVersion": 1,
|
||||
"label": "GHCR pulls",
|
||||
"message": "0",
|
||||
"color": "blue",
|
||||
"cacheSeconds": 3600
|
||||
}
|
||||
32
.github/instructions/copilot-instructions.md
vendored
32
.github/instructions/copilot-instructions.md
vendored
@@ -17,6 +17,23 @@ Every session should improve the codebase, not just add to it. Actively refactor
|
||||
- **READABLE**: Maintain comments and clear naming for complex logic. Favor clarity over cleverness.
|
||||
- **CONVENTIONAL COMMITS**: Write commit messages using `feat:`, `fix:`, `chore:`, `refactor:`, or `docs:` prefixes.
|
||||
|
||||
## Governance & Precedence
|
||||
|
||||
When policy statements conflict across documentation sources, resolve using this precedence hierarchy:
|
||||
|
||||
1. **Highest Precedence**: `.github/instructions/**` files (canonical source of truth)
|
||||
2. **Agent Overrides**: `.github/agents/**` files (agent-specific customizations)
|
||||
3. **Operator Documentation**: `SECURITY.md`, `docs/security.md`,
|
||||
`docs/features/notifications.md` (user-facing guidance)
|
||||
|
||||
**Reconciliation Rule**: When conflicts arise, the stricter security requirement
|
||||
wins. Update downstream documentation to match canonical text in
|
||||
`.github/instructions/**`.
|
||||
|
||||
**Example**: If `.github/instructions/security.instructions.md` mandates token
|
||||
redaction but operator docs suggest logging is acceptable, token redaction
|
||||
requirement takes precedence and operator docs must be updated.
|
||||
|
||||
## 🚨 CRITICAL ARCHITECTURE RULES 🚨
|
||||
|
||||
- **Single Frontend Source**: All frontend code MUST reside in `frontend/`. NEVER create `backend/frontend/` or any other nested frontend directory.
|
||||
@@ -150,6 +167,21 @@ Before marking an implementation task as complete, perform the following in orde
|
||||
- **Base URL**: Uses `PLAYWRIGHT_BASE_URL` or default from `playwright.config.js`
|
||||
- All E2E tests must pass before proceeding to unit tests
|
||||
|
||||
1.5. **GORM Security Scan** (CONDITIONAL, BLOCKING):
|
||||
- **Trigger Condition**: Execute this gate when changes include backend models or database interaction logic:
|
||||
- `backend/internal/models/**`
|
||||
- GORM query/service layers
|
||||
- Database migrations or seeding logic
|
||||
- **Exclusions**: Skip this gate for docs-only (`**/*.md`) or frontend-only (`frontend/**`) changes
|
||||
- **Run One Of**:
|
||||
- VS Code task: `Lint: GORM Security Scan`
|
||||
- Pre-commit: `pre-commit run --hook-stage manual gorm-security-scan --all-files`
|
||||
- Direct: `./scripts/scan-gorm-security.sh --check`
|
||||
- **Gate Enforcement**: DoD is process-blocking until scanner reports zero
|
||||
CRITICAL/HIGH findings, even while automation remains in manual stage
|
||||
- **Check Mode Required**: Gate decisions must use check mode semantics
|
||||
(`--check` flag or equivalent task wiring) for pass/fail determination
|
||||
|
||||
2. **Local Patch Coverage Preflight** (MANDATORY - Run Before Unit/Coverage Tests):
|
||||
- **Run**: VS Code task `Test: Local Patch Report` or `bash scripts/local-patch-report.sh` from repo root.
|
||||
- **Purpose**: Surface exact changed files and uncovered changed lines before adding/refining unit tests.
|
||||
|
||||
@@ -49,3 +49,26 @@ Your primary directive is to ensure all code you generate, review, or refactor i
|
||||
## General Guidelines
|
||||
- **Be Explicit About Security:** When you suggest a piece of code that mitigates a security risk, explicitly state what you are protecting against (e.g., "Using a parameterized query here to prevent SQL injection.").
|
||||
- **Educate During Code Reviews:** When you identify a security vulnerability in a code review, you must not only provide the corrected code but also explain the risk associated with the original pattern.
|
||||
|
||||
### Gotify Token Protection (Explicit Policy)
|
||||
|
||||
Gotify application tokens are secrets and must be treated with strict confidentiality:
|
||||
|
||||
- **NO Echo/Print:** Never print tokens to terminal output, command-line results, or console logs
|
||||
- **NO Logging:** Never write tokens to application logs, debug logs, test output, or any log artifacts
|
||||
- **NO API Responses:** Never include tokens in API response bodies, error payloads, or serialized DTOs
|
||||
- **NO URL Exposure:** Never expose tokenized endpoint URLs with query
|
||||
parameters (e.g., `https://gotify.example.com/message?token=...`) in:
|
||||
- Documentation examples
|
||||
- Diagnostic output
|
||||
- Screenshots or reports
|
||||
- Log files
|
||||
- **Redact Query Parameters:** Always redact URL query parameters in
|
||||
diagnostics, examples, and log output before display or storage
|
||||
- **Validation Without Revelation:** For token validation or health checks:
|
||||
- Return only non-sensitive status indicators (`valid`/`invalid` + reason category)
|
||||
- Use token length/prefix-independent masking in UX and diagnostics
|
||||
- Never reveal raw token values in validation feedback
|
||||
- **Storage:** Store and process tokens as secrets only (environment variables
|
||||
or secret management service)
|
||||
- **Rotation:** Rotate tokens immediately on suspected exposure
|
||||
|
||||
39
.github/instructions/testing.instructions.md
vendored
39
.github/instructions/testing.instructions.md
vendored
@@ -4,6 +4,10 @@ description: 'Strict protocols for test execution, debugging, and coverage valid
|
||||
---
|
||||
# Testing Protocols
|
||||
|
||||
**Governance Note**: This file is subject to the precedence hierarchy defined in
|
||||
`.github/instructions/copilot-instructions.md`. When conflicts arise, canonical
|
||||
instruction files take precedence over agent files and operator documentation.
|
||||
|
||||
## 0. E2E Verification First (Playwright)
|
||||
|
||||
**MANDATORY**: Before running unit tests, verify the application UI/UX functions correctly end-to-end.
|
||||
@@ -170,16 +174,39 @@ Before pushing code, verify E2E coverage:
|
||||
* **Threshold Compliance:** You must compare the final coverage percentage against the project's threshold (Default: 85% unless specified otherwise). If coverage drops, you must identify the "uncovered lines" and add targeted tests.
|
||||
* **Patch Coverage (Suggestion):** Codecov reports patch coverage as an indicator. While developers should aim for 100% coverage of modified lines, patch coverage is **not a hard requirement** and will not block PR approval. If patch coverage is low, consider adding targeted tests to improve the metric.
|
||||
* **Review Patch Coverage:** When reviewing patch coverage reports, assess whether missing lines represent genuine gaps or are acceptable (e.g., error handling branches, deprecated code paths). Use the report to inform testing decisions, not as an absolute gate.
|
||||
|
||||
## 4. GORM Security Validation (Manual Stage)
|
||||
|
||||
**Requirement:** All backend changes involving GORM models or database interactions must pass the GORM Security Scanner.
|
||||
**Requirement:** For any change that touches backend models or
|
||||
database-related logic, the GORM Security Scanner is a mandatory local DoD gate
|
||||
and must pass with zero CRITICAL/HIGH findings.
|
||||
|
||||
### When to Run
|
||||
**Policy vs. Automation Reconciliation:** "Manual stage" describes execution
|
||||
mechanism only (not automated pre-commit hook); policy enforcement remains
|
||||
process-blocking for DoD. Gate decisions must use check semantics
|
||||
(`./scripts/scan-gorm-security.sh --check` or equivalent task wiring).
|
||||
|
||||
* **Before Committing:** When modifying GORM models (files in `backend/internal/models/`)
|
||||
* **Before Opening PR:** Verify no security issues introduced
|
||||
* **After Code Review:** If model-related changes were requested
|
||||
* **Definition of Done:** Scanner must pass with zero CRITICAL/HIGH issues
|
||||
### When to Run (Conditional Trigger Matrix)
|
||||
|
||||
**Mandatory Trigger Paths (Include):**
|
||||
- `backend/internal/models/**` — GORM model definitions
|
||||
- Backend services/repositories with GORM query logic
|
||||
- Database migrations or seeding logic affecting model persistence behavior
|
||||
|
||||
**Explicit Exclusions:**
|
||||
- Docs-only changes (`**/*.md`, governance documentation)
|
||||
- Frontend-only changes (`frontend/**`)
|
||||
|
||||
**Gate Decision Rule:** IF any Include path matches, THEN scanner execution in
|
||||
check mode is mandatory DoD gate. IF only Exclude paths match, THEN GORM gate
|
||||
is not required for that change set.
|
||||
|
||||
### Definition of Done
|
||||
- **Before Committing:** When modifying trigger paths listed above
|
||||
- **Before Opening PR:** Verify no security issues introduced
|
||||
- **After Code Review:** If model-related changes were requested
|
||||
- **Blocking Gate:** Scanner must pass with zero CRITICAL/HIGH issues before
|
||||
task completion
|
||||
|
||||
### Running the Scanner
|
||||
|
||||
|
||||
55
.github/security-severity-policy.yml
vendored
Normal file
55
.github/security-severity-policy.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
version: 1
|
||||
effective_date: 2026-02-25
|
||||
scope:
|
||||
- local pre-commit manual security hooks
|
||||
- github actions security workflows
|
||||
|
||||
defaults:
|
||||
blocking:
|
||||
- critical
|
||||
- high
|
||||
medium:
|
||||
mode: risk-based
|
||||
default_action: report
|
||||
require_sla: true
|
||||
default_sla_days: 14
|
||||
escalation:
|
||||
trigger: high-signal class or repeated finding
|
||||
action: require issue + owner + due date
|
||||
low:
|
||||
action: report
|
||||
|
||||
codeql:
|
||||
severity_mapping:
|
||||
error: high_or_critical
|
||||
warning: medium_or_lower
|
||||
note: informational
|
||||
blocking_levels:
|
||||
- error
|
||||
warning_policy:
|
||||
default_action: report
|
||||
escalation_high_signal_rule_ids:
|
||||
- go/request-forgery
|
||||
- js/missing-rate-limiting
|
||||
- js/insecure-randomness
|
||||
|
||||
trivy:
|
||||
blocking_severities:
|
||||
- CRITICAL
|
||||
- HIGH
|
||||
medium_policy:
|
||||
action: report
|
||||
escalation: issue-with-sla
|
||||
|
||||
grype:
|
||||
blocking_severities:
|
||||
- Critical
|
||||
- High
|
||||
medium_policy:
|
||||
action: report
|
||||
escalation: issue-with-sla
|
||||
|
||||
enforcement_contract:
|
||||
codeql_local_vs_ci: "local and ci block on codeql error-level findings only"
|
||||
supply_chain_medium: "medium vulnerabilities are non-blocking by default and require explicit triage"
|
||||
auth_regression_guard: "state-changing routes must remain protected by auth middleware"
|
||||
96
.github/skills/scripts/_environment_helpers.sh
vendored
96
.github/skills/scripts/_environment_helpers.sh
vendored
@@ -192,6 +192,101 @@ get_project_root() {
|
||||
return 1
|
||||
}
|
||||
|
||||
# ensure_charon_encryption_key: Ensure CHARON_ENCRYPTION_KEY is present and valid
|
||||
# for backend tests. Generates an ephemeral base64-encoded 32-byte key when
|
||||
# missing or invalid.
|
||||
ensure_charon_encryption_key() {
|
||||
local key_source="existing"
|
||||
local decoded_key_hex=""
|
||||
local decoded_key_bytes=0
|
||||
|
||||
generate_key() {
|
||||
if command -v openssl >/dev/null 2>&1; then
|
||||
openssl rand -base64 32 | tr -d '\n'
|
||||
return
|
||||
fi
|
||||
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
python3 - <<'PY'
|
||||
import base64
|
||||
import os
|
||||
|
||||
print(base64.b64encode(os.urandom(32)).decode())
|
||||
PY
|
||||
return
|
||||
fi
|
||||
|
||||
echo ""
|
||||
}
|
||||
|
||||
if [[ -z "${CHARON_ENCRYPTION_KEY:-}" ]]; then
|
||||
key_source="generated"
|
||||
CHARON_ENCRYPTION_KEY="$(generate_key)"
|
||||
fi
|
||||
|
||||
if [[ -z "${CHARON_ENCRYPTION_KEY:-}" ]]; then
|
||||
if declare -f log_error >/dev/null 2>&1; then
|
||||
log_error "Could not auto-provision CHARON_ENCRYPTION_KEY (requires openssl or python3)"
|
||||
else
|
||||
echo "[ERROR] Could not auto-provision CHARON_ENCRYPTION_KEY (requires openssl or python3)" >&2
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
|
||||
if ! decoded_key_hex=$(printf '%s' "$CHARON_ENCRYPTION_KEY" | base64 --decode 2>/dev/null | od -An -tx1 -v | tr -d ' \n'); then
|
||||
key_source="regenerated"
|
||||
CHARON_ENCRYPTION_KEY="$(generate_key)"
|
||||
if ! decoded_key_hex=$(printf '%s' "$CHARON_ENCRYPTION_KEY" | base64 --decode 2>/dev/null | od -An -tx1 -v | tr -d ' \n'); then
|
||||
if declare -f log_error >/dev/null 2>&1; then
|
||||
log_error "CHARON_ENCRYPTION_KEY is invalid and regeneration failed"
|
||||
else
|
||||
echo "[ERROR] CHARON_ENCRYPTION_KEY is invalid and regeneration failed" >&2
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
decoded_key_bytes=$(( ${#decoded_key_hex} / 2 ))
|
||||
if [[ "$decoded_key_bytes" -ne 32 ]]; then
|
||||
key_source="regenerated"
|
||||
CHARON_ENCRYPTION_KEY="$(generate_key)"
|
||||
if ! decoded_key_hex=$(printf '%s' "$CHARON_ENCRYPTION_KEY" | base64 --decode 2>/dev/null | od -An -tx1 -v | tr -d ' \n'); then
|
||||
if declare -f log_error >/dev/null 2>&1; then
|
||||
log_error "CHARON_ENCRYPTION_KEY has invalid length and regeneration failed"
|
||||
else
|
||||
echo "[ERROR] CHARON_ENCRYPTION_KEY has invalid length and regeneration failed" >&2
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
|
||||
decoded_key_bytes=$(( ${#decoded_key_hex} / 2 ))
|
||||
if [[ "$decoded_key_bytes" -ne 32 ]]; then
|
||||
if declare -f log_error >/dev/null 2>&1; then
|
||||
log_error "Could not provision a valid 32-byte CHARON_ENCRYPTION_KEY"
|
||||
else
|
||||
echo "[ERROR] Could not provision a valid 32-byte CHARON_ENCRYPTION_KEY" >&2
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
export CHARON_ENCRYPTION_KEY
|
||||
|
||||
if [[ "$key_source" == "generated" ]]; then
|
||||
if declare -f log_info >/dev/null 2>&1; then
|
||||
log_info "CHARON_ENCRYPTION_KEY not set; generated ephemeral test key"
|
||||
fi
|
||||
elif [[ "$key_source" == "regenerated" ]]; then
|
||||
if declare -f log_warn >/dev/null 2>&1; then
|
||||
log_warn "CHARON_ENCRYPTION_KEY invalid; generated ephemeral test key"
|
||||
elif declare -f log_info >/dev/null 2>&1; then
|
||||
log_info "CHARON_ENCRYPTION_KEY invalid; generated ephemeral test key"
|
||||
fi
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# Export functions
|
||||
export -f validate_go_environment
|
||||
export -f validate_python_environment
|
||||
@@ -200,3 +295,4 @@ export -f validate_docker_environment
|
||||
export -f set_default_env
|
||||
export -f validate_project_structure
|
||||
export -f get_project_root
|
||||
export -f ensure_charon_encryption_key
|
||||
|
||||
@@ -95,6 +95,7 @@ run_codeql_scan() {
|
||||
local source_root=$2
|
||||
local db_name="codeql-db-${lang}"
|
||||
local sarif_file="codeql-results-${lang}.sarif"
|
||||
local suite=""
|
||||
local build_mode_args=()
|
||||
local codescanning_config="${PROJECT_ROOT}/.github/codeql/codeql-config.yml"
|
||||
|
||||
@@ -107,6 +108,9 @@ run_codeql_scan() {
|
||||
|
||||
if [[ "${lang}" == "javascript" ]]; then
|
||||
build_mode_args=(--build-mode=none)
|
||||
suite="codeql/javascript-queries:codeql-suites/javascript-security-and-quality.qls"
|
||||
else
|
||||
suite="codeql/go-queries:codeql-suites/go-security-and-quality.qls"
|
||||
fi
|
||||
|
||||
log_step "CODEQL" "Scanning ${lang} code in ${source_root}/"
|
||||
@@ -135,8 +139,9 @@ run_codeql_scan() {
|
||||
fi
|
||||
|
||||
# Run analysis
|
||||
log_info "Analyzing with Code Scanning config (CI-aligned query filters)..."
|
||||
log_info "Analyzing with CI-aligned suite: ${suite}"
|
||||
if ! codeql database analyze "${db_name}" \
|
||||
"${suite}" \
|
||||
--format=sarif-latest \
|
||||
--output="${sarif_file}" \
|
||||
--sarif-add-baseline-file-info \
|
||||
|
||||
7
.github/skills/security-scan-codeql.SKILL.md
vendored
7
.github/skills/security-scan-codeql.SKILL.md
vendored
@@ -136,8 +136,8 @@ This skill uses the **security-and-quality** suite to match CI:
|
||||
|
||||
| Language | Suite | Queries | Coverage |
|
||||
|----------|-------|---------|----------|
|
||||
| Go | go-security-and-quality.qls | 61 | Security + quality issues |
|
||||
| JavaScript | javascript-security-and-quality.qls | 204 | Security + quality issues |
|
||||
| Go | go-security-and-quality.qls | version-dependent | Security + quality issues |
|
||||
| JavaScript | javascript-security-and-quality.qls | version-dependent | Security + quality issues |
|
||||
|
||||
**Note:** This matches GitHub Actions CodeQL default configuration exactly.
|
||||
|
||||
@@ -260,8 +260,7 @@ This skill is specifically designed to match GitHub Actions CodeQL workflow:
|
||||
| Parameter | Local | CI | Aligned |
|
||||
|-----------|-------|-----|---------|
|
||||
| Query Suite | security-and-quality | security-and-quality | ✅ |
|
||||
| Go Queries | 61 | 61 | ✅ |
|
||||
| JS Queries | 204 | 204 | ✅ |
|
||||
| Query Expansion | version-dependent | version-dependent | ✅ (when versions match) |
|
||||
| Threading | auto | auto | ✅ |
|
||||
| Baseline Info | enabled | enabled | ✅ |
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@ validate_docker_environment || error_exit "Docker is required but not available"
|
||||
# Set defaults
|
||||
set_default_env "TRIVY_SEVERITY" "CRITICAL,HIGH,MEDIUM"
|
||||
set_default_env "TRIVY_TIMEOUT" "10m"
|
||||
set_default_env "TRIVY_DOCKER_RM" "true"
|
||||
|
||||
# Parse arguments
|
||||
# Default scanners exclude misconfig to avoid non-actionable policy bundle issues
|
||||
@@ -88,8 +89,19 @@ for d in "${SKIP_DIRS[@]}"; do
|
||||
SKIP_DIR_FLAGS+=("--skip-dirs" "/app/${d}")
|
||||
done
|
||||
|
||||
log_step "PREPARE" "Pulling latest Trivy Docker image"
|
||||
if ! docker pull aquasec/trivy:latest >/dev/null; then
|
||||
log_error "Failed to pull Docker image aquasec/trivy:latest"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Run Trivy via Docker
|
||||
if docker run --rm \
|
||||
DOCKER_RUN_ARGS=(run)
|
||||
if [[ "${TRIVY_DOCKER_RM}" == "true" ]]; then
|
||||
DOCKER_RUN_ARGS+=(--rm)
|
||||
fi
|
||||
|
||||
if docker "${DOCKER_RUN_ARGS[@]}" \
|
||||
-v "$(pwd):/app:ro" \
|
||||
-e "TRIVY_SEVERITY=${TRIVY_SEVERITY}" \
|
||||
-e "TRIVY_TIMEOUT=${TRIVY_TIMEOUT}" \
|
||||
|
||||
@@ -32,7 +32,7 @@ cd "${PROJECT_ROOT}"
|
||||
validate_project_structure "backend" "scripts/go-test-coverage.sh" || error_exit "Invalid project structure"
|
||||
|
||||
# Set default environment variables
|
||||
set_default_env "CHARON_MIN_COVERAGE" "85"
|
||||
set_default_env "CHARON_MIN_COVERAGE" "87"
|
||||
set_default_env "PERF_MAX_MS_GETSTATUS_P95" "25ms"
|
||||
set_default_env "PERF_MAX_MS_GETSTATUS_P95_PARALLEL" "50ms"
|
||||
set_default_env "PERF_MAX_MS_LISTDECISIONS_P95" "75ms"
|
||||
|
||||
@@ -25,6 +25,10 @@ requirements:
|
||||
version: ">=3.8"
|
||||
optional: false
|
||||
environment_variables:
|
||||
- name: "CHARON_ENCRYPTION_KEY"
|
||||
description: "Encryption key for backend test runtime. Auto-generated ephemerally by the script if missing/invalid."
|
||||
default: "(auto-generated for test run)"
|
||||
required: false
|
||||
- name: "CHARON_MIN_COVERAGE"
|
||||
description: "Minimum coverage percentage required (overrides default)"
|
||||
default: "85"
|
||||
@@ -125,6 +129,7 @@ For use in GitHub Actions or other CI/CD pipelines:
|
||||
|
||||
| Variable | Required | Default | Description |
|
||||
|----------|----------|---------|-------------|
|
||||
| CHARON_ENCRYPTION_KEY | No | auto-generated for test run | Backend test encryption key. If missing/invalid, an ephemeral 32-byte base64 key is generated for the run. |
|
||||
| CHARON_MIN_COVERAGE | No | 85 | Minimum coverage percentage required for success |
|
||||
| CPM_MIN_COVERAGE | No | 85 | Legacy name for minimum coverage (fallback) |
|
||||
| PERF_MAX_MS_GETSTATUS_P95 | No | 25ms | Max P95 latency for GetStatus endpoint |
|
||||
|
||||
@@ -11,10 +11,13 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
# Helper scripts are in .github/skills/scripts/
|
||||
SKILLS_SCRIPTS_DIR="$(cd "${SCRIPT_DIR}/../scripts" && pwd)"
|
||||
|
||||
# shellcheck disable=SC1091
|
||||
# shellcheck source=../scripts/_logging_helpers.sh
|
||||
source "${SKILLS_SCRIPTS_DIR}/_logging_helpers.sh"
|
||||
# shellcheck disable=SC1091
|
||||
# shellcheck source=../scripts/_error_handling_helpers.sh
|
||||
source "${SKILLS_SCRIPTS_DIR}/_error_handling_helpers.sh"
|
||||
# shellcheck disable=SC1091
|
||||
# shellcheck source=../scripts/_environment_helpers.sh
|
||||
source "${SKILLS_SCRIPTS_DIR}/_environment_helpers.sh"
|
||||
|
||||
@@ -24,6 +27,7 @@ PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../../.." && pwd)"
|
||||
# Validate environment
|
||||
log_step "ENVIRONMENT" "Validating prerequisites"
|
||||
validate_go_environment "1.23" || error_exit "Go 1.23+ is required"
|
||||
ensure_charon_encryption_key || error_exit "Failed to provision CHARON_ENCRYPTION_KEY for backend tests"
|
||||
|
||||
# Validate project structure
|
||||
log_step "VALIDATION" "Checking project structure"
|
||||
|
||||
10
.github/skills/test-backend-unit.SKILL.md
vendored
10
.github/skills/test-backend-unit.SKILL.md
vendored
@@ -21,7 +21,11 @@ requirements:
|
||||
- name: "go"
|
||||
version: ">=1.23"
|
||||
optional: false
|
||||
environment_variables: []
|
||||
environment_variables:
|
||||
- name: "CHARON_ENCRYPTION_KEY"
|
||||
description: "Encryption key for backend test runtime. Auto-generated ephemerally if missing/invalid."
|
||||
default: "(auto-generated for test run)"
|
||||
required: false
|
||||
parameters:
|
||||
- name: "verbose"
|
||||
type: "boolean"
|
||||
@@ -106,7 +110,9 @@ For use in GitHub Actions or other CI/CD pipelines:
|
||||
|
||||
## Environment Variables
|
||||
|
||||
No environment variables are required for this skill.
|
||||
| Variable | Required | Default | Description |
|
||||
|----------|----------|---------|-------------|
|
||||
| CHARON_ENCRYPTION_KEY | No | auto-generated for test run | Backend test encryption key. If missing/invalid, an ephemeral 32-byte base64 key is generated for the run. |
|
||||
|
||||
## Outputs
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ cd "${PROJECT_ROOT}"
|
||||
validate_project_structure "frontend" "scripts/frontend-test-coverage.sh" || error_exit "Invalid project structure"
|
||||
|
||||
# Set default environment variables
|
||||
set_default_env "CHARON_MIN_COVERAGE" "85"
|
||||
set_default_env "CHARON_MIN_COVERAGE" "87"
|
||||
|
||||
# Execute the legacy script
|
||||
log_step "EXECUTION" "Running frontend tests with coverage"
|
||||
|
||||
54
.github/workflows/badge-ghcr-downloads.yml
vendored
54
.github/workflows/badge-ghcr-downloads.yml
vendored
@@ -1,54 +0,0 @@
|
||||
name: "Badge: GHCR downloads"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Update periodically (GitHub schedules may be delayed)
|
||||
- cron: '17 * * * *'
|
||||
workflow_dispatch: {}
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
packages: read
|
||||
|
||||
concurrency:
|
||||
group: ghcr-downloads-badge
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
update:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout (main)
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
ref: main
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
|
||||
with:
|
||||
node-version: 24.13.1
|
||||
|
||||
- name: Update GHCR downloads badge
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GHCR_OWNER: ${{ github.repository_owner }}
|
||||
GHCR_PACKAGE: charon
|
||||
BADGE_OUTPUT: .github/badges/ghcr-downloads.json
|
||||
run: node scripts/update-ghcr-downloads-badge.mjs
|
||||
|
||||
- name: Commit and push (if changed)
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if git diff --quiet; then
|
||||
echo "No changes."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
git add .github/badges/ghcr-downloads.json
|
||||
git commit -m "chore(badges): update GHCR downloads [skip ci]"
|
||||
git push origin HEAD:main
|
||||
4
.github/workflows/benchmark.yml
vendored
4
.github/workflows/benchmark.yml
vendored
@@ -3,6 +3,8 @@ name: Go Benchmark
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
@@ -33,7 +35,7 @@ jobs:
|
||||
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
|
||||
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
|
||||
with:
|
||||
go-version: ${{ env.GO_VERSION }}
|
||||
cache-dependency-path: backend/go.sum
|
||||
|
||||
6
.github/workflows/codecov-upload.yml
vendored
6
.github/workflows/codecov-upload.yml
vendored
@@ -3,6 +3,8 @@ name: Upload Coverage to Codecov
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
run_backend:
|
||||
@@ -17,7 +19,7 @@ on:
|
||||
type: boolean
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.run_id }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
@@ -43,7 +45,7 @@ jobs:
|
||||
ref: ${{ github.sha }}
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
|
||||
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
|
||||
with:
|
||||
go-version: ${{ env.GO_VERSION }}
|
||||
cache-dependency-path: backend/go.sum
|
||||
|
||||
79
.github/workflows/codeql.yml
vendored
79
.github/workflows/codeql.yml
vendored
@@ -4,7 +4,7 @@ on:
|
||||
pull_request:
|
||||
branches: [main, nightly, development]
|
||||
push:
|
||||
branches: [main, nightly, development, 'feature/**', 'fix/**']
|
||||
branches: [main]
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 3 * * 1' # Mondays 03:00 UTC
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
run: bash scripts/ci/check-codeql-parity.sh
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@9e907b5e64f6b83e7804b09294d44122997950d6 # v4
|
||||
uses: github/codeql-action/init@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
queries: security-and-quality
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
|
||||
- name: Setup Go
|
||||
if: matrix.language == 'go'
|
||||
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
|
||||
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
|
||||
with:
|
||||
go-version: 1.26.0
|
||||
cache-dependency-path: backend/go.sum
|
||||
@@ -86,10 +86,10 @@ jobs:
|
||||
run: mkdir -p sarif-results
|
||||
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@9e907b5e64f6b83e7804b09294d44122997950d6 # v4
|
||||
uses: github/codeql-action/autobuild@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@9e907b5e64f6b83e7804b09294d44122997950d6 # v4
|
||||
uses: github/codeql-action/analyze@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4
|
||||
with:
|
||||
category: "/language:${{ matrix.language }}"
|
||||
output: sarif-results/${{ matrix.language }}
|
||||
@@ -122,10 +122,28 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2016
|
||||
EFFECTIVE_LEVELS_JQ='[
|
||||
.runs[] as $run
|
||||
| $run.results[]
|
||||
| . as $result
|
||||
| ($run.tool.driver.rules // []) as $rules
|
||||
| ((
|
||||
$result.level
|
||||
// (if (($result.ruleIndex | type) == "number") then ($rules[$result.ruleIndex].defaultConfiguration.level // empty) else empty end)
|
||||
// ([
|
||||
$rules[]?
|
||||
| select((.id // "") == ($result.ruleId // ""))
|
||||
| (.defaultConfiguration.level // empty)
|
||||
][0] // empty)
|
||||
// ""
|
||||
) | ascii_downcase)
|
||||
]'
|
||||
|
||||
echo "Found SARIF file: $SARIF_FILE"
|
||||
ERROR_COUNT=$(jq '[.runs[].results[] | select(.level == "error")] | length' "$SARIF_FILE")
|
||||
WARNING_COUNT=$(jq '[.runs[].results[] | select(.level == "warning")] | length' "$SARIF_FILE")
|
||||
NOTE_COUNT=$(jq '[.runs[].results[] | select(.level == "note")] | length' "$SARIF_FILE")
|
||||
ERROR_COUNT=$(jq -r "${EFFECTIVE_LEVELS_JQ} | map(select(. == \"error\")) | length" "$SARIF_FILE")
|
||||
WARNING_COUNT=$(jq -r "${EFFECTIVE_LEVELS_JQ} | map(select(. == \"warning\")) | length" "$SARIF_FILE")
|
||||
NOTE_COUNT=$(jq -r "${EFFECTIVE_LEVELS_JQ} | map(select(. == \"note\")) | length" "$SARIF_FILE")
|
||||
|
||||
{
|
||||
echo "**Findings:**"
|
||||
@@ -135,14 +153,32 @@ jobs:
|
||||
echo ""
|
||||
|
||||
if [ "$ERROR_COUNT" -gt 0 ]; then
|
||||
echo "❌ **CRITICAL:** High-severity security issues found!"
|
||||
echo "❌ **BLOCKING:** CodeQL error-level security issues found"
|
||||
echo ""
|
||||
echo "### Top Issues:"
|
||||
echo '```'
|
||||
jq -r '.runs[].results[] | select(.level == "error") | "\(.ruleId): \(.message.text)"' "$SARIF_FILE" | head -5
|
||||
# shellcheck disable=SC2016
|
||||
jq -r '
|
||||
.runs[] as $run
|
||||
| $run.results[]
|
||||
| . as $result
|
||||
| ($run.tool.driver.rules // []) as $rules
|
||||
| ((
|
||||
$result.level
|
||||
// (if (($result.ruleIndex | type) == "number") then ($rules[$result.ruleIndex].defaultConfiguration.level // empty) else empty end)
|
||||
// ([
|
||||
$rules[]?
|
||||
| select((.id // "") == ($result.ruleId // ""))
|
||||
| (.defaultConfiguration.level // empty)
|
||||
][0] // empty)
|
||||
// ""
|
||||
) | ascii_downcase) as $effectiveLevel
|
||||
| select($effectiveLevel == "error")
|
||||
| "\($effectiveLevel): \($result.ruleId // \"<unknown-rule>\"): \($result.message.text)"
|
||||
' "$SARIF_FILE" | head -5
|
||||
echo '```'
|
||||
else
|
||||
echo "✅ No high-severity issues found"
|
||||
echo "✅ No blocking CodeQL issues found"
|
||||
fi
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
@@ -169,9 +205,26 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ERROR_COUNT=$(jq '[.runs[].results[] | select(.level == "error")] | length' "$SARIF_FILE")
|
||||
# shellcheck disable=SC2016
|
||||
ERROR_COUNT=$(jq -r '[
|
||||
.runs[] as $run
|
||||
| $run.results[]
|
||||
| . as $result
|
||||
| ($run.tool.driver.rules // []) as $rules
|
||||
| ((
|
||||
$result.level
|
||||
// (if (($result.ruleIndex | type) == "number") then ($rules[$result.ruleIndex].defaultConfiguration.level // empty) else empty end)
|
||||
// ([
|
||||
$rules[]?
|
||||
| select((.id // "") == ($result.ruleId // ""))
|
||||
| (.defaultConfiguration.level // empty)
|
||||
][0] // empty)
|
||||
// ""
|
||||
) | ascii_downcase) as $effectiveLevel
|
||||
| select($effectiveLevel == "error")
|
||||
] | length' "$SARIF_FILE")
|
||||
|
||||
if [ "$ERROR_COUNT" -gt 0 ]; then
|
||||
echo "::error::CodeQL found $ERROR_COUNT high-severity security issues. Fix before merging."
|
||||
echo "::error::CodeQL found $ERROR_COUNT blocking findings (effective-level=error). Fix before merging. Policy: .github/security-severity-policy.yml"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
186
.github/workflows/container-prune.yml
vendored
186
.github/workflows/container-prune.yml
vendored
@@ -5,10 +5,6 @@ on:
|
||||
- cron: '0 3 * * 0' # Weekly: Sundays at 03:00 UTC
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
registries:
|
||||
description: 'Comma-separated registries to prune (ghcr,dockerhub)'
|
||||
required: false
|
||||
default: 'ghcr,dockerhub'
|
||||
keep_days:
|
||||
description: 'Number of days to retain images (unprotected)'
|
||||
required: false
|
||||
@@ -27,16 +23,17 @@ permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
prune:
|
||||
prune-ghcr:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
OWNER: ${{ github.repository_owner }}
|
||||
IMAGE_NAME: charon
|
||||
REGISTRIES: ${{ github.event.inputs.registries || 'ghcr,dockerhub' }}
|
||||
KEEP_DAYS: ${{ github.event.inputs.keep_days || '30' }}
|
||||
KEEP_LAST_N: ${{ github.event.inputs.keep_last_n || '30' }}
|
||||
DRY_RUN: ${{ github.event.inputs.dry_run || 'false' }}
|
||||
PROTECTED_REGEX: '["^v","^latest$","^main$","^develop$"]'
|
||||
DRY_RUN: ${{ github.event_name == 'pull_request' && 'true' || github.event.inputs.dry_run || 'false' }}
|
||||
PROTECTED_REGEX: '["^v?[0-9]+\\.[0-9]+\\.[0-9]+$","^latest$","^main$","^develop$"]'
|
||||
PRUNE_UNTAGGED: 'true'
|
||||
PRUNE_SBOM_TAGS: 'true'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
@@ -45,21 +42,19 @@ jobs:
|
||||
run: |
|
||||
sudo apt-get update && sudo apt-get install -y jq curl
|
||||
|
||||
- name: Run container prune
|
||||
- name: Run GHCR prune
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
run: |
|
||||
chmod +x scripts/prune-container-images.sh
|
||||
./scripts/prune-container-images.sh 2>&1 | tee prune-${{ github.run_id }}.log
|
||||
chmod +x scripts/prune-ghcr.sh
|
||||
./scripts/prune-ghcr.sh 2>&1 | tee prune-ghcr-${{ github.run_id }}.log
|
||||
|
||||
- name: Summarize prune results (space reclaimed)
|
||||
if: ${{ always() }}
|
||||
- name: Summarize GHCR results
|
||||
if: always()
|
||||
run: |
|
||||
set -euo pipefail
|
||||
SUMMARY_FILE=prune-summary.env
|
||||
LOG_FILE=prune-${{ github.run_id }}.log
|
||||
SUMMARY_FILE=prune-summary-ghcr.env
|
||||
LOG_FILE=prune-ghcr-${{ github.run_id }}.log
|
||||
|
||||
human() {
|
||||
local bytes=${1:-0}
|
||||
@@ -67,7 +62,7 @@ jobs:
|
||||
echo "0 B"
|
||||
return
|
||||
fi
|
||||
awk -v b="$bytes" 'function human(x){ split("B KiB MiB GiB TiB",u," "); i=0; while(x>1024){x/=1024;i++} printf "%0.2f %s", x, u[i+1]} END{human(b)}'
|
||||
awk -v b="$bytes" 'BEGIN { split("B KiB MiB GiB TiB",u," "); i=0; x=b; while(x>1024){x/=1024;i++} printf "%0.2f %s", x, u[i+1] }'
|
||||
}
|
||||
|
||||
if [ -f "$SUMMARY_FILE" ]; then
|
||||
@@ -77,34 +72,155 @@ jobs:
|
||||
TOTAL_DELETED_BYTES=$(grep -E '^TOTAL_DELETED_BYTES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
|
||||
|
||||
{
|
||||
echo "## Container prune summary"
|
||||
echo "## GHCR prune summary"
|
||||
echo "- candidates: ${TOTAL_CANDIDATES} (≈ $(human "${TOTAL_CANDIDATES_BYTES}"))"
|
||||
echo "- deleted: ${TOTAL_DELETED} (≈ $(human "${TOTAL_DELETED_BYTES}"))"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
printf 'PRUNE_SUMMARY: candidates=%s candidates_bytes=%s deleted=%s deleted_bytes=%s\n' \
|
||||
"${TOTAL_CANDIDATES}" "${TOTAL_CANDIDATES_BYTES}" "${TOTAL_DELETED}" "${TOTAL_DELETED_BYTES}"
|
||||
echo "Deleted approximately: $(human "${TOTAL_DELETED_BYTES}")"
|
||||
echo "space_saved=$(human "${TOTAL_DELETED_BYTES}")" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
deleted_bytes=$(grep -oE '\( *approx +[0-9]+ bytes\)' "$LOG_FILE" | sed -E 's/.*approx +([0-9]+) bytes.*/\1/' | awk '{s+=$1} END {print s+0}' || true)
|
||||
deleted_count=$(grep -cE 'deleting |DRY RUN: would delete' "$LOG_FILE" || true)
|
||||
|
||||
{
|
||||
echo "## Container prune summary"
|
||||
echo "## GHCR prune summary"
|
||||
echo "- deleted (approx): ${deleted_count} (≈ $(human "${deleted_bytes}"))"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
printf 'PRUNE_SUMMARY: deleted_approx=%s deleted_bytes=%s\n' "${deleted_count}" "${deleted_bytes}"
|
||||
echo "Deleted approximately: $(human "${deleted_bytes}")"
|
||||
echo "space_saved=$(human "${deleted_bytes}")" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Upload prune artifacts
|
||||
if: ${{ always() }}
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
|
||||
- name: Upload GHCR prune artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
|
||||
with:
|
||||
name: prune-log-${{ github.run_id }}
|
||||
name: prune-ghcr-log-${{ github.run_id }}
|
||||
path: |
|
||||
prune-${{ github.run_id }}.log
|
||||
prune-summary.env
|
||||
prune-ghcr-${{ github.run_id }}.log
|
||||
prune-summary-ghcr.env
|
||||
|
||||
prune-dockerhub:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
OWNER: ${{ github.repository_owner }}
|
||||
IMAGE_NAME: charon
|
||||
KEEP_DAYS: ${{ github.event.inputs.keep_days || '30' }}
|
||||
KEEP_LAST_N: ${{ github.event.inputs.keep_last_n || '30' }}
|
||||
DRY_RUN: ${{ github.event_name == 'pull_request' && 'true' || github.event.inputs.dry_run || 'false' }}
|
||||
PROTECTED_REGEX: '["^v?[0-9]+\\.[0-9]+\\.[0-9]+$","^latest$","^main$","^develop$"]'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
|
||||
- name: Install tools
|
||||
run: |
|
||||
sudo apt-get update && sudo apt-get install -y jq curl
|
||||
|
||||
- name: Run Docker Hub prune
|
||||
env:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
run: |
|
||||
chmod +x scripts/prune-dockerhub.sh
|
||||
./scripts/prune-dockerhub.sh 2>&1 | tee prune-dockerhub-${{ github.run_id }}.log
|
||||
|
||||
- name: Summarize Docker Hub results
|
||||
if: always()
|
||||
run: |
|
||||
set -euo pipefail
|
||||
SUMMARY_FILE=prune-summary-dockerhub.env
|
||||
LOG_FILE=prune-dockerhub-${{ github.run_id }}.log
|
||||
|
||||
human() {
|
||||
local bytes=${1:-0}
|
||||
if [ -z "$bytes" ] || [ "$bytes" -eq 0 ]; then
|
||||
echo "0 B"
|
||||
return
|
||||
fi
|
||||
awk -v b="$bytes" 'BEGIN { split("B KiB MiB GiB TiB",u," "); i=0; x=b; while(x>1024){x/=1024;i++} printf "%0.2f %s", x, u[i+1] }'
|
||||
}
|
||||
|
||||
if [ -f "$SUMMARY_FILE" ]; then
|
||||
TOTAL_CANDIDATES=$(grep -E '^TOTAL_CANDIDATES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
|
||||
TOTAL_CANDIDATES_BYTES=$(grep -E '^TOTAL_CANDIDATES_BYTES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
|
||||
TOTAL_DELETED=$(grep -E '^TOTAL_DELETED=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
|
||||
TOTAL_DELETED_BYTES=$(grep -E '^TOTAL_DELETED_BYTES=' "$SUMMARY_FILE" | cut -d= -f2 || echo 0)
|
||||
|
||||
{
|
||||
echo "## Docker Hub prune summary"
|
||||
echo "- candidates: ${TOTAL_CANDIDATES} (≈ $(human "${TOTAL_CANDIDATES_BYTES}"))"
|
||||
echo "- deleted: ${TOTAL_DELETED} (≈ $(human "${TOTAL_DELETED_BYTES}"))"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
else
|
||||
deleted_bytes=$(grep -oE '\( *approx +[0-9]+ bytes\)' "$LOG_FILE" | sed -E 's/.*approx +([0-9]+) bytes.*/\1/' | awk '{s+=$1} END {print s+0}' || true)
|
||||
deleted_count=$(grep -cE 'deleting |DRY RUN: would delete' "$LOG_FILE" || true)
|
||||
|
||||
{
|
||||
echo "## Docker Hub prune summary"
|
||||
echo "- deleted (approx): ${deleted_count} (≈ $(human "${deleted_bytes}"))"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
fi
|
||||
|
||||
- name: Upload Docker Hub prune artifacts
|
||||
if: always()
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
|
||||
with:
|
||||
name: prune-dockerhub-log-${{ github.run_id }}
|
||||
path: |
|
||||
prune-dockerhub-${{ github.run_id }}.log
|
||||
prune-summary-dockerhub.env
|
||||
|
||||
summarize:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [prune-ghcr, prune-dockerhub]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8
|
||||
with:
|
||||
pattern: prune-*-log-${{ github.run_id }}
|
||||
merge-multiple: true
|
||||
|
||||
- name: Combined summary
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
human() {
|
||||
local bytes=${1:-0}
|
||||
if [ -z "$bytes" ] || [ "$bytes" -eq 0 ]; then
|
||||
echo "0 B"
|
||||
return
|
||||
fi
|
||||
awk -v b="$bytes" 'BEGIN { split("B KiB MiB GiB TiB",u," "); i=0; x=b; while(x>1024){x/=1024;i++} printf "%0.2f %s", x, u[i+1] }'
|
||||
}
|
||||
|
||||
GHCR_CANDIDATES=0 GHCR_CANDIDATES_BYTES=0 GHCR_DELETED=0 GHCR_DELETED_BYTES=0
|
||||
if [ -f prune-summary-ghcr.env ]; then
|
||||
GHCR_CANDIDATES=$(grep -E '^TOTAL_CANDIDATES=' prune-summary-ghcr.env | cut -d= -f2 || echo 0)
|
||||
GHCR_CANDIDATES_BYTES=$(grep -E '^TOTAL_CANDIDATES_BYTES=' prune-summary-ghcr.env | cut -d= -f2 || echo 0)
|
||||
GHCR_DELETED=$(grep -E '^TOTAL_DELETED=' prune-summary-ghcr.env | cut -d= -f2 || echo 0)
|
||||
GHCR_DELETED_BYTES=$(grep -E '^TOTAL_DELETED_BYTES=' prune-summary-ghcr.env | cut -d= -f2 || echo 0)
|
||||
fi
|
||||
|
||||
HUB_CANDIDATES=0 HUB_CANDIDATES_BYTES=0 HUB_DELETED=0 HUB_DELETED_BYTES=0
|
||||
if [ -f prune-summary-dockerhub.env ]; then
|
||||
HUB_CANDIDATES=$(grep -E '^TOTAL_CANDIDATES=' prune-summary-dockerhub.env | cut -d= -f2 || echo 0)
|
||||
HUB_CANDIDATES_BYTES=$(grep -E '^TOTAL_CANDIDATES_BYTES=' prune-summary-dockerhub.env | cut -d= -f2 || echo 0)
|
||||
HUB_DELETED=$(grep -E '^TOTAL_DELETED=' prune-summary-dockerhub.env | cut -d= -f2 || echo 0)
|
||||
HUB_DELETED_BYTES=$(grep -E '^TOTAL_DELETED_BYTES=' prune-summary-dockerhub.env | cut -d= -f2 || echo 0)
|
||||
fi
|
||||
|
||||
TOTAL_CANDIDATES=$((GHCR_CANDIDATES + HUB_CANDIDATES))
|
||||
TOTAL_CANDIDATES_BYTES=$((GHCR_CANDIDATES_BYTES + HUB_CANDIDATES_BYTES))
|
||||
TOTAL_DELETED=$((GHCR_DELETED + HUB_DELETED))
|
||||
TOTAL_DELETED_BYTES=$((GHCR_DELETED_BYTES + HUB_DELETED_BYTES))
|
||||
|
||||
{
|
||||
echo "## Combined container prune summary"
|
||||
echo ""
|
||||
echo "| Registry | Candidates | Deleted | Space Reclaimed |"
|
||||
echo "|----------|------------|---------|-----------------|"
|
||||
echo "| GHCR | ${GHCR_CANDIDATES} | ${GHCR_DELETED} | $(human "${GHCR_DELETED_BYTES}") |"
|
||||
echo "| Docker Hub | ${HUB_CANDIDATES} | ${HUB_DELETED} | $(human "${HUB_DELETED_BYTES}") |"
|
||||
echo "| **Total** | **${TOTAL_CANDIDATES}** | **${TOTAL_DELETED}** | **$(human "${TOTAL_DELETED_BYTES}")** |"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
printf 'PRUNE_SUMMARY: candidates=%s candidates_bytes=%s deleted=%s deleted_bytes=%s\n' \
|
||||
"${TOTAL_CANDIDATES}" "${TOTAL_CANDIDATES_BYTES}" "${TOTAL_DELETED}" "${TOTAL_DELETED_BYTES}"
|
||||
echo "Total space reclaimed: $(human "${TOTAL_DELETED_BYTES}")"
|
||||
|
||||
61
.github/workflows/docker-build.yml
vendored
61
.github/workflows/docker-build.yml
vendored
@@ -23,7 +23,11 @@ name: Docker Build, Publish & Test
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches: [main]
|
||||
workflow_dispatch:
|
||||
workflow_run:
|
||||
workflows: ["Docker Lint"]
|
||||
types: [completed]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.head_ref || github.ref_name }}
|
||||
@@ -38,7 +42,7 @@ env:
|
||||
TRIGGER_HEAD_SHA: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.sha }}
|
||||
TRIGGER_REF: ${{ github.event_name == 'workflow_run' && format('refs/heads/{0}', github.event.workflow_run.head_branch) || github.ref }}
|
||||
TRIGGER_HEAD_REF: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.head_ref }}
|
||||
TRIGGER_PR_NUMBER: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.pull_requests[0].number || github.event.pull_request.number }}
|
||||
TRIGGER_PR_NUMBER: ${{ github.event_name == 'workflow_run' && join(github.event.workflow_run.pull_requests.*.number, '') || github.event.pull_request.number }}
|
||||
TRIGGER_ACTOR: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.actor.login || github.actor }}
|
||||
|
||||
jobs:
|
||||
@@ -339,7 +343,7 @@ jobs:
|
||||
|
||||
- name: Upload Image Artifact
|
||||
if: success() && steps.skip.outputs.skip_build != 'true' && env.TRIGGER_EVENT == 'pull_request'
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: ${{ env.TRIGGER_EVENT == 'pull_request' && format('pr-image-{0}', env.TRIGGER_PR_NUMBER) || 'push-image' }}
|
||||
path: /tmp/charon-pr-image.tar
|
||||
@@ -527,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Run Trivy scan (table output)
|
||||
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
|
||||
uses: aquasecurity/trivy-action@c1824fd6edce30d7ab345a9989de00bbd46ef284 # 0.34.0
|
||||
uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518 # 0.34.1
|
||||
with:
|
||||
image-ref: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }}
|
||||
format: 'table'
|
||||
@@ -538,7 +542,7 @@ jobs:
|
||||
- name: Run Trivy vulnerability scanner (SARIF)
|
||||
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
|
||||
id: trivy
|
||||
uses: aquasecurity/trivy-action@c1824fd6edce30d7ab345a9989de00bbd46ef284 # 0.34.0
|
||||
uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518 # 0.34.1
|
||||
with:
|
||||
image-ref: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }}
|
||||
format: 'sarif'
|
||||
@@ -558,15 +562,16 @@ jobs:
|
||||
|
||||
- name: Upload Trivy results
|
||||
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.trivy-check.outputs.exists == 'true'
|
||||
uses: github/codeql-action/upload-sarif@9e907b5e64f6b83e7804b09294d44122997950d6 # v4.32.3
|
||||
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
category: '.github/workflows/docker-build.yml:build-and-push'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Generate SBOM (Software Bill of Materials) for supply chain security
|
||||
# Only for production builds (main/development) - feature branches use downstream supply-chain-pr.yml
|
||||
- name: Generate SBOM
|
||||
uses: anchore/sbom-action@28d71544de8eaf1b958d335707167c5f783590ad # v0.22.2
|
||||
uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0
|
||||
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
|
||||
with:
|
||||
image: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }}
|
||||
@@ -575,7 +580,7 @@ jobs:
|
||||
|
||||
# Create verifiable attestation for the SBOM
|
||||
- name: Attest SBOM
|
||||
uses: actions/attest-sbom@4651f806c01d8637787e274ac3bdf724ef169f34 # v3.0.0
|
||||
uses: actions/attest-sbom@07e74fc4e78d1aad915e867f9a094073a9f71527 # v4.0.0
|
||||
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
|
||||
with:
|
||||
subject-name: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
@@ -684,7 +689,7 @@ jobs:
|
||||
echo "✅ Image freshness validated"
|
||||
|
||||
- name: Run Trivy scan on PR image (table output)
|
||||
uses: aquasecurity/trivy-action@c1824fd6edce30d7ab345a9989de00bbd46ef284 # 0.34.0
|
||||
uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518 # 0.34.1
|
||||
with:
|
||||
image-ref: ${{ steps.pr-image.outputs.image_ref }}
|
||||
format: 'table'
|
||||
@@ -693,7 +698,7 @@ jobs:
|
||||
|
||||
- name: Run Trivy scan on PR image (SARIF - blocking)
|
||||
id: trivy-scan
|
||||
uses: aquasecurity/trivy-action@c1824fd6edce30d7ab345a9989de00bbd46ef284 # 0.34.0
|
||||
uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518 # 0.34.1
|
||||
with:
|
||||
image-ref: ${{ steps.pr-image.outputs.image_ref }}
|
||||
format: 'sarif'
|
||||
@@ -702,13 +707,47 @@ jobs:
|
||||
exit-code: '1' # Intended to block, but continued on error for now
|
||||
continue-on-error: true
|
||||
|
||||
- name: Upload Trivy scan results
|
||||
- name: Check Trivy PR SARIF exists
|
||||
if: always()
|
||||
uses: github/codeql-action/upload-sarif@9e907b5e64f6b83e7804b09294d44122997950d6 # v4.32.3
|
||||
id: trivy-pr-check
|
||||
run: |
|
||||
if [ -f trivy-pr-results.sarif ]; then
|
||||
echo "exists=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "exists=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Upload Trivy scan results
|
||||
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
|
||||
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
|
||||
with:
|
||||
sarif_file: 'trivy-pr-results.sarif'
|
||||
category: 'docker-pr-image'
|
||||
|
||||
- name: Upload Trivy compatibility results (docker-build category)
|
||||
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
|
||||
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
|
||||
with:
|
||||
sarif_file: 'trivy-pr-results.sarif'
|
||||
category: '.github/workflows/docker-build.yml:build-and-push'
|
||||
continue-on-error: true
|
||||
|
||||
- name: Upload Trivy compatibility results (docker-publish alias)
|
||||
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
|
||||
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
|
||||
with:
|
||||
sarif_file: 'trivy-pr-results.sarif'
|
||||
category: '.github/workflows/docker-publish.yml:build-and-push'
|
||||
continue-on-error: true
|
||||
|
||||
- name: Upload Trivy compatibility results (nightly alias)
|
||||
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
|
||||
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
|
||||
with:
|
||||
sarif_file: 'trivy-pr-results.sarif'
|
||||
category: 'trivy-nightly'
|
||||
continue-on-error: true
|
||||
|
||||
- name: Create scan summary
|
||||
if: always()
|
||||
run: |
|
||||
|
||||
187
.github/workflows/e2e-tests-split.yml
vendored
187
.github/workflows/e2e-tests-split.yml
vendored
@@ -80,7 +80,6 @@ on:
|
||||
default: false
|
||||
type: boolean
|
||||
pull_request:
|
||||
push:
|
||||
|
||||
env:
|
||||
NODE_VERSION: '20'
|
||||
@@ -96,7 +95,7 @@ env:
|
||||
CI_LOG_LEVEL: 'verbose'
|
||||
|
||||
concurrency:
|
||||
group: e2e-split-${{ github.workflow }}-${{ github.ref }}-${{ github.event.pull_request.head.sha || github.sha }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
@@ -143,7 +142,7 @@ jobs:
|
||||
|
||||
- name: Set up Go
|
||||
if: steps.resolve-image.outputs.image_source == 'build'
|
||||
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
|
||||
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
|
||||
with:
|
||||
go-version: ${{ env.GO_VERSION }}
|
||||
cache: true
|
||||
@@ -191,7 +190,7 @@ jobs:
|
||||
|
||||
- name: Upload Docker image artifact
|
||||
if: steps.resolve-image.outputs.image_source == 'build'
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: docker-image
|
||||
path: charon-e2e-image.tar
|
||||
@@ -230,6 +229,7 @@ jobs:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
if: needs.build.outputs.image_source == 'registry'
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
|
||||
- name: Download Docker image artifact
|
||||
if: needs.build.outputs.image_source == 'build'
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
@@ -347,7 +347,7 @@ jobs:
|
||||
|
||||
- name: Upload HTML report (Chromium Security)
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: playwright-report-chromium-security
|
||||
path: playwright-report/
|
||||
@@ -355,7 +355,7 @@ jobs:
|
||||
|
||||
- name: Upload Chromium Security coverage (if enabled)
|
||||
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: e2e-coverage-chromium-security
|
||||
path: coverage/e2e/
|
||||
@@ -363,7 +363,7 @@ jobs:
|
||||
|
||||
- name: Upload test traces on failure
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: traces-chromium-security
|
||||
path: test-results/**/*.zip
|
||||
@@ -382,7 +382,7 @@ jobs:
|
||||
|
||||
- name: Upload diagnostics
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: e2e-diagnostics-chromium-security
|
||||
path: diagnostics/
|
||||
@@ -395,7 +395,7 @@ jobs:
|
||||
|
||||
- name: Upload Docker logs on failure
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: docker-logs-chromium-security
|
||||
path: docker-logs-chromium-security.txt
|
||||
@@ -431,6 +431,7 @@ jobs:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
if: needs.build.outputs.image_source == 'registry'
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
@@ -448,7 +449,7 @@ jobs:
|
||||
|
||||
- name: Download Docker image artifact
|
||||
if: needs.build.outputs.image_source == 'build'
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
@@ -556,7 +557,7 @@ jobs:
|
||||
|
||||
- name: Upload HTML report (Firefox Security)
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: playwright-report-firefox-security
|
||||
path: playwright-report/
|
||||
@@ -564,7 +565,7 @@ jobs:
|
||||
|
||||
- name: Upload Firefox Security coverage (if enabled)
|
||||
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: e2e-coverage-firefox-security
|
||||
path: coverage/e2e/
|
||||
@@ -572,7 +573,7 @@ jobs:
|
||||
|
||||
- name: Upload test traces on failure
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: traces-firefox-security
|
||||
path: test-results/**/*.zip
|
||||
@@ -591,7 +592,7 @@ jobs:
|
||||
|
||||
- name: Upload diagnostics
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: e2e-diagnostics-firefox-security
|
||||
path: diagnostics/
|
||||
@@ -604,7 +605,7 @@ jobs:
|
||||
|
||||
- name: Upload Docker logs on failure
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: docker-logs-firefox-security
|
||||
path: docker-logs-firefox-security.txt
|
||||
@@ -640,6 +641,7 @@ jobs:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
if: needs.build.outputs.image_source == 'registry'
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
@@ -657,7 +659,7 @@ jobs:
|
||||
|
||||
- name: Download Docker image artifact
|
||||
if: needs.build.outputs.image_source == 'build'
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
@@ -765,7 +767,7 @@ jobs:
|
||||
|
||||
- name: Upload HTML report (WebKit Security)
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: playwright-report-webkit-security
|
||||
path: playwright-report/
|
||||
@@ -773,7 +775,7 @@ jobs:
|
||||
|
||||
- name: Upload WebKit Security coverage (if enabled)
|
||||
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: e2e-coverage-webkit-security
|
||||
path: coverage/e2e/
|
||||
@@ -781,7 +783,7 @@ jobs:
|
||||
|
||||
- name: Upload test traces on failure
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: traces-webkit-security
|
||||
path: test-results/**/*.zip
|
||||
@@ -800,7 +802,7 @@ jobs:
|
||||
|
||||
- name: Upload diagnostics
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: e2e-diagnostics-webkit-security
|
||||
path: diagnostics/
|
||||
@@ -813,7 +815,7 @@ jobs:
|
||||
|
||||
- name: Upload Docker logs on failure
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: docker-logs-webkit-security
|
||||
path: docker-logs-webkit-security.txt
|
||||
@@ -861,6 +863,39 @@ jobs:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
|
||||
- name: Preflight disk diagnostics (before cleanup)
|
||||
run: |
|
||||
echo "Disk usage before cleanup"
|
||||
df -h
|
||||
docker system df || true
|
||||
|
||||
- name: Preflight cleanup (best effort)
|
||||
run: |
|
||||
echo "Best-effort cleanup for CI runner"
|
||||
docker system prune -af || true
|
||||
rm -rf playwright-report playwright-output coverage/e2e test-results diagnostics || true
|
||||
rm -f docker-logs-*.txt charon-e2e-image.tar || true
|
||||
|
||||
- name: Preflight disk diagnostics and threshold gate
|
||||
run: |
|
||||
set -euo pipefail
|
||||
MIN_FREE_BYTES=$((5 * 1024 * 1024 * 1024))
|
||||
echo "Disk usage after cleanup"
|
||||
df -h
|
||||
docker system df || true
|
||||
|
||||
WORKSPACE_PATH="${GITHUB_WORKSPACE:-$PWD}"
|
||||
FREE_ROOT_BYTES=$(df -PB1 / | awk 'NR==2 {print $4}')
|
||||
FREE_WORKSPACE_BYTES=$(df -PB1 "$WORKSPACE_PATH" | awk 'NR==2 {print $4}')
|
||||
|
||||
echo "Free bytes on /: $FREE_ROOT_BYTES"
|
||||
echo "Free bytes on workspace ($WORKSPACE_PATH): $FREE_WORKSPACE_BYTES"
|
||||
|
||||
if [ "$FREE_ROOT_BYTES" -lt "$MIN_FREE_BYTES" ] || [ "$FREE_WORKSPACE_BYTES" -lt "$MIN_FREE_BYTES" ]; then
|
||||
echo "::error::[CI_DISK_PRESSURE] Insufficient free disk after cleanup. Required >= 5GiB on both / and workspace. root=${FREE_ROOT_BYTES}B workspace=${FREE_WORKSPACE_BYTES}B"
|
||||
exit 42
|
||||
fi
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
if: needs.build.outputs.image_source == 'registry'
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
@@ -878,7 +913,7 @@ jobs:
|
||||
|
||||
- name: Download Docker image artifact
|
||||
if: needs.build.outputs.image_source == 'build'
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
@@ -968,7 +1003,7 @@ jobs:
|
||||
|
||||
- name: Upload HTML report (Chromium shard ${{ matrix.shard }})
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: playwright-report-chromium-shard-${{ matrix.shard }}
|
||||
path: playwright-report/
|
||||
@@ -976,7 +1011,7 @@ jobs:
|
||||
|
||||
- name: Upload Playwright output (Chromium shard ${{ matrix.shard }})
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: playwright-output-chromium-shard-${{ matrix.shard }}
|
||||
path: playwright-output/chromium-shard-${{ matrix.shard }}/
|
||||
@@ -984,7 +1019,7 @@ jobs:
|
||||
|
||||
- name: Upload Chromium coverage (if enabled)
|
||||
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: e2e-coverage-chromium-shard-${{ matrix.shard }}
|
||||
path: coverage/e2e/
|
||||
@@ -992,7 +1027,7 @@ jobs:
|
||||
|
||||
- name: Upload test traces on failure
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: traces-chromium-shard-${{ matrix.shard }}
|
||||
path: test-results/**/*.zip
|
||||
@@ -1011,7 +1046,7 @@ jobs:
|
||||
|
||||
- name: Upload diagnostics
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: e2e-diagnostics-chromium-shard-${{ matrix.shard }}
|
||||
path: diagnostics/
|
||||
@@ -1024,7 +1059,7 @@ jobs:
|
||||
|
||||
- name: Upload Docker logs on failure
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: docker-logs-chromium-shard-${{ matrix.shard }}
|
||||
path: docker-logs-chromium-shard-${{ matrix.shard }}.txt
|
||||
@@ -1065,6 +1100,39 @@ jobs:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
|
||||
- name: Preflight disk diagnostics (before cleanup)
|
||||
run: |
|
||||
echo "Disk usage before cleanup"
|
||||
df -h
|
||||
docker system df || true
|
||||
|
||||
- name: Preflight cleanup (best effort)
|
||||
run: |
|
||||
echo "Best-effort cleanup for CI runner"
|
||||
docker system prune -af || true
|
||||
rm -rf playwright-report playwright-output coverage/e2e test-results diagnostics || true
|
||||
rm -f docker-logs-*.txt charon-e2e-image.tar || true
|
||||
|
||||
- name: Preflight disk diagnostics and threshold gate
|
||||
run: |
|
||||
set -euo pipefail
|
||||
MIN_FREE_BYTES=$((5 * 1024 * 1024 * 1024))
|
||||
echo "Disk usage after cleanup"
|
||||
df -h
|
||||
docker system df || true
|
||||
|
||||
WORKSPACE_PATH="${GITHUB_WORKSPACE:-$PWD}"
|
||||
FREE_ROOT_BYTES=$(df -PB1 / | awk 'NR==2 {print $4}')
|
||||
FREE_WORKSPACE_BYTES=$(df -PB1 "$WORKSPACE_PATH" | awk 'NR==2 {print $4}')
|
||||
|
||||
echo "Free bytes on /: $FREE_ROOT_BYTES"
|
||||
echo "Free bytes on workspace ($WORKSPACE_PATH): $FREE_WORKSPACE_BYTES"
|
||||
|
||||
if [ "$FREE_ROOT_BYTES" -lt "$MIN_FREE_BYTES" ] || [ "$FREE_WORKSPACE_BYTES" -lt "$MIN_FREE_BYTES" ]; then
|
||||
echo "::error::[CI_DISK_PRESSURE] Insufficient free disk after cleanup. Required >= 5GiB on both / and workspace. root=${FREE_ROOT_BYTES}B workspace=${FREE_WORKSPACE_BYTES}B"
|
||||
exit 42
|
||||
fi
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
if: needs.build.outputs.image_source == 'registry'
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
@@ -1082,7 +1150,7 @@ jobs:
|
||||
|
||||
- name: Download Docker image artifact
|
||||
if: needs.build.outputs.image_source == 'build'
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
@@ -1180,7 +1248,7 @@ jobs:
|
||||
|
||||
- name: Upload HTML report (Firefox shard ${{ matrix.shard }})
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: playwright-report-firefox-shard-${{ matrix.shard }}
|
||||
path: playwright-report/
|
||||
@@ -1188,7 +1256,7 @@ jobs:
|
||||
|
||||
- name: Upload Playwright output (Firefox shard ${{ matrix.shard }})
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: playwright-output-firefox-shard-${{ matrix.shard }}
|
||||
path: playwright-output/firefox-shard-${{ matrix.shard }}/
|
||||
@@ -1196,7 +1264,7 @@ jobs:
|
||||
|
||||
- name: Upload Firefox coverage (if enabled)
|
||||
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: e2e-coverage-firefox-shard-${{ matrix.shard }}
|
||||
path: coverage/e2e/
|
||||
@@ -1204,7 +1272,7 @@ jobs:
|
||||
|
||||
- name: Upload test traces on failure
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: traces-firefox-shard-${{ matrix.shard }}
|
||||
path: test-results/**/*.zip
|
||||
@@ -1223,7 +1291,7 @@ jobs:
|
||||
|
||||
- name: Upload diagnostics
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: e2e-diagnostics-firefox-shard-${{ matrix.shard }}
|
||||
path: diagnostics/
|
||||
@@ -1236,7 +1304,7 @@ jobs:
|
||||
|
||||
- name: Upload Docker logs on failure
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: docker-logs-firefox-shard-${{ matrix.shard }}
|
||||
path: docker-logs-firefox-shard-${{ matrix.shard }}.txt
|
||||
@@ -1277,6 +1345,39 @@ jobs:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
|
||||
- name: Preflight disk diagnostics (before cleanup)
|
||||
run: |
|
||||
echo "Disk usage before cleanup"
|
||||
df -h
|
||||
docker system df || true
|
||||
|
||||
- name: Preflight cleanup (best effort)
|
||||
run: |
|
||||
echo "Best-effort cleanup for CI runner"
|
||||
docker system prune -af || true
|
||||
rm -rf playwright-report playwright-output coverage/e2e test-results diagnostics || true
|
||||
rm -f docker-logs-*.txt charon-e2e-image.tar || true
|
||||
|
||||
- name: Preflight disk diagnostics and threshold gate
|
||||
run: |
|
||||
set -euo pipefail
|
||||
MIN_FREE_BYTES=$((5 * 1024 * 1024 * 1024))
|
||||
echo "Disk usage after cleanup"
|
||||
df -h
|
||||
docker system df || true
|
||||
|
||||
WORKSPACE_PATH="${GITHUB_WORKSPACE:-$PWD}"
|
||||
FREE_ROOT_BYTES=$(df -PB1 / | awk 'NR==2 {print $4}')
|
||||
FREE_WORKSPACE_BYTES=$(df -PB1 "$WORKSPACE_PATH" | awk 'NR==2 {print $4}')
|
||||
|
||||
echo "Free bytes on /: $FREE_ROOT_BYTES"
|
||||
echo "Free bytes on workspace ($WORKSPACE_PATH): $FREE_WORKSPACE_BYTES"
|
||||
|
||||
if [ "$FREE_ROOT_BYTES" -lt "$MIN_FREE_BYTES" ] || [ "$FREE_WORKSPACE_BYTES" -lt "$MIN_FREE_BYTES" ]; then
|
||||
echo "::error::[CI_DISK_PRESSURE] Insufficient free disk after cleanup. Required >= 5GiB on both / and workspace. root=${FREE_ROOT_BYTES}B workspace=${FREE_WORKSPACE_BYTES}B"
|
||||
exit 42
|
||||
fi
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
if: needs.build.outputs.image_source == 'registry'
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
@@ -1294,7 +1395,7 @@ jobs:
|
||||
|
||||
- name: Download Docker image artifact
|
||||
if: needs.build.outputs.image_source == 'build'
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
@@ -1392,7 +1493,7 @@ jobs:
|
||||
|
||||
- name: Upload HTML report (WebKit shard ${{ matrix.shard }})
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
|
||||
with:
|
||||
name: playwright-report-webkit-shard-${{ matrix.shard }}
|
||||
path: playwright-report/
|
||||
@@ -1400,7 +1501,7 @@ jobs:
|
||||
|
||||
- name: Upload Playwright output (WebKit shard ${{ matrix.shard }})
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
|
||||
with:
|
||||
name: playwright-output-webkit-shard-${{ matrix.shard }}
|
||||
path: playwright-output/webkit-shard-${{ matrix.shard }}/
|
||||
@@ -1408,7 +1509,7 @@ jobs:
|
||||
|
||||
- name: Upload WebKit coverage (if enabled)
|
||||
if: always() && (inputs.playwright_coverage == 'true' || vars.PLAYWRIGHT_COVERAGE == '1')
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
|
||||
with:
|
||||
name: e2e-coverage-webkit-shard-${{ matrix.shard }}
|
||||
path: coverage/e2e/
|
||||
@@ -1416,7 +1517,7 @@ jobs:
|
||||
|
||||
- name: Upload test traces on failure
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
|
||||
with:
|
||||
name: traces-webkit-shard-${{ matrix.shard }}
|
||||
path: test-results/**/*.zip
|
||||
@@ -1435,7 +1536,7 @@ jobs:
|
||||
|
||||
- name: Upload diagnostics
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
|
||||
with:
|
||||
name: e2e-diagnostics-webkit-shard-${{ matrix.shard }}
|
||||
path: diagnostics/
|
||||
@@ -1448,7 +1549,7 @@ jobs:
|
||||
|
||||
- name: Upload Docker logs on failure
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
|
||||
with:
|
||||
name: docker-logs-webkit-shard-${{ matrix.shard }}
|
||||
path: docker-logs-webkit-shard-${{ matrix.shard }}.txt
|
||||
|
||||
1170
.github/workflows/e2e-tests-split.yml.backup
vendored
1170
.github/workflows/e2e-tests-split.yml.backup
vendored
File diff suppressed because it is too large
Load Diff
632
.github/workflows/e2e-tests.yml.backup
vendored
632
.github/workflows/e2e-tests.yml.backup
vendored
@@ -1,632 +0,0 @@
|
||||
# E2E Tests Workflow
|
||||
# Runs Playwright E2E tests with sharding for faster execution
|
||||
# and collects frontend code coverage via @bgotink/playwright-coverage
|
||||
#
|
||||
# Test Execution Architecture:
|
||||
# - Parallel Sharding: Tests split across 4 shards for speed
|
||||
# - Per-Shard HTML Reports: Each shard generates its own HTML report
|
||||
# - No Merging Needed: Smaller reports are easier to debug
|
||||
# - Trace Collection: Failure traces captured for debugging
|
||||
#
|
||||
# Coverage Architecture:
|
||||
# - Backend: Docker container at localhost:8080 (API)
|
||||
# - Frontend: Vite dev server at localhost:3000 (serves source files)
|
||||
# - Tests hit Vite, which proxies API calls to Docker
|
||||
# - V8 coverage maps directly to source files for accurate reporting
|
||||
# - Coverage disabled by default (requires PLAYWRIGHT_COVERAGE=1)
|
||||
#
|
||||
# Triggers:
|
||||
# - Pull requests to main/develop (with path filters)
|
||||
# - Push to main branch
|
||||
# - Manual dispatch with browser selection
|
||||
#
|
||||
# Jobs:
|
||||
# 1. build: Build Docker image and upload as artifact
|
||||
# 2. e2e-tests: Run tests in parallel shards, upload per-shard HTML reports
|
||||
# 3. test-summary: Generate summary with links to shard reports
|
||||
# 4. comment-results: Post test results as PR comment
|
||||
# 5. upload-coverage: Merge and upload E2E coverage to Codecov (if enabled)
|
||||
# 6. e2e-results: Status check to block merge on failure
|
||||
|
||||
name: E2E Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- development
|
||||
- 'feature/**'
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
- 'backend/**'
|
||||
- 'tests/**'
|
||||
- 'playwright.config.js'
|
||||
- '.github/workflows/e2e-tests.yml'
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
browser:
|
||||
description: 'Browser to test'
|
||||
required: false
|
||||
default: 'chromium'
|
||||
type: choice
|
||||
options:
|
||||
- chromium
|
||||
- firefox
|
||||
- webkit
|
||||
- all
|
||||
|
||||
env:
|
||||
NODE_VERSION: '20'
|
||||
GO_VERSION: '1.25.6'
|
||||
GOTOOLCHAIN: auto
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository_owner }}/charon
|
||||
PLAYWRIGHT_COVERAGE: ${{ vars.PLAYWRIGHT_COVERAGE || '0' }}
|
||||
# Enhanced debugging environment variables
|
||||
DEBUG: 'charon:*,charon-test:*'
|
||||
PLAYWRIGHT_DEBUG: '1'
|
||||
CI_LOG_LEVEL: 'verbose'
|
||||
|
||||
concurrency:
|
||||
group: e2e-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Build application once, share across test shards
|
||||
build:
|
||||
name: Build Application
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
image_digest: ${{ steps.build-image.outputs.digest }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
|
||||
with:
|
||||
go-version: ${{ env.GO_VERSION }}
|
||||
cache: true
|
||||
cache-dependency-path: backend/go.sum
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
|
||||
- name: Cache npm dependencies
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: npm-${{ hashFiles('package-lock.json') }}
|
||||
restore-keys: npm-
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3
|
||||
|
||||
- name: Build Docker image
|
||||
id: build-image
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
push: false
|
||||
load: true
|
||||
tags: charon:e2e-test
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Save Docker image
|
||||
run: docker save charon:e2e-test -o charon-e2e-image.tar
|
||||
|
||||
- name: Upload Docker image artifact
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
|
||||
with:
|
||||
name: docker-image
|
||||
path: charon-e2e-image.tar
|
||||
retention-days: 1
|
||||
|
||||
# Run tests in parallel shards
|
||||
e2e-tests:
|
||||
name: E2E ${{ matrix.browser }} (Shard ${{ matrix.shard }}/${{ matrix.total-shards }})
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
# Required for security teardown (emergency reset fallback when ACL blocks API)
|
||||
CHARON_EMERGENCY_TOKEN: ${{ secrets.CHARON_EMERGENCY_TOKEN }}
|
||||
# Enable security-focused endpoints and test gating
|
||||
CHARON_EMERGENCY_SERVER_ENABLED: "true"
|
||||
CHARON_SECURITY_TESTS_ENABLED: "true"
|
||||
CHARON_E2E_IMAGE_TAG: charon:e2e-test
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
shard: [1, 2, 3, 4]
|
||||
total-shards: [4]
|
||||
browser: [chromium, firefox, webkit]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
|
||||
- name: Download Docker image
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7
|
||||
with:
|
||||
name: docker-image
|
||||
|
||||
- name: Validate Emergency Token Configuration
|
||||
run: |
|
||||
echo "🔐 Validating emergency token configuration..."
|
||||
|
||||
if [ -z "$CHARON_EMERGENCY_TOKEN" ]; then
|
||||
echo "::error title=Missing Secret::CHARON_EMERGENCY_TOKEN secret not configured in repository settings"
|
||||
echo "::error::Navigate to: Repository Settings → Secrets and Variables → Actions"
|
||||
echo "::error::Create secret: CHARON_EMERGENCY_TOKEN"
|
||||
echo "::error::Generate value with: openssl rand -hex 32"
|
||||
echo "::error::See docs/github-setup.md for detailed instructions"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TOKEN_LENGTH=${#CHARON_EMERGENCY_TOKEN}
|
||||
if [ $TOKEN_LENGTH -lt 64 ]; then
|
||||
echo "::error title=Invalid Token Length::CHARON_EMERGENCY_TOKEN must be at least 64 characters (current: $TOKEN_LENGTH)"
|
||||
echo "::error::Generate new token with: openssl rand -hex 32"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Mask token in output (show first 8 chars only)
|
||||
MASKED_TOKEN="${CHARON_EMERGENCY_TOKEN:0:8}...${CHARON_EMERGENCY_TOKEN: -4}"
|
||||
echo "::notice::Emergency token validated (length: $TOKEN_LENGTH, preview: $MASKED_TOKEN)"
|
||||
env:
|
||||
CHARON_EMERGENCY_TOKEN: ${{ secrets.CHARON_EMERGENCY_TOKEN }}
|
||||
|
||||
- name: Load Docker image
|
||||
run: |
|
||||
docker load -i charon-e2e-image.tar
|
||||
docker images | grep charon
|
||||
|
||||
- name: Generate ephemeral encryption key
|
||||
run: |
|
||||
# Generate a unique, ephemeral encryption key for this CI run
|
||||
# Key is 32 bytes, base64-encoded as required by CHARON_ENCRYPTION_KEY
|
||||
echo "CHARON_ENCRYPTION_KEY=$(openssl rand -base64 32)" >> $GITHUB_ENV
|
||||
echo "✅ Generated ephemeral encryption key for E2E tests"
|
||||
|
||||
- name: Start test environment
|
||||
run: |
|
||||
# Use docker-compose.playwright-ci.yml for CI (no .env file, uses GitHub Secrets)
|
||||
# Note: Using pre-built image loaded from artifact - no rebuild needed
|
||||
docker compose -f .docker/compose/docker-compose.playwright-ci.yml --profile security-tests up -d
|
||||
echo "✅ Container started via docker-compose.playwright-ci.yml"
|
||||
|
||||
- name: Wait for service health
|
||||
run: |
|
||||
echo "⏳ Waiting for Charon to be healthy..."
|
||||
MAX_ATTEMPTS=30
|
||||
ATTEMPT=0
|
||||
|
||||
while [[ ${ATTEMPT} -lt ${MAX_ATTEMPTS} ]]; do
|
||||
ATTEMPT=$((ATTEMPT + 1))
|
||||
echo "Attempt ${ATTEMPT}/${MAX_ATTEMPTS}..."
|
||||
|
||||
if curl -sf http://localhost:8080/api/v1/health > /dev/null 2>&1; then
|
||||
echo "✅ Charon is healthy!"
|
||||
curl -s http://localhost:8080/api/v1/health | jq .
|
||||
exit 0
|
||||
fi
|
||||
|
||||
sleep 2
|
||||
done
|
||||
|
||||
echo "❌ Health check failed"
|
||||
docker compose -f .docker/compose/docker-compose.playwright-ci.yml logs
|
||||
exit 1
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Clean Playwright browser cache
|
||||
run: rm -rf ~/.cache/ms-playwright
|
||||
|
||||
|
||||
- name: Cache Playwright browsers
|
||||
id: playwright-cache
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
# Use exact match only - no restore-keys fallback
|
||||
# This ensures we don't restore stale browsers when Playwright version changes
|
||||
key: playwright-${{ matrix.browser }}-${{ hashFiles('package-lock.json') }}
|
||||
|
||||
- name: Install & verify Playwright browsers
|
||||
run: |
|
||||
npx playwright install --with-deps --force
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
echo "🎯 Playwright CLI version"
|
||||
npx playwright --version || true
|
||||
|
||||
echo "🔍 Showing Playwright cache root (if present)"
|
||||
ls -la ~/.cache/ms-playwright || true
|
||||
|
||||
echo "📥 Install or verify browser: ${{ matrix.browser }}"
|
||||
|
||||
# Install when cache miss, otherwise verify the expected executables exist
|
||||
if [[ "${{ steps.playwright-cache.outputs.cache-hit }}" != "true" ]]; then
|
||||
echo "📥 Cache miss - downloading ${{ matrix.browser }} browser..."
|
||||
npx playwright install --with-deps ${{ matrix.browser }}
|
||||
else
|
||||
echo "✅ Cache hit - verifying ${{ matrix.browser }} browser files..."
|
||||
fi
|
||||
|
||||
# Look for the browser-specific headless shell executable(s)
|
||||
case "${{ matrix.browser }}" in
|
||||
chromium)
|
||||
EXPECTED_PATTERN="chrome-headless-shell*"
|
||||
;;
|
||||
firefox)
|
||||
EXPECTED_PATTERN="firefox*"
|
||||
;;
|
||||
webkit)
|
||||
EXPECTED_PATTERN="webkit*"
|
||||
;;
|
||||
*)
|
||||
EXPECTED_PATTERN="*"
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "Searching for expected files (pattern=$EXPECTED_PATTERN)..."
|
||||
find ~/.cache/ms-playwright -maxdepth 4 -type f -name "$EXPECTED_PATTERN" -print || true
|
||||
|
||||
# Attempt to derive the exact executable path Playwright will use
|
||||
echo "Attempting to resolve Playwright's executable path via Node API (best-effort)"
|
||||
node -e "try{ const pw = require('playwright'); const b = pw['${{ matrix.browser }}']; console.log('exePath:', b.executablePath ? b.executablePath() : 'n/a'); }catch(e){ console.error('node-check-failed', e.message); process.exit(0); }" || true
|
||||
|
||||
# If the expected binary is missing, force reinstall
|
||||
MISSING_COUNT=$(find ~/.cache/ms-playwright -maxdepth 4 -type f -name "$EXPECTED_PATTERN" | wc -l || true)
|
||||
if [[ "$MISSING_COUNT" -lt 1 ]]; then
|
||||
echo "⚠️ Expected Playwright browser executable not found (count=$MISSING_COUNT). Forcing reinstall..."
|
||||
npx playwright install --with-deps ${{ matrix.browser }} --force
|
||||
fi
|
||||
|
||||
echo "Post-install: show cache contents (top 5 lines)"
|
||||
find ~/.cache/ms-playwright -maxdepth 3 -printf '%p\n' | head -40 || true
|
||||
|
||||
# Final sanity check: try a headless launch via a tiny Node script (browser-specific args, retry without args)
|
||||
echo "🔁 Verifying browser can be launched (headless)"
|
||||
node -e "(async()=>{ try{ const pw=require('playwright'); const name='${{ matrix.browser }}'; const browser = pw[name]; const argsMap = { chromium: ['--no-sandbox'], firefox: ['--no-sandbox'], webkit: [] }; const args = argsMap[name] || [];
|
||||
// First attempt: launch with recommended args for this browser
|
||||
try {
|
||||
console.log('attempt-launch', name, 'args', JSON.stringify(args));
|
||||
const b = await browser.launch({ headless: true, args });
|
||||
await b.close();
|
||||
console.log('launch-ok', 'argsUsed', JSON.stringify(args));
|
||||
process.exit(0);
|
||||
} catch (err) {
|
||||
console.warn('launch-with-args-failed', err && err.message);
|
||||
if (args.length) {
|
||||
// Retry without args (some browsers reject unknown flags)
|
||||
console.log('retrying-without-args');
|
||||
const b2 = await browser.launch({ headless: true });
|
||||
await b2.close();
|
||||
console.log('launch-ok-no-args');
|
||||
process.exit(0);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
} catch (e) { console.error('launch-failed', e && e.message); process.exit(2); } })()" || (echo '❌ Browser launch verification failed' && exit 1)
|
||||
|
||||
echo "✅ Playwright ${{ matrix.browser }} ready and verified"
|
||||
|
||||
- name: Run E2E tests (Shard ${{ matrix.shard }}/${{ matrix.total-shards }})
|
||||
run: |
|
||||
echo "════════════════════════════════════════════════════════════"
|
||||
echo "E2E Test Shard ${{ matrix.shard }}/${{ matrix.total-shards }}"
|
||||
echo "Browser: ${{ matrix.browser }}"
|
||||
echo "Start Time: $(date -u +'%Y-%m-%dT%H:%M:%SZ')"
|
||||
echo ""
|
||||
echo "Reporter: HTML (per-shard reports)"
|
||||
echo "Output: playwright-report/ directory"
|
||||
echo "════════════════════════════════════════════════════════════"
|
||||
|
||||
# Capture start time for performance budget tracking
|
||||
SHARD_START=$(date +%s)
|
||||
echo "SHARD_START=$SHARD_START" >> $GITHUB_ENV
|
||||
|
||||
npx playwright test \
|
||||
--project=${{ matrix.browser }} \
|
||||
--shard=${{ matrix.shard }}/${{ matrix.total-shards }}
|
||||
|
||||
# Capture end time for performance budget tracking
|
||||
SHARD_END=$(date +%s)
|
||||
echo "SHARD_END=$SHARD_END" >> $GITHUB_ENV
|
||||
|
||||
SHARD_DURATION=$((SHARD_END - SHARD_START))
|
||||
|
||||
echo ""
|
||||
echo "════════════════════════════════════════════════════════════"
|
||||
echo "Shard ${{ matrix.shard }} Complete | Duration: ${SHARD_DURATION}s"
|
||||
echo "════════════════════════════════════════════════════════════"
|
||||
env:
|
||||
# Test directly against Docker container (no coverage)
|
||||
PLAYWRIGHT_BASE_URL: http://localhost:8080
|
||||
CI: true
|
||||
TEST_WORKER_INDEX: ${{ matrix.shard }}
|
||||
|
||||
- name: Verify shard performance budget
|
||||
if: always()
|
||||
run: |
|
||||
# Calculate shard execution time
|
||||
SHARD_DURATION=$((SHARD_END - SHARD_START))
|
||||
MAX_DURATION=900 # 15 minutes
|
||||
|
||||
echo "📊 Performance Budget Check"
|
||||
echo " Shard Duration: ${SHARD_DURATION}s"
|
||||
echo " Budget Limit: ${MAX_DURATION}s"
|
||||
echo " Utilization: $((SHARD_DURATION * 100 / MAX_DURATION))%"
|
||||
|
||||
# Fail if shard exceeded performance budget
|
||||
if [[ $SHARD_DURATION -gt $MAX_DURATION ]]; then
|
||||
echo "::error::Shard exceeded performance budget: ${SHARD_DURATION}s > ${MAX_DURATION}s"
|
||||
echo "::error::This likely indicates feature flag polling regression or API bottleneck"
|
||||
echo "::error::Review test logs and consider optimizing wait helpers or API calls"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Shard completed within budget: ${SHARD_DURATION}s"
|
||||
|
||||
- name: Upload HTML report (per-shard)
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
|
||||
with:
|
||||
name: playwright-report-${{ matrix.browser }}-shard-${{ matrix.shard }}
|
||||
path: playwright-report/
|
||||
retention-days: 14
|
||||
|
||||
- name: Upload test traces on failure
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
|
||||
with:
|
||||
name: traces-${{ matrix.browser }}-shard-${{ matrix.shard }}
|
||||
path: test-results/**/*.zip
|
||||
retention-days: 7
|
||||
|
||||
- name: Collect Docker logs on failure
|
||||
if: failure()
|
||||
run: |
|
||||
echo "📋 Container logs:"
|
||||
docker compose -f .docker/compose/docker-compose.playwright-ci.yml logs > docker-logs-${{ matrix.browser }}-shard-${{ matrix.shard }}.txt 2>&1
|
||||
|
||||
- name: Upload Docker logs on failure
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
|
||||
with:
|
||||
name: docker-logs-${{ matrix.browser }}-shard-${{ matrix.shard }}
|
||||
path: docker-logs-${{ matrix.browser }}-shard-${{ matrix.shard }}.txt
|
||||
retention-days: 7
|
||||
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker compose -f .docker/compose/docker-compose.playwright-ci.yml down -v 2>/dev/null || true
|
||||
|
||||
# Summarize test results from all shards (no merging needed)
|
||||
test-summary:
|
||||
name: E2E Test Summary
|
||||
runs-on: ubuntu-latest
|
||||
needs: e2e-tests
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Generate job summary with per-shard links
|
||||
run: |
|
||||
echo "## 📊 E2E Test Results" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Per-Shard HTML Reports" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Each shard generates its own HTML report for easier debugging:" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Browser | Shards | HTML Reports | Traces (on failure) |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "|---------|--------|--------------|---------------------|" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Chromium | 1-4 | \`playwright-report-chromium-shard-{1..4}\` | \`traces-chromium-shard-{1..4}\` |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| Firefox | 1-4 | \`playwright-report-firefox-shard-{1..4}\` | \`traces-firefox-shard-{1..4}\` |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "| WebKit | 1-4 | \`playwright-report-webkit-shard-{1..4}\` | \`traces-webkit-shard-{1..4}\` |" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### How to View Reports" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "1. Download the shard HTML report artifact (zip file)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "2. Extract and open \`index.html\` in your browser" >> $GITHUB_STEP_SUMMARY
|
||||
echo "3. Or run: \`npx playwright show-report path/to/extracted-folder\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Debugging Tips" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Failed tests?** Download the shard report that failed. Each shard has a focused subset of tests." >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Traces**: Available in trace artifacts (only on failure)" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Docker Logs**: Backend errors available in docker-logs-shard-N artifacts" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Local repro**: \`npx playwright test --grep=\"test name\"\`" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
# Comment on PR with results
|
||||
comment-results:
|
||||
name: Comment Test Results
|
||||
runs-on: ubuntu-latest
|
||||
needs: [e2e-tests, test-summary]
|
||||
if: github.event_name == 'pull_request' && always()
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Determine test status
|
||||
id: status
|
||||
run: |
|
||||
if [[ "${{ needs.e2e-tests.result }}" == "success" ]]; then
|
||||
echo "emoji=✅" >> $GITHUB_OUTPUT
|
||||
echo "status=PASSED" >> $GITHUB_OUTPUT
|
||||
echo "message=All E2E tests passed!" >> $GITHUB_OUTPUT
|
||||
elif [[ "${{ needs.e2e-tests.result }}" == "failure" ]]; then
|
||||
echo "emoji=❌" >> $GITHUB_OUTPUT
|
||||
echo "status=FAILED" >> $GITHUB_OUTPUT
|
||||
echo "message=Some E2E tests failed. Check artifacts for per-shard reports." >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "emoji=⚠️" >> $GITHUB_OUTPUT
|
||||
echo "status=UNKNOWN" >> $GITHUB_OUTPUT
|
||||
echo "message=E2E tests did not complete successfully." >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Comment on PR
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
const emoji = '${{ steps.status.outputs.emoji }}';
|
||||
const status = '${{ steps.status.outputs.status }}';
|
||||
const message = '${{ steps.status.outputs.message }}';
|
||||
const runUrl = `https://github.com/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}`;
|
||||
|
||||
const body = `## ${emoji} E2E Test Results: ${status}
|
||||
|
||||
${message}
|
||||
|
||||
| Metric | Result |
|
||||
|--------|--------|
|
||||
| Browsers | Chromium, Firefox, WebKit |
|
||||
| Shards per Browser | 4 |
|
||||
| Total Jobs | 12 |
|
||||
| Status | ${status} |
|
||||
|
||||
**Per-Shard HTML Reports** (easier to debug):
|
||||
- \`playwright-report-{browser}-shard-{1..4}\` (12 total artifacts)
|
||||
- Trace artifacts: \`traces-{browser}-shard-{N}\`
|
||||
|
||||
[📊 View workflow run & download reports](${runUrl})
|
||||
|
||||
---
|
||||
<sub>🤖 This comment was automatically generated by the E2E Tests workflow.</sub>`;
|
||||
|
||||
// Find existing comment
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
});
|
||||
|
||||
const botComment = comments.find(comment =>
|
||||
comment.user.type === 'Bot' &&
|
||||
comment.body.includes('E2E Test Results')
|
||||
);
|
||||
|
||||
if (botComment) {
|
||||
await github.rest.issues.updateComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: botComment.id,
|
||||
body: body
|
||||
});
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: body
|
||||
});
|
||||
}
|
||||
|
||||
# Upload merged E2E coverage to Codecov
|
||||
upload-coverage:
|
||||
name: Upload E2E Coverage
|
||||
runs-on: ubuntu-latest
|
||||
needs: e2e-tests
|
||||
# Coverage is only produced when PLAYWRIGHT_COVERAGE=1 (requires Vite dev server)
|
||||
if: vars.PLAYWRIGHT_COVERAGE == '1'
|
||||
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7
|
||||
with:
|
||||
pattern: e2e-coverage-*
|
||||
path: all-coverage
|
||||
merge-multiple: false
|
||||
|
||||
- name: Merge LCOV coverage files
|
||||
run: |
|
||||
# Install lcov for merging
|
||||
sudo apt-get update && sudo apt-get install -y lcov
|
||||
|
||||
# Create merged coverage directory
|
||||
mkdir -p coverage/e2e-merged
|
||||
|
||||
# Find all lcov.info files and merge them
|
||||
LCOV_FILES=$(find all-coverage -name "lcov.info" -type f)
|
||||
|
||||
if [[ -n "$LCOV_FILES" ]]; then
|
||||
# Build merge command
|
||||
MERGE_ARGS=""
|
||||
for file in $LCOV_FILES; do
|
||||
MERGE_ARGS="$MERGE_ARGS -a $file"
|
||||
done
|
||||
|
||||
lcov $MERGE_ARGS -o coverage/e2e-merged/lcov.info
|
||||
echo "✅ Merged $(echo "$LCOV_FILES" | wc -w) coverage files"
|
||||
else
|
||||
echo "⚠️ No coverage files found to merge"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
- name: Upload E2E coverage to Codecov
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
files: ./coverage/e2e-merged/lcov.info
|
||||
flags: e2e
|
||||
name: e2e-coverage
|
||||
fail_ci_if_error: false
|
||||
|
||||
- name: Upload merged coverage artifact
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
|
||||
with:
|
||||
name: e2e-coverage-merged
|
||||
path: coverage/e2e-merged/
|
||||
retention-days: 30
|
||||
|
||||
# Final status check - blocks merge if tests fail
|
||||
e2e-results:
|
||||
name: E2E Test Results
|
||||
runs-on: ubuntu-latest
|
||||
needs: e2e-tests
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: Check test results
|
||||
run: |
|
||||
if [[ "${{ needs.e2e-tests.result }}" == "success" ]]; then
|
||||
echo "✅ All E2E tests passed"
|
||||
exit 0
|
||||
elif [[ "${{ needs.e2e-tests.result }}" == "skipped" ]]; then
|
||||
echo "⏭️ E2E tests were skipped"
|
||||
exit 0
|
||||
else
|
||||
echo "❌ E2E tests failed or were cancelled"
|
||||
echo "Result: ${{ needs.e2e-tests.result }}"
|
||||
exit 1
|
||||
fi
|
||||
179
.github/workflows/nightly-build.yml
vendored
179
.github/workflows/nightly-build.yml
vendored
@@ -103,11 +103,12 @@ jobs:
|
||||
const workflows = [
|
||||
{ id: 'e2e-tests-split.yml' },
|
||||
{ id: 'codecov-upload.yml', inputs: { run_backend: 'true', run_frontend: 'true' } },
|
||||
{ id: 'security-pr.yml' },
|
||||
{ id: 'supply-chain-verify.yml' },
|
||||
{ id: 'codeql.yml' },
|
||||
];
|
||||
|
||||
core.info('Skipping security-pr.yml: PR-only workflow intentionally excluded from nightly non-PR dispatch');
|
||||
|
||||
for (const workflow of workflows) {
|
||||
const { data: workflowRuns } = await github.rest.actions.listWorkflowRuns({
|
||||
owner,
|
||||
@@ -220,14 +221,66 @@ jobs:
|
||||
echo "- ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:nightly@${{ steps.build.outputs.digest }}" >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Generate SBOM
|
||||
uses: anchore/sbom-action@28d71544de8eaf1b958d335707167c5f783590ad # v0.22.2
|
||||
id: sbom_primary
|
||||
continue-on-error: true
|
||||
uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0
|
||||
with:
|
||||
image: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:nightly@${{ steps.build.outputs.digest }}
|
||||
format: cyclonedx-json
|
||||
output-file: sbom-nightly.json
|
||||
syft-version: v1.42.1
|
||||
|
||||
- name: Generate SBOM fallback with pinned Syft
|
||||
if: always()
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "${{ steps.sbom_primary.outcome }}" == "success" ]] && [[ -s sbom-nightly.json ]] && jq -e . sbom-nightly.json >/dev/null 2>&1; then
|
||||
echo "Primary SBOM generation succeeded with valid JSON; skipping fallback"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Primary SBOM generation failed or produced missing/invalid output; using deterministic Syft fallback"
|
||||
|
||||
SYFT_VERSION="v1.42.1"
|
||||
OS="$(uname -s | tr '[:upper:]' '[:lower:]')"
|
||||
ARCH="$(uname -m)"
|
||||
case "$ARCH" in
|
||||
x86_64) ARCH="amd64" ;;
|
||||
aarch64|arm64) ARCH="arm64" ;;
|
||||
*) echo "Unsupported architecture: $ARCH"; exit 1 ;;
|
||||
esac
|
||||
|
||||
TARBALL="syft_${SYFT_VERSION#v}_${OS}_${ARCH}.tar.gz"
|
||||
BASE_URL="https://github.com/anchore/syft/releases/download/${SYFT_VERSION}"
|
||||
|
||||
curl -fsSLo "$TARBALL" "${BASE_URL}/${TARBALL}"
|
||||
curl -fsSLo checksums.txt "${BASE_URL}/syft_${SYFT_VERSION#v}_checksums.txt"
|
||||
|
||||
grep " ${TARBALL}$" checksums.txt > checksum_line.txt
|
||||
sha256sum -c checksum_line.txt
|
||||
|
||||
tar -xzf "$TARBALL" syft
|
||||
chmod +x syft
|
||||
|
||||
./syft "${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}:nightly@${{ steps.build.outputs.digest }}" -o cyclonedx-json=sbom-nightly.json
|
||||
|
||||
- name: Verify SBOM artifact
|
||||
if: always()
|
||||
run: |
|
||||
set -euo pipefail
|
||||
test -s sbom-nightly.json
|
||||
jq -e . sbom-nightly.json >/dev/null
|
||||
jq -e '
|
||||
.bomFormat == "CycloneDX"
|
||||
and (.specVersion | type == "string" and length > 0)
|
||||
and has("version")
|
||||
and has("metadata")
|
||||
and (.components | type == "array")
|
||||
' sbom-nightly.json >/dev/null
|
||||
|
||||
- name: Upload SBOM artifact
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: sbom-nightly
|
||||
path: sbom-nightly.json
|
||||
@@ -331,7 +384,7 @@ jobs:
|
||||
run: echo "IMAGE_NAME_LC=${IMAGE_NAME,,}" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Download SBOM
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
with:
|
||||
name: sbom-nightly
|
||||
|
||||
@@ -343,22 +396,128 @@ jobs:
|
||||
severity-cutoff: high
|
||||
|
||||
- name: Scan with Trivy
|
||||
uses: aquasecurity/trivy-action@c1824fd6edce30d7ab345a9989de00bbd46ef284 # 0.34.0
|
||||
uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518 # 0.34.1
|
||||
with:
|
||||
image-ref: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ needs.build-and-push-nightly.outputs.digest }}
|
||||
format: 'sarif'
|
||||
output: 'trivy-nightly.sarif'
|
||||
|
||||
- name: Upload Trivy results
|
||||
uses: github/codeql-action/upload-sarif@9e907b5e64f6b83e7804b09294d44122997950d6 # v4.32.3
|
||||
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
|
||||
with:
|
||||
sarif_file: 'trivy-nightly.sarif'
|
||||
category: 'trivy-nightly'
|
||||
|
||||
- name: Check for critical CVEs
|
||||
- name: Security severity policy summary
|
||||
run: |
|
||||
if grep -q "CRITICAL" trivy-nightly.sarif; then
|
||||
echo "❌ Critical vulnerabilities found in nightly build"
|
||||
{
|
||||
echo "## 🔐 Nightly Supply Chain Severity Policy"
|
||||
echo ""
|
||||
echo "- Blocking: Critical, High"
|
||||
echo "- Medium: non-blocking by default (report + triage SLA)"
|
||||
echo "- Policy file: .github/security-severity-policy.yml"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
- name: Check for Critical/High CVEs
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
jq -e . trivy-nightly.sarif >/dev/null
|
||||
|
||||
CRITICAL_COUNT=$(jq -r '
|
||||
[
|
||||
.runs[] as $run
|
||||
| ($run.tool.driver.rules // []) as $rules
|
||||
| $run.results[]?
|
||||
| . as $result
|
||||
| (
|
||||
(
|
||||
if (($result.ruleIndex | type) == "number") then
|
||||
($rules[$result.ruleIndex].properties["security-severity"] // empty)
|
||||
else
|
||||
empty
|
||||
end
|
||||
)
|
||||
// ([
|
||||
$rules[]?
|
||||
| select((.id // "") == ($result.ruleId // ""))
|
||||
| .properties["security-severity"]
|
||||
][0] // empty)
|
||||
// empty
|
||||
) as $securitySeverity
|
||||
| (try ($securitySeverity | tonumber) catch empty) as $score
|
||||
| select($score != null and $score >= 9.0)
|
||||
] | length
|
||||
' trivy-nightly.sarif)
|
||||
|
||||
HIGH_COUNT=$(jq -r '
|
||||
[
|
||||
.runs[] as $run
|
||||
| ($run.tool.driver.rules // []) as $rules
|
||||
| $run.results[]?
|
||||
| . as $result
|
||||
| (
|
||||
(
|
||||
if (($result.ruleIndex | type) == "number") then
|
||||
($rules[$result.ruleIndex].properties["security-severity"] // empty)
|
||||
else
|
||||
empty
|
||||
end
|
||||
)
|
||||
// ([
|
||||
$rules[]?
|
||||
| select((.id // "") == ($result.ruleId // ""))
|
||||
| .properties["security-severity"]
|
||||
][0] // empty)
|
||||
// empty
|
||||
) as $securitySeverity
|
||||
| (try ($securitySeverity | tonumber) catch empty) as $score
|
||||
| select($score != null and $score >= 7.0 and $score < 9.0)
|
||||
] | length
|
||||
' trivy-nightly.sarif)
|
||||
|
||||
MEDIUM_COUNT=$(jq -r '
|
||||
[
|
||||
.runs[] as $run
|
||||
| ($run.tool.driver.rules // []) as $rules
|
||||
| $run.results[]?
|
||||
| . as $result
|
||||
| (
|
||||
(
|
||||
if (($result.ruleIndex | type) == "number") then
|
||||
($rules[$result.ruleIndex].properties["security-severity"] // empty)
|
||||
else
|
||||
empty
|
||||
end
|
||||
)
|
||||
// ([
|
||||
$rules[]?
|
||||
| select((.id // "") == ($result.ruleId // ""))
|
||||
| .properties["security-severity"]
|
||||
][0] // empty)
|
||||
// empty
|
||||
) as $securitySeverity
|
||||
| (try ($securitySeverity | tonumber) catch empty) as $score
|
||||
| select($score != null and $score >= 4.0 and $score < 7.0)
|
||||
] | length
|
||||
' trivy-nightly.sarif)
|
||||
|
||||
{
|
||||
echo "- Structured SARIF counts: CRITICAL=${CRITICAL_COUNT}, HIGH=${HIGH_COUNT}, MEDIUM=${MEDIUM_COUNT}"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
if [ "$CRITICAL_COUNT" -gt 0 ]; then
|
||||
echo "❌ Critical vulnerabilities found in nightly build (${CRITICAL_COUNT})"
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ No critical vulnerabilities found"
|
||||
|
||||
if [ "$HIGH_COUNT" -gt 0 ]; then
|
||||
echo "❌ High vulnerabilities found in nightly build (${HIGH_COUNT})"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$MEDIUM_COUNT" -gt 0 ]; then
|
||||
echo "::warning::Medium vulnerabilities found in nightly build (${MEDIUM_COUNT}). Non-blocking by policy; triage with SLA per .github/security-severity-policy.yml"
|
||||
fi
|
||||
|
||||
echo "✅ No Critical/High vulnerabilities found"
|
||||
|
||||
25
.github/workflows/quality-checks.yml
vendored
25
.github/workflows/quality-checks.yml
vendored
@@ -3,6 +3,8 @@ name: Quality Checks
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -18,6 +20,27 @@ env:
|
||||
GOTOOLCHAIN: auto
|
||||
|
||||
jobs:
|
||||
auth-route-protection-contract:
|
||||
name: Auth Route Protection Contract
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.sha }}
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0
|
||||
with:
|
||||
go-version: ${{ env.GO_VERSION }}
|
||||
cache-dependency-path: backend/go.sum
|
||||
|
||||
- name: Run auth protection contract tests
|
||||
run: |
|
||||
set -euo pipefail
|
||||
cd backend
|
||||
go test ./internal/api/routes -run 'TestRegister_StateChangingRoutesRequireAuthentication|TestRegister_StateChangingRoutesDenyByDefaultWithExplicitAllowlist|TestRegister_AuthenticatedRoutes' -count=1 -v
|
||||
|
||||
codecov-trigger-parity-guard:
|
||||
name: Codecov Trigger/Comment Parity Guard
|
||||
runs-on: ubuntu-latest
|
||||
@@ -113,7 +136,7 @@ jobs:
|
||||
} >> "$GITHUB_ENV"
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6.2.0
|
||||
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0
|
||||
with:
|
||||
go-version: ${{ env.GO_VERSION }}
|
||||
cache-dependency-path: backend/go.sum
|
||||
|
||||
17
.github/workflows/release-goreleaser.yml
vendored
17
.github/workflows/release-goreleaser.yml
vendored
@@ -20,6 +20,7 @@ permissions:
|
||||
|
||||
jobs:
|
||||
goreleaser:
|
||||
if: ${{ !contains(github.ref_name, '-candidate') && !contains(github.ref_name, '-rc') }}
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# Use the built-in GITHUB_TOKEN by default for GitHub API operations.
|
||||
@@ -32,10 +33,22 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Enforce PR-2 release promotion guard
|
||||
env:
|
||||
REPO_VARS_JSON: ${{ toJSON(vars) }}
|
||||
run: |
|
||||
PR2_GATE_STATUS="$(printf '%s' "$REPO_VARS_JSON" | jq -r '.CHARON_PR2_GATES_PASSED // "false"')"
|
||||
if [[ "$PR2_GATE_STATUS" != "true" ]]; then
|
||||
echo "::error::Releasable tag promotion is blocked until PR-2 security/retirement gates pass."
|
||||
echo "::error::Set repository variable CHARON_PR2_GATES_PASSED=true only after PR-2 approval."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@7a3fe6cf4cb3a834922a1244abfce67bcef6a0c5 # v6
|
||||
uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6
|
||||
with:
|
||||
go-version: ${{ env.GO_VERSION }}
|
||||
cache-dependency-path: backend/go.sum
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6
|
||||
@@ -61,7 +74,7 @@ jobs:
|
||||
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@e435ccd777264be153ace6237001ef4d979d3a7a # v6
|
||||
uses: goreleaser/goreleaser-action@ec59f474b9834571250b370d4735c50f8e2d1e29 # v7
|
||||
with:
|
||||
distribution: goreleaser
|
||||
version: '~> v2.5'
|
||||
|
||||
2
.github/workflows/renovate.yml
vendored
2
.github/workflows/renovate.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Renovate
|
||||
uses: renovatebot/github-action@d65ef9e20512193cc070238b49c3873a361cd50c # v46.1.1
|
||||
uses: renovatebot/github-action@7b4b65bf31e07d4e3e51708d07700fb41bc03166 # v46.1.3
|
||||
with:
|
||||
configurationFile: .github/renovate.json
|
||||
token: ${{ secrets.RENOVATE_TOKEN || secrets.GITHUB_TOKEN }}
|
||||
|
||||
2
.github/workflows/repo-health.yml
vendored
2
.github/workflows/repo-health.yml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
|
||||
- name: Upload health output
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
|
||||
with:
|
||||
name: repo-health-output
|
||||
path: |
|
||||
|
||||
339
.github/workflows/security-pr.yml
vendored
339
.github/workflows/security-pr.yml
vendored
@@ -4,18 +4,22 @@
|
||||
name: Security Scan (PR)
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Docker Build, Publish & Test"]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
pr_number:
|
||||
description: 'PR number to scan (optional)'
|
||||
required: false
|
||||
description: 'PR number to scan'
|
||||
required: true
|
||||
type: string
|
||||
pull_request:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
|
||||
concurrency:
|
||||
group: security-pr-${{ github.event.workflow_run.event || github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
|
||||
group: security-pr-${{ github.event_name == 'workflow_run' && github.event.workflow_run.event || github.event_name }}-${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
@@ -23,16 +27,18 @@ jobs:
|
||||
name: Trivy Binary Scan
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
# Run for: manual dispatch, PR builds, or any push builds from docker-build
|
||||
# Run for manual dispatch, direct PR/push, or successful upstream workflow_run
|
||||
if: >-
|
||||
github.event_name == 'workflow_dispatch' ||
|
||||
github.event_name == 'pull_request' ||
|
||||
((github.event.workflow_run.event == 'push' || github.event.workflow_run.pull_requests[0].number != null) &&
|
||||
(github.event.workflow_run.status != 'completed' || github.event.workflow_run.conclusion == 'success'))
|
||||
github.event_name == 'push' ||
|
||||
(github.event_name == 'workflow_run' &&
|
||||
github.event.workflow_run.event == 'pull_request' &&
|
||||
github.event.workflow_run.status == 'completed' &&
|
||||
github.event.workflow_run.conclusion == 'success')
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
security-events: write
|
||||
actions: read
|
||||
|
||||
@@ -41,27 +47,65 @@ jobs:
|
||||
# actions/checkout v4.2.2
|
||||
uses: actions/checkout@0c366fd6a839edf440554fa01a7085ccba70ac98
|
||||
with:
|
||||
ref: ${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
ref: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.sha }}
|
||||
|
||||
- name: Extract PR number from workflow_run
|
||||
id: pr-info
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
# Manual dispatch - use input or fail gracefully
|
||||
if [[ -n "${{ inputs.pr_number }}" ]]; then
|
||||
echo "pr_number=${{ inputs.pr_number }}" >> "$GITHUB_OUTPUT"
|
||||
echo "✅ Using manually provided PR number: ${{ inputs.pr_number }}"
|
||||
else
|
||||
echo "⚠️ No PR number provided for manual dispatch"
|
||||
echo "pr_number=" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
if [[ "${{ github.event_name }}" == "push" ]]; then
|
||||
echo "pr_number=" >> "$GITHUB_OUTPUT"
|
||||
echo "is_push=true" >> "$GITHUB_OUTPUT"
|
||||
echo "✅ Push event detected; using local image path"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
echo "pr_number=${{ github.event.pull_request.number }}" >> "$GITHUB_OUTPUT"
|
||||
echo "is_push=false" >> "$GITHUB_OUTPUT"
|
||||
echo "✅ Pull request event detected: PR #${{ github.event.pull_request.number }}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
INPUT_PR_NUMBER="${{ inputs.pr_number }}"
|
||||
if [[ -z "${INPUT_PR_NUMBER}" ]]; then
|
||||
echo "❌ workflow_dispatch requires inputs.pr_number"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! "${INPUT_PR_NUMBER}" =~ ^[0-9]+$ ]]; then
|
||||
echo "❌ reason_category=invalid_input"
|
||||
echo "reason=workflow_dispatch pr_number must be digits-only"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
PR_NUMBER="${INPUT_PR_NUMBER}"
|
||||
echo "pr_number=${PR_NUMBER}" >> "$GITHUB_OUTPUT"
|
||||
echo "is_push=false" >> "$GITHUB_OUTPUT"
|
||||
echo "✅ Using manually provided PR number: ${PR_NUMBER}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "${{ github.event_name }}" == "workflow_run" ]]; then
|
||||
if [[ "${{ github.event.workflow_run.event }}" != "pull_request" ]]; then
|
||||
# Explicit contract validation happens in the dedicated guard step.
|
||||
echo "pr_number=" >> "$GITHUB_OUTPUT"
|
||||
echo "is_push=false" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ -n "${{ github.event.workflow_run.pull_requests[0].number || '' }}" ]]; then
|
||||
echo "pr_number=${{ github.event.workflow_run.pull_requests[0].number }}" >> "$GITHUB_OUTPUT"
|
||||
echo "is_push=false" >> "$GITHUB_OUTPUT"
|
||||
echo "✅ Found PR number from workflow_run payload: ${{ github.event.workflow_run.pull_requests[0].number }}"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# Extract PR number from context
|
||||
HEAD_SHA="${{ github.event.workflow_run.head_sha || github.event.pull_request.head.sha || github.sha }}"
|
||||
HEAD_SHA="${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_sha || github.event.pull_request.head.sha || github.sha }}"
|
||||
echo "🔍 Looking for PR with head SHA: ${HEAD_SHA}"
|
||||
|
||||
# Query GitHub API for PR associated with this commit
|
||||
@@ -73,21 +117,38 @@ jobs:
|
||||
|
||||
if [[ -n "${PR_NUMBER}" ]]; then
|
||||
echo "pr_number=${PR_NUMBER}" >> "$GITHUB_OUTPUT"
|
||||
echo "is_push=false" >> "$GITHUB_OUTPUT"
|
||||
echo "✅ Found PR number: ${PR_NUMBER}"
|
||||
else
|
||||
echo "⚠️ Could not find PR number for SHA: ${HEAD_SHA}"
|
||||
echo "pr_number=" >> "$GITHUB_OUTPUT"
|
||||
echo "❌ Could not determine PR number for workflow_run SHA: ${HEAD_SHA}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if this is a push event (not a PR)
|
||||
if [[ "${{ github.event_name }}" == "push" || "${{ github.event.workflow_run.event }}" == "push" || -z "${PR_NUMBER}" ]]; then
|
||||
HEAD_BRANCH="${{ github.event.workflow_run.head_branch || github.ref_name }}"
|
||||
echo "is_push=true" >> "$GITHUB_OUTPUT"
|
||||
echo "✅ Detected push build from branch: ${HEAD_BRANCH}"
|
||||
else
|
||||
echo "is_push=false" >> "$GITHUB_OUTPUT"
|
||||
- name: Validate workflow_run trust boundary and event contract
|
||||
if: github.event_name == 'workflow_run'
|
||||
run: |
|
||||
if [[ "${{ github.event.workflow_run.name }}" != "Docker Build, Publish & Test" ]]; then
|
||||
echo "❌ reason_category=unexpected_upstream_workflow"
|
||||
echo "workflow_name=${{ github.event.workflow_run.name }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${{ github.event.workflow_run.event }}" != "pull_request" ]]; then
|
||||
echo "❌ reason_category=unsupported_upstream_event"
|
||||
echo "upstream_event=${{ github.event.workflow_run.event }}"
|
||||
echo "run_id=${{ github.event.workflow_run.id }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${{ github.event.workflow_run.head_repository.full_name }}" != "${{ github.repository }}" ]]; then
|
||||
echo "❌ reason_category=untrusted_upstream_repository"
|
||||
echo "upstream_head_repository=${{ github.event.workflow_run.head_repository.full_name }}"
|
||||
echo "expected_repository=${{ github.repository }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ workflow_run trust boundary and event contract validated"
|
||||
|
||||
- name: Build Docker image (Local)
|
||||
if: github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
run: |
|
||||
@@ -97,95 +158,149 @@ jobs:
|
||||
|
||||
- name: Check for PR image artifact
|
||||
id: check-artifact
|
||||
if: (steps.pr-info.outputs.pr_number != '' || steps.pr-info.outputs.is_push == 'true') && github.event_name != 'push' && github.event_name != 'pull_request'
|
||||
if: github.event_name == 'workflow_run' || github.event_name == 'workflow_dispatch'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Determine artifact name based on event type
|
||||
if [[ "${{ steps.pr-info.outputs.is_push }}" == "true" ]]; then
|
||||
ARTIFACT_NAME="push-image"
|
||||
else
|
||||
PR_NUMBER="${{ steps.pr-info.outputs.pr_number }}"
|
||||
ARTIFACT_NAME="pr-image-${PR_NUMBER}"
|
||||
PR_NUMBER="${{ steps.pr-info.outputs.pr_number }}"
|
||||
if [[ ! "${PR_NUMBER}" =~ ^[0-9]+$ ]]; then
|
||||
echo "❌ reason_category=invalid_input"
|
||||
echo "reason=Resolved PR number must be digits-only"
|
||||
exit 1
|
||||
fi
|
||||
RUN_ID="${{ github.event.workflow_run.id }}"
|
||||
|
||||
ARTIFACT_NAME="pr-image-${PR_NUMBER}"
|
||||
RUN_ID="${{ github.event_name == 'workflow_run' && github.event.workflow_run.id || '' }}"
|
||||
|
||||
echo "🔍 Checking for artifact: ${ARTIFACT_NAME}"
|
||||
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
# For manual dispatch, find the most recent workflow run with this artifact
|
||||
RUN_ID=$(gh api \
|
||||
# Manual replay path: find latest successful docker-build pull_request run for this PR.
|
||||
RUNS_JSON=$(gh api \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
"/repos/${{ github.repository }}/actions/workflows/docker-build.yml/runs?status=success&per_page=10" \
|
||||
--jq '.workflow_runs[0].id // empty' 2>/dev/null || echo "")
|
||||
"/repos/${{ github.repository }}/actions/workflows/docker-build.yml/runs?event=pull_request&status=success&per_page=100" 2>&1)
|
||||
RUNS_STATUS=$?
|
||||
|
||||
if [[ ${RUNS_STATUS} -ne 0 ]]; then
|
||||
echo "❌ reason_category=api_error"
|
||||
echo "reason=Failed to query workflow runs for PR lookup"
|
||||
echo "upstream_run_id=unknown"
|
||||
echo "artifact_name=${ARTIFACT_NAME}"
|
||||
echo "api_output=${RUNS_JSON}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
RUN_ID=$(printf '%s' "${RUNS_JSON}" | jq -r --argjson pr "${PR_NUMBER}" '.workflow_runs[] | select((.pull_requests // []) | any(.number == $pr)) | .id' | head -n 1)
|
||||
|
||||
if [[ -z "${RUN_ID}" ]]; then
|
||||
echo "⚠️ No successful workflow runs found"
|
||||
echo "artifact_exists=false" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
echo "❌ reason_category=not_found"
|
||||
echo "reason=No successful docker-build pull_request run found for PR #${PR_NUMBER}"
|
||||
echo "upstream_run_id=unknown"
|
||||
echo "artifact_name=${ARTIFACT_NAME}"
|
||||
exit 1
|
||||
fi
|
||||
elif [[ -z "${RUN_ID}" ]]; then
|
||||
# If triggered by push/pull_request, RUN_ID is empty. Find recent run for this commit.
|
||||
HEAD_SHA="${{ github.event.workflow_run.head_sha || github.event.pull_request.head.sha || github.sha }}"
|
||||
echo "🔍 Searching for workflow run for SHA: ${HEAD_SHA}"
|
||||
# Retry a few times as the run might be just starting or finishing
|
||||
for i in {1..3}; do
|
||||
RUN_ID=$(gh api \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
"/repos/${{ github.repository }}/actions/workflows/docker-build.yml/runs?head_sha=${HEAD_SHA}&status=success&per_page=1" \
|
||||
--jq '.workflow_runs[0].id // empty' 2>/dev/null || echo "")
|
||||
if [[ -n "${RUN_ID}" ]]; then break; fi
|
||||
echo "⏳ Waiting for workflow run to appear/complete... ($i/3)"
|
||||
sleep 5
|
||||
done
|
||||
fi
|
||||
|
||||
echo "run_id=${RUN_ID}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# Check if the artifact exists in the workflow run
|
||||
ARTIFACT_ID=$(gh api \
|
||||
ARTIFACTS_JSON=$(gh api \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
"/repos/${{ github.repository }}/actions/runs/${RUN_ID}/artifacts" \
|
||||
--jq ".artifacts[] | select(.name == \"${ARTIFACT_NAME}\") | .id" 2>/dev/null || echo "")
|
||||
"/repos/${{ github.repository }}/actions/runs/${RUN_ID}/artifacts" 2>&1)
|
||||
ARTIFACTS_STATUS=$?
|
||||
|
||||
if [[ -n "${ARTIFACT_ID}" ]]; then
|
||||
echo "artifact_exists=true" >> "$GITHUB_OUTPUT"
|
||||
echo "artifact_id=${ARTIFACT_ID}" >> "$GITHUB_OUTPUT"
|
||||
echo "✅ Found artifact: ${ARTIFACT_NAME} (ID: ${ARTIFACT_ID})"
|
||||
else
|
||||
echo "artifact_exists=false" >> "$GITHUB_OUTPUT"
|
||||
echo "⚠️ Artifact not found: ${ARTIFACT_NAME}"
|
||||
echo "ℹ️ This is expected for non-PR builds or if the image was not uploaded"
|
||||
if [[ ${ARTIFACTS_STATUS} -ne 0 ]]; then
|
||||
echo "❌ reason_category=api_error"
|
||||
echo "reason=Failed to query artifacts for upstream run"
|
||||
echo "upstream_run_id=${RUN_ID}"
|
||||
echo "artifact_name=${ARTIFACT_NAME}"
|
||||
echo "api_output=${ARTIFACTS_JSON}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Skip if no artifact
|
||||
if: ((steps.pr-info.outputs.pr_number == '' && steps.pr-info.outputs.is_push != 'true') || steps.check-artifact.outputs.artifact_exists != 'true') && github.event_name != 'push' && github.event_name != 'pull_request'
|
||||
run: |
|
||||
echo "ℹ️ Skipping security scan - no PR image artifact available"
|
||||
echo "This is expected for:"
|
||||
echo " - Pushes to main/release branches"
|
||||
echo " - PRs where Docker build failed"
|
||||
echo " - Manual dispatch without PR number"
|
||||
exit 0
|
||||
ARTIFACT_ID=$(printf '%s' "${ARTIFACTS_JSON}" | jq -r --arg name "${ARTIFACT_NAME}" '.artifacts[] | select(.name == $name) | .id' | head -n 1)
|
||||
|
||||
if [[ -z "${ARTIFACT_ID}" ]]; then
|
||||
echo "❌ reason_category=not_found"
|
||||
echo "reason=Required artifact was not found"
|
||||
echo "upstream_run_id=${RUN_ID}"
|
||||
echo "artifact_name=${ARTIFACT_NAME}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
{
|
||||
echo "artifact_exists=true"
|
||||
echo "artifact_id=${ARTIFACT_ID}"
|
||||
echo "artifact_name=${ARTIFACT_NAME}"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
echo "✅ Found artifact: ${ARTIFACT_NAME} (ID: ${ARTIFACT_ID})"
|
||||
|
||||
- name: Download PR image artifact
|
||||
if: steps.check-artifact.outputs.artifact_exists == 'true'
|
||||
if: github.event_name == 'workflow_run' || github.event_name == 'workflow_dispatch'
|
||||
# actions/download-artifact v4.1.8
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3
|
||||
with:
|
||||
name: ${{ steps.pr-info.outputs.is_push == 'true' && 'push-image' || format('pr-image-{0}', steps.pr-info.outputs.pr_number) }}
|
||||
name: ${{ steps.check-artifact.outputs.artifact_name }}
|
||||
run-id: ${{ steps.check-artifact.outputs.run_id }}
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Load Docker image
|
||||
if: steps.check-artifact.outputs.artifact_exists == 'true'
|
||||
if: github.event_name == 'workflow_run' || github.event_name == 'workflow_dispatch'
|
||||
id: load-image
|
||||
run: |
|
||||
echo "📦 Loading Docker image..."
|
||||
docker load < charon-pr-image.tar
|
||||
echo "✅ Docker image loaded"
|
||||
|
||||
if [[ ! -r "charon-pr-image.tar" ]]; then
|
||||
echo "❌ ERROR: Artifact image tar is missing or unreadable"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
MANIFEST_TAGS=""
|
||||
if tar -tf charon-pr-image.tar | grep -qx "manifest.json"; then
|
||||
MANIFEST_TAGS=$(tar -xOf charon-pr-image.tar manifest.json 2>/dev/null | jq -r '.[]?.RepoTags[]?' 2>/dev/null | sed '/^$/d' || true)
|
||||
else
|
||||
echo "⚠️ manifest.json not found in artifact tar; will try docker-load-image-id fallback"
|
||||
fi
|
||||
|
||||
LOAD_OUTPUT=$(docker load < charon-pr-image.tar 2>&1)
|
||||
echo "${LOAD_OUTPUT}"
|
||||
|
||||
SOURCE_IMAGE_REF=""
|
||||
SOURCE_RESOLUTION_MODE=""
|
||||
|
||||
while IFS= read -r tag; do
|
||||
[[ -z "${tag}" ]] && continue
|
||||
if docker image inspect "${tag}" >/dev/null 2>&1; then
|
||||
SOURCE_IMAGE_REF="${tag}"
|
||||
SOURCE_RESOLUTION_MODE="manifest_tag"
|
||||
break
|
||||
fi
|
||||
done <<< "${MANIFEST_TAGS}"
|
||||
|
||||
if [[ -z "${SOURCE_IMAGE_REF}" ]]; then
|
||||
LOAD_IMAGE_ID=$(printf '%s\n' "${LOAD_OUTPUT}" | sed -nE 's/^Loaded image ID: (sha256:[0-9a-f]+)$/\1/p' | head -n1)
|
||||
if [[ -n "${LOAD_IMAGE_ID}" ]] && docker image inspect "${LOAD_IMAGE_ID}" >/dev/null 2>&1; then
|
||||
SOURCE_IMAGE_REF="${LOAD_IMAGE_ID}"
|
||||
SOURCE_RESOLUTION_MODE="load_image_id"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -z "${SOURCE_IMAGE_REF}" ]]; then
|
||||
echo "❌ ERROR: Could not resolve a valid image reference from manifest tags or docker load image ID"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
docker tag "${SOURCE_IMAGE_REF}" "charon:artifact"
|
||||
|
||||
{
|
||||
echo "source_image_ref=${SOURCE_IMAGE_REF}"
|
||||
echo "source_resolution_mode=${SOURCE_RESOLUTION_MODE}"
|
||||
echo "image_ref=charon:artifact"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "✅ Docker image resolved via ${SOURCE_RESOLUTION_MODE} and tagged as charon:artifact"
|
||||
docker images | grep charon
|
||||
|
||||
- name: Extract charon binary from container
|
||||
@@ -214,31 +329,10 @@ jobs:
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Normalize image name for reference
|
||||
IMAGE_NAME=$(echo "${{ github.repository_owner }}/charon" | tr '[:upper:]' '[:lower:]')
|
||||
if [[ "${{ steps.pr-info.outputs.is_push }}" == "true" ]]; then
|
||||
BRANCH_NAME="${{ github.event.workflow_run.head_branch }}"
|
||||
if [[ -z "${BRANCH_NAME}" ]]; then
|
||||
echo "❌ ERROR: Branch name is empty for push build"
|
||||
exit 1
|
||||
fi
|
||||
# Normalize branch name for Docker tag (replace / and other special chars with -)
|
||||
# This matches docker/metadata-action behavior: type=ref,event=branch
|
||||
TAG_SAFE_BRANCH="${BRANCH_NAME//\//-}"
|
||||
IMAGE_REF="ghcr.io/${IMAGE_NAME}:${TAG_SAFE_BRANCH}"
|
||||
elif [[ -n "${{ steps.pr-info.outputs.pr_number }}" ]]; then
|
||||
IMAGE_REF="ghcr.io/${IMAGE_NAME}:pr-${{ steps.pr-info.outputs.pr_number }}"
|
||||
else
|
||||
echo "❌ ERROR: Cannot determine image reference"
|
||||
echo " - is_push: ${{ steps.pr-info.outputs.is_push }}"
|
||||
echo " - pr_number: ${{ steps.pr-info.outputs.pr_number }}"
|
||||
echo " - branch: ${{ github.event.workflow_run.head_branch }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate the image reference format
|
||||
if [[ ! "${IMAGE_REF}" =~ ^ghcr\.io/[a-z0-9_-]+/[a-z0-9_-]+:[a-zA-Z0-9._-]+$ ]]; then
|
||||
echo "❌ ERROR: Invalid image reference format: ${IMAGE_REF}"
|
||||
# For workflow_run artifact path, always use locally tagged image from loaded artifact.
|
||||
IMAGE_REF="${{ steps.load-image.outputs.image_ref }}"
|
||||
if [[ -z "${IMAGE_REF}" ]]; then
|
||||
echo "❌ ERROR: Loaded artifact image reference is empty"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -268,7 +362,7 @@ jobs:
|
||||
- name: Run Trivy filesystem scan (SARIF output)
|
||||
if: steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
# aquasecurity/trivy-action v0.33.1
|
||||
uses: aquasecurity/trivy-action@c1824fd6edce30d7ab345a9989de00bbd46ef284
|
||||
uses: aquasecurity/trivy-action@4c61e6329bab9be735ca35291551614bc663dff3
|
||||
with:
|
||||
scan-type: 'fs'
|
||||
scan-ref: ${{ steps.extract.outputs.binary_path }}
|
||||
@@ -277,19 +371,30 @@ jobs:
|
||||
severity: 'CRITICAL,HIGH,MEDIUM'
|
||||
continue-on-error: true
|
||||
|
||||
- name: Check Trivy SARIF output exists
|
||||
if: always() && (steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request')
|
||||
id: trivy-sarif-check
|
||||
run: |
|
||||
if [[ -f trivy-binary-results.sarif ]]; then
|
||||
echo "exists=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "exists=false" >> "$GITHUB_OUTPUT"
|
||||
echo "ℹ️ No Trivy SARIF output found; skipping SARIF/artifact upload steps"
|
||||
fi
|
||||
|
||||
- name: Upload Trivy SARIF to GitHub Security
|
||||
if: steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
if: always() && steps.trivy-sarif-check.outputs.exists == 'true'
|
||||
# github/codeql-action v4
|
||||
uses: github/codeql-action/upload-sarif@5e7a52feb2a3dfb87f88be2af33b9e2275f48de6
|
||||
uses: github/codeql-action/upload-sarif@0ec47d036c68ae0cf94c629009b1029407111281
|
||||
with:
|
||||
sarif_file: 'trivy-binary-results.sarif'
|
||||
category: ${{ steps.pr-info.outputs.is_push == 'true' && format('security-scan-{0}', github.event.workflow_run.head_branch) || format('security-scan-pr-{0}', steps.pr-info.outputs.pr_number) }}
|
||||
category: ${{ steps.pr-info.outputs.is_push == 'true' && format('security-scan-{0}', github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref_name) || format('security-scan-pr-{0}', steps.pr-info.outputs.pr_number) }}
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run Trivy filesystem scan (fail on CRITICAL/HIGH)
|
||||
if: steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
# aquasecurity/trivy-action v0.33.1
|
||||
uses: aquasecurity/trivy-action@c1824fd6edce30d7ab345a9989de00bbd46ef284
|
||||
uses: aquasecurity/trivy-action@4c61e6329bab9be735ca35291551614bc663dff3
|
||||
with:
|
||||
scan-type: 'fs'
|
||||
scan-ref: ${{ steps.extract.outputs.binary_path }}
|
||||
@@ -298,11 +403,11 @@ jobs:
|
||||
exit-code: '1'
|
||||
|
||||
- name: Upload scan artifacts
|
||||
if: always() && (steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request')
|
||||
if: always() && steps.trivy-sarif-check.outputs.exists == 'true'
|
||||
# actions/upload-artifact v4.4.3
|
||||
uses: actions/upload-artifact@47309c993abb98030a35d55ef7ff34b7fa1074b5
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
|
||||
with:
|
||||
name: ${{ steps.pr-info.outputs.is_push == 'true' && format('security-scan-{0}', github.event.workflow_run.head_branch) || format('security-scan-pr-{0}', steps.pr-info.outputs.pr_number) }}
|
||||
name: ${{ steps.pr-info.outputs.is_push == 'true' && format('security-scan-{0}', github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref_name) || format('security-scan-pr-{0}', steps.pr-info.outputs.pr_number) }}
|
||||
path: |
|
||||
trivy-binary-results.sarif
|
||||
retention-days: 14
|
||||
@@ -312,7 +417,7 @@ jobs:
|
||||
run: |
|
||||
{
|
||||
if [[ "${{ steps.pr-info.outputs.is_push }}" == "true" ]]; then
|
||||
echo "## 🔒 Security Scan Results - Branch: ${{ github.event.workflow_run.head_branch }}"
|
||||
echo "## 🔒 Security Scan Results - Branch: ${{ github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref_name }}"
|
||||
else
|
||||
echo "## 🔒 Security Scan Results - PR #${{ steps.pr-info.outputs.pr_number }}"
|
||||
fi
|
||||
|
||||
12
.github/workflows/security-weekly-rebuild.yml
vendored
12
.github/workflows/security-weekly-rebuild.yml
vendored
@@ -6,7 +6,7 @@ name: Weekly Security Rebuild
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 2 * * 0' # Sundays at 02:00 UTC
|
||||
- cron: '0 12 * * 2' # Tuesdays at 12:00 UTC
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
force_rebuild:
|
||||
@@ -88,7 +88,7 @@ jobs:
|
||||
BASE_IMAGE=${{ steps.base-image.outputs.digest }}
|
||||
|
||||
- name: Run Trivy vulnerability scanner (CRITICAL+HIGH)
|
||||
uses: aquasecurity/trivy-action@c1824fd6edce30d7ab345a9989de00bbd46ef284 # 0.34.0
|
||||
uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518 # 0.34.1
|
||||
with:
|
||||
image-ref: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build.outputs.digest }}
|
||||
format: 'table'
|
||||
@@ -98,7 +98,7 @@ jobs:
|
||||
|
||||
- name: Run Trivy vulnerability scanner (SARIF)
|
||||
id: trivy-sarif
|
||||
uses: aquasecurity/trivy-action@c1824fd6edce30d7ab345a9989de00bbd46ef284 # 0.34.0
|
||||
uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518 # 0.34.1
|
||||
with:
|
||||
image-ref: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build.outputs.digest }}
|
||||
format: 'sarif'
|
||||
@@ -106,12 +106,12 @@ jobs:
|
||||
severity: 'CRITICAL,HIGH,MEDIUM'
|
||||
|
||||
- name: Upload Trivy results to GitHub Security
|
||||
uses: github/codeql-action/upload-sarif@9e907b5e64f6b83e7804b09294d44122997950d6 # v4.32.3
|
||||
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
|
||||
with:
|
||||
sarif_file: 'trivy-weekly-results.sarif'
|
||||
|
||||
- name: Run Trivy vulnerability scanner (JSON for artifact)
|
||||
uses: aquasecurity/trivy-action@c1824fd6edce30d7ab345a9989de00bbd46ef284 # 0.34.0
|
||||
uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518 # 0.34.1
|
||||
with:
|
||||
image-ref: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build.outputs.digest }}
|
||||
format: 'json'
|
||||
@@ -119,7 +119,7 @@ jobs:
|
||||
severity: 'CRITICAL,HIGH,MEDIUM,LOW'
|
||||
|
||||
- name: Upload Trivy JSON results
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7
|
||||
with:
|
||||
name: trivy-weekly-scan-${{ github.run_number }}
|
||||
path: trivy-weekly-results.json
|
||||
|
||||
40
.github/workflows/supply-chain-pr.yml
vendored
40
.github/workflows/supply-chain-pr.yml
vendored
@@ -11,6 +11,8 @@ on:
|
||||
type: string
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: supply-chain-pr-${{ github.event.workflow_run.event || github.event_name }}-${{ github.event.workflow_run.head_branch || github.ref }}
|
||||
@@ -264,7 +266,7 @@ jobs:
|
||||
# Generate SBOM using official Anchore action (auto-updated by Renovate)
|
||||
- name: Generate SBOM
|
||||
if: steps.set-target.outputs.image_name != ''
|
||||
uses: anchore/sbom-action@28d71544de8eaf1b958d335707167c5f783590ad # v0.22.2
|
||||
uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0
|
||||
id: sbom
|
||||
with:
|
||||
image: ${{ steps.set-target.outputs.image_name }}
|
||||
@@ -337,9 +339,30 @@ jobs:
|
||||
echo " Low: ${LOW_COUNT}"
|
||||
echo " Total: ${TOTAL_COUNT}"
|
||||
|
||||
- name: Security severity policy summary
|
||||
if: steps.set-target.outputs.image_name != ''
|
||||
run: |
|
||||
CRITICAL_COUNT="${{ steps.vuln-summary.outputs.critical_count }}"
|
||||
HIGH_COUNT="${{ steps.vuln-summary.outputs.high_count }}"
|
||||
MEDIUM_COUNT="${{ steps.vuln-summary.outputs.medium_count }}"
|
||||
|
||||
{
|
||||
echo "## 🔐 Supply Chain Severity Policy"
|
||||
echo ""
|
||||
echo "- Blocking: Critical, High"
|
||||
echo "- Medium: non-blocking by default (report + triage SLA)"
|
||||
echo "- Policy file: .github/security-severity-policy.yml"
|
||||
echo ""
|
||||
echo "Current scan counts: Critical=${CRITICAL_COUNT}, High=${HIGH_COUNT}, Medium=${MEDIUM_COUNT}"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
|
||||
if [[ "${MEDIUM_COUNT}" -gt 0 ]]; then
|
||||
echo "::warning::${MEDIUM_COUNT} medium vulnerabilities found. Non-blocking by policy; create/maintain triage issue with SLA per .github/security-severity-policy.yml"
|
||||
fi
|
||||
|
||||
- name: Upload SARIF to GitHub Security
|
||||
if: steps.check-artifact.outputs.artifact_found == 'true'
|
||||
uses: github/codeql-action/upload-sarif@9e907b5e64f6b83e7804b09294d44122997950d6 # v4
|
||||
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4
|
||||
continue-on-error: true
|
||||
with:
|
||||
sarif_file: grype-results.sarif
|
||||
@@ -348,7 +371,7 @@ jobs:
|
||||
- name: Upload supply chain artifacts
|
||||
if: steps.set-target.outputs.image_name != ''
|
||||
# actions/upload-artifact v4.6.0
|
||||
uses: actions/upload-artifact@47309c993abb98030a35d55ef7ff34b7fa1074b5
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
|
||||
with:
|
||||
name: ${{ steps.pr-number.outputs.is_push == 'true' && format('supply-chain-{0}', steps.sanitize.outputs.branch) || format('supply-chain-pr-{0}', steps.pr-number.outputs.pr_number) }}
|
||||
path: |
|
||||
@@ -433,10 +456,11 @@ jobs:
|
||||
|
||||
echo "✅ PR comment posted"
|
||||
|
||||
- name: Fail on critical vulnerabilities
|
||||
- name: Fail on Critical/High vulnerabilities
|
||||
if: steps.set-target.outputs.image_name != ''
|
||||
run: |
|
||||
CRITICAL_COUNT="${{ steps.vuln-summary.outputs.critical_count }}"
|
||||
HIGH_COUNT="${{ steps.vuln-summary.outputs.high_count }}"
|
||||
|
||||
if [[ "${CRITICAL_COUNT}" -gt 0 ]]; then
|
||||
echo "🚨 Found ${CRITICAL_COUNT} CRITICAL vulnerabilities!"
|
||||
@@ -444,4 +468,10 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ No critical vulnerabilities found"
|
||||
if [[ "${HIGH_COUNT}" -gt 0 ]]; then
|
||||
echo "🚨 Found ${HIGH_COUNT} HIGH vulnerabilities!"
|
||||
echo "Please review the vulnerability report and address high severity issues before merging."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ No Critical/High vulnerabilities found"
|
||||
|
||||
6
.github/workflows/supply-chain-verify.yml
vendored
6
.github/workflows/supply-chain-verify.yml
vendored
@@ -119,7 +119,7 @@ jobs:
|
||||
# Generate SBOM using official Anchore action (auto-updated by Renovate)
|
||||
- name: Generate and Verify SBOM
|
||||
if: steps.image-check.outputs.exists == 'true'
|
||||
uses: anchore/sbom-action@28d71544de8eaf1b958d335707167c5f783590ad # v0.22.2
|
||||
uses: anchore/sbom-action@17ae1740179002c89186b61233e0f892c3118b11 # v0.23.0
|
||||
with:
|
||||
image: ghcr.io/${{ github.repository_owner }}/charon:${{ steps.tag.outputs.tag }}
|
||||
format: cyclonedx-json
|
||||
@@ -144,7 +144,7 @@ jobs:
|
||||
|
||||
- name: Upload SBOM Artifact
|
||||
if: steps.image-check.outputs.exists == 'true' && always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: sbom-${{ steps.tag.outputs.tag }}
|
||||
path: sbom-verify.cyclonedx.json
|
||||
@@ -324,7 +324,7 @@ jobs:
|
||||
|
||||
- name: Upload Vulnerability Scan Artifact
|
||||
if: steps.validate-sbom.outputs.valid == 'true' && always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: vulnerability-scan-${{ steps.tag.outputs.tag }}
|
||||
path: |
|
||||
|
||||
@@ -5,9 +5,9 @@ name: Weekly Nightly to Main Promotion
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Every Monday at 10:30 UTC (5:30am EST / 6:30am EDT)
|
||||
# Every Monday at 12:00 UTC (7:00am EST / 8:00am EDT)
|
||||
# Offset from nightly sync (09:00 UTC) to avoid schedule race and allow validation completion.
|
||||
- cron: '30 10 * * 1'
|
||||
- cron: '0 12 * * 1'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
reason:
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -306,4 +306,5 @@ frontend/temp**
|
||||
playwright-output/**
|
||||
validation-evidence/**
|
||||
.github/agents/# Tools Configuration.md
|
||||
docs/plans/codecove_patch_report.md
|
||||
docs/reports/codecove_patch_report.md
|
||||
vuln-results.json
|
||||
|
||||
76
.grype.yaml
76
.grype.yaml
@@ -59,6 +59,82 @@ ignore:
|
||||
# 4. If no fix: Extend expiry by 7 days, document justification
|
||||
# 5. If extended 3+ times: Escalate to security team for review
|
||||
|
||||
# GHSA-69x3-g4r3-p962 / CVE-2026-25793: Nebula ECDSA Signature Malleability
|
||||
# Severity: HIGH (CVSS 8.1)
|
||||
# Package: github.com/slackhq/nebula v1.9.7 (embedded in /usr/bin/caddy)
|
||||
# Status: Cannot upgrade — smallstep/certificates v0.30.0-rc2 still pins nebula v1.9.x
|
||||
#
|
||||
# Vulnerability Details:
|
||||
# - ECDSA signature malleability allows bypassing certificate blocklists
|
||||
# - Attacker can forge alternate valid P256 ECDSA signatures for revoked
|
||||
# certificates (CVSSv3: AV:N/AC:H/PR:L/UI:N/S:U/C:H/I:H/A:N)
|
||||
# - Only affects configurations using Nebula-based certificate authorities
|
||||
# (non-default and uncommon in Charon deployments)
|
||||
#
|
||||
# Root Cause (Compile-Time Dependency Lock):
|
||||
# - Caddy is built with caddy-security plugin, which transitively requires
|
||||
# github.com/smallstep/certificates. That package pins nebula v1.9.x.
|
||||
# - Checked: smallstep/certificates v0.27.5 → v0.30.0-rc2 all require nebula v1.9.4–v1.9.7.
|
||||
# The nebula v1.10 API removal breaks compilation in the
|
||||
# authority/provisioner package; xcaddy build fails with upgrade attempted.
|
||||
# - Dockerfile caddy-builder stage pins nebula@v1.9.7 (Renovate tracked) with
|
||||
# an inline comment explaining the constraint (Dockerfile line 247).
|
||||
# - Fix path: once smallstep/certificates releases a version requiring
|
||||
# nebula v1.10+, remove the pin and this suppression simultaneously.
|
||||
#
|
||||
# Risk Assessment: ACCEPTED (Low exploitability in Charon context)
|
||||
# - Charon uses standard ACME/Let's Encrypt TLS; Nebula VPN PKI is not
|
||||
# enabled by default and rarely configured in Charon deployments.
|
||||
# - Exploiting this requires a valid certificate sharing the same issuer as
|
||||
# a revoked one — an uncommon and targeted attack scenario.
|
||||
# - Container-level isolation reduces the attack surface further.
|
||||
#
|
||||
# Mitigation (active while suppression is in effect):
|
||||
# - Monitor smallstep/certificates releases at https://github.com/smallstep/certificates/releases
|
||||
# - Weekly CI security rebuild flags any new CVEs in the full image.
|
||||
# - Renovate annotation in Dockerfile (datasource=go depName=github.com/slackhq/nebula)
|
||||
# will surface the pin for review when xcaddy build becomes compatible.
|
||||
#
|
||||
# Review:
|
||||
# - Reviewed 2026-02-19: smallstep/certificates latest stable remains v0.27.5;
|
||||
# no release requiring nebula v1.10+ has shipped. Suppression extended 14 days.
|
||||
# - Next review: 2026-03-05. Remove suppression immediately once upstream fixes.
|
||||
#
|
||||
# Removal Criteria:
|
||||
# - smallstep/certificates releases a stable version requiring nebula v1.10+
|
||||
# - Update Dockerfile caddy-builder patch to use the new versions
|
||||
# - Rebuild image, run security scan, confirm suppression no longer needed
|
||||
# - Remove both this entry and the corresponding .trivyignore entry
|
||||
#
|
||||
# References:
|
||||
# - GHSA: https://github.com/advisories/GHSA-69x3-g4r3-p962
|
||||
# - CVE-2026-25793: https://nvd.nist.gov/vuln/detail/CVE-2026-25793
|
||||
# - smallstep/certificates: https://github.com/smallstep/certificates/releases
|
||||
# - Dockerfile pin: caddy-builder stage, line ~247 (go get nebula@v1.9.7)
|
||||
- vulnerability: GHSA-69x3-g4r3-p962
|
||||
package:
|
||||
name: github.com/slackhq/nebula
|
||||
version: "v1.9.7"
|
||||
type: go-module
|
||||
reason: |
|
||||
HIGH — ECDSA signature malleability in nebula v1.9.7 embedded in /usr/bin/caddy.
|
||||
Cannot upgrade: smallstep/certificates v0.27.5 (latest stable as of 2026-02-19)
|
||||
still requires nebula v1.9.x (verified across v0.27.5–v0.30.0-rc2). Charon does
|
||||
not use Nebula VPN PKI by default. Risk accepted pending upstream smallstep fix.
|
||||
Reviewed 2026-02-19: no new smallstep release changes this assessment.
|
||||
expiry: "2026-03-05" # Re-evaluate in 14 days (2026-02-19 + 14 days)
|
||||
|
||||
# Action items when this suppression expires:
|
||||
# 1. Check smallstep/certificates releases: https://github.com/smallstep/certificates/releases
|
||||
# 2. If a stable version requires nebula v1.10+:
|
||||
# a. Update Dockerfile caddy-builder: remove the `go get nebula@v1.9.7` pin
|
||||
# b. Optionally bump smallstep/certificates to the new version
|
||||
# c. Rebuild Docker image and verify no compile failures
|
||||
# d. Re-run local security-scan-docker-image and confirm clean result
|
||||
# e. Remove this suppression entry
|
||||
# 3. If no fix yet: Extend expiry by 14 days and document justification
|
||||
# 4. If extended 3+ times: Open upstream issue on smallstep/certificates
|
||||
|
||||
# Match exclusions (patterns to ignore during scanning)
|
||||
# Use sparingly - prefer specific CVE suppressions above
|
||||
match:
|
||||
|
||||
@@ -113,7 +113,7 @@ repos:
|
||||
stages: [manual] # Only runs when explicitly called
|
||||
- id: frontend-type-check
|
||||
name: Frontend TypeScript Check
|
||||
entry: bash -c 'cd frontend && npm run type-check'
|
||||
entry: bash -c 'cd frontend && npx tsc --noEmit'
|
||||
language: system
|
||||
files: '^frontend/.*\.(ts|tsx)$'
|
||||
pass_filenames: false
|
||||
|
||||
@@ -1,2 +1,9 @@
|
||||
.cache/
|
||||
playwright/.auth/
|
||||
|
||||
# GHSA-69x3-g4r3-p962 / CVE-2026-25793: Nebula ECDSA Signature Malleability
|
||||
# Severity: HIGH (CVSS 8.1) — Package: github.com/slackhq/nebula v1.9.7 in /usr/bin/caddy
|
||||
# Cannot upgrade: smallstep/certificates v0.27.5 (latest stable as of 2026-02-19) still pins nebula v1.9.x.
|
||||
# Charon does not use Nebula VPN PKI by default. Review by: 2026-03-05
|
||||
# See also: .grype.yaml for full justification
|
||||
CVE-2026-25793
|
||||
|
||||
8
.vscode/mcp.json
vendored
8
.vscode/mcp.json
vendored
@@ -1,4 +1,5 @@
|
||||
{
|
||||
"inputs": [],
|
||||
"servers": {
|
||||
"microsoft/playwright-mcp": {
|
||||
"type": "stdio",
|
||||
@@ -8,11 +9,6 @@
|
||||
],
|
||||
"gallery": "https://api.mcp.github.com",
|
||||
"version": "0.0.1-seed"
|
||||
},
|
||||
"gopls": {
|
||||
"url": "http://localhost:8092",
|
||||
"type": "sse"
|
||||
}
|
||||
},
|
||||
"inputs": []
|
||||
}
|
||||
}
|
||||
|
||||
169
.vscode/tasks.json
vendored
169
.vscode/tasks.json
vendored
@@ -454,7 +454,7 @@
|
||||
{
|
||||
"label": "Security: Trivy Scan",
|
||||
"type": "shell",
|
||||
"command": ".github/skills/scripts/skill-runner.sh security-scan-trivy",
|
||||
"command": "TRIVY_DOCKER_RM=false .github/skills/scripts/skill-runner.sh security-scan-trivy",
|
||||
"group": "test",
|
||||
"problemMatcher": []
|
||||
},
|
||||
@@ -501,14 +501,14 @@
|
||||
{
|
||||
"label": "Security: CodeQL Go Scan (DEPRECATED)",
|
||||
"type": "shell",
|
||||
"command": "codeql database create codeql-db-go --language=go --source-root=backend --overwrite && codeql database analyze codeql-db-go /projects/codeql/codeql/go/ql/src/codeql-suites/go-security-extended.qls --format=sarif-latest --output=codeql-results-go.sarif",
|
||||
"command": "bash scripts/pre-commit-hooks/codeql-go-scan.sh",
|
||||
"group": "test",
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Security: CodeQL JS Scan (DEPRECATED)",
|
||||
"type": "shell",
|
||||
"command": "codeql database create codeql-db-js --language=javascript --source-root=frontend --overwrite && codeql database analyze codeql-db-js /projects/codeql/codeql/javascript/ql/src/codeql-suites/javascript-security-extended.qls --format=sarif-latest --output=codeql-results-js.sarif",
|
||||
"command": "bash scripts/pre-commit-hooks/codeql-js-scan.sh",
|
||||
"group": "test",
|
||||
"problemMatcher": []
|
||||
},
|
||||
@@ -724,6 +724,13 @@
|
||||
"group": "test",
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Security: Caddy PR-1 Compatibility Matrix",
|
||||
"type": "shell",
|
||||
"command": "cd /projects/Charon && bash scripts/caddy-compat-matrix.sh --candidate-version 2.11.1 --patch-scenarios A,B,C --platforms linux/amd64,linux/arm64 --smoke-set boot_caddy,plugin_modules,config_validate,admin_api_health --output-dir test-results/caddy-compat --docs-report docs/reports/caddy-compatibility-matrix.md",
|
||||
"group": "test",
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Test: E2E Playwright (Skill)",
|
||||
"type": "shell",
|
||||
@@ -808,6 +815,162 @@
|
||||
"close": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Test: E2E Playwright (Chromium) - Non-Security Shards 1/4-4/4",
|
||||
"type": "shell",
|
||||
"command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=1 npx playwright test --project=chromium --shard=1/4 --output=playwright-output/chromium-shard-1 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks && cd /projects/Charon && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=2 npx playwright test --project=chromium --shard=2/4 --output=playwright-output/chromium-shard-2 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks && cd /projects/Charon && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=3 npx playwright test --project=chromium --shard=3/4 --output=playwright-output/chromium-shard-3 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks && cd /projects/Charon && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=4 npx playwright test --project=chromium --shard=4/4 --output=playwright-output/chromium-shard-4 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
|
||||
"group": "test",
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
"close": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Test: E2E Playwright (Chromium) - Non-Security Shard 1/4",
|
||||
"type": "shell",
|
||||
"command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=1 npx playwright test --project=chromium --shard=1/4 --output=playwright-output/chromium-shard-1 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
|
||||
"group": "test",
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
"close": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Test: E2E Playwright (Chromium) - Non-Security Shard 2/4",
|
||||
"type": "shell",
|
||||
"command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=2 npx playwright test --project=chromium --shard=2/4 --output=playwright-output/chromium-shard-2 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
|
||||
"group": "test",
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
"close": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Test: E2E Playwright (Chromium) - Non-Security Shard 3/4",
|
||||
"type": "shell",
|
||||
"command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=3 npx playwright test --project=chromium --shard=3/4 --output=playwright-output/chromium-shard-3 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
|
||||
"group": "test",
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
"close": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Test: E2E Playwright (Chromium) - Non-Security Shard 4/4",
|
||||
"type": "shell",
|
||||
"command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=4 npx playwright test --project=chromium --shard=4/4 --output=playwright-output/chromium-shard-4 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
|
||||
"group": "test",
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
"close": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Test: E2E Playwright (WebKit) - Non-Security Shards 1/4-4/4",
|
||||
"type": "shell",
|
||||
"command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=1 npx playwright test --project=webkit --shard=1/4 --output=playwright-output/webkit-shard-1 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks && cd /projects/Charon && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=2 npx playwright test --project=webkit --shard=2/4 --output=playwright-output/webkit-shard-2 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks && cd /projects/Charon && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=3 npx playwright test --project=webkit --shard=3/4 --output=playwright-output/webkit-shard-3 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks && cd /projects/Charon && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=4 npx playwright test --project=webkit --shard=4/4 --output=playwright-output/webkit-shard-4 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
|
||||
"group": "test",
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
"close": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Test: E2E Playwright (WebKit) - Non-Security Shard 1/4",
|
||||
"type": "shell",
|
||||
"command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=1 npx playwright test --project=webkit --shard=1/4 --output=playwright-output/webkit-shard-1 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
|
||||
"group": "test",
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
"close": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Test: E2E Playwright (WebKit) - Non-Security Shard 2/4",
|
||||
"type": "shell",
|
||||
"command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=2 npx playwright test --project=webkit --shard=2/4 --output=playwright-output/webkit-shard-2 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
|
||||
"group": "test",
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
"close": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Test: E2E Playwright (WebKit) - Non-Security Shard 3/4",
|
||||
"type": "shell",
|
||||
"command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=3 npx playwright test --project=webkit --shard=3/4 --output=playwright-output/webkit-shard-3 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
|
||||
"group": "test",
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
"close": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Test: E2E Playwright (WebKit) - Non-Security Shard 4/4",
|
||||
"type": "shell",
|
||||
"command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=false PLAYWRIGHT_SKIP_SECURITY_DEPS=1 TEST_WORKER_INDEX=4 npx playwright test --project=webkit --shard=4/4 --output=playwright-output/webkit-shard-4 tests/core tests/dns-provider-crud.spec.ts tests/dns-provider-types.spec.ts tests/integration tests/manual-dns-provider.spec.ts tests/monitoring tests/settings tests/tasks",
|
||||
"group": "test",
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
"close": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Test: E2E Playwright (Chromium) - Security Suite",
|
||||
"type": "shell",
|
||||
"command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=true PLAYWRIGHT_SKIP_SECURITY_DEPS=0 npx playwright test --project=security-tests --output=playwright-output/chromium-security tests/security",
|
||||
"group": "test",
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
"close": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Test: E2E Playwright (FireFox) - Security Suite",
|
||||
"type": "shell",
|
||||
"command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=true PLAYWRIGHT_SKIP_SECURITY_DEPS=0 npx playwright test --project=firefox --output=playwright-output/firefox-security tests/security",
|
||||
"group": "test",
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
"close": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Test: E2E Playwright (WebKit) - Security Suite",
|
||||
"type": "shell",
|
||||
"command": "cd /projects/Charon && if [ -f .env ]; then set -a; . ./.env; set +a; fi && : \"${CHARON_EMERGENCY_TOKEN:?CHARON_EMERGENCY_TOKEN is required (set it in /projects/Charon/.env)}\" && CI=true PLAYWRIGHT_BASE_URL=http://127.0.0.1:8080 CHARON_SECURITY_TESTS_ENABLED=true PLAYWRIGHT_SKIP_SECURITY_DEPS=0 npx playwright test --project=webkit --output=playwright-output/webkit-security tests/security",
|
||||
"group": "test",
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "dedicated",
|
||||
"close": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Test: E2E Playwright with Coverage",
|
||||
"type": "shell",
|
||||
|
||||
@@ -126,11 +126,11 @@ graph TB
|
||||
| **HTTP Framework** | Gin | Latest | Routing, middleware, HTTP handling |
|
||||
| **Database** | SQLite | 3.x | Embedded database |
|
||||
| **ORM** | GORM | Latest | Database abstraction layer |
|
||||
| **Reverse Proxy** | Caddy Server | 2.11.0-beta.2 | Embedded HTTP/HTTPS proxy |
|
||||
| **Reverse Proxy** | Caddy Server | 2.11.1 | Embedded HTTP/HTTPS proxy |
|
||||
| **WebSocket** | gorilla/websocket | Latest | Real-time log streaming |
|
||||
| **Crypto** | golang.org/x/crypto | Latest | Password hashing, encryption |
|
||||
| **Metrics** | Prometheus Client | Latest | Application metrics |
|
||||
| **Notifications** | Shoutrrr | Latest | Multi-platform alerts |
|
||||
| **Notifications** | Notify (Discord-first) | Current | Discord notifications now; additional services in phased rollout |
|
||||
| **Docker Client** | Docker SDK | Latest | Container discovery |
|
||||
| **Logging** | Logrus + Lumberjack | Latest | Structured logging with rotation |
|
||||
|
||||
@@ -1259,6 +1259,14 @@ go test ./integration/...
|
||||
9. **Release Notes:** Generate changelog from commits
|
||||
10. **Notify:** Send release notification (Discord, email)
|
||||
|
||||
**Mandatory rollout gates (sign-off block):**
|
||||
|
||||
1. Digest freshness and index digest parity across GHCR and Docker Hub
|
||||
2. Per-arch digest parity across GHCR and Docker Hub
|
||||
3. SBOM and vulnerability scans against immutable refs (`image@sha256:...`)
|
||||
4. Artifact freshness timestamps after push
|
||||
5. Evidence block with required rollout verification fields
|
||||
|
||||
### Supply Chain Security
|
||||
|
||||
**Components:**
|
||||
@@ -1292,10 +1300,10 @@ cosign verify \
|
||||
wikid82/charon:latest
|
||||
|
||||
# Inspect SBOM
|
||||
syft wikid82/charon:latest -o json
|
||||
syft ghcr.io/wikid82/charon@sha256:<index-digest> -o json
|
||||
|
||||
# Scan for vulnerabilities
|
||||
grype wikid82/charon:latest
|
||||
grype ghcr.io/wikid82/charon@sha256:<index-digest>
|
||||
```
|
||||
|
||||
### Rollback Strategy
|
||||
@@ -1333,8 +1341,8 @@ docker exec charon /app/scripts/restore-backup.sh \
|
||||
- Future: Dynamic plugin loading for custom providers
|
||||
|
||||
2. **Notification Channels:**
|
||||
- Shoutrrr provides 40+ channels (Discord, Slack, Email, etc.)
|
||||
- Custom channels via Shoutrrr service URLs
|
||||
- Current rollout is Discord-only for notifications
|
||||
- Additional services are enabled later in validated phases
|
||||
|
||||
3. **Authentication Providers:**
|
||||
- Current: Local database authentication
|
||||
|
||||
@@ -31,6 +31,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
### Fixed
|
||||
- Fixed: Added robust validation and debug logging for Docker image tags to prevent invalid reference errors.
|
||||
- Fixed: Removed log masking for image references and added manifest validation to debug CI failures.
|
||||
- **Proxy Hosts**: Fixed ACL and Security Headers dropdown selections so create/edit saves now keep the selected values (including clearing to none) after submit and reload.
|
||||
- **CI**: Fixed Docker image reference output so integration jobs never pull an empty image ref
|
||||
- **E2E Test Reliability**: Resolved test timeout issues affecting CI/CD pipeline stability
|
||||
- Fixed config reload overlay blocking test interactions
|
||||
|
||||
52
Dockerfile
52
Dockerfile
@@ -14,8 +14,11 @@ ARG BUILD_DEBUG=0
|
||||
# avoid accidentally pulling a v3 major release. Renovate can still update
|
||||
# this ARG to a specific v2.x tag when desired.
|
||||
## Try to build the requested Caddy v2.x tag (Renovate can update this ARG).
|
||||
## If the requested tag isn't available, fall back to a known-good v2.11.0-beta.2 build.
|
||||
ARG CADDY_VERSION=2.11.0-beta.2
|
||||
## If the requested tag isn't available, fall back to a known-good v2.11.1 build.
|
||||
ARG CADDY_VERSION=2.11.1
|
||||
ARG CADDY_CANDIDATE_VERSION=2.11.1
|
||||
ARG CADDY_USE_CANDIDATE=0
|
||||
ARG CADDY_PATCH_SCENARIO=B
|
||||
## When an official caddy image tag isn't available on the host, use a
|
||||
## plain Alpine base image and overwrite its caddy binary with our
|
||||
## xcaddy-built binary in the later COPY step. This avoids relying on
|
||||
@@ -65,7 +68,7 @@ RUN --mount=type=cache,target=/root/.cache/go-build \
|
||||
# ---- Frontend Builder ----
|
||||
# Build the frontend using the BUILDPLATFORM to avoid arm64 musl Rollup native issues
|
||||
# renovate: datasource=docker depName=node
|
||||
FROM --platform=$BUILDPLATFORM node:24.13.1-alpine AS frontend-builder
|
||||
FROM --platform=$BUILDPLATFORM node:24.14.0-alpine AS frontend-builder
|
||||
WORKDIR /app/frontend
|
||||
|
||||
# Copy frontend package files
|
||||
@@ -196,6 +199,9 @@ FROM --platform=$BUILDPLATFORM golang:1.26-alpine AS caddy-builder
|
||||
ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
ARG CADDY_VERSION
|
||||
ARG CADDY_CANDIDATE_VERSION
|
||||
ARG CADDY_USE_CANDIDATE
|
||||
ARG CADDY_PATCH_SCENARIO
|
||||
# renovate: datasource=go depName=github.com/caddyserver/xcaddy
|
||||
ARG XCADDY_VERSION=0.4.5
|
||||
|
||||
@@ -213,10 +219,16 @@ RUN --mount=type=cache,target=/go/pkg/mod \
|
||||
RUN --mount=type=cache,target=/root/.cache/go-build \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
sh -c 'set -e; \
|
||||
CADDY_TARGET_VERSION="${CADDY_VERSION}"; \
|
||||
if [ "${CADDY_USE_CANDIDATE}" = "1" ]; then \
|
||||
CADDY_TARGET_VERSION="${CADDY_CANDIDATE_VERSION}"; \
|
||||
fi; \
|
||||
echo "Using Caddy target version: v${CADDY_TARGET_VERSION}"; \
|
||||
echo "Using Caddy patch scenario: ${CADDY_PATCH_SCENARIO}"; \
|
||||
export XCADDY_SKIP_CLEANUP=1; \
|
||||
echo "Stage 1: Generate go.mod with xcaddy..."; \
|
||||
# Run xcaddy to generate the build directory and go.mod
|
||||
GOOS=$TARGETOS GOARCH=$TARGETARCH xcaddy build v${CADDY_VERSION} \
|
||||
GOOS=$TARGETOS GOARCH=$TARGETARCH xcaddy build v${CADDY_TARGET_VERSION} \
|
||||
--with github.com/greenpau/caddy-security \
|
||||
--with github.com/corazawaf/coraza-caddy/v2 \
|
||||
--with github.com/hslatman/caddy-crowdsec-bouncer@v0.10.0 \
|
||||
@@ -239,12 +251,21 @@ RUN --mount=type=cache,target=/root/.cache/go-build \
|
||||
go get github.com/expr-lang/expr@v1.17.7; \
|
||||
# renovate: datasource=go depName=github.com/hslatman/ipstore
|
||||
go get github.com/hslatman/ipstore@v0.4.0; \
|
||||
# NOTE: smallstep/certificates (pulled by caddy-security stack) currently
|
||||
# uses legacy nebula APIs removed in nebula v1.10+, which causes compile
|
||||
# failures in authority/provisioner. Keep this pinned to a known-compatible
|
||||
# v1.9.x release until upstream stack supports nebula v1.10+.
|
||||
# renovate: datasource=go depName=github.com/slackhq/nebula
|
||||
go get github.com/slackhq/nebula@v1.9.7; \
|
||||
if [ "${CADDY_PATCH_SCENARIO}" = "A" ]; then \
|
||||
# Rollback scenario: keep explicit nebula pin if upstream compatibility regresses.
|
||||
# NOTE: smallstep/certificates (pulled by caddy-security stack) currently
|
||||
# uses legacy nebula APIs removed in nebula v1.10+, which causes compile
|
||||
# failures in authority/provisioner. Keep this pinned to a known-compatible
|
||||
# v1.9.x release until upstream stack supports nebula v1.10+.
|
||||
# renovate: datasource=go depName=github.com/slackhq/nebula
|
||||
go get github.com/slackhq/nebula@v1.9.7; \
|
||||
elif [ "${CADDY_PATCH_SCENARIO}" = "B" ] || [ "${CADDY_PATCH_SCENARIO}" = "C" ]; then \
|
||||
# Default PR-2 posture: retire explicit nebula pin and use upstream resolution.
|
||||
echo "Skipping nebula pin for scenario ${CADDY_PATCH_SCENARIO}"; \
|
||||
else \
|
||||
echo "Unsupported CADDY_PATCH_SCENARIO=${CADDY_PATCH_SCENARIO}"; \
|
||||
exit 1; \
|
||||
fi; \
|
||||
# Clean up go.mod and ensure all dependencies are resolved
|
||||
go mod tidy; \
|
||||
echo "Dependencies patched successfully"; \
|
||||
@@ -392,7 +413,7 @@ SHELL ["/bin/ash", "-o", "pipefail", "-c"]
|
||||
# Note: In production, users should provide their own MaxMind license key
|
||||
# This uses the publicly available GeoLite2 database
|
||||
# In CI, timeout quickly rather than retrying to save build time
|
||||
ARG GEOLITE2_COUNTRY_SHA256=86fe00e0272865b8bec79defca2e9fb19ad0cf4458697992e1a37ba89077c13a
|
||||
ARG GEOLITE2_COUNTRY_SHA256=d3031e02196523cbb5f74291122033f2be277b2130abedd4b5bee52ba79832be
|
||||
RUN mkdir -p /app/data/geoip && \
|
||||
if [ -n "$CI" ]; then \
|
||||
echo "⏱️ CI detected - quick download (10s timeout, no retries)"; \
|
||||
@@ -548,13 +569,8 @@ HEALTHCHECK --interval=30s --timeout=3s --start-period=40s --retries=3 \
|
||||
# while maintaining the expected /etc/crowdsec path for compatibility
|
||||
RUN ln -sf /app/data/crowdsec/config /etc/crowdsec
|
||||
|
||||
# Security: Container starts as root to handle Docker socket group permissions,
|
||||
# then the entrypoint script drops privileges to the charon user before starting
|
||||
# applications. This approach:
|
||||
# 1. Maintains CIS Docker Benchmark compliance (non-root execution)
|
||||
# 2. Enables Docker integration by dynamically adding charon to docker group
|
||||
# 3. Ensures proper ownership of mounted volumes
|
||||
# The entrypoint script uses gosu to securely drop privileges after setup.
|
||||
# Security: Run the container as non-root by default.
|
||||
USER charon
|
||||
|
||||
# Use custom entrypoint to start both Caddy and Charon
|
||||
ENTRYPOINT ["/docker-entrypoint.sh"]
|
||||
|
||||
649
README.md
649
README.md
@@ -1,75 +1,132 @@
|
||||
<p align="center">
|
||||
<img src="frontend/public/banner.png" alt="Charon" width="600">
|
||||
<img src="https://raw.githubusercontent.com/Wikid82/Charon/refs/heads/main/frontend/public/banner.webp" alt="Charon" width="350">
|
||||
</p>
|
||||
|
||||
<h1 align="center">Charon</h1>
|
||||
|
||||
<br>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://www.repostatus.org/#active"><img src="https://www.repostatus.org/badges/latest/active.svg" alt="Project Status: Active – The project is being actively developed." /></a>
|
||||
<a href="https://hub.docker.com/r/wikid82/charon"><img src="https://img.shields.io/docker/pulls/wikid82/charon.svg" alt="Docker Pulls"></a>
|
||||
<a href="https://github.com/users/Wikid82/packages/container/package/charon"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/Wikid82/Charon/main/.github/badges/ghcr-downloads.json" alt="GHCR Pulls"></a>
|
||||
<a href="https://github.com/Wikid82/charon/releases"><img src="https://img.shields.io/github/v/release/Wikid82/charon?include_prereleases" alt="Release"></a>
|
||||
<br>
|
||||
<a href="https://codecov.io/gh/Wikid82/Charon" ><img src="https://codecov.io/gh/Wikid82/Charon/branch/main/graph/badge.svg?token=RXSINLQTGE" alt="Code Coverage"/></a>
|
||||
<a href="LICENSE"><img src="https://img.shields.io/badge/License-MIT-blue.svg" alt="License: MIT"></a>
|
||||
<a href="SECURITY.md"><img src="https://img.shields.io/badge/Security-Audited-brightgreen.svg" alt="Security: Audited"></a>
|
||||
<br>
|
||||
<a href="https://github.com/Wikid82/Charon/actions/workflows/e2e-tests-split.yml"><img src="https://github.com/Wikid82/Charon/actions/workflows/e2e-tests-split.yml/badge.svg" alt="E2E Tests"></a>
|
||||
<a href="https://github.com/Wikid82/Charon/actions/workflows/cerberus-integration.yml"><img src="https://github.com/Wikid82/Charon/actions/workflows/cerberus-integration.yml/badge.svg" alt="Cerberus Integration"></a><br>
|
||||
<a href="https://github.com/Wikid82/Charon/actions/workflows/crowdsec-integration.yml"><img src="https://github.com/Wikid82/Charon/actions/workflows/crowdsec-integration.yml/badge.svg" alt="CrowdSec Integration"></a>
|
||||
<a href="https://github.com/Wikid82/Charon/actions/workflows/waf-integration.yml"><img src="https://github.com/Wikid82/Charon/actions/workflows/waf-integration.yml/badge.svg" alt="WAF Integration"></a>
|
||||
<a href="https://github.com/Wikid82/Charon/actions/workflows/rate-limit-integration.yml"><img src="https://github.com/Wikid82/Charon/actions/workflows/rate-limit-integration.yml/badge.svg" alt="Rate Limit Integration"></a>
|
||||
<strong>Your server, your rules—without the headaches.</strong>
|
||||
</p>
|
||||
<br>
|
||||
<p align="center"><strong>Your server, your rules—without the headaches.</strong></p>
|
||||
|
||||
<p align="center">
|
||||
Simply manage multiple websites and self-hosted applications. Click, save, done. No code, no config files, no PhD required.
|
||||
Manage reverse proxies with a clean web interface.<br>
|
||||
No config files. No cryptic syntax. No networking degree required.
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://hub.docker.com/r/wikid82/charon">
|
||||
<img src="https://img.shields.io/docker/pulls/wikid82/charon.svg" alt="Docker Pulls">
|
||||
</a>
|
||||
<a href="https://github.com/Wikid82/charon/releases">
|
||||
<img src="https://img.shields.io/github/v/release/Wikid82/charon?include_prereleases" alt="Latest Release">
|
||||
</a>
|
||||
<a href="LICENSE">
|
||||
<img src="https://img.shields.io/badge/License-MIT-blue.svg" alt="MIT License">
|
||||
</a>
|
||||
<a href="https://discord.gg/Tvzg6BQx">
|
||||
<img src="https://img.shields.io/badge/Community-Discord-5865F2?logo=discord&logoColor=white">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
## Why Charon?
|
||||
## 🚀 Why Charon?
|
||||
|
||||
You want your apps accessible online. You don't want to become a networking expert first.
|
||||
You want your apps online.
|
||||
|
||||
**The problem:** Managing reverse proxies usually means editing config files, memorizing cryptic syntax, and hoping you didn't break everything.
|
||||
You don’t want to edit config files or memorize reverse proxy syntax.
|
||||
|
||||
**Charon's answer:** A web interface where you click boxes and type domain names. That's it.
|
||||
Charon gives you:
|
||||
|
||||
- ✅ **Your blog** gets a green lock (HTTPS) automatically
|
||||
- ✅ **Your chat server** works without weird port numbers
|
||||
- ✅ **Your admin panel** blocks everyone except you
|
||||
- ✅ **Everything stays up** even when you make changes
|
||||
- ✅ Automatic HTTPS certificates
|
||||
- ✅ Clean domain routing
|
||||
- ✅ Built-in security protection
|
||||
- ✅ One-click Docker app discovery
|
||||
- ✅ Live updates without restarts
|
||||
- ✅ Zero external dependencies
|
||||
|
||||
If you can use a website, you can run Charon.
|
||||
|
||||
---
|
||||
|
||||
## 🐕 Cerberus Security Suite
|
||||
## 🛡 Built-In Security
|
||||
|
||||
### 🕵️♂️ **CrowdSec Integration**
|
||||
Charon includes security features that normally require multiple tools:
|
||||
|
||||
- Protects your applications from attacks using behavior-based detection and automated remediation.
|
||||
- Web Application Firewall (WAF)
|
||||
- CrowdSec intrusion detection
|
||||
- Access Control Lists (ACLs)
|
||||
- Rate limiting
|
||||
- Emergency recovery tools
|
||||
|
||||
### 🔐 **Access Control Lists (ACLs)**
|
||||
Secure by default. No extra containers required.
|
||||
|
||||
- Define fine-grained access rules for your applications, controlling who can access what and under which conditions.
|
||||
|
||||
### 🧱 **Web Application Firewall (WAF)**
|
||||
|
||||
- Protects your applications from common web vulnerabilities such as SQL injection, XSS, and more using Coraza.
|
||||
|
||||
### ⏱️ **Rate Limiting**
|
||||
|
||||
- Protect your applications from abuse by limiting the number of requests a user or IP can make within a certain timeframe.
|
||||
📖 [Learn more about security →](https://wikid82.github.io/charon/security)
|
||||
|
||||
---
|
||||
|
||||
## ✨ Top 10 Features
|
||||
## ⚡ Quick Start (5 Minutes)
|
||||
|
||||
### 1️⃣ Create `docker-compose.yml`
|
||||
|
||||
```yaml
|
||||
services:
|
||||
charon:
|
||||
image: wikid82/charon:latest
|
||||
container_name: charon
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
- "443:443/udp"
|
||||
- "8080:8080"
|
||||
volumes:
|
||||
- ./charon-data:/app/data
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
environment:
|
||||
- TZ=America/New_York
|
||||
# Generate with: openssl rand -base64 32
|
||||
- CHARON_ENCRYPTION_KEY=your-32-byte-base64-key
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl -fsS http://localhost:8080/api/v1/health || exit 1"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
```
|
||||
> **Docker Socket Access:** Charon runs as a non-root user. If you mount the Docker socket for container discovery, the container needs permission to read it. Find your socket's group ID and add it to the compose file:
|
||||
>
|
||||
> ```bash
|
||||
> stat -c '%g' /var/run/docker.sock
|
||||
> ```
|
||||
>
|
||||
> Then add `group_add: ["<gid>"]` under your service (replace `<gid>` with the number from the command above). For example, if the result is `998`:
|
||||
>
|
||||
> ```yaml
|
||||
> group_add:
|
||||
> - "998"
|
||||
> ```
|
||||
|
||||
### 2️⃣ Generate encryption key:
|
||||
```bash
|
||||
openssl rand -base64 32
|
||||
```
|
||||
### 3️⃣ Start Charon:
|
||||
```bash
|
||||
docker-compose up -d
|
||||
```
|
||||
### 4️⃣ Access the dashboard:
|
||||
Open your browser and navigate to `http://localhost:8080` to access the dashboard and create your admin account.
|
||||
```code
|
||||
http://localhost:8080
|
||||
```
|
||||
### Getting Started:
|
||||
Full setup instructions and documentation are available at [https://wikid82.github.io/Charon/docs/getting-started.html](https://wikid82.github.io/Charon/docs/getting-started.html).
|
||||
|
||||
|
||||
--- ## ✨ Top 10 Features
|
||||
|
||||
### 🎯 **Point & Click Management**
|
||||
|
||||
No config files. No terminal commands. Just click, type your domain name, and you're live. If you can use a website, you can run Charon.
|
||||
|
||||
### 🔐 **Automatic HTTPS Certificates**
|
||||
@@ -78,7 +135,7 @@ Free SSL certificates that request, install, and renew themselves. Your sites ge
|
||||
|
||||
### 🌐 **DNS Challenge for Wildcard Certificates**
|
||||
|
||||
Secure all your subdomains with a single `*.example.com` certificate. Supports 15+ DNS providers including Cloudflare, Route53, DigitalOcean, and Google Cloud DNS. Credentials are encrypted and automatically rotated.
|
||||
Secure all your subdomains with a single *.example.com certificate. Supports 15+ DNS providers including Cloudflare, Route53, DigitalOcean, and Google Cloud DNS. Credentials are encrypted and automatically rotated.
|
||||
|
||||
### 🛡️ **Enterprise-Grade Security Built In**
|
||||
|
||||
@@ -102,15 +159,13 @@ See exactly what's happening with live request logs, uptime monitoring, and inst
|
||||
|
||||
### 📥 **Migration Made Easy**
|
||||
|
||||
Import your existing configurations with one click:
|
||||
Already invested in another reverse proxy? Bring your work with you by importing your existing configurations with one click:
|
||||
- **Caddyfile** — Migrate from other Caddy setups
|
||||
- **Nginx** — Import from Nginx based configurations (Coming Soon)
|
||||
- **Traefik** - Import from Traefik based configurations (Coming Soon)
|
||||
- **CrowdSec** - Import from CrowdSec configurations (WIP)
|
||||
- **CrowdSec** - Import from CrowdSec configurations
|
||||
- **JSON Import** — Restore from Charon backups or generic JSON configs
|
||||
|
||||
Already invested in another reverse proxy? Bring your work with you.
|
||||
|
||||
### ⚡ **Live Configuration Changes**
|
||||
|
||||
Update domains, add security rules, or modify settings instantly—no container restarts needed.* Your sites stay up while you make changes.
|
||||
@@ -125,498 +180,22 @@ One Docker container. No databases to install. No external services required. No
|
||||
|
||||
### 💯 **100% Free & Open Source**
|
||||
|
||||
No premium tiers. No feature paywalls. No usage limits. Everything you see is yours to use, forever, backed by the MIT license.
|
||||
No premium tiers. No feature paywalls. No usage limits. Everything you see is yours to use, forever, backed by the MIT license. <sup>* Note: Initial security engine setup (CrowdSec) requires a one-time container restart to initialize the protection layer. All subsequent changes happen live.</sup> **
|
||||
|
||||
<sup>* Note: Initial security engine setup (CrowdSec) requires a one-time container restart to initialize the protection layer. All subsequent changes happen live.</sup>
|
||||
[Explore All Features →](https://github.com/Wikid82/Charon/blob/main/docs/features.md)**
|
||||
|
||||
**[Explore All Features →](https://wikid82.github.io/charon/features)**
|
||||
---
|
||||
💬 Support
|
||||
<p align="center"> <a href="https://github.com/Wikid82/Charon/issues">
|
||||
<img alt="GitHub issues"
|
||||
src="https://img.shields.io/github/issues/Wikid82/Charon"><a href="https://github.com/Wikid82/Charon/issues/new/choose"> <img src="https://img.shields.io/badge/Support-Open%20Issue-blue?logo=github"> </a> <a href="https://discord.gg/Tvzg6BQx"> <img src="https://img.shields.io/badge/Community-Discord-5865F2?logo=discord&logoColor=white"> </a> </p>
|
||||
|
||||
---
|
||||
|
||||
## Quick Start
|
||||
❤️ Free & Open Source
|
||||
|
||||
### Container Registries
|
||||
Charon is 100% free and open source under the MIT License.
|
||||
|
||||
Charon is available from two container registries:
|
||||
No premium tiers. No locked features. No usage limits.
|
||||
|
||||
**Docker Hub (Recommended):**
|
||||
|
||||
```bash
|
||||
docker pull wikid82/charon:latest
|
||||
```
|
||||
|
||||
**GitHub Container Registry:**
|
||||
|
||||
```bash
|
||||
docker pull ghcr.io/wikid82/charon:latest
|
||||
```
|
||||
|
||||
### Docker Compose (Recommended)
|
||||
|
||||
Save this as `docker-compose.yml`:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
charon:
|
||||
# Docker Hub (recommended)
|
||||
image: wikid82/charon:latest
|
||||
# Alternative: GitHub Container Registry
|
||||
# image: ghcr.io/wikid82/charon:latest
|
||||
container_name: charon
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
- "443:443/udp"
|
||||
- "8080:8080"
|
||||
volumes:
|
||||
- ./charon-data:/app/data
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
environment:
|
||||
- CHARON_ENV=production
|
||||
# Generate with: openssl rand -base64 32
|
||||
- CHARON_ENCRYPTION_KEY=your-32-byte-base64-key-here
|
||||
|
||||
```
|
||||
|
||||
**Using Nightly Builds:**
|
||||
|
||||
To test the latest nightly build (automated daily at 02:00 UTC):
|
||||
|
||||
```yaml
|
||||
services:
|
||||
charon:
|
||||
# Docker Hub
|
||||
image: wikid82/charon:nightly
|
||||
# Alternative: GitHub Container Registry
|
||||
# image: ghcr.io/wikid82/charon:nightly
|
||||
# ... rest of configuration
|
||||
```
|
||||
|
||||
> **Note:** Nightly builds are for testing and may contain experimental features. Use `latest` for production.
|
||||
|
||||
Then run:
|
||||
|
||||
```bash
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
### Docker Run (One-Liner)
|
||||
|
||||
**Stable Release (Docker Hub):**
|
||||
|
||||
```bash
|
||||
docker run -d \
|
||||
--name charon \
|
||||
-p 80:80 \
|
||||
-p 443:443 \
|
||||
-p 443:443/udp \
|
||||
-p 8080:8080 \
|
||||
-v ./charon-data:/app/data \
|
||||
-v /var/run/docker.sock:/var/run/docker.sock:ro \
|
||||
-e CHARON_ENV=production \
|
||||
-e CHARON_ENCRYPTION_KEY=your-32-byte-base64-key-here \
|
||||
wikid82/charon:latest
|
||||
```
|
||||
|
||||
**Stable Release (GitHub Container Registry):**
|
||||
|
||||
```bash
|
||||
docker run -d \
|
||||
--name charon \
|
||||
-p 80:80 \
|
||||
-p 443:443 \
|
||||
-p 443:443/udp \
|
||||
-p 8080:8080 \
|
||||
-v ./charon-data:/app/data \
|
||||
-v /var/run/docker.sock:/var/run/docker.sock:ro \
|
||||
-e CHARON_ENV=production \
|
||||
-e CHARON_ENCRYPTION_KEY=your-32-byte-base64-key-here \
|
||||
ghcr.io/wikid82/charon:latest
|
||||
```
|
||||
|
||||
**Nightly Build (Testing - Docker Hub):**
|
||||
|
||||
```bash
|
||||
docker run -d \
|
||||
--name charon \
|
||||
-p 80:80 \
|
||||
-p 443:443 \
|
||||
-p 443:443/udp \
|
||||
-p 8080:8080 \
|
||||
-v ./charon-data:/app/data \
|
||||
-v /var/run/docker.sock:/var/run/docker.sock:ro \
|
||||
-e CHARON_ENV=production \
|
||||
-e CHARON_ENCRYPTION_KEY=your-32-byte-base64-key-here \
|
||||
wikid82/charon:nightly
|
||||
```
|
||||
|
||||
> **Note:** Nightly builds include the latest development features and are rebuilt daily at 02:00 UTC. Use for testing only. Also available via GHCR: `ghcr.io/wikid82/charon:nightly`
|
||||
|
||||
### What Just Happened?
|
||||
|
||||
1. Charon downloaded and started
|
||||
2. The web interface opened on port 8080
|
||||
3. Your websites will use ports 80 (HTTP) and 443 (HTTPS)
|
||||
|
||||
**Open <http://localhost:8080>** and start adding your websites!
|
||||
|
||||
### Requirements
|
||||
|
||||
**Server:**
|
||||
|
||||
- Docker 20.10+ or Docker Compose V2
|
||||
- Linux, macOS, or Windows with WSL2
|
||||
|
||||
**Browser:**
|
||||
|
||||
- Tested with React 19.2.3
|
||||
- Compatible with modern browsers:
|
||||
- Chrome/Edge 90+
|
||||
- Firefox 88+
|
||||
- Safari 14+
|
||||
- Opera 76+
|
||||
|
||||
> **Note:** If you encounter errors after upgrading, try a hard refresh (`Ctrl+Shift+R`) or clearing your browser cache. See [Troubleshooting Guide](docs/troubleshooting/react-production-errors.md) for details.
|
||||
|
||||
### Development Setup
|
||||
|
||||
**Requirements:**
|
||||
|
||||
- **go 1.26.0+** — Download from [go.dev/dl](https://go.dev/dl/)
|
||||
- **Node.js 20+** and npm
|
||||
- Docker 20.10+
|
||||
|
||||
**Install golangci-lint** (for contributors): `go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest`
|
||||
|
||||
**GORM Security Scanner:** Charon includes an automated security scanner that detects GORM vulnerabilities (ID leaks, exposed secrets, DTO embedding issues). Runs automatically in CI on all PRs. Run locally via:
|
||||
|
||||
```bash
|
||||
# VS Code: Command Palette → "Lint: GORM Security Scan"
|
||||
# Or via pre-commit:
|
||||
pre-commit run --hook-stage manual gorm-security-scan --all-files
|
||||
# Or directly:
|
||||
./scripts/scan-gorm-security.sh --report
|
||||
```
|
||||
|
||||
See [GORM Security Scanner Documentation](docs/implementation/gorm_security_scanner_complete.md) for details.
|
||||
|
||||
See [CONTRIBUTING.md](CONTRIBUTING.md) for complete development environment setup.
|
||||
|
||||
**Note:** GitHub Actions CI uses `GOTOOLCHAIN: auto` to automatically download and use go 1.26.0, even if your system has an older version installed. For local development, ensure you have go 1.26.0+ installed.
|
||||
|
||||
#### Keeping Go Tools Up-to-Date
|
||||
|
||||
After pulling a Go version update:
|
||||
|
||||
```bash
|
||||
# Rebuild all Go development tools
|
||||
./scripts/rebuild-go-tools.sh
|
||||
```
|
||||
|
||||
**Why?** Tools like golangci-lint are compiled programs. When Go upgrades, they need to be recompiled to work with the new version. This one command rebuilds all your tools automatically.
|
||||
|
||||
See [Go Version Upgrades Guide](docs/development/go_version_upgrades.md) for details.
|
||||
|
||||
### Environment Configuration
|
||||
|
||||
Before running Charon or E2E tests, configure required environment variables:
|
||||
|
||||
1. **Copy the example environment file:**
|
||||
```bash
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
2. **Configure required secrets:**
|
||||
```bash
|
||||
# Generate encryption key (32 bytes, base64-encoded)
|
||||
openssl rand -base64 32
|
||||
|
||||
# Generate emergency token (64 characters hex)
|
||||
openssl rand -hex 32
|
||||
```
|
||||
|
||||
3. **Add to `.env` file:**
|
||||
```bash
|
||||
CHARON_ENCRYPTION_KEY=<paste_encryption_key_here>
|
||||
CHARON_EMERGENCY_TOKEN=<paste_emergency_token_here>
|
||||
```
|
||||
|
||||
4. **Verify configuration:**
|
||||
```bash
|
||||
# Encryption key should be ~44 chars (base64)
|
||||
grep CHARON_ENCRYPTION_KEY .env | cut -d= -f2 | wc -c
|
||||
|
||||
# Emergency token should be 64 chars (hex)
|
||||
grep CHARON_EMERGENCY_TOKEN .env | cut -d= -f2 | wc -c
|
||||
```
|
||||
|
||||
⚠️ **Security:** Never commit actual secret values to the repository. The `.env` file is gitignored.
|
||||
|
||||
📖 **More Info:** See [Getting Started Guide](docs/getting-started.md) for detailed setup instructions.
|
||||
|
||||
### Upgrading? Run Migrations
|
||||
|
||||
If you're upgrading from a previous version with persistent data:
|
||||
|
||||
```bash
|
||||
docker exec charon /app/charon migrate
|
||||
docker restart charon
|
||||
```
|
||||
|
||||
This ensures security features (especially CrowdSec) work correctly.
|
||||
|
||||
**Important:** If you had CrowdSec enabled before the upgrade, it will **automatically restart** after migration. You don't need to manually re-enable it via the GUI. See [Migration Guide](https://wikid82.github.io/charon/migration-guide) for details.
|
||||
|
||||
---
|
||||
|
||||
## 🔔 Smart Notifications
|
||||
|
||||
Stay informed about your infrastructure with flexible notification support.
|
||||
|
||||
### Supported Services
|
||||
|
||||
Charon integrates with popular notification platforms using JSON templates for rich formatting:
|
||||
|
||||
- **Discord** — Rich embeds with colors, fields, and custom formatting
|
||||
- **Slack** — Block Kit messages with interactive elements
|
||||
- **Gotify** — Self-hosted push notifications with priority levels
|
||||
- **Telegram** — Instant messaging with Markdown support
|
||||
- **Generic Webhooks** — Connect to any service with custom JSON payloads
|
||||
|
||||
### JSON Template Examples
|
||||
|
||||
**Discord Rich Embed:**
|
||||
|
||||
```json
|
||||
{
|
||||
"embeds": [{
|
||||
"title": "🚨 {{.Title}}",
|
||||
"description": "{{.Message}}",
|
||||
"color": 15158332,
|
||||
"timestamp": "{{.Timestamp}}",
|
||||
"fields": [
|
||||
{"name": "Host", "value": "{{.HostName}}", "inline": true},
|
||||
{"name": "Event", "value": "{{.EventType}}", "inline": true}
|
||||
]
|
||||
}]
|
||||
}
|
||||
```
|
||||
|
||||
**Slack Block Kit:**
|
||||
|
||||
```json
|
||||
{
|
||||
"blocks": [
|
||||
{
|
||||
"type": "header",
|
||||
"text": {"type": "plain_text", "text": "🔔 {{.Title}}"}
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"text": {"type": "mrkdwn", "text": "*Event:* {{.EventType}}\n*Message:* {{.Message}}"}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Available Template Variables
|
||||
|
||||
All JSON templates support these variables:
|
||||
|
||||
| Variable | Description | Example |
|
||||
|----------|-------------|---------|
|
||||
| `{{.Title}}` | Event title | "SSL Certificate Renewed" |
|
||||
| `{{.Message}}` | Event details | "Certificate for example.com renewed" |
|
||||
| `{{.EventType}}` | Type of event | "ssl_renewal", "uptime_down" |
|
||||
| `{{.Severity}}` | Severity level | "info", "warning", "error" |
|
||||
| `{{.HostName}}` | Affected host | "example.com" |
|
||||
| `{{.Timestamp}}` | ISO 8601 timestamp | "2025-12-24T10:30:00Z" |
|
||||
|
||||
**[📖 Complete Notification Guide →](docs/features/notifications.md)**
|
||||
|
||||
---
|
||||
|
||||
## 🚨 Emergency Break Glass Access
|
||||
|
||||
Charon provides a **3-Tier Break Glass Protocol** for emergency lockout recovery when security modules (ACL, WAF, CrowdSec) block access to the admin interface.
|
||||
|
||||
### Emergency Recovery Quick Reference
|
||||
|
||||
**Tier 1 (Preferred):** Use emergency token via main endpoint
|
||||
|
||||
```bash
|
||||
curl -X POST https://charon.example.com/api/v1/emergency/security-reset \
|
||||
-H "X-Emergency-Token: $CHARON_EMERGENCY_TOKEN"
|
||||
```
|
||||
|
||||
**Tier 2 (If Tier 1 blocked):** Use emergency server via SSH tunnel
|
||||
|
||||
```bash
|
||||
ssh -L 2019:localhost:2019 admin@server
|
||||
curl -X POST http://localhost:2019/emergency/security-reset \
|
||||
-H "X-Emergency-Token: $CHARON_EMERGENCY_TOKEN" \
|
||||
-u admin:password
|
||||
```
|
||||
|
||||
**Tier 3 (Catastrophic):** Direct SSH access - see [Emergency Runbook](docs/runbooks/emergency-lockout-recovery.md)
|
||||
|
||||
### Tier 1: Emergency Token (Layer 7 Bypass)
|
||||
|
||||
**Use when:** The application is accessible but security middleware is blocking you.
|
||||
|
||||
```bash
|
||||
# Set emergency token (generate with: openssl rand -hex 32)
|
||||
export CHARON_EMERGENCY_TOKEN=your-64-char-hex-token
|
||||
|
||||
# Use token to disable security
|
||||
curl -X POST https://charon.example.com/api/v1/emergency/security-reset \
|
||||
-H "X-Emergency-Token: $CHARON_EMERGENCY_TOKEN"
|
||||
```
|
||||
|
||||
**Response:**
|
||||
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"message": "All security modules have been disabled",
|
||||
"disabled_modules": [
|
||||
"feature.cerberus.enabled",
|
||||
"security.acl.enabled",
|
||||
"security.waf.enabled",
|
||||
"security.rate_limit.enabled",
|
||||
"security.crowdsec.enabled"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Tier 2: Emergency Server (Sidecar Port)
|
||||
|
||||
**Use when:** Caddy/CrowdSec is blocking at the reverse proxy level, or you need a separate entry point.
|
||||
|
||||
**Prerequisites:**
|
||||
|
||||
- Emergency server enabled in configuration
|
||||
- SSH access to Docker host
|
||||
- Knowledge of Basic Auth credentials (if configured)
|
||||
|
||||
**Setup:**
|
||||
|
||||
```yaml
|
||||
# docker-compose.yml
|
||||
environment:
|
||||
- CHARON_EMERGENCY_SERVER_ENABLED=true
|
||||
- CHARON_EMERGENCY_BIND=127.0.0.1:2019 # Localhost only
|
||||
- CHARON_EMERGENCY_USERNAME=admin
|
||||
- CHARON_EMERGENCY_PASSWORD=your-strong-password
|
||||
```
|
||||
|
||||
**Usage:**
|
||||
|
||||
```bash
|
||||
# 1. SSH to server and create tunnel
|
||||
ssh -L 2019:localhost:2019 admin@server.example.com
|
||||
|
||||
# 2. Access emergency endpoint (from local machine)
|
||||
curl -X POST http://localhost:2019/emergency/security-reset \
|
||||
-H "X-Emergency-Token: $CHARON_EMERGENCY_TOKEN" \
|
||||
-u admin:your-strong-password
|
||||
```
|
||||
|
||||
### Tier 3: Direct System Access (Physical Key)
|
||||
|
||||
**Use when:** All application-level recovery methods have failed.
|
||||
|
||||
**Prerequisites:**
|
||||
|
||||
- SSH or console access to Docker host
|
||||
- Root or sudo privileges
|
||||
- Knowledge of container name
|
||||
|
||||
**Emergency Procedures:**
|
||||
|
||||
```bash
|
||||
# SSH to host
|
||||
ssh admin@docker-host.example.com
|
||||
|
||||
# Clear CrowdSec bans
|
||||
docker exec charon cscli decisions delete --all
|
||||
|
||||
# Disable security via database
|
||||
docker exec charon sqlite3 /app/data/charon.db \
|
||||
"UPDATE settings SET value='false' WHERE key LIKE 'security.%.enabled';"
|
||||
|
||||
# Restart container
|
||||
docker restart charon
|
||||
```
|
||||
|
||||
### When to Use Each Tier
|
||||
|
||||
| Scenario | Tier | Solution |
|
||||
|----------|------|----------|
|
||||
| ACL blocked your IP | Tier 1 | Emergency token via main port |
|
||||
| Caddy/CrowdSec blocking at Layer 7 | Tier 2 | Emergency server on separate port |
|
||||
| Complete system failure | Tier 3 | Direct SSH + database access |
|
||||
|
||||
### Security Considerations
|
||||
|
||||
**⚠️ Emergency Server Security:**
|
||||
|
||||
- The emergency server should **NEVER** be exposed to the public internet
|
||||
- Always bind to localhost (127.0.0.1) only
|
||||
- Use SSH tunneling or VPN access to reach the port
|
||||
- Optional Basic Auth provides defense in depth
|
||||
- Port 2019 should be blocked by firewall rules from public access
|
||||
|
||||
**🔐 Emergency Token Security:**
|
||||
|
||||
- Store token in secrets manager (Vault, AWS Secrets Manager, Azure Key Vault)
|
||||
- Rotate token every 90 days or after use
|
||||
- Never commit token to version control
|
||||
- Use HTTPS when calling emergency endpoint (HTTP leaks token)
|
||||
- Monitor audit logs for emergency token usage
|
||||
|
||||
**<2A> API Key & Credential Management:**
|
||||
|
||||
- **Never log sensitive credentials**: Charon automatically masks API keys in logs (e.g., `abcd...xyz9`)
|
||||
- **Secure storage**: CrowdSec API keys stored with 0600 permissions (owner read/write only)
|
||||
- **No HTTP exposure**: API keys never returned in API responses
|
||||
- **No cookie storage**: Keys never stored in browser cookies
|
||||
- **Regular rotation**: Rotate CrowdSec bouncer keys every 90 days (recommended)
|
||||
- **Environment variables**: Use `CHARON_SECURITY_CROWDSEC_API_KEY` for production deployments
|
||||
- **Compliance**: Implementation addresses CWE-312, CWE-315, CWE-359 (GDPR, PCI-DSS, SOC 2)
|
||||
|
||||
For detailed security practices, see:
|
||||
- 📘 [API Key Handling Guide](docs/security/api-key-handling.md)
|
||||
- 🛡️ [Security Best Practices](docs/SECURITY_PRACTICES.md)
|
||||
|
||||
**<2A>📍 Management Network Configuration:**
|
||||
|
||||
```yaml
|
||||
# Restrict emergency access to trusted networks only
|
||||
environment:
|
||||
- CHARON_MANAGEMENT_CIDRS=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16
|
||||
```
|
||||
|
||||
Default: RFC1918 private networks + localhost
|
||||
|
||||
### Complete Documentation
|
||||
|
||||
📖 **[Emergency Lockout Recovery Runbook](docs/runbooks/emergency-lockout-recovery.md)** — Complete procedures for all 3 tiers
|
||||
🔄 **[Emergency Token Rotation Guide](docs/runbooks/emergency-token-rotation.md)** — Token rotation procedures
|
||||
⚙️ **[Configuration Examples](docs/configuration/emergency-setup.md)** — Docker Compose and secrets manager integration
|
||||
🛡️ **[Security Documentation](docs/security.md)** — Break glass protocol architecture
|
||||
|
||||
---
|
||||
|
||||
## Getting Help
|
||||
|
||||
**[📖 Full Documentation](https://wikid82.github.io/charon/)** — Everything explained simply
|
||||
**[🚀 5-Minute Guide](https://wikid82.github.io/charon/getting-started)** — Your first website up and running
|
||||
**[🔐 Supply Chain Security](docs/guides/supply-chain-security-user-guide.md)** — Verify signatures and build provenance
|
||||
**[<EFBFBD> Maintenance](docs/maintenance/)** — Keeping Charon running smoothly
|
||||
**[<EFBFBD>🛠️ Troubleshooting](docs/troubleshooting/)** — Common issues and solutions
|
||||
**[💬 Ask Questions](https://github.com/Wikid82/charon/discussions)** — Friendly community help
|
||||
**[🐛 Report Problems](https://github.com/Wikid82/charon/issues)** — Something broken? Let us know
|
||||
|
||||
---
|
||||
Built for the self-hosting community.
|
||||
|
||||
34
SECURITY.md
34
SECURITY.md
@@ -25,11 +25,10 @@ We take security seriously. If you discover a security vulnerability in Charon,
|
||||
- Impact assessment
|
||||
- Suggested fix (if applicable)
|
||||
|
||||
**Alternative Method**: Email
|
||||
**Alternative Method**: GitHub Issues (Public)
|
||||
|
||||
- Send to: `security@charon.dev` (if configured)
|
||||
- Use PGP encryption (key available below, if applicable)
|
||||
- Include same information as GitHub advisory
|
||||
1. Go to <https://github.com/Wikid82/Charon/issues>
|
||||
2. Create a new issue with the same information as above
|
||||
|
||||
### What to Include
|
||||
|
||||
@@ -125,6 +124,7 @@ For complete technical details, see:
|
||||
|
||||
### Infrastructure Security
|
||||
|
||||
- **Non-root by default**: Charon runs as an unprivileged user (`charon`, uid 1000) inside the container. Docker socket access is granted via a minimal supplemental group matching the host socket's GID—never by running as root. If the socket GID is `0` (root group), Charon requires explicit opt-in before granting access.
|
||||
- **Container isolation**: Docker-based deployment
|
||||
- **Minimal attack surface**: Alpine Linux base image
|
||||
- **Dependency scanning**: Regular Trivy and govulncheck scans
|
||||
@@ -177,6 +177,20 @@ services:
|
||||
- /tmp:noexec,nosuid,nodev
|
||||
```
|
||||
|
||||
### Gotify Token Hygiene
|
||||
|
||||
Gotify application tokens are secrets and must be handled with strict confidentiality.
|
||||
|
||||
- Never echo, print, log, or return token values in API responses or errors.
|
||||
- Never expose tokenized endpoint query strings (for example,
|
||||
`...?token=...`) in logs, diagnostics, examples, screenshots,
|
||||
tickets, or reports.
|
||||
- Always redact query parameters in diagnostics and examples before display or storage.
|
||||
- Use write-only token inputs in operator workflows and UI forms.
|
||||
- Store tokens only in environment variables or a dedicated secret manager.
|
||||
- Validate Gotify endpoints over HTTPS only.
|
||||
- Rotate tokens immediately on suspected exposure.
|
||||
|
||||
### Network Security
|
||||
|
||||
- **Firewall Rules**: Only expose necessary ports (80, 443, 8080)
|
||||
@@ -306,11 +320,15 @@ Charon uses digest pinning to reduce supply chain risk and ensure CI runs agains
|
||||
**Documented Exceptions & Compensating Controls:**
|
||||
|
||||
1. **Go toolchain shim** (`golang.org/dl/goX.Y.Z@latest`)
|
||||
- **Exception:** Uses `@latest` to install the shim.
|
||||
- **Compensating controls:** The target toolchain version is pinned in `go.work`, and Renovate tracks the required version for updates.
|
||||
- **Exception:** Uses `@latest` to install the shim.
|
||||
- **Compensating controls:** The target toolchain version is pinned in
|
||||
`go.work`, and Renovate tracks the required version for updates.
|
||||
|
||||
2. **Unpinnable dependencies** (no stable digest or checksum source)
|
||||
- **Exception:** Dependency cannot be pinned by digest.
|
||||
- **Compensating controls:** Require documented justification, prefer vendor-provided checksums or signed releases when available, and keep SBOM/vulnerability scans in CI.
|
||||
- **Exception:** Dependency cannot be pinned by digest.
|
||||
- **Compensating controls:** Require documented justification, prefer
|
||||
vendor-provided checksums or signed releases when available, and keep
|
||||
SBOM/vulnerability scans in CI.
|
||||
|
||||
### Learn More
|
||||
|
||||
|
||||
74
VERSION.md
74
VERSION.md
@@ -19,36 +19,76 @@ Example: `0.1.0-alpha`, `1.0.0-beta.1`, `2.0.0-rc.2`
|
||||
|
||||
## Creating a Release
|
||||
|
||||
### Automated Release Process
|
||||
### Canonical Release Process (Tag-Derived CI)
|
||||
|
||||
1. **Update version** in `.version` file:
|
||||
1. **Create and push a release tag**:
|
||||
|
||||
```bash
|
||||
echo "1.0.0" > .version
|
||||
git tag -a v1.0.0 -m "Release v1.0.0"
|
||||
git push origin v1.0.0
|
||||
```
|
||||
|
||||
2. **Commit version bump**:
|
||||
2. **GitHub Actions automatically**:
|
||||
- Runs release workflow from the pushed tag (`.github/workflows/release-goreleaser.yml`)
|
||||
- Builds and publishes release artifacts/images through CI (`.github/workflows/docker-build.yml`)
|
||||
- Creates/updates GitHub Release metadata
|
||||
|
||||
3. **Container tags are published**:
|
||||
- `v1.0.0` (exact version)
|
||||
- `1.0` (minor version)
|
||||
- `1` (major version)
|
||||
- `latest` (for non-prerelease on main branch)
|
||||
|
||||
### Legacy/Optional `.version` Path
|
||||
|
||||
The `.version` file is optional and not the canonical release trigger.
|
||||
|
||||
Use it only when you need local/version-file parity checks:
|
||||
|
||||
1. **Set `.version` locally (optional)**:
|
||||
|
||||
```bash
|
||||
git add .version
|
||||
git commit -m "chore: bump version to 1.0.0"
|
||||
echo "1.0.0" > .version
|
||||
```
|
||||
|
||||
3. **Create and push tag**:
|
||||
2. **Validate `.version` matches the latest tag**:
|
||||
|
||||
```bash
|
||||
git tag -a v1.0.0 -m "Release v1.0.0"
|
||||
git push origin v1.0.0
|
||||
bash scripts/check-version-match-tag.sh
|
||||
```
|
||||
|
||||
4. **GitHub Actions automatically**:
|
||||
- Creates GitHub Release with changelog
|
||||
- Builds multi-arch Docker images (amd64, arm64)
|
||||
- Publishes to GitHub Container Registry with tags:
|
||||
- `v1.0.0` (exact version)
|
||||
- `1.0` (minor version)
|
||||
- `1` (major version)
|
||||
- `latest` (for non-prerelease on main branch)
|
||||
### Deterministic Rollout Verification Gates (Mandatory)
|
||||
|
||||
Release sign-off is blocked until all items below pass in the same validation
|
||||
run.
|
||||
|
||||
Enforcement points:
|
||||
|
||||
- Release sign-off checklist/process (mandatory): All gates below remain required for release sign-off.
|
||||
- CI-supported checks (current): `.github/workflows/docker-build.yml` and `.github/workflows/supply-chain-verify.yml` enforce the subset currently implemented in workflows.
|
||||
- Manual validation required until CI parity: Validate any not-yet-implemented workflow gates via VS Code tasks `Security: Full Supply Chain Audit`, `Security: Verify SBOM`, `Security: Generate SLSA Provenance`, and `Security: Sign with Cosign`.
|
||||
- Optional version-file parity check: `Utility: Check Version Match Tag` (script: `scripts/check-version-match-tag.sh`).
|
||||
|
||||
- [ ] **Digest freshness/parity:** Capture pre-push and post-push index digests
|
||||
for the target tag in GHCR and Docker Hub, confirm expected freshness,
|
||||
and confirm cross-registry index digest parity.
|
||||
- [ ] **Per-arch parity:** Confirm per-platform (`linux/amd64`, `linux/arm64`,
|
||||
and any published platform) digest parity between GHCR and Docker Hub.
|
||||
- [ ] **Immutable digest scanning:** Run SBOM and vulnerability scans against
|
||||
immutable refs only, using `image@sha256:<index-digest>`.
|
||||
- [ ] **Artifact freshness:** Confirm scan artifacts are generated after the
|
||||
push timestamp and in the same validation run.
|
||||
- [ ] **Evidence block present:** Include the mandatory evidence block fields
|
||||
listed below.
|
||||
|
||||
#### Mandatory Evidence Block Fields
|
||||
|
||||
- Tag name
|
||||
- Index digest (`sha256:...`)
|
||||
- Per-arch digests (platform -> digest)
|
||||
- Scan tool versions
|
||||
- Push timestamp and scan timestamp(s)
|
||||
- Artifact file names generated in this run
|
||||
|
||||
## Container Image Tags
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ linters:
|
||||
- ineffassign # Ineffectual assignments
|
||||
- unused # Unused code detection
|
||||
- gosec # Security checks (critical issues only)
|
||||
linters-settings:
|
||||
settings:
|
||||
govet:
|
||||
enable:
|
||||
- shadow
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# golangci-lint configuration
|
||||
version: 2
|
||||
version: "2"
|
||||
run:
|
||||
timeout: 5m
|
||||
tests: true
|
||||
@@ -14,7 +14,7 @@ linters:
|
||||
- staticcheck
|
||||
- unused
|
||||
- errcheck
|
||||
linters-settings:
|
||||
settings:
|
||||
gocritic:
|
||||
enabled-tags:
|
||||
- diagnostic
|
||||
|
||||
@@ -260,7 +260,7 @@ func main() {
|
||||
}
|
||||
|
||||
// Register import handler with config dependencies
|
||||
routes.RegisterImportHandler(router, db, cfg.CaddyBinary, cfg.ImportDir, cfg.ImportCaddyfile)
|
||||
routes.RegisterImportHandler(router, db, cfg, cfg.CaddyBinary, cfg.ImportDir, cfg.ImportCaddyfile)
|
||||
|
||||
// Check for mounted Caddyfile on startup
|
||||
if err := handlers.CheckMountedImport(db, cfg.ImportCaddyfile, cfg.CaddyBinary, cfg.ImportDir); err != nil {
|
||||
|
||||
@@ -311,7 +311,8 @@ func TestMain_DefaultStartupGracefulShutdown_Subprocess(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("find free http port: %v", err)
|
||||
}
|
||||
if err := os.MkdirAll(filepath.Dir(dbPath), 0o750); err != nil {
|
||||
err = os.MkdirAll(filepath.Dir(dbPath), 0o750)
|
||||
if err != nil {
|
||||
t.Fatalf("mkdir db dir: %v", err)
|
||||
}
|
||||
|
||||
|
||||
@@ -64,11 +64,13 @@ func main() {
|
||||
jsonOutPath := resolvePath(repoRoot, *jsonOutFlag)
|
||||
mdOutPath := resolvePath(repoRoot, *mdOutFlag)
|
||||
|
||||
if err := assertFileExists(backendCoveragePath, "backend coverage file"); err != nil {
|
||||
err = assertFileExists(backendCoveragePath, "backend coverage file")
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
if err := assertFileExists(frontendCoveragePath, "frontend coverage file"); err != nil {
|
||||
err = assertFileExists(frontendCoveragePath, "frontend coverage file")
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
@@ -235,7 +235,8 @@ func TestGitDiffAndWriters(t *testing.T) {
|
||||
t.Fatalf("expected empty diff for HEAD...HEAD, got: %q", diffContent)
|
||||
}
|
||||
|
||||
if _, err := gitDiff(repoRoot, "bad-baseline"); err == nil {
|
||||
_, err = gitDiff(repoRoot, "bad-baseline")
|
||||
if err == nil {
|
||||
t.Fatal("expected gitDiff failure for invalid baseline")
|
||||
}
|
||||
|
||||
@@ -263,7 +264,8 @@ func TestGitDiffAndWriters(t *testing.T) {
|
||||
}
|
||||
|
||||
jsonPath := filepath.Join(t.TempDir(), "report.json")
|
||||
if err := writeJSON(jsonPath, report); err != nil {
|
||||
err = writeJSON(jsonPath, report)
|
||||
if err != nil {
|
||||
t.Fatalf("writeJSON should succeed: %v", err)
|
||||
}
|
||||
// #nosec G304 -- Test reads artifact path created by this test.
|
||||
@@ -276,7 +278,8 @@ func TestGitDiffAndWriters(t *testing.T) {
|
||||
}
|
||||
|
||||
markdownPath := filepath.Join(t.TempDir(), "report.md")
|
||||
if err := writeMarkdown(markdownPath, report, "backend/coverage.txt", "frontend/coverage/lcov.info"); err != nil {
|
||||
err = writeMarkdown(markdownPath, report, "backend/coverage.txt", "frontend/coverage/lcov.info")
|
||||
if err != nil {
|
||||
t.Fatalf("writeMarkdown should succeed: %v", err)
|
||||
}
|
||||
// #nosec G304 -- Test reads artifact path created by this test.
|
||||
|
||||
@@ -3,10 +3,9 @@ module github.com/Wikid82/charon/backend
|
||||
go 1.26
|
||||
|
||||
require (
|
||||
github.com/containrrr/shoutrrr v0.8.0
|
||||
github.com/docker/docker v28.5.2+incompatible
|
||||
github.com/gin-contrib/gzip v1.2.5
|
||||
github.com/gin-gonic/gin v1.11.0
|
||||
github.com/gin-gonic/gin v1.12.0
|
||||
github.com/glebarez/sqlite v1.11.0
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1
|
||||
github.com/google/uuid v1.6.0
|
||||
@@ -18,7 +17,7 @@ require (
|
||||
github.com/sirupsen/logrus v1.9.4
|
||||
github.com/stretchr/testify v1.11.1
|
||||
golang.org/x/crypto v0.48.0
|
||||
golang.org/x/net v0.50.0
|
||||
golang.org/x/net v0.51.0
|
||||
golang.org/x/text v0.34.0
|
||||
golang.org/x/time v0.14.0
|
||||
gopkg.in/natefinch/lumberjack.v2 v2.2.1
|
||||
@@ -30,8 +29,8 @@ require (
|
||||
github.com/Microsoft/go-winio v0.6.2 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/bytedance/gopkg v0.1.3 // indirect
|
||||
github.com/bytedance/sonic v1.14.1 // indirect
|
||||
github.com/bytedance/sonic/loader v0.3.0 // indirect
|
||||
github.com/bytedance/sonic v1.15.0 // indirect
|
||||
github.com/bytedance/sonic/loader v0.5.0 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.6 // indirect
|
||||
github.com/containerd/errdefs v1.0.0 // indirect
|
||||
@@ -42,25 +41,23 @@ require (
|
||||
github.com/docker/go-connections v0.6.0 // indirect
|
||||
github.com/docker/go-units v0.5.0 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/fatih/color v1.15.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.12 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.13 // indirect
|
||||
github.com/gin-contrib/sse v1.1.0 // indirect
|
||||
github.com/glebarez/go-sqlite v1.21.2 // indirect
|
||||
github.com/glebarez/go-sqlite v1.22.0 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/go-playground/validator/v10 v10.30.1 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/goccy/go-yaml v1.18.0 // indirect
|
||||
github.com/goccy/go-yaml v1.19.2 // indirect
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
github.com/jinzhu/now v1.1.5 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
|
||||
github.com/kylelemons/godebug v1.1.0 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/moby/docker-image-spec v1.3.1 // indirect
|
||||
github.com/moby/sys/atomicwriter v0.1.0 // indirect
|
||||
@@ -69,7 +66,7 @@ require (
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/morikuni/aec v1.0.0 // indirect
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/onsi/ginkgo/v2 v2.9.5 // indirect
|
||||
github.com/ncruces/go-strftime v1.0.0 // indirect
|
||||
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||
github.com/opencontainers/image-spec v1.1.1 // indirect
|
||||
github.com/oschwald/maxminddb-golang/v2 v2.1.1 // indirect
|
||||
@@ -77,28 +74,29 @@ require (
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/prometheus/client_model v0.6.2 // indirect
|
||||
github.com/prometheus/common v0.66.1 // indirect
|
||||
github.com/prometheus/procfs v0.16.1 // indirect
|
||||
github.com/prometheus/common v0.67.5 // indirect
|
||||
github.com/prometheus/procfs v0.20.1 // indirect
|
||||
github.com/quic-go/qpack v0.6.0 // indirect
|
||||
github.com/quic-go/quic-go v0.59.0 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/stretchr/objx v0.5.2 // indirect
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/ugorji/go/codec v1.3.0 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 // indirect
|
||||
go.opentelemetry.io/otel v1.38.0 // indirect
|
||||
github.com/ugorji/go/codec v1.3.1 // indirect
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.65.0 // indirect
|
||||
go.opentelemetry.io/otel v1.40.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.38.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.2 // indirect
|
||||
golang.org/x/arch v0.22.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.40.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.40.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.3 // indirect
|
||||
golang.org/x/arch v0.24.0 // indirect
|
||||
golang.org/x/sys v0.41.0 // indirect
|
||||
google.golang.org/protobuf v1.36.11 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
gotest.tools/v3 v3.5.2 // indirect
|
||||
modernc.org/libc v1.22.5 // indirect
|
||||
modernc.org/mathutil v1.5.0 // indirect
|
||||
modernc.org/memory v1.5.0 // indirect
|
||||
modernc.org/sqlite v1.23.1 // indirect
|
||||
modernc.org/libc v1.69.0 // indirect
|
||||
modernc.org/mathutil v1.7.1 // indirect
|
||||
modernc.org/memory v1.11.0 // indirect
|
||||
modernc.org/sqlite v1.46.1 // indirect
|
||||
)
|
||||
|
||||
152
backend/go.sum
152
backend/go.sum
@@ -6,10 +6,10 @@ github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||
github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M=
|
||||
github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM=
|
||||
github.com/bytedance/sonic v1.14.1 h1:FBMC0zVz5XUmE4z9wF4Jey0An5FueFvOsTKKKtwIl7w=
|
||||
github.com/bytedance/sonic v1.14.1/go.mod h1:gi6uhQLMbTdeP0muCnrjHLeCUPyb70ujhnNlhOylAFc=
|
||||
github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZwvZJyqeA=
|
||||
github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
|
||||
github.com/bytedance/sonic v1.15.0 h1:/PXeWFaR5ElNcVE84U0dOHjiMHQOwNIx3K4ymzh/uSE=
|
||||
github.com/bytedance/sonic v1.15.0/go.mod h1:tFkWrPz0/CUCLEF4ri4UkHekCIcdnkqXw9VduqpJh0k=
|
||||
github.com/bytedance/sonic/loader v0.5.0 h1:gXH3KVnatgY7loH5/TkeVyXPfESoqSBSBEiDd5VjlgE=
|
||||
github.com/bytedance/sonic/loader v0.5.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo=
|
||||
github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM=
|
||||
github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw=
|
||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||
@@ -22,8 +22,6 @@ github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151X
|
||||
github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk=
|
||||
github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
|
||||
github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo=
|
||||
github.com/containrrr/shoutrrr v0.8.0 h1:mfG2ATzIS7NR2Ec6XL+xyoHzN97H8WPjir8aYzJUSec=
|
||||
github.com/containrrr/shoutrrr v0.8.0/go.mod h1:ioyQAyu1LJY6sILuNyKaQaw+9Ttik5QePU8atnAdO2o=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
@@ -37,20 +35,18 @@ github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4
|
||||
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs=
|
||||
github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw=
|
||||
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||
github.com/gabriel-vasile/mimetype v1.4.12 h1:e9hWvmLYvtp846tLHam2o++qitpguFiYCKbn0w9jyqw=
|
||||
github.com/gabriel-vasile/mimetype v1.4.12/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s=
|
||||
github.com/gabriel-vasile/mimetype v1.4.13 h1:46nXokslUBsAJE/wMsp5gtO500a4F3Nkz9Ufpk2AcUM=
|
||||
github.com/gabriel-vasile/mimetype v1.4.13/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s=
|
||||
github.com/gin-contrib/gzip v1.2.5 h1:fIZs0S+l17pIu1P5XRJOo/YNqfIuPCrZZ3TWB7pjckI=
|
||||
github.com/gin-contrib/gzip v1.2.5/go.mod h1:aomRgR7ftdZV3uWY0gW/m8rChfxau0n8YVvwlOHONzw=
|
||||
github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w=
|
||||
github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM=
|
||||
github.com/gin-gonic/gin v1.11.0 h1:OW/6PLjyusp2PPXtyxKHU0RbX6I/l28FTdDlae5ueWk=
|
||||
github.com/gin-gonic/gin v1.11.0/go.mod h1:+iq/FyxlGzII0KHiBGjuNn4UNENUlKbGlNmc+W50Dls=
|
||||
github.com/glebarez/go-sqlite v1.21.2 h1:3a6LFC4sKahUunAmynQKLZceZCOzUthkRkEAl9gAXWo=
|
||||
github.com/glebarez/go-sqlite v1.21.2/go.mod h1:sfxdZyhQjTM2Wry3gVYWaW072Ri1WMdWJi0k6+3382k=
|
||||
github.com/gin-gonic/gin v1.12.0 h1:b3YAbrZtnf8N//yjKeU2+MQsh2mY5htkZidOM7O0wG8=
|
||||
github.com/gin-gonic/gin v1.12.0/go.mod h1:VxccKfsSllpKshkBWgVgRniFFAzFb9csfngsqANjnLc=
|
||||
github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ=
|
||||
github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc=
|
||||
github.com/glebarez/sqlite v1.11.0 h1:wSG0irqzP6VurnMEpFGer5Li19RpIRi2qvQz++w0GMw=
|
||||
github.com/glebarez/sqlite v1.11.0/go.mod h1:h8/o8j5wiAsqSPoWELDUdJXhjAhsVliSn7bWZjOhrgQ=
|
||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
@@ -66,29 +62,25 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
|
||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.30.1 h1:f3zDSN/zOma+w6+1Wswgd9fLkdwy06ntQJp0BBvFG0w=
|
||||
github.com/go-playground/validator/v10 v10.30.1/go.mod h1:oSuBIQzuJxL//3MelwSLD5hc2Tu889bF0Idm9Dg26cM=
|
||||
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
|
||||
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls=
|
||||
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
|
||||
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
|
||||
github.com/goccy/go-yaml v1.19.2 h1:PmFC1S6h8ljIz6gMRBopkjP1TVT7xuwrButHID66PoM=
|
||||
github.com/goccy/go-yaml v1.19.2/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1 h1:kYf81DTWFe7t+1VvL7eS+jKFVWaUnK9cB1qbwn63YCY=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
|
||||
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
|
||||
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ=
|
||||
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo=
|
||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
|
||||
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 h1:8Tjv8EJ+pM1xP8mK6egEbD1OgnVTyacbefKhmbLhIhU=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2/go.mod h1:pkJQ2tZHJ0aFOVEEot6oZmaVEZcRme73eIFmhiVuRWs=
|
||||
github.com/jarcoal/httpmock v1.3.0 h1:2RJ8GP0IIaWwcC9Fp2BmVi8Kog3v2Hn7VXM3fTd+nuc=
|
||||
github.com/jarcoal/httpmock v1.3.0/go.mod h1:3yb8rc4BI7TCBhFY8ng0gjuLKJNquuDNiPaZjnENuYg=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
||||
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
||||
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
||||
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
|
||||
@@ -107,9 +99,6 @@ github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0
|
||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
||||
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-sqlite3 v1.14.34 h1:3NtcvcUnFBPsuRcno8pUtupspG/GM+9nZ88zgJcp6Zk=
|
||||
@@ -131,10 +120,8 @@ github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
|
||||
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
||||
github.com/onsi/ginkgo/v2 v2.9.5 h1:+6Hr4uxzP4XIUyAkg61dWBw8lb/gc4/X5luuxN/EC+Q=
|
||||
github.com/onsi/ginkgo/v2 v2.9.5/go.mod h1:tvAoo1QUJwNEU2ITftXTpR7R1RbCzoZUOs3RonqW57k=
|
||||
github.com/onsi/gomega v1.27.6 h1:ENqfyGeS5AX/rlXDd/ETokDz93u0YufY1Pgxuy/PvWE=
|
||||
github.com/onsi/gomega v1.27.6/go.mod h1:PIQNjfQwkP3aQAH7lf7j87O/5FiNr+ZR8+ipb+qQlhg=
|
||||
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
|
||||
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
||||
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
||||
github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040=
|
||||
@@ -153,21 +140,20 @@ github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h
|
||||
github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg=
|
||||
github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
|
||||
github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE=
|
||||
github.com/prometheus/common v0.66.1 h1:h5E0h5/Y8niHc5DlaLlWLArTQI7tMrsfQjHV+d9ZoGs=
|
||||
github.com/prometheus/common v0.66.1/go.mod h1:gcaUsgf3KfRSwHY4dIMXLPV0K/Wg1oZ8+SbZk/HH/dA=
|
||||
github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg=
|
||||
github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is=
|
||||
github.com/prometheus/common v0.67.5 h1:pIgK94WWlQt1WLwAC5j2ynLaBRDiinoAb86HZHTUGI4=
|
||||
github.com/prometheus/common v0.67.5/go.mod h1:SjE/0MzDEEAyrdr5Gqc6G+sXI67maCxzaT3A2+HqjUw=
|
||||
github.com/prometheus/procfs v0.20.1 h1:XwbrGOIplXW/AU3YhIhLODXMJYyC1isLFfYCsTEycfc=
|
||||
github.com/prometheus/procfs v0.20.1/go.mod h1:o9EMBZGRyvDrSPH1RqdxhojkuXstoe4UlK79eF5TGGo=
|
||||
github.com/quic-go/qpack v0.6.0 h1:g7W+BMYynC1LbYLSqRt8PBg5Tgwxn214ZZR34VIOjz8=
|
||||
github.com/quic-go/qpack v0.6.0/go.mod h1:lUpLKChi8njB4ty2bFLX2x4gzDqXwUpaO1DP9qMDZII=
|
||||
github.com/quic-go/quic-go v0.59.0 h1:OLJkp1Mlm/aS7dpKgTc6cnpynnD2Xg7C1pwL6vy/SAw=
|
||||
github.com/quic-go/quic-go v0.59.0/go.mod h1:upnsH4Ju1YkqpLXC305eW3yDZ4NfnNbmQRCMWS58IKU=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
||||
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
||||
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
|
||||
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
|
||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||
github.com/sirupsen/logrus v1.9.4 h1:TsZE7l11zFCLZnZ+teH4Umoq5BhEIfIzfRDZ1Uzql2w=
|
||||
github.com/sirupsen/logrus v1.9.4/go.mod h1:ftWc9WdOfJ0a92nsE2jF5u5ZwH8Bv2zdeOC42RjbV2g=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
@@ -178,46 +164,52 @@ github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||
github.com/ugorji/go/codec v1.3.0 h1:Qd2W2sQawAfG8XSvzwhBeoGq71zXOC/Q1E9y/wUcsUA=
|
||||
github.com/ugorji/go/codec v1.3.0/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4=
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
|
||||
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 h1:RbKq8BG0FI8OiXhBfcRtqqHcZcka+gU3cskNuf05R18=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0/go.mod h1:h06DGIukJOevXaj/xrNjhi/2098RZzcLTbc0jDAUbsg=
|
||||
go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8=
|
||||
go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM=
|
||||
github.com/ugorji/go/codec v1.3.1 h1:waO7eEiFDwidsBN6agj1vJQ4AG7lh2yqXyOXqhgQuyY=
|
||||
github.com/ugorji/go/codec v1.3.1/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4=
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0 h1:yXUhImUjjAInNcpTcAlPHiT7bIXhshCTL3jVBkF3xaE=
|
||||
go.mongodb.org/mongo-driver/v2 v2.5.0/go.mod h1:yOI9kBsufol30iFsl1slpdq1I0eHPzybRWdyYUs8K/0=
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64=
|
||||
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.65.0 h1:7iP2uCb7sGddAr30RRS6xjKy7AZ2JtTOPA3oolgVSw8=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.65.0/go.mod h1:c7hN3ddxs/z6q9xwvfLPk+UHlWRQyaeR1LdgfL/66l0=
|
||||
go.opentelemetry.io/otel v1.40.0 h1:oA5YeOcpRTXq6NN7frwmwFR0Cn3RhTVZvXsP4duvCms=
|
||||
go.opentelemetry.io/otel v1.40.0/go.mod h1:IMb+uXZUKkMXdPddhwAHm6UfOwJyh4ct1ybIlV14J0g=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 h1:GqRJVj7UmLjCVyVJ3ZFLdPRmhDUp2zFmQe3RHIOsw24=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0/go.mod h1:ri3aaHSmCTVYu2AWv44YMauwAQc0aqI9gHKIcSbI1pU=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 h1:aTL7F04bJHUlztTsNGJ2l+6he8c+y/b//eR0jjjemT4=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0/go.mod h1:kldtb7jDTeol0l3ewcmd8SDvx3EmIE7lyvqbasU3QC4=
|
||||
go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA=
|
||||
go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI=
|
||||
go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E=
|
||||
go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA=
|
||||
go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE=
|
||||
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
|
||||
go.opentelemetry.io/otel/metric v1.40.0 h1:rcZe317KPftE2rstWIBitCdVp89A2HqjkxR3c11+p9g=
|
||||
go.opentelemetry.io/otel/metric v1.40.0/go.mod h1:ib/crwQH7N3r5kfiBZQbwrTge743UDc7DTFVZrrXnqc=
|
||||
go.opentelemetry.io/otel/sdk v1.40.0 h1:KHW/jUzgo6wsPh9At46+h4upjtccTmuZCFAc9OJ71f8=
|
||||
go.opentelemetry.io/otel/sdk v1.40.0/go.mod h1:Ph7EFdYvxq72Y8Li9q8KebuYUr2KoeyHx0DRMKrYBUE=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.40.0 h1:mtmdVqgQkeRxHgRv4qhyJduP3fYJRMX4AtAlbuWdCYw=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.40.0/go.mod h1:4Z2bGMf0KSK3uRjlczMOeMhKU2rhUqdWNoKcYrtcBPg=
|
||||
go.opentelemetry.io/otel/trace v1.40.0 h1:WA4etStDttCSYuhwvEa8OP8I5EWu24lkOzp+ZYblVjw=
|
||||
go.opentelemetry.io/otel/trace v1.40.0/go.mod h1:zeAhriXecNGP/s2SEG3+Y8X9ujcJOTqQ5RgdEJcawiA=
|
||||
go.opentelemetry.io/proto/otlp v1.7.1 h1:gTOMpGDb0WTBOP8JaO72iL3auEZhVmAQg4ipjOVAtj4=
|
||||
go.opentelemetry.io/proto/otlp v1.7.1/go.mod h1:b2rVh6rfI/s2pHWNlB7ILJcRALpcNDzKhACevjI+ZnE=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y=
|
||||
go.uber.org/mock v0.6.0/go.mod h1:KiVJ4BqZJaMj4svdfmHM0AUx4NJYO8ZNpPnZn1Z+BBU=
|
||||
go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
|
||||
go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
|
||||
golang.org/x/arch v0.22.0 h1:c/Zle32i5ttqRXjdLyyHZESLD/bB90DCU1g9l/0YBDI=
|
||||
golang.org/x/arch v0.22.0/go.mod h1:dNHoOeKiyja7GTvF9NJS1l3Z2yntpQNzgrjh1cU103A=
|
||||
go.yaml.in/yaml/v2 v2.4.3 h1:6gvOSjQoTB3vt1l+CU+tSyi/HOjfOjRLJ4YwYZGwRO0=
|
||||
go.yaml.in/yaml/v2 v2.4.3/go.mod h1:zSxWcmIDjOzPXpjlTTbAsKokqkDNAVtZO0WOMiT90s8=
|
||||
golang.org/x/arch v0.24.0 h1:qlJ3M9upxvFfwRM51tTg3Yl+8CP9vCC1E7vlFpgv99Y=
|
||||
golang.org/x/arch v0.24.0/go.mod h1:dNHoOeKiyja7GTvF9NJS1l3Z2yntpQNzgrjh1cU103A=
|
||||
golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts=
|
||||
golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos=
|
||||
golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60=
|
||||
golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c=
|
||||
golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU=
|
||||
golang.org/x/net v0.51.0 h1:94R/GTO7mt3/4wIKpcR5gkGmRLOuE/2hNGeWq/GBIFo=
|
||||
golang.org/x/net v0.51.0/go.mod h1:aamm+2QF5ogm02fjy5Bb7CQ0WMt1/WVM7FtyaTLlA9Y=
|
||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
|
||||
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
@@ -249,11 +241,31 @@ gorm.io/gorm v1.31.1 h1:7CA8FTFz/gRfgqgpeKIBcervUn3xSyPUmr6B2WXJ7kg=
|
||||
gorm.io/gorm v1.31.1/go.mod h1:XyQVbO2k6YkOis7C2437jSit3SsDK72s7n7rsSHd+Gs=
|
||||
gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q=
|
||||
gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA=
|
||||
modernc.org/libc v1.22.5 h1:91BNch/e5B0uPbJFgqbxXuOnxBQjlS//icfQEGmvyjE=
|
||||
modernc.org/libc v1.22.5/go.mod h1:jj+Z7dTNX8fBScMVNRAYZ/jF91K8fdT2hYMThc3YjBY=
|
||||
modernc.org/mathutil v1.5.0 h1:rV0Ko/6SfM+8G+yKiyI830l3Wuz1zRutdslNoQ0kfiQ=
|
||||
modernc.org/mathutil v1.5.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E=
|
||||
modernc.org/memory v1.5.0 h1:N+/8c5rE6EqugZwHii4IFsaJ7MUhoWX07J5tC/iI5Ds=
|
||||
modernc.org/memory v1.5.0/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU=
|
||||
modernc.org/sqlite v1.23.1 h1:nrSBg4aRQQwq59JpvGEQ15tNxoO5pX/kUjcRNwSAGQM=
|
||||
modernc.org/sqlite v1.23.1/go.mod h1:OrDj17Mggn6MhE+iPbBNf7RGKODDE9NFT0f3EwDzJqk=
|
||||
modernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis=
|
||||
modernc.org/cc/v4 v4.27.1/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
|
||||
modernc.org/ccgo/v4 v4.31.0 h1:/bsaxqdgX3gy/0DboxcvWrc3NpzH+6wpFfI/ZaA/hrg=
|
||||
modernc.org/ccgo/v4 v4.31.0/go.mod h1:jKe8kPBjIN/VdGTVqARTQ8N1gAziBmiISY8j5HoKwjg=
|
||||
modernc.org/fileutil v1.4.0 h1:j6ZzNTftVS054gi281TyLjHPp6CPHr2KCxEXjEbD6SM=
|
||||
modernc.org/fileutil v1.4.0/go.mod h1:EqdKFDxiByqxLk8ozOxObDSfcVOv/54xDs/DUHdvCUU=
|
||||
modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI=
|
||||
modernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito=
|
||||
modernc.org/gc/v3 v3.1.2 h1:ZtDCnhonXSZexk/AYsegNRV1lJGgaNZJuKjJSWKyEqo=
|
||||
modernc.org/gc/v3 v3.1.2/go.mod h1:HFK/6AGESC7Ex+EZJhJ2Gni6cTaYpSMmU/cT9RmlfYY=
|
||||
modernc.org/goabi0 v0.2.0 h1:HvEowk7LxcPd0eq6mVOAEMai46V+i7Jrj13t4AzuNks=
|
||||
modernc.org/goabi0 v0.2.0/go.mod h1:CEFRnnJhKvWT1c1JTI3Avm+tgOWbkOu5oPA8eH8LnMI=
|
||||
modernc.org/libc v1.69.0 h1:YQJ5QMSReTgQ3QFmI0dudfjXIjCcYTUxcH8/9P9f0D8=
|
||||
modernc.org/libc v1.69.0/go.mod h1:YfLLduUEbodNV2xLU5JOnRHBTAHVHsVW3bVYGw0ZCV4=
|
||||
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
|
||||
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
|
||||
modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=
|
||||
modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw=
|
||||
modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
|
||||
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
|
||||
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
|
||||
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
|
||||
modernc.org/sqlite v1.46.1 h1:eFJ2ShBLIEnUWlLy12raN0Z1plqmFX9Qe3rjQTKt6sU=
|
||||
modernc.org/sqlite v1.46.1/go.mod h1:CzbrU2lSB1DKUusvwGz7rqEKIq+NUd8GWuBBZDs9/nA=
|
||||
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
|
||||
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
|
||||
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
|
||||
modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
|
||||
|
||||
@@ -0,0 +1,124 @@
|
||||
//go:build integration
|
||||
// +build integration
|
||||
|
||||
package integration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
"testing"
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/notifications"
|
||||
)
|
||||
|
||||
func TestNotificationHTTPWrapperIntegration_RetriesOn429AndSucceeds(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
var calls int32
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
current := atomic.AddInt32(&calls, 1)
|
||||
if current == 1 {
|
||||
w.WriteHeader(http.StatusTooManyRequests)
|
||||
return
|
||||
}
|
||||
w.WriteHeader(http.StatusOK)
|
||||
_, _ = w.Write([]byte(`{"ok":true}`))
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
wrapper := notifications.NewNotifyHTTPWrapper()
|
||||
result, err := wrapper.Send(context.Background(), notifications.HTTPWrapperRequest{
|
||||
URL: server.URL,
|
||||
Body: []byte(`{"message":"hello"}`),
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("expected retry success, got error: %v", err)
|
||||
}
|
||||
if result.Attempts != 2 {
|
||||
t.Fatalf("expected 2 attempts, got %d", result.Attempts)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNotificationHTTPWrapperIntegration_DoesNotRetryOn400(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
var calls int32
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
atomic.AddInt32(&calls, 1)
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
wrapper := notifications.NewNotifyHTTPWrapper()
|
||||
_, err := wrapper.Send(context.Background(), notifications.HTTPWrapperRequest{
|
||||
URL: server.URL,
|
||||
Body: []byte(`{"message":"hello"}`),
|
||||
})
|
||||
if err == nil {
|
||||
t.Fatalf("expected non-retryable 400 error")
|
||||
}
|
||||
if atomic.LoadInt32(&calls) != 1 {
|
||||
t.Fatalf("expected one request attempt, got %d", calls)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNotificationHTTPWrapperIntegration_RejectsTokenizedQueryWithoutEcho(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
wrapper := notifications.NewNotifyHTTPWrapper()
|
||||
secret := "pr1-secret-token-value"
|
||||
_, err := wrapper.Send(context.Background(), notifications.HTTPWrapperRequest{
|
||||
URL: "http://example.com/hook?token=" + secret,
|
||||
Body: []byte(`{"message":"hello"}`),
|
||||
})
|
||||
if err == nil {
|
||||
t.Fatalf("expected tokenized query rejection")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "query authentication is not allowed") {
|
||||
t.Fatalf("expected sanitized query-auth rejection, got: %v", err)
|
||||
}
|
||||
if strings.Contains(err.Error(), secret) {
|
||||
t.Fatalf("error must not echo secret token")
|
||||
}
|
||||
}
|
||||
|
||||
func TestNotificationHTTPWrapperIntegration_HeaderAllowlistSafety(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
var seenAuthHeader string
|
||||
var seenCookieHeader string
|
||||
var seenGotifyKey string
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
seenAuthHeader = r.Header.Get("Authorization")
|
||||
seenCookieHeader = r.Header.Get("Cookie")
|
||||
seenGotifyKey = r.Header.Get("X-Gotify-Key")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
wrapper := notifications.NewNotifyHTTPWrapper()
|
||||
_, err := wrapper.Send(context.Background(), notifications.HTTPWrapperRequest{
|
||||
URL: server.URL,
|
||||
Headers: map[string]string{
|
||||
"Authorization": "Bearer should-not-leak",
|
||||
"Cookie": "session=should-not-leak",
|
||||
"X-Gotify-Key": "allowed-token",
|
||||
},
|
||||
Body: []byte(`{"message":"hello"}`),
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("expected success, got error: %v", err)
|
||||
}
|
||||
if seenAuthHeader != "" {
|
||||
t.Fatalf("authorization header must be stripped")
|
||||
}
|
||||
if seenCookieHeader != "" {
|
||||
t.Fatalf("cookie header must be stripped")
|
||||
}
|
||||
if seenGotifyKey != "allowed-token" {
|
||||
t.Fatalf("expected X-Gotify-Key to pass through")
|
||||
}
|
||||
}
|
||||
@@ -170,6 +170,7 @@ func TestSecurityHandler_UpdateConfig_ApplyCaddyError(t *testing.T) {
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
setAdminContext(c)
|
||||
c.Request = httptest.NewRequest("PUT", "/security/config", bytes.NewBuffer(body))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
@@ -190,6 +191,7 @@ func TestSecurityHandler_GenerateBreakGlass_Error(t *testing.T) {
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
setAdminContext(c)
|
||||
c.Request = httptest.NewRequest("POST", "/security/breakglass", http.NoBody)
|
||||
|
||||
h.GenerateBreakGlass(c)
|
||||
@@ -252,6 +254,7 @@ func TestSecurityHandler_UpsertRuleSet_Error(t *testing.T) {
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
setAdminContext(c)
|
||||
c.Request = httptest.NewRequest("POST", "/security/rulesets", bytes.NewBuffer(body))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
@@ -277,6 +280,7 @@ func TestSecurityHandler_CreateDecision_LogError(t *testing.T) {
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
setAdminContext(c)
|
||||
c.Request = httptest.NewRequest("POST", "/security/decisions", bytes.NewBuffer(body))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
@@ -297,6 +301,7 @@ func TestSecurityHandler_DeleteRuleSet_Error(t *testing.T) {
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
setAdminContext(c)
|
||||
c.Params = gin.Params{{Key: "id", Value: "999"}}
|
||||
|
||||
h.DeleteRuleSet(c)
|
||||
|
||||
@@ -127,18 +127,20 @@ func isLocalRequest(c *gin.Context) bool {
|
||||
|
||||
// setSecureCookie sets an auth cookie with security best practices
|
||||
// - HttpOnly: prevents JavaScript access (XSS protection)
|
||||
// - Secure: derived from request scheme to allow HTTP/IP logins when needed
|
||||
// - Secure: true for HTTPS; false only for local non-HTTPS loopback flows
|
||||
// - SameSite: Strict for HTTPS, Lax for HTTP/IP to allow forward-auth redirects
|
||||
func setSecureCookie(c *gin.Context, name, value string, maxAge int) {
|
||||
scheme := requestScheme(c)
|
||||
secure := scheme == "https"
|
||||
secure := true
|
||||
sameSite := http.SameSiteStrictMode
|
||||
if scheme != "https" {
|
||||
sameSite = http.SameSiteLaxMode
|
||||
if isLocalRequest(c) {
|
||||
secure = false
|
||||
}
|
||||
}
|
||||
|
||||
if isLocalRequest(c) {
|
||||
secure = false
|
||||
sameSite = http.SameSiteLaxMode
|
||||
}
|
||||
|
||||
@@ -152,7 +154,7 @@ func setSecureCookie(c *gin.Context, name, value string, maxAge int) {
|
||||
maxAge, // maxAge in seconds
|
||||
"/", // path
|
||||
domain, // domain (empty = current host)
|
||||
secure, // secure (HTTPS only in production)
|
||||
secure, // secure (always true)
|
||||
true, // httpOnly (no JS access)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -94,10 +94,28 @@ func TestSetSecureCookie_HTTP_Lax(t *testing.T) {
|
||||
cookies := recorder.Result().Cookies()
|
||||
require.Len(t, cookies, 1)
|
||||
c := cookies[0]
|
||||
assert.False(t, c.Secure)
|
||||
assert.True(t, c.Secure)
|
||||
assert.Equal(t, http.SameSiteLaxMode, c.SameSite)
|
||||
}
|
||||
|
||||
func TestSetSecureCookie_HTTP_Loopback_Insecure(t *testing.T) {
|
||||
t.Parallel()
|
||||
gin.SetMode(gin.TestMode)
|
||||
recorder := httptest.NewRecorder()
|
||||
ctx, _ := gin.CreateTestContext(recorder)
|
||||
req := httptest.NewRequest("POST", "http://127.0.0.1:8080/login", http.NoBody)
|
||||
req.Host = "127.0.0.1:8080"
|
||||
req.Header.Set("X-Forwarded-Proto", "http")
|
||||
ctx.Request = req
|
||||
|
||||
setSecureCookie(ctx, "auth_token", "abc", 60)
|
||||
cookies := recorder.Result().Cookies()
|
||||
require.Len(t, cookies, 1)
|
||||
cookie := cookies[0]
|
||||
assert.False(t, cookie.Secure)
|
||||
assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite)
|
||||
}
|
||||
|
||||
func TestSetSecureCookie_ForwardedHTTPS_LocalhostForcesInsecure(t *testing.T) {
|
||||
t.Parallel()
|
||||
gin.SetMode(gin.TestMode)
|
||||
@@ -115,7 +133,7 @@ func TestSetSecureCookie_ForwardedHTTPS_LocalhostForcesInsecure(t *testing.T) {
|
||||
cookies := recorder.Result().Cookies()
|
||||
require.Len(t, cookies, 1)
|
||||
cookie := cookies[0]
|
||||
assert.False(t, cookie.Secure)
|
||||
assert.True(t, cookie.Secure)
|
||||
assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite)
|
||||
}
|
||||
|
||||
@@ -136,7 +154,7 @@ func TestSetSecureCookie_ForwardedHTTPS_LoopbackForcesInsecure(t *testing.T) {
|
||||
cookies := recorder.Result().Cookies()
|
||||
require.Len(t, cookies, 1)
|
||||
cookie := cookies[0]
|
||||
assert.False(t, cookie.Secure)
|
||||
assert.True(t, cookie.Secure)
|
||||
assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite)
|
||||
}
|
||||
|
||||
@@ -158,7 +176,7 @@ func TestSetSecureCookie_ForwardedHostLocalhostForcesInsecure(t *testing.T) {
|
||||
cookies := recorder.Result().Cookies()
|
||||
require.Len(t, cookies, 1)
|
||||
cookie := cookies[0]
|
||||
assert.False(t, cookie.Secure)
|
||||
assert.True(t, cookie.Secure)
|
||||
assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite)
|
||||
}
|
||||
|
||||
@@ -180,7 +198,7 @@ func TestSetSecureCookie_OriginLoopbackForcesInsecure(t *testing.T) {
|
||||
cookies := recorder.Result().Cookies()
|
||||
require.Len(t, cookies, 1)
|
||||
cookie := cookies[0]
|
||||
assert.False(t, cookie.Secure)
|
||||
assert.True(t, cookie.Secure)
|
||||
assert.Equal(t, http.SameSiteLaxMode, cookie.SameSite)
|
||||
}
|
||||
|
||||
|
||||
@@ -71,10 +71,14 @@ func (h *DockerHandler) ListContainers(c *gin.Context) {
|
||||
if err != nil {
|
||||
var unavailableErr *services.DockerUnavailableError
|
||||
if errors.As(err, &unavailableErr) {
|
||||
details := unavailableErr.Details()
|
||||
if details == "" {
|
||||
details = "Cannot connect to Docker. Please ensure Docker is running and the socket is accessible (e.g., /var/run/docker.sock is mounted)."
|
||||
}
|
||||
log.WithFields(map[string]any{"server_id": util.SanitizeForLog(serverID), "host": util.SanitizeForLog(host), "error": util.SanitizeForLog(err.Error())}).Warn("docker unavailable")
|
||||
c.JSON(http.StatusServiceUnavailable, gin.H{
|
||||
"error": "Docker daemon unavailable",
|
||||
"details": "Cannot connect to Docker. Please ensure Docker is running and the socket is accessible (e.g., /var/run/docker.sock is mounted).",
|
||||
"details": details,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
@@ -63,7 +63,7 @@ func TestDockerHandler_ListContainers_DockerUnavailableMappedTo503(t *testing.T)
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
|
||||
dockerSvc := &fakeDockerService{err: services.NewDockerUnavailableError(errors.New("no docker socket"))}
|
||||
dockerSvc := &fakeDockerService{err: services.NewDockerUnavailableError(errors.New("no docker socket"), "Local Docker socket is mounted but not accessible by current process")}
|
||||
remoteSvc := &fakeRemoteServerService{}
|
||||
h := NewDockerHandler(dockerSvc, remoteSvc)
|
||||
|
||||
@@ -78,7 +78,7 @@ func TestDockerHandler_ListContainers_DockerUnavailableMappedTo503(t *testing.T)
|
||||
assert.Contains(t, w.Body.String(), "Docker daemon unavailable")
|
||||
// Verify the new details field is included in the response
|
||||
assert.Contains(t, w.Body.String(), "details")
|
||||
assert.Contains(t, w.Body.String(), "Docker is running")
|
||||
assert.Contains(t, w.Body.String(), "not accessible by current process")
|
||||
}
|
||||
|
||||
func TestDockerHandler_ListContainers_ServerIDResolvesToTCPHost(t *testing.T) {
|
||||
@@ -360,3 +360,47 @@ func TestDockerHandler_ListContainers_GenericError(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestDockerHandler_ListContainers_503FallbackDetailsWhenEmpty(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
|
||||
dockerSvc := &fakeDockerService{err: services.NewDockerUnavailableError(errors.New("socket error"))}
|
||||
remoteSvc := &fakeRemoteServerService{}
|
||||
h := NewDockerHandler(dockerSvc, remoteSvc)
|
||||
|
||||
api := router.Group("/api/v1")
|
||||
h.RegisterRoutes(api)
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/v1/docker/containers", http.NoBody)
|
||||
w := httptest.NewRecorder()
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusServiceUnavailable, w.Code)
|
||||
assert.Contains(t, w.Body.String(), "Docker daemon unavailable")
|
||||
assert.Contains(t, w.Body.String(), "docker.sock is mounted")
|
||||
}
|
||||
|
||||
func TestDockerHandler_ListContainers_503DetailsWithGroupGuidance(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
|
||||
groupDetails := `Local Docker socket is mounted but not accessible by current process (uid=1000 gid=1000). Process groups (1000) do not include socket gid 988; run container with matching supplemental group (e.g., --group-add 988 or compose group_add: ["988"]).`
|
||||
dockerSvc := &fakeDockerService{
|
||||
err: services.NewDockerUnavailableError(errors.New("EACCES"), groupDetails),
|
||||
}
|
||||
remoteSvc := &fakeRemoteServerService{}
|
||||
h := NewDockerHandler(dockerSvc, remoteSvc)
|
||||
|
||||
api := router.Group("/api/v1")
|
||||
h.RegisterRoutes(api)
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/v1/docker/containers?host=local", http.NoBody)
|
||||
w := httptest.NewRecorder()
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusServiceUnavailable, w.Code)
|
||||
assert.Contains(t, w.Body.String(), "Docker daemon unavailable")
|
||||
assert.Contains(t, w.Body.String(), "--group-add 988")
|
||||
assert.Contains(t, w.Body.String(), "group_add")
|
||||
}
|
||||
|
||||
305
backend/internal/api/handlers/feature_flags_blocker3_test.go
Normal file
305
backend/internal/api/handlers/feature_flags_blocker3_test.go
Normal file
@@ -0,0 +1,305 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/models"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
// TestBlocker3_SecurityProviderEventsFlagInResponse tests that the feature flag is included in GET response.
|
||||
func TestBlocker3_SecurityProviderEventsFlagInResponse(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
// Setup test database
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Run migrations
|
||||
err = db.AutoMigrate(&models.Setting{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create handler
|
||||
handler := NewFeatureFlagsHandler(db)
|
||||
|
||||
// Create test context
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
|
||||
// Call GetFlags
|
||||
handler.GetFlags(c)
|
||||
|
||||
// Assert response status
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
// Parse response
|
||||
var response map[string]bool
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Blocker 3: Verify security_provider_events flag is present
|
||||
_, exists := response["feature.notifications.security_provider_events.enabled"]
|
||||
assert.True(t, exists, "security_provider_events flag should be in response")
|
||||
}
|
||||
|
||||
// TestBlocker3_SecurityProviderEventsFlagDefaultValue tests the default value of the flag.
|
||||
func TestBlocker3_SecurityProviderEventsFlagDefaultValue(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
// Setup test database
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Run migrations
|
||||
err = db.AutoMigrate(&models.Setting{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create handler
|
||||
handler := NewFeatureFlagsHandler(db)
|
||||
|
||||
// Create test context
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
|
||||
// Call GetFlags
|
||||
handler.GetFlags(c)
|
||||
|
||||
// Assert response status
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
// Parse response
|
||||
var response map[string]bool
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Blocker 3: Verify default value is false for this stage
|
||||
assert.False(t, response["feature.notifications.security_provider_events.enabled"],
|
||||
"security_provider_events flag should default to false for this stage")
|
||||
}
|
||||
|
||||
// TestBlocker3_SecurityProviderEventsFlagCanBeEnabled tests that the flag can be enabled.
|
||||
func TestBlocker3_SecurityProviderEventsFlagCanBeEnabled(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
// Setup test database
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Run migrations
|
||||
err = db.AutoMigrate(&models.Setting{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create setting with flag enabled
|
||||
setting := models.Setting{
|
||||
Key: "feature.notifications.security_provider_events.enabled",
|
||||
Value: "true",
|
||||
Type: "bool",
|
||||
Category: "feature",
|
||||
}
|
||||
assert.NoError(t, db.Create(&setting).Error)
|
||||
|
||||
// Create handler
|
||||
handler := NewFeatureFlagsHandler(db)
|
||||
|
||||
// Create test context
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
|
||||
// Call GetFlags
|
||||
handler.GetFlags(c)
|
||||
|
||||
// Assert response status
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
// Parse response
|
||||
var response map[string]bool
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Blocker 3: Verify flag can be enabled
|
||||
assert.True(t, response["feature.notifications.security_provider_events.enabled"],
|
||||
"security_provider_events flag should be true when enabled in DB")
|
||||
}
|
||||
|
||||
// TestLegacyFallbackRemoved_UpdateFlagsRejectsTrue tests that attempting to set legacy fallback to true returns error code LEGACY_FALLBACK_REMOVED.
|
||||
func TestLegacyFallbackRemoved_UpdateFlagsRejectsTrue(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
assert.NoError(t, err)
|
||||
assert.NoError(t, db.AutoMigrate(&models.Setting{}))
|
||||
|
||||
handler := NewFeatureFlagsHandler(db)
|
||||
|
||||
// Attempt to set legacy fallback to true
|
||||
payload := map[string]bool{
|
||||
"feature.notifications.legacy.fallback_enabled": true,
|
||||
}
|
||||
jsonPayload, err := json.Marshal(payload)
|
||||
assert.NoError(t, err)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("PUT", "/api/v1/feature-flags", bytes.NewBuffer(jsonPayload))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
handler.UpdateFlags(c)
|
||||
|
||||
// Must return 400 with code LEGACY_FALLBACK_REMOVED
|
||||
assert.Equal(t, http.StatusBadRequest, w.Code)
|
||||
|
||||
var response map[string]interface{}
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, response["error"], "retired")
|
||||
assert.Equal(t, "LEGACY_FALLBACK_REMOVED", response["code"])
|
||||
}
|
||||
|
||||
// TestLegacyFallbackRemoved_UpdateFlagsAcceptsFalse tests that setting legacy fallback to false is allowed (forced false).
|
||||
func TestLegacyFallbackRemoved_UpdateFlagsAcceptsFalse(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
assert.NoError(t, err)
|
||||
assert.NoError(t, db.AutoMigrate(&models.Setting{}))
|
||||
|
||||
handler := NewFeatureFlagsHandler(db)
|
||||
|
||||
// Set legacy fallback to false (should be accepted and forced)
|
||||
payload := map[string]bool{
|
||||
"feature.notifications.legacy.fallback_enabled": false,
|
||||
}
|
||||
jsonPayload, err := json.Marshal(payload)
|
||||
assert.NoError(t, err)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("PUT", "/api/v1/feature-flags", bytes.NewBuffer(jsonPayload))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
handler.UpdateFlags(c)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
// Verify in DB that it's false
|
||||
var setting models.Setting
|
||||
db.Where("key = ?", "feature.notifications.legacy.fallback_enabled").First(&setting)
|
||||
assert.Equal(t, "false", setting.Value)
|
||||
}
|
||||
|
||||
// TestLegacyFallbackRemoved_GetFlagsReturnsHardFalse tests that GET always returns false for legacy fallback.
|
||||
func TestLegacyFallbackRemoved_GetFlagsReturnsHardFalse(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
assert.NoError(t, err)
|
||||
assert.NoError(t, db.AutoMigrate(&models.Setting{}))
|
||||
|
||||
handler := NewFeatureFlagsHandler(db)
|
||||
|
||||
// Scenario 1: No DB entry
|
||||
t.Run("no_db_entry", func(t *testing.T) {
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("GET", "/api/v1/feature-flags", nil)
|
||||
|
||||
handler.GetFlags(c)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
var response map[string]bool
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
assert.NoError(t, err)
|
||||
assert.False(t, response["feature.notifications.legacy.fallback_enabled"], "Must return hard-false when no DB entry")
|
||||
})
|
||||
|
||||
// Scenario 2: DB entry says true (invalid, forced false)
|
||||
t.Run("db_entry_true", func(t *testing.T) {
|
||||
// Force a true value in DB (simulating legacy state)
|
||||
setting := models.Setting{
|
||||
Key: "feature.notifications.legacy.fallback_enabled",
|
||||
Value: "true",
|
||||
Type: "bool",
|
||||
Category: "feature",
|
||||
}
|
||||
db.Create(&setting)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("GET", "/api/v1/feature-flags", nil)
|
||||
|
||||
handler.GetFlags(c)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
var response map[string]bool
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
assert.NoError(t, err)
|
||||
assert.False(t, response["feature.notifications.legacy.fallback_enabled"], "Must return hard-false even when DB says true")
|
||||
|
||||
// Clean up
|
||||
db.Unscoped().Delete(&setting)
|
||||
})
|
||||
|
||||
// Scenario 3: DB entry says false
|
||||
t.Run("db_entry_false", func(t *testing.T) {
|
||||
setting := models.Setting{
|
||||
Key: "feature.notifications.legacy.fallback_enabled",
|
||||
Value: "false",
|
||||
Type: "bool",
|
||||
Category: "feature",
|
||||
}
|
||||
db.Create(&setting)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("GET", "/api/v1/feature-flags", nil)
|
||||
|
||||
handler.GetFlags(c)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
var response map[string]bool
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
assert.NoError(t, err)
|
||||
assert.False(t, response["feature.notifications.legacy.fallback_enabled"], "Must return hard-false when DB says false")
|
||||
|
||||
// Clean up
|
||||
db.Unscoped().Delete(&setting)
|
||||
})
|
||||
}
|
||||
|
||||
// TestLegacyFallbackRemoved_InvalidEnvValue tests that invalid environment variable values are handled (lines 157-158)
|
||||
func TestLegacyFallbackRemoved_InvalidEnvValue(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
assert.NoError(t, err)
|
||||
assert.NoError(t, db.AutoMigrate(&models.Setting{}))
|
||||
|
||||
// Set invalid environment variable value
|
||||
t.Setenv("CHARON_NOTIFICATIONS_LEGACY_FALLBACK", "invalid-value")
|
||||
|
||||
handler := NewFeatureFlagsHandler(db)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("GET", "/api/v1/feature-flags", nil)
|
||||
|
||||
// Lines 157-158: Should log warning for invalid env value and return hard-false
|
||||
handler.GetFlags(c)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
var response map[string]bool
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
assert.NoError(t, err)
|
||||
assert.False(t, response["feature.notifications.legacy.fallback_enabled"], "Must return hard-false even with invalid env value")
|
||||
}
|
||||
105
backend/internal/api/handlers/feature_flags_coverage_v2_test.go
Normal file
105
backend/internal/api/handlers/feature_flags_coverage_v2_test.go
Normal file
@@ -0,0 +1,105 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/models"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
// TestResolveRetiredLegacyFallback_InvalidPersistedValue covers lines 139-140
|
||||
func TestResolveRetiredLegacyFallback_InvalidPersistedValue(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
|
||||
require.NoError(t, db.AutoMigrate(&models.Setting{}))
|
||||
|
||||
// Create setting with invalid value for retired fallback flag
|
||||
db.Create(&models.Setting{
|
||||
Key: "feature.notifications.legacy.fallback_enabled",
|
||||
Value: "invalid_value_not_bool",
|
||||
Type: "bool",
|
||||
Category: "feature",
|
||||
})
|
||||
|
||||
h := NewFeatureFlagsHandler(db)
|
||||
r := gin.New()
|
||||
r.GET("/api/v1/feature-flags", h.GetFlags)
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/v1/feature-flags", http.NoBody)
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
require.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
// Should log warning and return false (lines 139-140)
|
||||
var flags map[string]bool
|
||||
err = json.Unmarshal(w.Body.Bytes(), &flags)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.False(t, flags["feature.notifications.legacy.fallback_enabled"])
|
||||
}
|
||||
|
||||
// TestResolveRetiredLegacyFallback_InvalidEnvValue covers lines 149-150
|
||||
func TestResolveRetiredLegacyFallback_InvalidEnvValue(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
|
||||
require.NoError(t, db.AutoMigrate(&models.Setting{}))
|
||||
|
||||
// Set invalid env var for retired fallback flag
|
||||
t.Setenv("CHARON_LEGACY_FALLBACK_ENABLED", "not_a_boolean")
|
||||
|
||||
h := NewFeatureFlagsHandler(db)
|
||||
r := gin.New()
|
||||
r.GET("/api/v1/feature-flags", h.GetFlags)
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/v1/feature-flags", http.NoBody)
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
require.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
// Should log warning and return false (lines 149-150)
|
||||
var flags map[string]bool
|
||||
err = json.Unmarshal(w.Body.Bytes(), &flags)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.False(t, flags["feature.notifications.legacy.fallback_enabled"])
|
||||
}
|
||||
|
||||
// TestResolveRetiredLegacyFallback_DefaultFalse covers lines 157-158
|
||||
func TestResolveRetiredLegacyFallback_DefaultFalse(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
|
||||
require.NoError(t, db.AutoMigrate(&models.Setting{}))
|
||||
|
||||
// No DB value, no env vars - should default to false
|
||||
h := NewFeatureFlagsHandler(db)
|
||||
r := gin.New()
|
||||
r.GET("/api/v1/feature-flags", h.GetFlags)
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/v1/feature-flags", http.NoBody)
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
require.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
// Should return false (lines 157-158)
|
||||
var flags map[string]bool
|
||||
err = json.Unmarshal(w.Body.Bytes(), &flags)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.False(t, flags["feature.notifications.legacy.fallback_enabled"])
|
||||
}
|
||||
@@ -28,12 +28,29 @@ var defaultFlags = []string{
|
||||
"feature.cerberus.enabled",
|
||||
"feature.uptime.enabled",
|
||||
"feature.crowdsec.console_enrollment",
|
||||
"feature.notifications.engine.notify_v1.enabled",
|
||||
"feature.notifications.service.discord.enabled",
|
||||
"feature.notifications.service.gotify.enabled",
|
||||
"feature.notifications.service.webhook.enabled",
|
||||
"feature.notifications.legacy.fallback_enabled",
|
||||
"feature.notifications.security_provider_events.enabled", // Blocker 3: Add security_provider_events gate
|
||||
}
|
||||
|
||||
var defaultFlagValues = map[string]bool{
|
||||
"feature.cerberus.enabled": false, // Cerberus OFF by default (per diagnostic fix)
|
||||
"feature.uptime.enabled": true, // Uptime enabled by default
|
||||
"feature.crowdsec.console_enrollment": false,
|
||||
"feature.cerberus.enabled": false, // Cerberus OFF by default (per diagnostic fix)
|
||||
"feature.uptime.enabled": true, // Uptime enabled by default
|
||||
"feature.crowdsec.console_enrollment": false,
|
||||
"feature.notifications.engine.notify_v1.enabled": false,
|
||||
"feature.notifications.service.discord.enabled": false,
|
||||
"feature.notifications.service.gotify.enabled": false,
|
||||
"feature.notifications.service.webhook.enabled": false,
|
||||
"feature.notifications.legacy.fallback_enabled": false,
|
||||
"feature.notifications.security_provider_events.enabled": false, // Blocker 3: Default disabled for this stage
|
||||
}
|
||||
|
||||
var retiredLegacyFallbackEnvAliases = []string{
|
||||
"FEATURE_NOTIFICATIONS_LEGACY_FALLBACK_ENABLED",
|
||||
"NOTIFICATIONS_LEGACY_FALLBACK_ENABLED",
|
||||
}
|
||||
|
||||
// GetFlags returns a map of feature flag -> bool. DB setting takes precedence
|
||||
@@ -69,6 +86,11 @@ func (h *FeatureFlagsHandler) GetFlags(c *gin.Context) {
|
||||
defaultVal = v
|
||||
}
|
||||
|
||||
if key == "feature.notifications.legacy.fallback_enabled" {
|
||||
result[key] = h.resolveRetiredLegacyFallback(settingsMap)
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if flag exists in DB
|
||||
if s, exists := settingsMap[key]; exists {
|
||||
v := strings.ToLower(strings.TrimSpace(s.Value))
|
||||
@@ -109,6 +131,40 @@ func (h *FeatureFlagsHandler) GetFlags(c *gin.Context) {
|
||||
c.JSON(http.StatusOK, result)
|
||||
}
|
||||
|
||||
func parseFlagBool(raw string) (bool, bool) {
|
||||
v := strings.ToLower(strings.TrimSpace(raw))
|
||||
switch v {
|
||||
case "1", "true", "yes":
|
||||
return true, true
|
||||
case "0", "false", "no":
|
||||
return false, true
|
||||
default:
|
||||
return false, false
|
||||
}
|
||||
}
|
||||
|
||||
func (h *FeatureFlagsHandler) resolveRetiredLegacyFallback(settingsMap map[string]models.Setting) bool {
|
||||
const retiredKey = "feature.notifications.legacy.fallback_enabled"
|
||||
|
||||
if s, exists := settingsMap[retiredKey]; exists {
|
||||
if _, ok := parseFlagBool(s.Value); !ok {
|
||||
log.Printf("[WARN] Invalid persisted retired fallback flag value, forcing disabled: key=%s value=%q", retiredKey, s.Value)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
for _, alias := range retiredLegacyFallbackEnvAliases {
|
||||
if ev, ok := os.LookupEnv(alias); ok {
|
||||
if _, parsed := parseFlagBool(ev); !parsed {
|
||||
log.Printf("[WARN] Invalid environment retired fallback flag value, forcing disabled: key=%s value=%q", alias, ev)
|
||||
}
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// UpdateFlags accepts a JSON object map[string]bool and upserts settings.
|
||||
func (h *FeatureFlagsHandler) UpdateFlags(c *gin.Context) {
|
||||
// Phase 0: Performance instrumentation
|
||||
@@ -124,6 +180,14 @@ func (h *FeatureFlagsHandler) UpdateFlags(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
if v, exists := payload["feature.notifications.legacy.fallback_enabled"]; exists && v {
|
||||
c.JSON(http.StatusBadRequest, gin.H{
|
||||
"error": "feature.notifications.legacy.fallback_enabled is retired and can only be false",
|
||||
"code": "LEGACY_FALLBACK_REMOVED",
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Phase 1: Transaction wrapping - all updates in single atomic transaction
|
||||
if err := h.DB.Transaction(func(tx *gorm.DB) error {
|
||||
for k, v := range payload {
|
||||
@@ -139,6 +203,10 @@ func (h *FeatureFlagsHandler) UpdateFlags(c *gin.Context) {
|
||||
continue
|
||||
}
|
||||
|
||||
if k == "feature.notifications.legacy.fallback_enabled" {
|
||||
v = false
|
||||
}
|
||||
|
||||
s := models.Setting{Key: k, Value: strconv.FormatBool(v), Type: "bool", Category: "feature"}
|
||||
if err := tx.Where(models.Setting{Key: k}).Assign(s).FirstOrCreate(&s).Error; err != nil {
|
||||
return err // Rollback on error
|
||||
|
||||
@@ -100,6 +100,147 @@ func TestFeatureFlags_EnvFallback(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestFeatureFlags_RetiredFallback_DenyByDefault(t *testing.T) {
|
||||
db := setupFlagsDB(t)
|
||||
h := NewFeatureFlagsHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
r := gin.New()
|
||||
r.GET("/api/v1/feature-flags", h.GetFlags)
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/v1/feature-flags", http.NoBody)
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200 got %d body=%s", w.Code, w.Body.String())
|
||||
}
|
||||
|
||||
var flags map[string]bool
|
||||
if err := json.Unmarshal(w.Body.Bytes(), &flags); err != nil {
|
||||
t.Fatalf("invalid json: %v", err)
|
||||
}
|
||||
|
||||
if flags["feature.notifications.legacy.fallback_enabled"] {
|
||||
t.Fatalf("expected retired fallback flag to be false by default")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFeatureFlags_RetiredFallback_PersistedAndEnvStillResolveFalse(t *testing.T) {
|
||||
db := setupFlagsDB(t)
|
||||
|
||||
if err := db.Create(&models.Setting{
|
||||
Key: "feature.notifications.legacy.fallback_enabled",
|
||||
Value: "true",
|
||||
Type: "bool",
|
||||
Category: "feature",
|
||||
}).Error; err != nil {
|
||||
t.Fatalf("failed to seed setting: %v", err)
|
||||
}
|
||||
|
||||
t.Setenv("FEATURE_NOTIFICATIONS_LEGACY_FALLBACK_ENABLED", "true")
|
||||
|
||||
h := NewFeatureFlagsHandler(db)
|
||||
gin.SetMode(gin.TestMode)
|
||||
r := gin.New()
|
||||
r.GET("/api/v1/feature-flags", h.GetFlags)
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/v1/feature-flags", http.NoBody)
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200 got %d body=%s", w.Code, w.Body.String())
|
||||
}
|
||||
|
||||
var flags map[string]bool
|
||||
if err := json.Unmarshal(w.Body.Bytes(), &flags); err != nil {
|
||||
t.Fatalf("invalid json: %v", err)
|
||||
}
|
||||
|
||||
if flags["feature.notifications.legacy.fallback_enabled"] {
|
||||
t.Fatalf("expected retired fallback flag to remain false even when persisted/env are true")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFeatureFlags_RetiredFallback_EnvAliasResolvesFalse(t *testing.T) {
|
||||
db := setupFlagsDB(t)
|
||||
t.Setenv("NOTIFICATIONS_LEGACY_FALLBACK_ENABLED", "true")
|
||||
|
||||
h := NewFeatureFlagsHandler(db)
|
||||
gin.SetMode(gin.TestMode)
|
||||
r := gin.New()
|
||||
r.GET("/api/v1/feature-flags", h.GetFlags)
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/v1/feature-flags", http.NoBody)
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200 got %d body=%s", w.Code, w.Body.String())
|
||||
}
|
||||
|
||||
var flags map[string]bool
|
||||
if err := json.Unmarshal(w.Body.Bytes(), &flags); err != nil {
|
||||
t.Fatalf("invalid json: %v", err)
|
||||
}
|
||||
|
||||
if flags["feature.notifications.legacy.fallback_enabled"] {
|
||||
t.Fatalf("expected retired fallback flag to remain false for env alias")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFeatureFlags_UpdateRejectsLegacyFallbackTrue(t *testing.T) {
|
||||
db := setupFlagsDB(t)
|
||||
h := NewFeatureFlagsHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
r := gin.New()
|
||||
r.PUT("/api/v1/feature-flags", h.UpdateFlags)
|
||||
|
||||
payload := map[string]bool{
|
||||
"feature.notifications.legacy.fallback_enabled": true,
|
||||
}
|
||||
b, _ := json.Marshal(payload)
|
||||
|
||||
req := httptest.NewRequest(http.MethodPut, "/api/v1/feature-flags", bytes.NewReader(b))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
if w.Code != http.StatusBadRequest {
|
||||
t.Fatalf("expected 400 got %d body=%s", w.Code, w.Body.String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestFeatureFlags_UpdatePersistsLegacyFallbackFalse(t *testing.T) {
|
||||
db := setupFlagsDB(t)
|
||||
h := NewFeatureFlagsHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
r := gin.New()
|
||||
r.PUT("/api/v1/feature-flags", h.UpdateFlags)
|
||||
|
||||
payload := map[string]bool{
|
||||
"feature.notifications.legacy.fallback_enabled": false,
|
||||
}
|
||||
b, _ := json.Marshal(payload)
|
||||
|
||||
req := httptest.NewRequest(http.MethodPut, "/api/v1/feature-flags", bytes.NewReader(b))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200 got %d body=%s", w.Code, w.Body.String())
|
||||
}
|
||||
|
||||
var s models.Setting
|
||||
if err := db.Where("key = ?", "feature.notifications.legacy.fallback_enabled").First(&s).Error; err != nil {
|
||||
t.Fatalf("expected setting persisted: %v", err)
|
||||
}
|
||||
if s.Value != "false" {
|
||||
t.Fatalf("expected persisted fallback value false, got %s", s.Value)
|
||||
}
|
||||
}
|
||||
|
||||
// setupBenchmarkFlagsDB creates an in-memory SQLite database for feature flags benchmarks
|
||||
func setupBenchmarkFlagsDB(b *testing.B) *gorm.DB {
|
||||
b.Helper()
|
||||
@@ -287,3 +428,32 @@ func TestUpdateFlags_TransactionAtomic(t *testing.T) {
|
||||
t.Errorf("expected crowdsec.console_enrollment to be true, got %s", s3.Value)
|
||||
}
|
||||
}
|
||||
|
||||
// TestFeatureFlags_InvalidRetiredEnvAlias covers lines 157-158 (invalid env var warning)
|
||||
func TestFeatureFlags_InvalidRetiredEnvAlias(t *testing.T) {
|
||||
db := setupFlagsDB(t)
|
||||
t.Setenv("NOTIFICATIONS_LEGACY_FALLBACK_ENABLED", "invalid-value")
|
||||
|
||||
h := NewFeatureFlagsHandler(db)
|
||||
gin.SetMode(gin.TestMode)
|
||||
r := gin.New()
|
||||
r.GET("/api/v1/feature-flags", h.GetFlags)
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/api/v1/feature-flags", http.NoBody)
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
if w.Code != http.StatusOK {
|
||||
t.Fatalf("expected 200 got %d body=%s", w.Code, w.Body.String())
|
||||
}
|
||||
|
||||
var flags map[string]bool
|
||||
if err := json.Unmarshal(w.Body.Bytes(), &flags); err != nil {
|
||||
t.Fatalf("invalid json: %v", err)
|
||||
}
|
||||
|
||||
// Should force disabled due to invalid value (lines 157-158)
|
||||
if flags["feature.notifications.legacy.fallback_enabled"] {
|
||||
t.Fatalf("expected retired fallback flag to be false for invalid env value")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -93,6 +93,10 @@ func (h *ImportHandler) RegisterRoutes(router *gin.RouterGroup) {
|
||||
|
||||
// GetStatus returns current import session status.
|
||||
func (h *ImportHandler) GetStatus(c *gin.Context) {
|
||||
if !requireAuthenticatedAdmin(c) {
|
||||
return
|
||||
}
|
||||
|
||||
var session models.ImportSession
|
||||
err := h.db.Where("status IN ?", []string{"pending", "reviewing"}).
|
||||
Order("created_at DESC").
|
||||
@@ -155,6 +159,10 @@ func (h *ImportHandler) GetStatus(c *gin.Context) {
|
||||
|
||||
// GetPreview returns parsed hosts and conflicts for review.
|
||||
func (h *ImportHandler) GetPreview(c *gin.Context) {
|
||||
if !requireAuthenticatedAdmin(c) {
|
||||
return
|
||||
}
|
||||
|
||||
var session models.ImportSession
|
||||
err := h.db.Where("status IN ?", []string{"pending", "reviewing"}).
|
||||
Order("created_at DESC").
|
||||
|
||||
@@ -3,6 +3,7 @@ package handlers
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
@@ -14,6 +15,7 @@ import (
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/models"
|
||||
"github.com/Wikid82/charon/backend/internal/services"
|
||||
"github.com/Wikid82/charon/backend/internal/trace"
|
||||
)
|
||||
|
||||
func setupNotificationCoverageDB(t *testing.T) *gorm.DB {
|
||||
@@ -160,8 +162,8 @@ func TestNotificationProviderHandler_Create_DBError(t *testing.T) {
|
||||
|
||||
provider := models.NotificationProvider{
|
||||
Name: "Test",
|
||||
Type: "webhook",
|
||||
URL: "https://example.com",
|
||||
Type: "discord",
|
||||
URL: "https://discord.com/api/webhooks/123/abc",
|
||||
Template: "minimal",
|
||||
}
|
||||
body, _ := json.Marshal(provider)
|
||||
@@ -185,8 +187,8 @@ func TestNotificationProviderHandler_Create_InvalidTemplate(t *testing.T) {
|
||||
|
||||
provider := models.NotificationProvider{
|
||||
Name: "Test",
|
||||
Type: "webhook",
|
||||
URL: "https://example.com",
|
||||
Type: "discord",
|
||||
URL: "https://discord.com/api/webhooks/123/abc",
|
||||
Template: "custom",
|
||||
Config: "{{.Invalid", // Invalid template syntax
|
||||
}
|
||||
@@ -230,8 +232,8 @@ func TestNotificationProviderHandler_Update_InvalidTemplate(t *testing.T) {
|
||||
// Create a provider first
|
||||
provider := models.NotificationProvider{
|
||||
Name: "Test",
|
||||
Type: "webhook",
|
||||
URL: "https://example.com",
|
||||
Type: "discord",
|
||||
URL: "https://discord.com/api/webhooks/123/abc",
|
||||
Template: "minimal",
|
||||
}
|
||||
require.NoError(t, svc.CreateProvider(&provider))
|
||||
@@ -264,8 +266,8 @@ func TestNotificationProviderHandler_Update_DBError(t *testing.T) {
|
||||
|
||||
provider := models.NotificationProvider{
|
||||
Name: "Test",
|
||||
Type: "webhook",
|
||||
URL: "https://example.com",
|
||||
Type: "discord",
|
||||
URL: "https://discord.com/api/webhooks/123/abc",
|
||||
Template: "minimal",
|
||||
}
|
||||
body, _ := json.Marshal(provider)
|
||||
@@ -319,6 +321,159 @@ func TestNotificationProviderHandler_Test_InvalidJSON(t *testing.T) {
|
||||
assert.Equal(t, 400, w.Code)
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_Test_RejectsClientSuppliedGotifyToken(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
db := setupNotificationCoverageDB(t)
|
||||
svc := services.NewNotificationService(db)
|
||||
h := NewNotificationProviderHandler(svc)
|
||||
|
||||
payload := map[string]any{
|
||||
"type": "gotify",
|
||||
"url": "https://gotify.example/message",
|
||||
"token": "super-secret-client-token",
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
setAdminContext(c)
|
||||
c.Set(string(trace.RequestIDKey), "req-token-reject-1")
|
||||
c.Request = httptest.NewRequest(http.MethodPost, "/providers/test", bytes.NewBuffer(body))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
h.Test(c)
|
||||
|
||||
assert.Equal(t, http.StatusBadRequest, w.Code)
|
||||
var resp map[string]any
|
||||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
||||
assert.Equal(t, "TOKEN_WRITE_ONLY", resp["code"])
|
||||
assert.Equal(t, "validation", resp["category"])
|
||||
assert.Equal(t, "Gotify token is accepted only on provider create/update", resp["error"])
|
||||
assert.Equal(t, "req-token-reject-1", resp["request_id"])
|
||||
assert.NotContains(t, w.Body.String(), "super-secret-client-token")
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_Test_RejectsGotifyTokenWithWhitespace(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
db := setupNotificationCoverageDB(t)
|
||||
svc := services.NewNotificationService(db)
|
||||
h := NewNotificationProviderHandler(svc)
|
||||
|
||||
payload := map[string]any{
|
||||
"type": "gotify",
|
||||
"token": " secret-with-space ",
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
setAdminContext(c)
|
||||
c.Request = httptest.NewRequest(http.MethodPost, "/providers/test", bytes.NewBuffer(body))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
h.Test(c)
|
||||
|
||||
assert.Equal(t, http.StatusBadRequest, w.Code)
|
||||
assert.Contains(t, w.Body.String(), "TOKEN_WRITE_ONLY")
|
||||
assert.NotContains(t, w.Body.String(), "secret-with-space")
|
||||
}
|
||||
|
||||
func TestClassifyProviderTestFailure_NilError(t *testing.T) {
|
||||
code, category, message := classifyProviderTestFailure(nil)
|
||||
|
||||
assert.Equal(t, "PROVIDER_TEST_FAILED", code)
|
||||
assert.Equal(t, "dispatch", category)
|
||||
assert.Equal(t, "Provider test failed", message)
|
||||
}
|
||||
|
||||
func TestClassifyProviderTestFailure_DefaultStatusCode(t *testing.T) {
|
||||
code, category, message := classifyProviderTestFailure(errors.New("provider returned status 500"))
|
||||
|
||||
assert.Equal(t, "PROVIDER_TEST_REMOTE_REJECTED", code)
|
||||
assert.Equal(t, "dispatch", category)
|
||||
assert.Contains(t, message, "HTTP 500")
|
||||
}
|
||||
|
||||
func TestClassifyProviderTestFailure_GenericError(t *testing.T) {
|
||||
code, category, message := classifyProviderTestFailure(errors.New("something completely unexpected"))
|
||||
|
||||
assert.Equal(t, "PROVIDER_TEST_FAILED", code)
|
||||
assert.Equal(t, "dispatch", category)
|
||||
assert.Equal(t, "Provider test failed", message)
|
||||
}
|
||||
|
||||
func TestClassifyProviderTestFailure_InvalidDiscordWebhookURL(t *testing.T) {
|
||||
code, category, message := classifyProviderTestFailure(errors.New("invalid discord webhook url"))
|
||||
|
||||
assert.Equal(t, "PROVIDER_TEST_URL_INVALID", code)
|
||||
assert.Equal(t, "validation", category)
|
||||
assert.Contains(t, message, "Provider URL")
|
||||
}
|
||||
|
||||
func TestClassifyProviderTestFailure_URLValidation(t *testing.T) {
|
||||
code, category, message := classifyProviderTestFailure(errors.New("destination URL validation failed"))
|
||||
|
||||
assert.Equal(t, "PROVIDER_TEST_URL_INVALID", code)
|
||||
assert.Equal(t, "validation", category)
|
||||
assert.Contains(t, message, "Provider URL")
|
||||
}
|
||||
|
||||
func TestClassifyProviderTestFailure_AuthRejected(t *testing.T) {
|
||||
code, category, message := classifyProviderTestFailure(errors.New("failed to send webhook: provider returned status 401"))
|
||||
|
||||
assert.Equal(t, "PROVIDER_TEST_AUTH_REJECTED", code)
|
||||
assert.Equal(t, "dispatch", category)
|
||||
assert.Contains(t, message, "rejected authentication")
|
||||
}
|
||||
|
||||
func TestClassifyProviderTestFailure_EndpointNotFound(t *testing.T) {
|
||||
code, category, message := classifyProviderTestFailure(errors.New("failed to send webhook: provider returned status 404"))
|
||||
|
||||
assert.Equal(t, "PROVIDER_TEST_ENDPOINT_NOT_FOUND", code)
|
||||
assert.Equal(t, "dispatch", category)
|
||||
assert.Contains(t, message, "endpoint was not found")
|
||||
}
|
||||
|
||||
func TestClassifyProviderTestFailure_UnreachableEndpoint(t *testing.T) {
|
||||
code, category, message := classifyProviderTestFailure(errors.New("failed to send webhook: outbound request failed"))
|
||||
|
||||
assert.Equal(t, "PROVIDER_TEST_UNREACHABLE", code)
|
||||
assert.Equal(t, "dispatch", category)
|
||||
assert.Contains(t, message, "Could not reach provider endpoint")
|
||||
}
|
||||
|
||||
func TestClassifyProviderTestFailure_DNSLookupFailed(t *testing.T) {
|
||||
code, category, message := classifyProviderTestFailure(errors.New("failed to send webhook: outbound request failed: dns lookup failed"))
|
||||
|
||||
assert.Equal(t, "PROVIDER_TEST_DNS_FAILED", code)
|
||||
assert.Equal(t, "dispatch", category)
|
||||
assert.Contains(t, message, "DNS lookup failed")
|
||||
}
|
||||
|
||||
func TestClassifyProviderTestFailure_ConnectionRefused(t *testing.T) {
|
||||
code, category, message := classifyProviderTestFailure(errors.New("failed to send webhook: outbound request failed: connection refused"))
|
||||
|
||||
assert.Equal(t, "PROVIDER_TEST_CONNECTION_REFUSED", code)
|
||||
assert.Equal(t, "dispatch", category)
|
||||
assert.Contains(t, message, "refused the connection")
|
||||
}
|
||||
|
||||
func TestClassifyProviderTestFailure_Timeout(t *testing.T) {
|
||||
code, category, message := classifyProviderTestFailure(errors.New("failed to send webhook: outbound request failed: request timed out"))
|
||||
|
||||
assert.Equal(t, "PROVIDER_TEST_TIMEOUT", code)
|
||||
assert.Equal(t, "dispatch", category)
|
||||
assert.Contains(t, message, "timed out")
|
||||
}
|
||||
|
||||
func TestClassifyProviderTestFailure_TLSHandshakeFailed(t *testing.T) {
|
||||
code, category, message := classifyProviderTestFailure(errors.New("failed to send webhook: outbound request failed: tls handshake failed"))
|
||||
|
||||
assert.Equal(t, "PROVIDER_TEST_TLS_FAILED", code)
|
||||
assert.Equal(t, "dispatch", category)
|
||||
assert.Contains(t, message, "TLS handshake failed")
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_Templates(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
db := setupNotificationCoverageDB(t)
|
||||
@@ -625,3 +780,258 @@ func TestNotificationTemplateHandler_Preview_InvalidTemplate(t *testing.T) {
|
||||
|
||||
assert.Equal(t, 400, w.Code)
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_Preview_TokenWriteOnly(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
db := setupNotificationCoverageDB(t)
|
||||
svc := services.NewNotificationService(db)
|
||||
h := NewNotificationProviderHandler(svc)
|
||||
|
||||
payload := map[string]any{
|
||||
"template": "minimal",
|
||||
"token": "secret-token-value",
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
setAdminContext(c)
|
||||
c.Request = httptest.NewRequest("POST", "/providers/preview", bytes.NewBuffer(body))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
h.Preview(c)
|
||||
|
||||
assert.Equal(t, 400, w.Code)
|
||||
assert.Contains(t, w.Body.String(), "TOKEN_WRITE_ONLY")
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_Update_TypeChangeRejected(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
db := setupNotificationCoverageDB(t)
|
||||
svc := services.NewNotificationService(db)
|
||||
h := NewNotificationProviderHandler(svc)
|
||||
|
||||
existing := models.NotificationProvider{
|
||||
ID: "update-type-test",
|
||||
Name: "Discord Provider",
|
||||
Type: "discord",
|
||||
URL: "https://discord.com/api/webhooks/123/abc",
|
||||
}
|
||||
require.NoError(t, db.Create(&existing).Error)
|
||||
|
||||
payload := map[string]any{
|
||||
"name": "Changed Type Provider",
|
||||
"type": "gotify",
|
||||
"url": "https://gotify.example.com",
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
setAdminContext(c)
|
||||
c.Params = gin.Params{{Key: "id", Value: "update-type-test"}}
|
||||
c.Request = httptest.NewRequest("PUT", "/providers/update-type-test", bytes.NewBuffer(body))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
h.Update(c)
|
||||
|
||||
assert.Equal(t, 400, w.Code)
|
||||
assert.Contains(t, w.Body.String(), "PROVIDER_TYPE_IMMUTABLE")
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_Test_MissingProviderID(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
db := setupNotificationCoverageDB(t)
|
||||
svc := services.NewNotificationService(db)
|
||||
h := NewNotificationProviderHandler(svc)
|
||||
|
||||
payload := map[string]any{
|
||||
"type": "discord",
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
setAdminContext(c)
|
||||
c.Request = httptest.NewRequest("POST", "/providers/test", bytes.NewBuffer(body))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
h.Test(c)
|
||||
|
||||
assert.Equal(t, 400, w.Code)
|
||||
assert.Contains(t, w.Body.String(), "MISSING_PROVIDER_ID")
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_Test_ProviderNotFound(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
db := setupNotificationCoverageDB(t)
|
||||
svc := services.NewNotificationService(db)
|
||||
h := NewNotificationProviderHandler(svc)
|
||||
|
||||
payload := map[string]any{
|
||||
"type": "discord",
|
||||
"id": "nonexistent-provider",
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
setAdminContext(c)
|
||||
c.Request = httptest.NewRequest("POST", "/providers/test", bytes.NewBuffer(body))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
h.Test(c)
|
||||
|
||||
assert.Equal(t, 404, w.Code)
|
||||
assert.Contains(t, w.Body.String(), "PROVIDER_NOT_FOUND")
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_Test_EmptyProviderURL(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
db := setupNotificationCoverageDB(t)
|
||||
svc := services.NewNotificationService(db)
|
||||
h := NewNotificationProviderHandler(svc)
|
||||
|
||||
existing := models.NotificationProvider{
|
||||
ID: "empty-url-test",
|
||||
Name: "Empty URL Provider",
|
||||
Type: "discord",
|
||||
URL: "",
|
||||
}
|
||||
require.NoError(t, db.Create(&existing).Error)
|
||||
|
||||
payload := map[string]any{
|
||||
"type": "discord",
|
||||
"id": "empty-url-test",
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
setAdminContext(c)
|
||||
c.Request = httptest.NewRequest("POST", "/providers/test", bytes.NewBuffer(body))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
h.Test(c)
|
||||
|
||||
assert.Equal(t, 400, w.Code)
|
||||
assert.Contains(t, w.Body.String(), "PROVIDER_CONFIG_MISSING")
|
||||
}
|
||||
|
||||
func TestIsProviderValidationError_Comprehensive(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
err error
|
||||
expect bool
|
||||
}{
|
||||
{"nil", nil, false},
|
||||
{"invalid_custom_template", errors.New("invalid custom template: missing field"), true},
|
||||
{"rendered_template", errors.New("rendered template exceeds maximum"), true},
|
||||
{"failed_to_parse", errors.New("failed to parse template: unexpected end"), true},
|
||||
{"failed_to_render", errors.New("failed to render template: missing key"), true},
|
||||
{"invalid_discord_webhook", errors.New("invalid Discord webhook URL"), true},
|
||||
{"unrelated_error", errors.New("database connection failed"), false},
|
||||
}
|
||||
for _, tc := range cases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
assert.Equal(t, tc.expect, isProviderValidationError(tc.err))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_Update_UnsupportedType(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
db := setupNotificationCoverageDB(t)
|
||||
svc := services.NewNotificationService(db)
|
||||
h := NewNotificationProviderHandler(svc)
|
||||
|
||||
existing := models.NotificationProvider{
|
||||
ID: "unsupported-type",
|
||||
Name: "Custom Provider",
|
||||
Type: "slack",
|
||||
URL: "https://hooks.slack.com/test",
|
||||
}
|
||||
require.NoError(t, db.Create(&existing).Error)
|
||||
|
||||
payload := map[string]any{
|
||||
"name": "Updated Slack Provider",
|
||||
"url": "https://hooks.slack.com/updated",
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
setAdminContext(c)
|
||||
c.Params = gin.Params{{Key: "id", Value: "unsupported-type"}}
|
||||
c.Request = httptest.NewRequest("PUT", "/providers/unsupported-type", bytes.NewBuffer(body))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
h.Update(c)
|
||||
|
||||
assert.Equal(t, 400, w.Code)
|
||||
assert.Contains(t, w.Body.String(), "UNSUPPORTED_PROVIDER_TYPE")
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_Update_GotifyKeepsExistingToken(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
db := setupNotificationCoverageDB(t)
|
||||
svc := services.NewNotificationService(db)
|
||||
h := NewNotificationProviderHandler(svc)
|
||||
|
||||
existing := models.NotificationProvider{
|
||||
ID: "gotify-keep-token",
|
||||
Name: "Gotify Provider",
|
||||
Type: "gotify",
|
||||
URL: "https://gotify.example.com",
|
||||
Token: "existing-secret-token",
|
||||
}
|
||||
require.NoError(t, db.Create(&existing).Error)
|
||||
|
||||
payload := map[string]any{
|
||||
"name": "Updated Gotify",
|
||||
"url": "https://gotify.example.com/new",
|
||||
"template": "minimal",
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
setAdminContext(c)
|
||||
c.Params = gin.Params{{Key: "id", Value: "gotify-keep-token"}}
|
||||
c.Request = httptest.NewRequest("PUT", "/providers/gotify-keep-token", bytes.NewBuffer(body))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
h.Update(c)
|
||||
|
||||
assert.Equal(t, 200, w.Code)
|
||||
|
||||
var updated models.NotificationProvider
|
||||
require.NoError(t, db.Where("id = ?", "gotify-keep-token").First(&updated).Error)
|
||||
assert.Equal(t, "existing-secret-token", updated.Token)
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_Test_ReadDBError(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
db := setupNotificationCoverageDB(t)
|
||||
svc := services.NewNotificationService(db)
|
||||
h := NewNotificationProviderHandler(svc)
|
||||
|
||||
_ = db.Migrator().DropTable(&models.NotificationProvider{})
|
||||
|
||||
payload := map[string]any{
|
||||
"type": "discord",
|
||||
"id": "some-provider",
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
setAdminContext(c)
|
||||
c.Request = httptest.NewRequest("POST", "/providers/test", bytes.NewBuffer(body))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
h.Test(c)
|
||||
|
||||
assert.Equal(t, 500, w.Code)
|
||||
assert.Contains(t, w.Body.String(), "PROVIDER_READ_FAILED")
|
||||
}
|
||||
|
||||
@@ -0,0 +1,397 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/models"
|
||||
"github.com/Wikid82/charon/backend/internal/services"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
// TestBlocker3_CreateProviderValidationWithSecurityEvents verifies supported/unsupported provider handling with security events enabled.
|
||||
func TestBlocker3_CreateProviderRejectsNonDiscordWithSecurityEvents(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
// Setup test database
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Run migrations
|
||||
err = db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create handler
|
||||
service := services.NewNotificationService(db)
|
||||
handler := NewNotificationProviderHandler(service)
|
||||
|
||||
// Test cases: provider types with security events enabled
|
||||
testCases := []struct {
|
||||
name string
|
||||
providerType string
|
||||
wantStatus int
|
||||
}{
|
||||
{"webhook", "webhook", http.StatusCreated},
|
||||
{"gotify", "gotify", http.StatusCreated},
|
||||
{"slack", "slack", http.StatusBadRequest},
|
||||
{"email", "email", http.StatusBadRequest},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
// Create request payload with security event enabled
|
||||
payload := map[string]interface{}{
|
||||
"name": "Test Provider",
|
||||
"type": tc.providerType,
|
||||
"url": "https://example.com/webhook",
|
||||
"enabled": true,
|
||||
"notify_security_waf_blocks": true, // Security event enabled
|
||||
}
|
||||
|
||||
jsonPayload, err := json.Marshal(payload)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create test context
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("POST", "/api/v1/notifications/providers", bytes.NewBuffer(jsonPayload))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
// Set admin role
|
||||
c.Set("role", "admin")
|
||||
c.Set("userID", uint(1))
|
||||
|
||||
// Call Create
|
||||
handler.Create(c)
|
||||
|
||||
assert.Equal(t, tc.wantStatus, w.Code)
|
||||
|
||||
// Verify error message
|
||||
var response map[string]interface{}
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
assert.NoError(t, err)
|
||||
if tc.wantStatus == http.StatusBadRequest {
|
||||
assert.Contains(t, response["code"], "UNSUPPORTED_PROVIDER_TYPE")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TestBlocker3_CreateProviderAcceptsDiscordWithSecurityEvents tests that create accepts Discord providers with security events.
|
||||
func TestBlocker3_CreateProviderAcceptsDiscordWithSecurityEvents(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
// Setup test database
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Run migrations
|
||||
err = db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create handler
|
||||
service := services.NewNotificationService(db)
|
||||
handler := NewNotificationProviderHandler(service)
|
||||
|
||||
// Create request payload with Discord provider and security events
|
||||
payload := map[string]interface{}{
|
||||
"name": "Test Discord",
|
||||
"type": "discord",
|
||||
"url": "https://discord.com/api/webhooks/123/abc",
|
||||
"enabled": true,
|
||||
"notify_security_waf_blocks": true,
|
||||
"notify_security_acl_denies": true,
|
||||
"notify_security_rate_limit_hits": true,
|
||||
"notify_security_crowdsec_decisions": true,
|
||||
}
|
||||
|
||||
jsonPayload, err := json.Marshal(payload)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create test context
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("POST", "/api/v1/notifications/providers", bytes.NewBuffer(jsonPayload))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
// Set admin role
|
||||
c.Set("role", "admin")
|
||||
c.Set("userID", uint(1))
|
||||
|
||||
// Call Create
|
||||
handler.Create(c)
|
||||
|
||||
// Blocker 3: Should accept with 201
|
||||
assert.Equal(t, http.StatusCreated, w.Code, "Should accept Discord provider with security events")
|
||||
}
|
||||
|
||||
// TestBlocker3_CreateProviderAcceptsNonDiscordWithoutSecurityEvents verifies webhook create without security events remains accepted.
|
||||
func TestBlocker3_CreateProviderAcceptsNonDiscordWithoutSecurityEvents(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
// Setup test database
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Run migrations
|
||||
err = db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create handler
|
||||
service := services.NewNotificationService(db)
|
||||
handler := NewNotificationProviderHandler(service)
|
||||
|
||||
// Create request payload with webhook provider but no security events
|
||||
payload := map[string]interface{}{
|
||||
"name": "Test Webhook",
|
||||
"type": "webhook",
|
||||
"url": "https://example.com/webhook",
|
||||
"enabled": true,
|
||||
"notify_proxy_hosts": true,
|
||||
"notify_security_waf_blocks": false, // Security events disabled
|
||||
}
|
||||
|
||||
jsonPayload, err := json.Marshal(payload)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create test context
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("POST", "/api/v1/notifications/providers", bytes.NewBuffer(jsonPayload))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
// Set admin role
|
||||
c.Set("role", "admin")
|
||||
c.Set("userID", uint(1))
|
||||
|
||||
// Call Create
|
||||
handler.Create(c)
|
||||
|
||||
assert.Equal(t, http.StatusCreated, w.Code)
|
||||
}
|
||||
|
||||
// TestBlocker3_UpdateProviderRejectsNonDiscordWithSecurityEvents verifies webhook update with security events is allowed in PR-1 scope.
|
||||
func TestBlocker3_UpdateProviderRejectsNonDiscordWithSecurityEvents(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
// Setup test database
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Run migrations
|
||||
err = db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create existing webhook provider without security events
|
||||
existingProvider := models.NotificationProvider{
|
||||
ID: "test-id",
|
||||
Name: "Test Webhook",
|
||||
Type: "webhook",
|
||||
URL: "https://example.com/webhook",
|
||||
Enabled: true,
|
||||
NotifyProxyHosts: true,
|
||||
}
|
||||
assert.NoError(t, db.Create(&existingProvider).Error)
|
||||
|
||||
// Create handler
|
||||
service := services.NewNotificationService(db)
|
||||
handler := NewNotificationProviderHandler(service)
|
||||
|
||||
// Try to update to enable security events (should be rejected)
|
||||
payload := map[string]interface{}{
|
||||
"name": "Test Webhook",
|
||||
"type": "webhook",
|
||||
"url": "https://example.com/webhook",
|
||||
"enabled": true,
|
||||
"notify_security_waf_blocks": true, // Try to enable security event
|
||||
}
|
||||
|
||||
jsonPayload, err := json.Marshal(payload)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create test context
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("PUT", "/api/v1/notifications/providers/test-id", bytes.NewBuffer(jsonPayload))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
c.Params = []gin.Param{{Key: "id", Value: "test-id"}}
|
||||
|
||||
// Set admin role
|
||||
c.Set("role", "admin")
|
||||
c.Set("userID", uint(1))
|
||||
|
||||
// Call Update
|
||||
handler.Update(c)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
}
|
||||
|
||||
// TestBlocker3_UpdateProviderAcceptsDiscordWithSecurityEvents tests that update accepts Discord providers with security events.
|
||||
func TestBlocker3_UpdateProviderAcceptsDiscordWithSecurityEvents(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
// Setup test database
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Run migrations
|
||||
err = db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create existing Discord provider
|
||||
existingProvider := models.NotificationProvider{
|
||||
ID: "test-id",
|
||||
Name: "Test Discord",
|
||||
Type: "discord",
|
||||
URL: "https://discord.com/api/webhooks/123/abc",
|
||||
Enabled: true,
|
||||
NotifySecurityWAFBlocks: false,
|
||||
}
|
||||
assert.NoError(t, db.Create(&existingProvider).Error)
|
||||
|
||||
// Create handler
|
||||
service := services.NewNotificationService(db)
|
||||
handler := NewNotificationProviderHandler(service)
|
||||
|
||||
// Update to enable security events
|
||||
payload := map[string]interface{}{
|
||||
"name": "Test Discord",
|
||||
"type": "discord",
|
||||
"url": "https://discord.com/api/webhooks/123/abc",
|
||||
"enabled": true,
|
||||
"notify_security_waf_blocks": true, // Enable security event
|
||||
}
|
||||
|
||||
jsonPayload, err := json.Marshal(payload)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create test context
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("PUT", "/api/v1/notifications/providers/test-id", bytes.NewBuffer(jsonPayload))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
c.Params = []gin.Param{{Key: "id", Value: "test-id"}}
|
||||
|
||||
// Set admin role
|
||||
c.Set("role", "admin")
|
||||
c.Set("userID", uint(1))
|
||||
|
||||
// Call Update
|
||||
handler.Update(c)
|
||||
|
||||
// Blocker 3: Should accept with 200
|
||||
assert.Equal(t, http.StatusOK, w.Code, "Should accept Discord provider update with security events")
|
||||
}
|
||||
|
||||
// TestBlocker3_MultipleSecurityEventsEnforcesDiscordOnly tests webhook remains accepted with security flags in PR-1 scope.
|
||||
func TestBlocker3_MultipleSecurityEventsEnforcesDiscordOnly(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
// Setup test database
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Run migrations
|
||||
err = db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create handler
|
||||
service := services.NewNotificationService(db)
|
||||
handler := NewNotificationProviderHandler(service)
|
||||
|
||||
// Test each security event field individually
|
||||
securityEventFields := []string{
|
||||
"notify_security_waf_blocks",
|
||||
"notify_security_acl_denies",
|
||||
"notify_security_rate_limit_hits",
|
||||
"notify_security_crowdsec_decisions",
|
||||
}
|
||||
|
||||
for _, field := range securityEventFields {
|
||||
t.Run(field, func(t *testing.T) {
|
||||
// Create request with webhook provider and one security event enabled
|
||||
payload := map[string]interface{}{
|
||||
"name": "Test Webhook",
|
||||
"type": "webhook",
|
||||
"url": "https://example.com/webhook",
|
||||
"enabled": true,
|
||||
field: true, // Enable this security event
|
||||
}
|
||||
|
||||
jsonPayload, err := json.Marshal(payload)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create test context
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("POST", "/api/v1/notifications/providers", bytes.NewBuffer(jsonPayload))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
// Set admin role
|
||||
c.Set("role", "admin")
|
||||
c.Set("userID", uint(1))
|
||||
|
||||
// Call Create
|
||||
handler.Create(c)
|
||||
|
||||
assert.Equal(t, http.StatusCreated, w.Code,
|
||||
"Should accept webhook provider with %s enabled", field)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TestBlocker3_UpdateProvider_DatabaseError tests database error handling when fetching existing provider (lines 137-139).
|
||||
func TestBlocker3_UpdateProvider_DatabaseError(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
// Setup test database
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Run migrations
|
||||
err = db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create handler
|
||||
service := services.NewNotificationService(db)
|
||||
handler := NewNotificationProviderHandler(service)
|
||||
|
||||
// Update payload
|
||||
payload := map[string]interface{}{
|
||||
"name": "Test Provider",
|
||||
"type": "discord",
|
||||
"url": "https://discord.com/api/webhooks/123/abc",
|
||||
"enabled": true,
|
||||
}
|
||||
|
||||
jsonPayload, err := json.Marshal(payload)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Create test context with non-existent provider ID
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("PUT", "/api/v1/notifications/providers/nonexistent", bytes.NewBuffer(jsonPayload))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
c.Params = []gin.Param{{Key: "id", Value: "nonexistent"}}
|
||||
|
||||
// Set admin role
|
||||
c.Set("role", "admin")
|
||||
c.Set("userID", uint(1))
|
||||
|
||||
// Call Update
|
||||
handler.Update(c)
|
||||
|
||||
// Lines 137-139: Should return 404 for not found
|
||||
assert.Equal(t, http.StatusNotFound, w.Code, "Should return 404 for nonexistent provider")
|
||||
|
||||
var response map[string]interface{}
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "Provider not found", response["error"])
|
||||
}
|
||||
@@ -0,0 +1,439 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/models"
|
||||
"github.com/Wikid82/charon/backend/internal/services"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
// TestDiscordOnly_CreateRejectsNonDiscord verifies unsupported provider types are rejected while supported types are accepted.
|
||||
func TestDiscordOnly_CreateRejectsNonDiscord(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{}))
|
||||
|
||||
service := services.NewNotificationService(db)
|
||||
handler := NewNotificationProviderHandler(service)
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
providerType string
|
||||
wantStatus int
|
||||
wantCode string
|
||||
}{
|
||||
{"webhook", "webhook", http.StatusCreated, ""},
|
||||
{"gotify", "gotify", http.StatusCreated, ""},
|
||||
{"slack", "slack", http.StatusBadRequest, "UNSUPPORTED_PROVIDER_TYPE"},
|
||||
{"telegram", "telegram", http.StatusBadRequest, "UNSUPPORTED_PROVIDER_TYPE"},
|
||||
{"generic", "generic", http.StatusBadRequest, "UNSUPPORTED_PROVIDER_TYPE"},
|
||||
{"email", "email", http.StatusBadRequest, "UNSUPPORTED_PROVIDER_TYPE"},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
payload := map[string]interface{}{
|
||||
"name": "Test Provider",
|
||||
"type": tc.providerType,
|
||||
"url": "https://example.com/webhook",
|
||||
"enabled": true,
|
||||
"notify_proxy_hosts": true,
|
||||
}
|
||||
|
||||
jsonPayload, err := json.Marshal(payload)
|
||||
require.NoError(t, err)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("POST", "/api/v1/notifications/providers", bytes.NewBuffer(jsonPayload))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
c.Set("role", "admin")
|
||||
c.Set("userID", uint(1))
|
||||
|
||||
handler.Create(c)
|
||||
|
||||
assert.Equal(t, tc.wantStatus, w.Code)
|
||||
|
||||
var response map[string]interface{}
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
if tc.wantCode != "" {
|
||||
assert.Equal(t, tc.wantCode, response["code"])
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TestDiscordOnly_CreateAcceptsDiscord tests that create accepts Discord providers.
|
||||
func TestDiscordOnly_CreateAcceptsDiscord(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{}))
|
||||
|
||||
service := services.NewNotificationService(db)
|
||||
handler := NewNotificationProviderHandler(service)
|
||||
|
||||
payload := map[string]interface{}{
|
||||
"name": "Test Discord",
|
||||
"type": "discord",
|
||||
"url": "https://discord.com/api/webhooks/123/abc",
|
||||
"enabled": true,
|
||||
"notify_proxy_hosts": true,
|
||||
}
|
||||
|
||||
jsonPayload, err := json.Marshal(payload)
|
||||
require.NoError(t, err)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("POST", "/api/v1/notifications/providers", bytes.NewBuffer(jsonPayload))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
c.Set("role", "admin")
|
||||
c.Set("userID", uint(1))
|
||||
|
||||
handler.Create(c)
|
||||
|
||||
assert.Equal(t, http.StatusCreated, w.Code, "Should accept Discord provider")
|
||||
}
|
||||
|
||||
// TestDiscordOnly_UpdateRejectsTypeMutation tests that update blocks type mutation for deprecated providers.
|
||||
func TestDiscordOnly_UpdateRejectsTypeMutation(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{}))
|
||||
|
||||
// Create a deprecated webhook provider
|
||||
deprecatedProvider := models.NotificationProvider{
|
||||
ID: "test-deprecated",
|
||||
Name: "Deprecated Webhook",
|
||||
Type: "webhook",
|
||||
URL: "https://example.com/webhook",
|
||||
Enabled: false,
|
||||
MigrationState: "deprecated",
|
||||
NotifyProxyHosts: true,
|
||||
}
|
||||
require.NoError(t, db.Create(&deprecatedProvider).Error)
|
||||
|
||||
service := services.NewNotificationService(db)
|
||||
handler := NewNotificationProviderHandler(service)
|
||||
|
||||
// Try to change type to discord
|
||||
payload := map[string]interface{}{
|
||||
"name": "Deprecated Webhook",
|
||||
"type": "discord",
|
||||
"url": "https://discord.com/api/webhooks/123/abc",
|
||||
"enabled": false,
|
||||
"notify_proxy_hosts": true,
|
||||
}
|
||||
|
||||
jsonPayload, err := json.Marshal(payload)
|
||||
require.NoError(t, err)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("PUT", "/api/v1/notifications/providers/test-deprecated", bytes.NewBuffer(jsonPayload))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
c.Params = []gin.Param{{Key: "id", Value: "test-deprecated"}}
|
||||
c.Set("role", "admin")
|
||||
c.Set("userID", uint(1))
|
||||
|
||||
handler.Update(c)
|
||||
|
||||
assert.Equal(t, http.StatusBadRequest, w.Code, "Should reject type mutation for deprecated provider")
|
||||
|
||||
var response map[string]interface{}
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "PROVIDER_TYPE_IMMUTABLE", response["code"])
|
||||
assert.Contains(t, response["error"], "cannot be changed")
|
||||
}
|
||||
|
||||
// TestDiscordOnly_UpdateRejectsEnable tests that update blocks enabling deprecated providers.
|
||||
func TestDiscordOnly_UpdateRejectsEnable(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{}))
|
||||
|
||||
// Create a deprecated webhook provider (disabled)
|
||||
deprecatedProvider := models.NotificationProvider{
|
||||
ID: "test-deprecated",
|
||||
Name: "Deprecated Webhook",
|
||||
Type: "webhook",
|
||||
URL: "https://example.com/webhook",
|
||||
Enabled: false,
|
||||
MigrationState: "deprecated",
|
||||
NotifyProxyHosts: true,
|
||||
}
|
||||
require.NoError(t, db.Create(&deprecatedProvider).Error)
|
||||
|
||||
service := services.NewNotificationService(db)
|
||||
handler := NewNotificationProviderHandler(service)
|
||||
|
||||
// Try to enable the deprecated provider
|
||||
payload := map[string]interface{}{
|
||||
"name": "Deprecated Webhook",
|
||||
"type": "webhook",
|
||||
"url": "https://example.com/webhook",
|
||||
"enabled": true, // Try to enable
|
||||
"notify_proxy_hosts": true,
|
||||
}
|
||||
|
||||
jsonPayload, err := json.Marshal(payload)
|
||||
require.NoError(t, err)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("PUT", "/api/v1/notifications/providers/test-deprecated", bytes.NewBuffer(jsonPayload))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
c.Params = []gin.Param{{Key: "id", Value: "test-deprecated"}}
|
||||
c.Set("role", "admin")
|
||||
c.Set("userID", uint(1))
|
||||
|
||||
handler.Update(c)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
}
|
||||
|
||||
// TestDiscordOnly_UpdateAllowsDisabledDeprecated tests that update allows updating disabled deprecated providers (except type/enable).
|
||||
func TestDiscordOnly_UpdateAllowsDisabledDeprecated(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{}))
|
||||
|
||||
// Create a deprecated webhook provider (disabled)
|
||||
deprecatedProvider := models.NotificationProvider{
|
||||
ID: "test-deprecated",
|
||||
Name: "Deprecated Webhook",
|
||||
Type: "webhook",
|
||||
URL: "https://example.com/webhook",
|
||||
Enabled: false,
|
||||
MigrationState: "deprecated",
|
||||
NotifyProxyHosts: false,
|
||||
}
|
||||
require.NoError(t, db.Create(&deprecatedProvider).Error)
|
||||
|
||||
service := services.NewNotificationService(db)
|
||||
handler := NewNotificationProviderHandler(service)
|
||||
|
||||
// Update name (keeping type and enabled unchanged)
|
||||
payload := map[string]interface{}{
|
||||
"name": "Updated Deprecated Name",
|
||||
"type": "webhook",
|
||||
"url": "https://example.com/webhook",
|
||||
"enabled": false,
|
||||
"notify_proxy_hosts": true,
|
||||
}
|
||||
|
||||
jsonPayload, err := json.Marshal(payload)
|
||||
require.NoError(t, err)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("PUT", "/api/v1/notifications/providers/test-deprecated", bytes.NewBuffer(jsonPayload))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
c.Params = []gin.Param{{Key: "id", Value: "test-deprecated"}}
|
||||
c.Set("role", "admin")
|
||||
c.Set("userID", uint(1))
|
||||
|
||||
handler.Update(c)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
}
|
||||
|
||||
// TestDiscordOnly_UpdateAcceptsDiscord tests that update accepts Discord provider updates.
|
||||
func TestDiscordOnly_UpdateAcceptsDiscord(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{}))
|
||||
|
||||
// Create a Discord provider
|
||||
discordProvider := models.NotificationProvider{
|
||||
ID: "test-discord",
|
||||
Name: "Test Discord",
|
||||
Type: "discord",
|
||||
URL: "https://discord.com/api/webhooks/123/abc",
|
||||
Enabled: true,
|
||||
MigrationState: "migrated",
|
||||
NotifySecurityWAFBlocks: false,
|
||||
}
|
||||
require.NoError(t, db.Create(&discordProvider).Error)
|
||||
|
||||
service := services.NewNotificationService(db)
|
||||
handler := NewNotificationProviderHandler(service)
|
||||
|
||||
// Update to enable security notifications
|
||||
payload := map[string]interface{}{
|
||||
"name": "Test Discord",
|
||||
"type": "discord",
|
||||
"url": "https://discord.com/api/webhooks/123/abc",
|
||||
"enabled": true,
|
||||
"notify_security_waf_blocks": true,
|
||||
}
|
||||
|
||||
jsonPayload, err := json.Marshal(payload)
|
||||
require.NoError(t, err)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("PUT", "/api/v1/notifications/providers/test-discord", bytes.NewBuffer(jsonPayload))
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
c.Params = []gin.Param{{Key: "id", Value: "test-discord"}}
|
||||
c.Set("role", "admin")
|
||||
c.Set("userID", uint(1))
|
||||
|
||||
handler.Update(c)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code, "Should accept Discord provider update")
|
||||
}
|
||||
|
||||
// TestDiscordOnly_DeleteAllowsDeprecated tests that delete works for deprecated providers.
|
||||
func TestDiscordOnly_DeleteAllowsDeprecated(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{}))
|
||||
|
||||
// Create a deprecated webhook provider
|
||||
deprecatedProvider := models.NotificationProvider{
|
||||
ID: "test-deprecated",
|
||||
Name: "Deprecated Webhook",
|
||||
Type: "webhook",
|
||||
URL: "https://example.com/webhook",
|
||||
Enabled: false,
|
||||
MigrationState: "deprecated",
|
||||
NotifyProxyHosts: true,
|
||||
}
|
||||
require.NoError(t, db.Create(&deprecatedProvider).Error)
|
||||
|
||||
service := services.NewNotificationService(db)
|
||||
handler := NewNotificationProviderHandler(service)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request, _ = http.NewRequest("DELETE", "/api/v1/notifications/providers/test-deprecated", nil)
|
||||
c.Params = []gin.Param{{Key: "id", Value: "test-deprecated"}}
|
||||
c.Set("role", "admin")
|
||||
c.Set("userID", uint(1))
|
||||
|
||||
handler.Delete(c)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code, "Should allow deleting deprecated provider")
|
||||
|
||||
// Verify deletion
|
||||
var count int64
|
||||
db.Model(&models.NotificationProvider{}).Where("id = ?", "test-deprecated").Count(&count)
|
||||
assert.Equal(t, int64(0), count, "Provider should be deleted")
|
||||
}
|
||||
|
||||
// TestDiscordOnly_ErrorCodes tests that error codes are deterministic.
|
||||
func TestDiscordOnly_ErrorCodes(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
setupFunc func(*gorm.DB) string
|
||||
requestFunc func(string) (*http.Request, gin.Params)
|
||||
expectedCode string
|
||||
}{
|
||||
{
|
||||
name: "create_unsupported",
|
||||
setupFunc: func(db *gorm.DB) string {
|
||||
return ""
|
||||
},
|
||||
requestFunc: func(id string) (*http.Request, gin.Params) {
|
||||
payload := map[string]interface{}{
|
||||
"name": "Test",
|
||||
"type": "slack",
|
||||
"url": "https://example.com",
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
req, _ := http.NewRequest("POST", "/api/v1/notifications/providers", bytes.NewBuffer(body))
|
||||
return req, nil
|
||||
},
|
||||
expectedCode: "UNSUPPORTED_PROVIDER_TYPE",
|
||||
},
|
||||
{
|
||||
name: "update_type_mutation",
|
||||
setupFunc: func(db *gorm.DB) string {
|
||||
provider := models.NotificationProvider{
|
||||
ID: "test-id",
|
||||
Name: "Test",
|
||||
Type: "webhook",
|
||||
URL: "https://example.com",
|
||||
MigrationState: "deprecated",
|
||||
}
|
||||
db.Create(&provider)
|
||||
return "test-id"
|
||||
},
|
||||
requestFunc: func(id string) (*http.Request, gin.Params) {
|
||||
payload := map[string]interface{}{
|
||||
"name": "Test",
|
||||
"type": "discord",
|
||||
"url": "https://discord.com/api/webhooks/1/a",
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
req, _ := http.NewRequest("PUT", "/api/v1/notifications/providers/"+id, bytes.NewBuffer(body))
|
||||
return req, []gin.Param{{Key: "id", Value: id}}
|
||||
},
|
||||
expectedCode: "PROVIDER_TYPE_IMMUTABLE",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
gin.SetMode(gin.TestMode)
|
||||
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{}))
|
||||
|
||||
id := tc.setupFunc(db)
|
||||
|
||||
service := services.NewNotificationService(db)
|
||||
handler := NewNotificationProviderHandler(service)
|
||||
|
||||
req, params := tc.requestFunc(id)
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
c.Request = req
|
||||
if params != nil {
|
||||
c.Params = params
|
||||
}
|
||||
c.Set("role", "admin")
|
||||
c.Set("userID", uint(1))
|
||||
|
||||
if req.Method == "POST" {
|
||||
handler.Create(c)
|
||||
} else {
|
||||
handler.Update(c)
|
||||
}
|
||||
|
||||
var response map[string]interface{}
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tc.expectedCode, response["code"], "Error code should be deterministic")
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -4,12 +4,15 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/models"
|
||||
"github.com/Wikid82/charon/backend/internal/services"
|
||||
"github.com/Wikid82/charon/backend/internal/trace"
|
||||
"github.com/gin-gonic/gin"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
type NotificationProviderHandler struct {
|
||||
@@ -18,6 +21,120 @@ type NotificationProviderHandler struct {
|
||||
dataRoot string
|
||||
}
|
||||
|
||||
type notificationProviderUpsertRequest struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
URL string `json:"url"`
|
||||
Config string `json:"config"`
|
||||
Template string `json:"template"`
|
||||
Token string `json:"token,omitempty"`
|
||||
Enabled bool `json:"enabled"`
|
||||
NotifyProxyHosts bool `json:"notify_proxy_hosts"`
|
||||
NotifyRemoteServers bool `json:"notify_remote_servers"`
|
||||
NotifyDomains bool `json:"notify_domains"`
|
||||
NotifyCerts bool `json:"notify_certs"`
|
||||
NotifyUptime bool `json:"notify_uptime"`
|
||||
NotifySecurityWAFBlocks bool `json:"notify_security_waf_blocks"`
|
||||
NotifySecurityACLDenies bool `json:"notify_security_acl_denies"`
|
||||
NotifySecurityRateLimitHits bool `json:"notify_security_rate_limit_hits"`
|
||||
NotifySecurityCrowdSecDecisions bool `json:"notify_security_crowdsec_decisions"`
|
||||
}
|
||||
|
||||
type notificationProviderTestRequest struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
URL string `json:"url"`
|
||||
Config string `json:"config"`
|
||||
Template string `json:"template"`
|
||||
Token string `json:"token,omitempty"`
|
||||
}
|
||||
|
||||
func (r notificationProviderUpsertRequest) toModel() models.NotificationProvider {
|
||||
return models.NotificationProvider{
|
||||
Name: r.Name,
|
||||
Type: r.Type,
|
||||
URL: r.URL,
|
||||
Config: r.Config,
|
||||
Template: r.Template,
|
||||
Token: strings.TrimSpace(r.Token),
|
||||
Enabled: r.Enabled,
|
||||
NotifyProxyHosts: r.NotifyProxyHosts,
|
||||
NotifyRemoteServers: r.NotifyRemoteServers,
|
||||
NotifyDomains: r.NotifyDomains,
|
||||
NotifyCerts: r.NotifyCerts,
|
||||
NotifyUptime: r.NotifyUptime,
|
||||
NotifySecurityWAFBlocks: r.NotifySecurityWAFBlocks,
|
||||
NotifySecurityACLDenies: r.NotifySecurityACLDenies,
|
||||
NotifySecurityRateLimitHits: r.NotifySecurityRateLimitHits,
|
||||
NotifySecurityCrowdSecDecisions: r.NotifySecurityCrowdSecDecisions,
|
||||
}
|
||||
}
|
||||
|
||||
func providerRequestID(c *gin.Context) string {
|
||||
if value, ok := c.Get(string(trace.RequestIDKey)); ok {
|
||||
if requestID, ok := value.(string); ok {
|
||||
return requestID
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func respondSanitizedProviderError(c *gin.Context, status int, code, category, message string) {
|
||||
response := gin.H{
|
||||
"error": message,
|
||||
"code": code,
|
||||
"category": category,
|
||||
}
|
||||
if requestID := providerRequestID(c); requestID != "" {
|
||||
response["request_id"] = requestID
|
||||
}
|
||||
c.JSON(status, response)
|
||||
}
|
||||
|
||||
var providerStatusCodePattern = regexp.MustCompile(`provider returned status\s+(\d{3})`)
|
||||
|
||||
func classifyProviderTestFailure(err error) (code string, category string, message string) {
|
||||
if err == nil {
|
||||
return "PROVIDER_TEST_FAILED", "dispatch", "Provider test failed"
|
||||
}
|
||||
|
||||
errText := strings.ToLower(strings.TrimSpace(err.Error()))
|
||||
|
||||
if strings.Contains(errText, "destination url validation failed") ||
|
||||
strings.Contains(errText, "invalid webhook url") ||
|
||||
strings.Contains(errText, "invalid discord webhook url") {
|
||||
return "PROVIDER_TEST_URL_INVALID", "validation", "Provider URL is invalid or blocked. Verify the URL and try again"
|
||||
}
|
||||
|
||||
if statusMatch := providerStatusCodePattern.FindStringSubmatch(errText); len(statusMatch) == 2 {
|
||||
switch statusMatch[1] {
|
||||
case "401", "403":
|
||||
return "PROVIDER_TEST_AUTH_REJECTED", "dispatch", "Provider rejected authentication. Verify your Gotify token"
|
||||
case "404":
|
||||
return "PROVIDER_TEST_ENDPOINT_NOT_FOUND", "dispatch", "Provider endpoint was not found. Verify the provider URL path"
|
||||
default:
|
||||
return "PROVIDER_TEST_REMOTE_REJECTED", "dispatch", fmt.Sprintf("Provider rejected the test request (HTTP %s)", statusMatch[1])
|
||||
}
|
||||
}
|
||||
|
||||
if strings.Contains(errText, "outbound request failed") || strings.Contains(errText, "failed to send webhook") {
|
||||
switch {
|
||||
case strings.Contains(errText, "dns lookup failed"):
|
||||
return "PROVIDER_TEST_DNS_FAILED", "dispatch", "DNS lookup failed for provider host. Verify the hostname in the provider URL"
|
||||
case strings.Contains(errText, "connection refused"):
|
||||
return "PROVIDER_TEST_CONNECTION_REFUSED", "dispatch", "Provider host refused the connection. Verify port and service availability"
|
||||
case strings.Contains(errText, "request timed out"):
|
||||
return "PROVIDER_TEST_TIMEOUT", "dispatch", "Provider request timed out. Verify network route and provider responsiveness"
|
||||
case strings.Contains(errText, "tls handshake failed"):
|
||||
return "PROVIDER_TEST_TLS_FAILED", "dispatch", "TLS handshake failed. Verify HTTPS certificate and URL scheme"
|
||||
}
|
||||
return "PROVIDER_TEST_UNREACHABLE", "dispatch", "Could not reach provider endpoint. Verify URL, DNS, and network connectivity"
|
||||
}
|
||||
|
||||
return "PROVIDER_TEST_FAILED", "dispatch", "Provider test failed"
|
||||
}
|
||||
|
||||
func NewNotificationProviderHandler(service *services.NotificationService) *NotificationProviderHandler {
|
||||
return NewNotificationProviderHandlerWithDeps(service, nil, "")
|
||||
}
|
||||
@@ -32,6 +149,10 @@ func (h *NotificationProviderHandler) List(c *gin.Context) {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to list providers"})
|
||||
return
|
||||
}
|
||||
for i := range providers {
|
||||
providers[i].HasToken = providers[i].Token != ""
|
||||
providers[i].Token = ""
|
||||
}
|
||||
c.JSON(http.StatusOK, providers)
|
||||
}
|
||||
|
||||
@@ -40,24 +161,41 @@ func (h *NotificationProviderHandler) Create(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
var provider models.NotificationProvider
|
||||
if err := c.ShouldBindJSON(&provider); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
var req notificationProviderUpsertRequest
|
||||
if err := c.ShouldBindJSON(&req); err != nil {
|
||||
respondSanitizedProviderError(c, http.StatusBadRequest, "INVALID_REQUEST", "validation", "Invalid notification provider payload")
|
||||
return
|
||||
}
|
||||
|
||||
providerType := strings.ToLower(strings.TrimSpace(req.Type))
|
||||
if providerType != "discord" && providerType != "gotify" && providerType != "webhook" {
|
||||
respondSanitizedProviderError(c, http.StatusBadRequest, "UNSUPPORTED_PROVIDER_TYPE", "validation", "Unsupported notification provider type")
|
||||
return
|
||||
}
|
||||
|
||||
provider := req.toModel()
|
||||
// Server-managed migration fields are set by the migration reconciliation logic
|
||||
// and must not be set from user input
|
||||
provider.Engine = ""
|
||||
provider.MigrationState = ""
|
||||
provider.MigrationError = ""
|
||||
provider.LastMigratedAt = nil
|
||||
provider.LegacyURL = ""
|
||||
|
||||
if err := h.service.CreateProvider(&provider); err != nil {
|
||||
// If it's a validation error from template parsing, return 400
|
||||
if isProviderValidationError(err) {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
respondSanitizedProviderError(c, http.StatusBadRequest, "PROVIDER_VALIDATION_FAILED", "validation", "Notification provider validation failed")
|
||||
return
|
||||
}
|
||||
if respondPermissionError(c, h.securityService, "notification_provider_save_failed", err, h.dataRoot) {
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to create provider"})
|
||||
respondSanitizedProviderError(c, http.StatusInternalServerError, "PROVIDER_CREATE_FAILED", "internal", "Failed to create provider")
|
||||
return
|
||||
}
|
||||
provider.HasToken = provider.Token != ""
|
||||
provider.Token = ""
|
||||
c.JSON(http.StatusCreated, provider)
|
||||
}
|
||||
|
||||
@@ -67,24 +205,62 @@ func (h *NotificationProviderHandler) Update(c *gin.Context) {
|
||||
}
|
||||
|
||||
id := c.Param("id")
|
||||
var provider models.NotificationProvider
|
||||
if err := c.ShouldBindJSON(&provider); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
var req notificationProviderUpsertRequest
|
||||
if err := c.ShouldBindJSON(&req); err != nil {
|
||||
respondSanitizedProviderError(c, http.StatusBadRequest, "INVALID_REQUEST", "validation", "Invalid notification provider payload")
|
||||
return
|
||||
}
|
||||
|
||||
// Check if existing provider is non-Discord (deprecated)
|
||||
var existing models.NotificationProvider
|
||||
if err := h.service.DB.Where("id = ?", id).First(&existing).Error; err != nil {
|
||||
if err == gorm.ErrRecordNotFound {
|
||||
respondSanitizedProviderError(c, http.StatusNotFound, "PROVIDER_NOT_FOUND", "validation", "Provider not found")
|
||||
return
|
||||
}
|
||||
respondSanitizedProviderError(c, http.StatusInternalServerError, "PROVIDER_READ_FAILED", "internal", "Failed to read provider")
|
||||
return
|
||||
}
|
||||
|
||||
if strings.TrimSpace(req.Type) != "" && strings.TrimSpace(req.Type) != existing.Type {
|
||||
respondSanitizedProviderError(c, http.StatusBadRequest, "PROVIDER_TYPE_IMMUTABLE", "validation", "Provider type cannot be changed")
|
||||
return
|
||||
}
|
||||
|
||||
providerType := strings.ToLower(strings.TrimSpace(existing.Type))
|
||||
if providerType != "discord" && providerType != "gotify" && providerType != "webhook" {
|
||||
respondSanitizedProviderError(c, http.StatusBadRequest, "UNSUPPORTED_PROVIDER_TYPE", "validation", "Unsupported notification provider type")
|
||||
return
|
||||
}
|
||||
|
||||
if providerType == "gotify" && strings.TrimSpace(req.Token) == "" {
|
||||
// Keep existing token if update payload omits token
|
||||
req.Token = existing.Token
|
||||
}
|
||||
req.Type = existing.Type
|
||||
|
||||
provider := req.toModel()
|
||||
provider.ID = id
|
||||
// Server-managed migration fields must not be modified via user input
|
||||
provider.Engine = ""
|
||||
provider.MigrationState = ""
|
||||
provider.MigrationError = ""
|
||||
provider.LastMigratedAt = nil
|
||||
provider.LegacyURL = ""
|
||||
|
||||
if err := h.service.UpdateProvider(&provider); err != nil {
|
||||
if isProviderValidationError(err) {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
respondSanitizedProviderError(c, http.StatusBadRequest, "PROVIDER_VALIDATION_FAILED", "validation", "Notification provider validation failed")
|
||||
return
|
||||
}
|
||||
if respondPermissionError(c, h.securityService, "notification_provider_save_failed", err, h.dataRoot) {
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to update provider"})
|
||||
respondSanitizedProviderError(c, http.StatusInternalServerError, "PROVIDER_UPDATE_FAILED", "internal", "Failed to update provider")
|
||||
return
|
||||
}
|
||||
provider.HasToken = provider.Token != ""
|
||||
provider.Token = ""
|
||||
c.JSON(http.StatusOK, provider)
|
||||
}
|
||||
|
||||
@@ -118,16 +294,44 @@ func (h *NotificationProviderHandler) Delete(c *gin.Context) {
|
||||
}
|
||||
|
||||
func (h *NotificationProviderHandler) Test(c *gin.Context) {
|
||||
var req notificationProviderTestRequest
|
||||
if err := c.ShouldBindJSON(&req); err != nil {
|
||||
respondSanitizedProviderError(c, http.StatusBadRequest, "INVALID_REQUEST", "validation", "Invalid test payload")
|
||||
return
|
||||
}
|
||||
|
||||
providerType := strings.ToLower(strings.TrimSpace(req.Type))
|
||||
if providerType == "gotify" && strings.TrimSpace(req.Token) != "" {
|
||||
respondSanitizedProviderError(c, http.StatusBadRequest, "TOKEN_WRITE_ONLY", "validation", "Gotify token is accepted only on provider create/update")
|
||||
return
|
||||
}
|
||||
|
||||
providerID := strings.TrimSpace(req.ID)
|
||||
if providerID == "" {
|
||||
respondSanitizedProviderError(c, http.StatusBadRequest, "MISSING_PROVIDER_ID", "validation", "Trusted provider ID is required for test dispatch")
|
||||
return
|
||||
}
|
||||
|
||||
var provider models.NotificationProvider
|
||||
if err := c.ShouldBindJSON(&provider); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
if err := h.service.DB.Where("id = ?", providerID).First(&provider).Error; err != nil {
|
||||
if err == gorm.ErrRecordNotFound {
|
||||
respondSanitizedProviderError(c, http.StatusNotFound, "PROVIDER_NOT_FOUND", "validation", "Provider not found")
|
||||
return
|
||||
}
|
||||
respondSanitizedProviderError(c, http.StatusInternalServerError, "PROVIDER_READ_FAILED", "internal", "Failed to read provider")
|
||||
return
|
||||
}
|
||||
|
||||
if strings.TrimSpace(provider.URL) == "" {
|
||||
respondSanitizedProviderError(c, http.StatusBadRequest, "PROVIDER_CONFIG_MISSING", "validation", "Trusted provider configuration is incomplete")
|
||||
return
|
||||
}
|
||||
|
||||
if err := h.service.TestProvider(provider); err != nil {
|
||||
// Create internal notification for the failure
|
||||
_, _ = h.service.Create(models.NotificationTypeError, "Test Failed", fmt.Sprintf("Provider %s test failed: %v", provider.Name, err))
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
_, _ = h.service.Create(models.NotificationTypeError, "Test Failed", fmt.Sprintf("Provider %s test failed", provider.Name))
|
||||
code, category, message := classifyProviderTestFailure(err)
|
||||
respondSanitizedProviderError(c, http.StatusBadRequest, code, category, message)
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, gin.H{"message": "Test notification sent"})
|
||||
@@ -146,9 +350,15 @@ func (h *NotificationProviderHandler) Templates(c *gin.Context) {
|
||||
func (h *NotificationProviderHandler) Preview(c *gin.Context) {
|
||||
var raw map[string]any
|
||||
if err := c.ShouldBindJSON(&raw); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
respondSanitizedProviderError(c, http.StatusBadRequest, "INVALID_REQUEST", "validation", "Invalid preview payload")
|
||||
return
|
||||
}
|
||||
if tokenValue, ok := raw["token"]; ok {
|
||||
if tokenText, isString := tokenValue.(string); isString && strings.TrimSpace(tokenText) != "" {
|
||||
respondSanitizedProviderError(c, http.StatusBadRequest, "TOKEN_WRITE_ONLY", "validation", "Gotify token is accepted only on provider create/update")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
var provider models.NotificationProvider
|
||||
// Marshal raw into provider to get proper types
|
||||
@@ -176,7 +386,8 @@ func (h *NotificationProviderHandler) Preview(c *gin.Context) {
|
||||
|
||||
rendered, parsed, err := h.service.RenderTemplate(provider, payload)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error(), "rendered": rendered})
|
||||
_ = rendered
|
||||
respondSanitizedProviderError(c, http.StatusBadRequest, "TEMPLATE_PREVIEW_FAILED", "validation", "Template preview failed")
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, gin.H{"rendered": rendered, "parsed": parsed})
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/stretchr/testify/assert"
|
||||
@@ -119,25 +120,60 @@ func TestNotificationProviderHandler_Templates(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_Test(t *testing.T) {
|
||||
r, _ := setupNotificationProviderTest(t)
|
||||
r, db := setupNotificationProviderTest(t)
|
||||
|
||||
// Test with invalid provider (should fail validation or service check)
|
||||
// Since we don't have a real shoutrrr backend mocked easily here without more work,
|
||||
// we expect it might fail or pass depending on service implementation.
|
||||
// Looking at service code (not shown but assumed), TestProvider likely calls shoutrrr.Send.
|
||||
// If URL is invalid, it should error.
|
||||
|
||||
provider := models.NotificationProvider{
|
||||
Type: "discord",
|
||||
URL: "invalid-url",
|
||||
stored := models.NotificationProvider{
|
||||
ID: "trusted-provider-id",
|
||||
Name: "Stored Provider",
|
||||
Type: "discord",
|
||||
URL: "invalid-url",
|
||||
Enabled: true,
|
||||
}
|
||||
body, _ := json.Marshal(provider)
|
||||
require.NoError(t, db.Create(&stored).Error)
|
||||
|
||||
payload := map[string]any{
|
||||
"id": stored.ID,
|
||||
"type": "discord",
|
||||
"url": "https://discord.com/api/webhooks/123/override",
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
req, _ := http.NewRequest("POST", "/api/v1/notifications/providers/test", bytes.NewBuffer(body))
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
// It should probably fail with 400
|
||||
assert.Equal(t, http.StatusBadRequest, w.Code)
|
||||
assert.Contains(t, w.Body.String(), "PROVIDER_TEST_URL_INVALID")
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_Test_RequiresTrustedProviderID(t *testing.T) {
|
||||
r, _ := setupNotificationProviderTest(t)
|
||||
|
||||
payload := map[string]any{
|
||||
"type": "discord",
|
||||
"url": "https://discord.com/api/webhooks/123/abc",
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
req, _ := http.NewRequest("POST", "/api/v1/notifications/providers/test", bytes.NewBuffer(body))
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusBadRequest, w.Code)
|
||||
assert.Contains(t, w.Body.String(), "MISSING_PROVIDER_ID")
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_Test_ReturnsNotFoundForUnknownProvider(t *testing.T) {
|
||||
r, _ := setupNotificationProviderTest(t)
|
||||
|
||||
payload := map[string]any{
|
||||
"id": "missing-provider-id",
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
req, _ := http.NewRequest("POST", "/api/v1/notifications/providers/test", bytes.NewBuffer(body))
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusNotFound, w.Code)
|
||||
assert.Contains(t, w.Body.String(), "PROVIDER_NOT_FOUND")
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_Errors(t *testing.T) {
|
||||
@@ -168,8 +204,8 @@ func TestNotificationProviderHandler_InvalidCustomTemplate_Rejects(t *testing.T)
|
||||
// Create with invalid custom template should return 400
|
||||
provider := models.NotificationProvider{
|
||||
Name: "Bad",
|
||||
Type: "webhook",
|
||||
URL: "http://example.com",
|
||||
Type: "discord",
|
||||
URL: "https://discord.com/api/webhooks/123/abc",
|
||||
Template: "custom",
|
||||
Config: `{"broken": "{{.Title"}`,
|
||||
}
|
||||
@@ -182,8 +218,8 @@ func TestNotificationProviderHandler_InvalidCustomTemplate_Rejects(t *testing.T)
|
||||
// Create valid and then attempt update to invalid custom template
|
||||
provider = models.NotificationProvider{
|
||||
Name: "Good",
|
||||
Type: "webhook",
|
||||
URL: "http://example.com",
|
||||
Type: "discord",
|
||||
URL: "https://discord.com/api/webhooks/456/def",
|
||||
Template: "minimal",
|
||||
}
|
||||
body, _ = json.Marshal(provider)
|
||||
@@ -208,8 +244,8 @@ func TestNotificationProviderHandler_Preview(t *testing.T) {
|
||||
|
||||
// Minimal template preview
|
||||
provider := models.NotificationProvider{
|
||||
Type: "webhook",
|
||||
URL: "http://example.com",
|
||||
Type: "discord",
|
||||
URL: "https://discord.com/api/webhooks/123/abc",
|
||||
Template: "minimal",
|
||||
}
|
||||
body, _ := json.Marshal(provider)
|
||||
@@ -247,8 +283,8 @@ func TestNotificationProviderHandler_CreateRejectsDiscordIPHost(t *testing.T) {
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusBadRequest, w.Code)
|
||||
assert.Contains(t, w.Body.String(), "invalid Discord webhook URL")
|
||||
assert.Contains(t, w.Body.String(), "IP address hosts are not allowed")
|
||||
assert.Contains(t, w.Body.String(), "PROVIDER_VALIDATION_FAILED")
|
||||
assert.Contains(t, w.Body.String(), "validation")
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_CreateAcceptsDiscordHostname(t *testing.T) {
|
||||
@@ -266,3 +302,211 @@ func TestNotificationProviderHandler_CreateAcceptsDiscordHostname(t *testing.T)
|
||||
|
||||
assert.Equal(t, http.StatusCreated, w.Code)
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_CreateIgnoresServerManagedMigrationFields(t *testing.T) {
|
||||
r, db := setupNotificationProviderTest(t)
|
||||
|
||||
payload := map[string]any{
|
||||
"name": "Create Ignore Migration",
|
||||
"type": "discord",
|
||||
"url": "https://discord.com/api/webhooks/123/abc",
|
||||
"template": "minimal",
|
||||
"enabled": true,
|
||||
"notify_proxy_hosts": true,
|
||||
"notify_remote_servers": true,
|
||||
"notify_domains": true,
|
||||
"notify_certs": true,
|
||||
"notify_uptime": true,
|
||||
"engine": "notify_v1",
|
||||
"service_config": `{"token":"attacker"}`,
|
||||
"migration_state": "migrated",
|
||||
"migration_error": "client-value",
|
||||
"legacy_url": "https://malicious.example",
|
||||
"last_migrated_at": "2020-01-01T00:00:00Z",
|
||||
"id": "client-controlled-id",
|
||||
}
|
||||
|
||||
body, _ := json.Marshal(payload)
|
||||
req, _ := http.NewRequest("POST", "/api/v1/notifications/providers", bytes.NewBuffer(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusCreated, w.Code)
|
||||
|
||||
var created models.NotificationProvider
|
||||
err := json.Unmarshal(w.Body.Bytes(), &created)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, created.ID)
|
||||
assert.NotEqual(t, "client-controlled-id", created.ID)
|
||||
|
||||
var dbProvider models.NotificationProvider
|
||||
err = db.First(&dbProvider, "id = ?", created.ID).Error
|
||||
require.NoError(t, err)
|
||||
assert.Empty(t, dbProvider.Engine)
|
||||
assert.Empty(t, dbProvider.ServiceConfig)
|
||||
assert.Empty(t, dbProvider.MigrationState)
|
||||
assert.Empty(t, dbProvider.MigrationError)
|
||||
assert.Empty(t, dbProvider.LegacyURL)
|
||||
assert.Nil(t, dbProvider.LastMigratedAt)
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_UpdatePreservesServerManagedMigrationFields(t *testing.T) {
|
||||
r, db := setupNotificationProviderTest(t)
|
||||
|
||||
now := time.Now().UTC().Round(time.Second)
|
||||
original := models.NotificationProvider{
|
||||
Name: "Original",
|
||||
Type: "discord",
|
||||
URL: "https://discord.com/api/webhooks/123/abc",
|
||||
Template: "minimal",
|
||||
Enabled: true,
|
||||
NotifyProxyHosts: true,
|
||||
NotifyRemoteServers: true,
|
||||
NotifyDomains: true,
|
||||
NotifyCerts: true,
|
||||
NotifyUptime: true,
|
||||
Engine: "notify_v1",
|
||||
ServiceConfig: `{"token":"server"}`,
|
||||
MigrationState: "migrated",
|
||||
MigrationError: "",
|
||||
LegacyURL: "discord://legacy",
|
||||
LastMigratedAt: &now,
|
||||
}
|
||||
require.NoError(t, db.Create(&original).Error)
|
||||
|
||||
payload := map[string]any{
|
||||
"name": "Updated Name",
|
||||
"type": "discord",
|
||||
"url": "https://discord.com/api/webhooks/456/def",
|
||||
"template": "minimal",
|
||||
"enabled": false,
|
||||
"notify_proxy_hosts": false,
|
||||
"notify_remote_servers": false,
|
||||
"notify_domains": false,
|
||||
"notify_certs": false,
|
||||
"notify_uptime": false,
|
||||
"engine": "legacy",
|
||||
"service_config": `{"token":"client-overwrite"}`,
|
||||
"migration_state": "failed",
|
||||
"migration_error": "client-error",
|
||||
"legacy_url": "https://attacker.example",
|
||||
"last_migrated_at": "1999-01-01T00:00:00Z",
|
||||
}
|
||||
|
||||
body, _ := json.Marshal(payload)
|
||||
req, _ := http.NewRequest("PUT", "/api/v1/notifications/providers/"+original.ID, bytes.NewBuffer(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
var dbProvider models.NotificationProvider
|
||||
require.NoError(t, db.First(&dbProvider, "id = ?", original.ID).Error)
|
||||
assert.Equal(t, "Updated Name", dbProvider.Name)
|
||||
assert.Equal(t, "notify_v1", dbProvider.Engine)
|
||||
assert.Equal(t, `{"token":"server"}`, dbProvider.ServiceConfig)
|
||||
assert.Equal(t, "migrated", dbProvider.MigrationState)
|
||||
assert.Equal(t, "", dbProvider.MigrationError)
|
||||
assert.Equal(t, "discord://legacy", dbProvider.LegacyURL)
|
||||
require.NotNil(t, dbProvider.LastMigratedAt)
|
||||
assert.Equal(t, now, dbProvider.LastMigratedAt.UTC().Round(time.Second))
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_List_ReturnsHasTokenTrue(t *testing.T) {
|
||||
r, db := setupNotificationProviderTest(t)
|
||||
|
||||
p := models.NotificationProvider{
|
||||
ID: "tok-true",
|
||||
Name: "Gotify With Token",
|
||||
Type: "gotify",
|
||||
URL: "https://gotify.example.com",
|
||||
Token: "secret-app-token",
|
||||
}
|
||||
require.NoError(t, db.Create(&p).Error)
|
||||
|
||||
req, _ := http.NewRequest("GET", "/api/v1/notifications/providers", http.NoBody)
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
var raw []map[string]interface{}
|
||||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &raw))
|
||||
require.Len(t, raw, 1)
|
||||
assert.Equal(t, true, raw[0]["has_token"])
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_List_ReturnsHasTokenFalse(t *testing.T) {
|
||||
r, db := setupNotificationProviderTest(t)
|
||||
|
||||
p := models.NotificationProvider{
|
||||
ID: "tok-false",
|
||||
Name: "Discord No Token",
|
||||
Type: "discord",
|
||||
URL: "https://discord.com/api/webhooks/123/abc",
|
||||
}
|
||||
require.NoError(t, db.Create(&p).Error)
|
||||
|
||||
req, _ := http.NewRequest("GET", "/api/v1/notifications/providers", http.NoBody)
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
var raw []map[string]interface{}
|
||||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &raw))
|
||||
require.Len(t, raw, 1)
|
||||
assert.Equal(t, false, raw[0]["has_token"])
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_List_NeverExposesRawToken(t *testing.T) {
|
||||
r, db := setupNotificationProviderTest(t)
|
||||
|
||||
p := models.NotificationProvider{
|
||||
ID: "tok-hidden",
|
||||
Name: "Secret Gotify",
|
||||
Type: "gotify",
|
||||
URL: "https://gotify.example.com",
|
||||
Token: "super-secret-value",
|
||||
}
|
||||
require.NoError(t, db.Create(&p).Error)
|
||||
|
||||
req, _ := http.NewRequest("GET", "/api/v1/notifications/providers", http.NoBody)
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
assert.NotContains(t, w.Body.String(), "super-secret-value")
|
||||
|
||||
var raw []map[string]interface{}
|
||||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &raw))
|
||||
require.Len(t, raw, 1)
|
||||
_, hasTokenField := raw[0]["token"]
|
||||
assert.False(t, hasTokenField, "raw token field must not appear in JSON response")
|
||||
}
|
||||
|
||||
func TestNotificationProviderHandler_Create_ResponseHasHasToken(t *testing.T) {
|
||||
r, _ := setupNotificationProviderTest(t)
|
||||
|
||||
payload := map[string]interface{}{
|
||||
"name": "New Gotify",
|
||||
"type": "gotify",
|
||||
"url": "https://gotify.example.com",
|
||||
"token": "app-token-123",
|
||||
"template": "minimal",
|
||||
}
|
||||
body, _ := json.Marshal(payload)
|
||||
req, _ := http.NewRequest("POST", "/api/v1/notifications/providers", bytes.NewBuffer(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
r.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusCreated, w.Code)
|
||||
|
||||
var raw map[string]interface{}
|
||||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &raw))
|
||||
assert.Equal(t, true, raw["has_token"])
|
||||
assert.NotContains(t, w.Body.String(), "app-token-123")
|
||||
}
|
||||
|
||||
@@ -0,0 +1,115 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/models"
|
||||
"github.com/Wikid82/charon/backend/internal/services"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
// TestUpdate_BlockTypeMutationForNonDiscord covers lines 137-139
|
||||
func TestUpdate_BlockTypeMutationForNonDiscord(t *testing.T) {
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
|
||||
require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{}))
|
||||
|
||||
// Create existing non-Discord provider
|
||||
existing := &models.NotificationProvider{
|
||||
ID: "test-provider",
|
||||
Name: "Test Webhook",
|
||||
Type: "webhook",
|
||||
URL: "https://example.com/webhook",
|
||||
Enabled: true,
|
||||
}
|
||||
require.NoError(t, db.Create(existing).Error)
|
||||
|
||||
service := services.NewNotificationService(db)
|
||||
handler := NewNotificationProviderHandler(service)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
r := gin.New()
|
||||
r.Use(func(c *gin.Context) {
|
||||
c.Set("role", "admin")
|
||||
c.Set("userID", uint(1))
|
||||
c.Next()
|
||||
})
|
||||
r.PUT("/api/v1/notifications/providers/:id", handler.Update)
|
||||
|
||||
// Try to mutate type from webhook to discord (should be blocked)
|
||||
req := map[string]interface{}{
|
||||
"name": "Updated Name",
|
||||
"type": "discord", // Trying to change type
|
||||
"url": "https://discord.com/api/webhooks/123/abc",
|
||||
}
|
||||
body, _ := json.Marshal(req)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
httpReq := httptest.NewRequest(http.MethodPut, "/api/v1/notifications/providers/test-provider", bytes.NewReader(body))
|
||||
httpReq.Header.Set("Content-Type", "application/json")
|
||||
r.ServeHTTP(w, httpReq)
|
||||
|
||||
// Should block type mutation (lines 137-139)
|
||||
assert.Equal(t, http.StatusBadRequest, w.Code)
|
||||
|
||||
var response map[string]interface{}
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "PROVIDER_TYPE_IMMUTABLE", response["code"])
|
||||
}
|
||||
|
||||
// TestUpdate_AllowTypeMutationForDiscord verifies Discord can be updated
|
||||
func TestUpdate_AllowTypeMutationForDiscord(t *testing.T) {
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
|
||||
require.NoError(t, db.AutoMigrate(&models.NotificationProvider{}, &models.Notification{}))
|
||||
|
||||
// Create existing Discord provider
|
||||
existing := &models.NotificationProvider{
|
||||
ID: "test-provider",
|
||||
Name: "Test Discord",
|
||||
Type: "discord",
|
||||
URL: "https://discord.com/api/webhooks/123/abc",
|
||||
Enabled: true,
|
||||
}
|
||||
require.NoError(t, db.Create(existing).Error)
|
||||
|
||||
service := services.NewNotificationService(db)
|
||||
handler := NewNotificationProviderHandler(service)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
r := gin.New()
|
||||
r.Use(func(c *gin.Context) {
|
||||
c.Set("role", "admin")
|
||||
c.Set("userID", uint(1))
|
||||
c.Next()
|
||||
})
|
||||
r.PUT("/api/v1/notifications/providers/:id", handler.Update)
|
||||
|
||||
// Try to update Discord (type remains discord - should be allowed)
|
||||
req := map[string]interface{}{
|
||||
"name": "Updated Discord",
|
||||
"type": "discord",
|
||||
"url": "https://discord.com/api/webhooks/456/def",
|
||||
}
|
||||
body, _ := json.Marshal(req)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
httpReq := httptest.NewRequest(http.MethodPut, "/api/v1/notifications/providers/test-provider", bytes.NewReader(body))
|
||||
httpReq.Header.Set("Content-Type", "application/json")
|
||||
r.ServeHTTP(w, httpReq)
|
||||
|
||||
// Should succeed
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
}
|
||||
@@ -24,6 +24,17 @@ func requireAdmin(c *gin.Context) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func requireAuthenticatedAdmin(c *gin.Context) bool {
|
||||
if _, exists := c.Get("userID"); !exists {
|
||||
c.JSON(http.StatusUnauthorized, gin.H{
|
||||
"error": "Authorization header required",
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
return requireAdmin(c)
|
||||
}
|
||||
|
||||
func isAdmin(c *gin.Context) bool {
|
||||
role, _ := c.Get("role")
|
||||
roleStr, _ := role.(string)
|
||||
|
||||
@@ -168,3 +168,34 @@ func TestLogPermissionAudit_ActorFallback(t *testing.T) {
|
||||
assert.Equal(t, "permissions", audit.EventCategory)
|
||||
assert.Contains(t, audit.Details, fmt.Sprintf("\"admin\":%v", false))
|
||||
}
|
||||
|
||||
func TestRequireAuthenticatedAdmin_NoUserID(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
ctx, rec := newTestContextWithRequest()
|
||||
result := requireAuthenticatedAdmin(ctx)
|
||||
assert.False(t, result)
|
||||
assert.Equal(t, http.StatusUnauthorized, rec.Code)
|
||||
assert.Contains(t, rec.Body.String(), "Authorization header required")
|
||||
}
|
||||
|
||||
func TestRequireAuthenticatedAdmin_UserIDPresentAndAdmin(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
ctx, _ := newTestContextWithRequest()
|
||||
ctx.Set("userID", uint(1))
|
||||
ctx.Set("role", "admin")
|
||||
result := requireAuthenticatedAdmin(ctx)
|
||||
assert.True(t, result)
|
||||
}
|
||||
|
||||
func TestRequireAuthenticatedAdmin_UserIDPresentButNotAdmin(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
ctx, rec := newTestContextWithRequest()
|
||||
ctx.Set("userID", uint(1))
|
||||
ctx.Set("role", "user")
|
||||
result := requireAuthenticatedAdmin(ctx)
|
||||
assert.False(t, result)
|
||||
assert.Equal(t, http.StatusForbidden, rec.Code)
|
||||
}
|
||||
|
||||
@@ -130,6 +130,7 @@ func generateForwardHostWarnings(forwardHost string) []ProxyHostWarning {
|
||||
// ProxyHostHandler handles CRUD operations for proxy hosts.
|
||||
type ProxyHostHandler struct {
|
||||
service *services.ProxyHostService
|
||||
db *gorm.DB
|
||||
caddyManager *caddy.Manager
|
||||
notificationService *services.NotificationService
|
||||
uptimeService *services.UptimeService
|
||||
@@ -183,6 +184,74 @@ func parseNullableUintField(value any, fieldName string) (*uint, bool, error) {
|
||||
}
|
||||
}
|
||||
|
||||
func (h *ProxyHostHandler) resolveAccessListReference(value any) (*uint, error) {
|
||||
if value == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
parsedID, _, parseErr := parseNullableUintField(value, "access_list_id")
|
||||
if parseErr == nil {
|
||||
return parsedID, nil
|
||||
}
|
||||
|
||||
uuidValue, isString := value.(string)
|
||||
if !isString {
|
||||
return nil, parseErr
|
||||
}
|
||||
|
||||
trimmed := strings.TrimSpace(uuidValue)
|
||||
if trimmed == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var acl models.AccessList
|
||||
if err := h.db.Select("id").Where("uuid = ?", trimmed).First(&acl).Error; err != nil {
|
||||
if err == gorm.ErrRecordNotFound {
|
||||
return nil, fmt.Errorf("access list not found")
|
||||
}
|
||||
return nil, fmt.Errorf("failed to resolve access list")
|
||||
}
|
||||
|
||||
id := acl.ID
|
||||
return &id, nil
|
||||
}
|
||||
|
||||
func (h *ProxyHostHandler) resolveSecurityHeaderProfileReference(value any) (*uint, error) {
|
||||
if value == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
parsedID, _, parseErr := parseNullableUintField(value, "security_header_profile_id")
|
||||
if parseErr == nil {
|
||||
return parsedID, nil
|
||||
}
|
||||
|
||||
uuidValue, isString := value.(string)
|
||||
if !isString {
|
||||
return nil, parseErr
|
||||
}
|
||||
|
||||
trimmed := strings.TrimSpace(uuidValue)
|
||||
if trimmed == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if _, err := uuid.Parse(trimmed); err != nil {
|
||||
return nil, parseErr
|
||||
}
|
||||
|
||||
var profile models.SecurityHeaderProfile
|
||||
if err := h.db.Select("id").Where("uuid = ?", trimmed).First(&profile).Error; err != nil {
|
||||
if err == gorm.ErrRecordNotFound {
|
||||
return nil, fmt.Errorf("security header profile not found")
|
||||
}
|
||||
return nil, fmt.Errorf("failed to resolve security header profile")
|
||||
}
|
||||
|
||||
id := profile.ID
|
||||
return &id, nil
|
||||
}
|
||||
|
||||
func parseForwardPortField(value any) (int, error) {
|
||||
switch v := value.(type) {
|
||||
case float64:
|
||||
@@ -221,6 +290,7 @@ func parseForwardPortField(value any) (int, error) {
|
||||
func NewProxyHostHandler(db *gorm.DB, caddyManager *caddy.Manager, ns *services.NotificationService, uptimeService *services.UptimeService) *ProxyHostHandler {
|
||||
return &ProxyHostHandler{
|
||||
service: services.NewProxyHostService(db),
|
||||
db: db,
|
||||
caddyManager: caddyManager,
|
||||
notificationService: ns,
|
||||
uptimeService: uptimeService,
|
||||
@@ -252,8 +322,38 @@ func (h *ProxyHostHandler) List(c *gin.Context) {
|
||||
|
||||
// Create creates a new proxy host.
|
||||
func (h *ProxyHostHandler) Create(c *gin.Context) {
|
||||
var payload map[string]any
|
||||
if err := c.ShouldBindJSON(&payload); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
if rawAccessListRef, ok := payload["access_list_id"]; ok {
|
||||
resolvedAccessListID, resolveErr := h.resolveAccessListReference(rawAccessListRef)
|
||||
if resolveErr != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": resolveErr.Error()})
|
||||
return
|
||||
}
|
||||
payload["access_list_id"] = resolvedAccessListID
|
||||
}
|
||||
|
||||
if rawSecurityHeaderRef, ok := payload["security_header_profile_id"]; ok {
|
||||
resolvedSecurityHeaderID, resolveErr := h.resolveSecurityHeaderProfileReference(rawSecurityHeaderRef)
|
||||
if resolveErr != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": resolveErr.Error()})
|
||||
return
|
||||
}
|
||||
payload["security_header_profile_id"] = resolvedSecurityHeaderID
|
||||
}
|
||||
|
||||
payloadBytes, marshalErr := json.Marshal(payload)
|
||||
if marshalErr != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid request payload"})
|
||||
return
|
||||
}
|
||||
|
||||
var host models.ProxyHost
|
||||
if err := c.ShouldBindJSON(&host); err != nil {
|
||||
if err := json.Unmarshal(payloadBytes, &host); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
@@ -313,6 +413,11 @@ func (h *ProxyHostHandler) Create(c *gin.Context) {
|
||||
)
|
||||
}
|
||||
|
||||
// Trigger immediate uptime monitor creation + health check (non-blocking)
|
||||
if h.uptimeService != nil {
|
||||
go h.uptimeService.SyncAndCheckForHost(host.ID)
|
||||
}
|
||||
|
||||
// Generate advisory warnings for private/Docker IPs
|
||||
warnings := generateForwardHostWarnings(host.ForwardHost)
|
||||
|
||||
@@ -430,12 +535,12 @@ func (h *ProxyHostHandler) Update(c *gin.Context) {
|
||||
host.CertificateID = parsedID
|
||||
}
|
||||
if v, ok := payload["access_list_id"]; ok {
|
||||
parsedID, _, parseErr := parseNullableUintField(v, "access_list_id")
|
||||
if parseErr != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": parseErr.Error()})
|
||||
resolvedAccessListID, resolveErr := h.resolveAccessListReference(v)
|
||||
if resolveErr != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": resolveErr.Error()})
|
||||
return
|
||||
}
|
||||
host.AccessListID = parsedID
|
||||
host.AccessListID = resolvedAccessListID
|
||||
}
|
||||
|
||||
if v, ok := payload["dns_provider_id"]; ok {
|
||||
@@ -453,54 +558,12 @@ func (h *ProxyHostHandler) Update(c *gin.Context) {
|
||||
|
||||
// Security Header Profile: update only if provided
|
||||
if v, ok := payload["security_header_profile_id"]; ok {
|
||||
logger := middleware.GetRequestLogger(c)
|
||||
// Sanitize user-provided values for log injection protection (CWE-117)
|
||||
safeUUID := sanitizeForLog(uuidStr)
|
||||
logger.WithField("host_uuid", safeUUID).WithField("raw_value", sanitizeForLog(fmt.Sprintf("%v", v))).Debug("Processing security_header_profile_id update")
|
||||
|
||||
if v == nil {
|
||||
logger.WithField("host_uuid", safeUUID).Debug("Setting security_header_profile_id to nil")
|
||||
host.SecurityHeaderProfileID = nil
|
||||
} else {
|
||||
conversionSuccess := false
|
||||
switch t := v.(type) {
|
||||
case float64:
|
||||
logger.Debug("Received security_header_profile_id as float64")
|
||||
if id, ok := safeFloat64ToUint(t); ok {
|
||||
host.SecurityHeaderProfileID = &id
|
||||
conversionSuccess = true
|
||||
logger.Info("Successfully converted security_header_profile_id from float64")
|
||||
} else {
|
||||
logger.Warn("Failed to convert security_header_profile_id from float64: value is negative or not a valid uint")
|
||||
}
|
||||
case int:
|
||||
logger.Debug("Received security_header_profile_id as int")
|
||||
if id, ok := safeIntToUint(t); ok {
|
||||
host.SecurityHeaderProfileID = &id
|
||||
conversionSuccess = true
|
||||
logger.Info("Successfully converted security_header_profile_id from int")
|
||||
} else {
|
||||
logger.Warn("Failed to convert security_header_profile_id from int: value is negative")
|
||||
}
|
||||
case string:
|
||||
logger.Debug("Received security_header_profile_id as string")
|
||||
if n, err := strconv.ParseUint(t, 10, 32); err == nil {
|
||||
id := uint(n)
|
||||
host.SecurityHeaderProfileID = &id
|
||||
conversionSuccess = true
|
||||
logger.WithField("host_uuid", safeUUID).WithField("profile_id", id).Info("Successfully converted security_header_profile_id from string")
|
||||
} else {
|
||||
logger.Warn("Failed to parse security_header_profile_id from string")
|
||||
}
|
||||
default:
|
||||
logger.Warn("Unsupported type for security_header_profile_id")
|
||||
}
|
||||
|
||||
if !conversionSuccess {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("invalid security_header_profile_id: unable to convert value %v of type %T to uint", v, v)})
|
||||
return
|
||||
}
|
||||
resolvedSecurityHeaderID, resolveErr := h.resolveSecurityHeaderProfileReference(v)
|
||||
if resolveErr != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": resolveErr.Error()})
|
||||
return
|
||||
}
|
||||
host.SecurityHeaderProfileID = resolvedSecurityHeaderID
|
||||
}
|
||||
|
||||
// Locations: replace only if provided
|
||||
@@ -587,11 +650,10 @@ func (h *ProxyHostHandler) Delete(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
// check if we should also delete associated uptime monitors (query param: delete_uptime=true)
|
||||
deleteUptime := c.DefaultQuery("delete_uptime", "false") == "true"
|
||||
|
||||
if deleteUptime && h.uptimeService != nil {
|
||||
// Find all monitors referencing this proxy host and delete each
|
||||
// Always clean up associated uptime monitors when deleting a proxy host.
|
||||
// The query param delete_uptime=true is kept for backward compatibility but
|
||||
// cleanup now runs unconditionally to prevent orphaned monitors.
|
||||
if h.uptimeService != nil {
|
||||
var monitors []models.UptimeMonitor
|
||||
if err := h.uptimeService.DB.Where("proxy_host_id = ?", host.ID).Find(&monitors).Error; err == nil {
|
||||
for _, m := range monitors {
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/google/uuid"
|
||||
@@ -44,6 +45,219 @@ func setupTestRouter(t *testing.T) (*gin.Engine, *gorm.DB) {
|
||||
return r, db
|
||||
}
|
||||
|
||||
func setupTestRouterWithReferenceTables(t *testing.T) (*gin.Engine, *gorm.DB) {
|
||||
t.Helper()
|
||||
|
||||
dsn := "file:" + t.Name() + "?mode=memory&cache=shared"
|
||||
db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, db.AutoMigrate(
|
||||
&models.ProxyHost{},
|
||||
&models.Location{},
|
||||
&models.AccessList{},
|
||||
&models.SecurityHeaderProfile{},
|
||||
&models.Notification{},
|
||||
&models.NotificationProvider{},
|
||||
))
|
||||
|
||||
ns := services.NewNotificationService(db)
|
||||
h := NewProxyHostHandler(db, nil, ns, nil)
|
||||
r := gin.New()
|
||||
api := r.Group("/api/v1")
|
||||
h.RegisterRoutes(api)
|
||||
|
||||
return r, db
|
||||
}
|
||||
|
||||
func setupTestRouterWithUptime(t *testing.T) (*gin.Engine, *gorm.DB) {
|
||||
t.Helper()
|
||||
|
||||
dsn := "file:" + t.Name() + "?mode=memory&cache=shared"
|
||||
db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, db.AutoMigrate(
|
||||
&models.ProxyHost{},
|
||||
&models.Location{},
|
||||
&models.Notification{},
|
||||
&models.NotificationProvider{},
|
||||
&models.UptimeMonitor{},
|
||||
&models.UptimeHeartbeat{},
|
||||
&models.UptimeHost{},
|
||||
&models.Setting{},
|
||||
))
|
||||
|
||||
ns := services.NewNotificationService(db)
|
||||
us := services.NewUptimeService(db, ns)
|
||||
h := NewProxyHostHandler(db, nil, ns, us)
|
||||
r := gin.New()
|
||||
api := r.Group("/api/v1")
|
||||
h.RegisterRoutes(api)
|
||||
|
||||
return r, db
|
||||
}
|
||||
|
||||
func TestProxyHostHandler_ResolveAccessListReference_TargetedBranches(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
_, db := setupTestRouterWithReferenceTables(t)
|
||||
h := NewProxyHostHandler(db, nil, services.NewNotificationService(db), nil)
|
||||
|
||||
resolved, err := h.resolveAccessListReference(true)
|
||||
require.Error(t, err)
|
||||
require.Nil(t, resolved)
|
||||
require.Contains(t, err.Error(), "invalid access_list_id")
|
||||
|
||||
resolved, err = h.resolveAccessListReference(" ")
|
||||
require.NoError(t, err)
|
||||
require.Nil(t, resolved)
|
||||
|
||||
acl := models.AccessList{UUID: uuid.NewString(), Name: "resolve-acl", Type: "ip", Enabled: true}
|
||||
require.NoError(t, db.Create(&acl).Error)
|
||||
|
||||
resolved, err = h.resolveAccessListReference(acl.UUID)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, resolved)
|
||||
require.Equal(t, acl.ID, *resolved)
|
||||
}
|
||||
|
||||
func TestProxyHostHandler_ResolveSecurityHeaderReference_TargetedBranches(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
_, db := setupTestRouterWithReferenceTables(t)
|
||||
h := NewProxyHostHandler(db, nil, services.NewNotificationService(db), nil)
|
||||
|
||||
resolved, err := h.resolveSecurityHeaderProfileReference(" ")
|
||||
require.NoError(t, err)
|
||||
require.Nil(t, resolved)
|
||||
|
||||
profile := models.SecurityHeaderProfile{
|
||||
UUID: uuid.NewString(),
|
||||
Name: "resolve-security-profile",
|
||||
IsPreset: false,
|
||||
SecurityScore: 90,
|
||||
}
|
||||
require.NoError(t, db.Create(&profile).Error)
|
||||
|
||||
resolved, err = h.resolveSecurityHeaderProfileReference(profile.UUID)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, resolved)
|
||||
require.Equal(t, profile.ID, *resolved)
|
||||
|
||||
resolved, err = h.resolveSecurityHeaderProfileReference(uuid.NewString())
|
||||
require.Error(t, err)
|
||||
require.Nil(t, resolved)
|
||||
require.Contains(t, err.Error(), "security header profile not found")
|
||||
|
||||
require.NoError(t, db.Migrator().DropTable(&models.SecurityHeaderProfile{}))
|
||||
resolved, err = h.resolveSecurityHeaderProfileReference(uuid.NewString())
|
||||
require.Error(t, err)
|
||||
require.Nil(t, resolved)
|
||||
require.Contains(t, err.Error(), "failed to resolve security header profile")
|
||||
}
|
||||
|
||||
func TestProxyHostCreate_ReferenceResolution_TargetedBranches(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
router, db := setupTestRouterWithReferenceTables(t)
|
||||
|
||||
acl := models.AccessList{UUID: uuid.NewString(), Name: "create-acl", Type: "ip", Enabled: true}
|
||||
require.NoError(t, db.Create(&acl).Error)
|
||||
|
||||
profile := models.SecurityHeaderProfile{
|
||||
UUID: uuid.NewString(),
|
||||
Name: "create-security-profile",
|
||||
IsPreset: false,
|
||||
SecurityScore: 85,
|
||||
}
|
||||
require.NoError(t, db.Create(&profile).Error)
|
||||
|
||||
t.Run("creates host when references are valid UUIDs", func(t *testing.T) {
|
||||
body := map[string]any{
|
||||
"name": "Create Ref Success",
|
||||
"domain_names": "create-ref-success.example.com",
|
||||
"forward_scheme": "http",
|
||||
"forward_host": "localhost",
|
||||
"forward_port": 8080,
|
||||
"enabled": true,
|
||||
"access_list_id": acl.UUID,
|
||||
"security_header_profile_id": profile.UUID,
|
||||
}
|
||||
payload, err := json.Marshal(body)
|
||||
require.NoError(t, err)
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/proxy-hosts", bytes.NewReader(payload))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
resp := httptest.NewRecorder()
|
||||
router.ServeHTTP(resp, req)
|
||||
require.Equal(t, http.StatusCreated, resp.Code)
|
||||
|
||||
var created models.ProxyHost
|
||||
require.NoError(t, json.Unmarshal(resp.Body.Bytes(), &created))
|
||||
require.NotNil(t, created.AccessListID)
|
||||
require.Equal(t, acl.ID, *created.AccessListID)
|
||||
require.NotNil(t, created.SecurityHeaderProfileID)
|
||||
require.Equal(t, profile.ID, *created.SecurityHeaderProfileID)
|
||||
})
|
||||
|
||||
t.Run("returns bad request for invalid access list reference type", func(t *testing.T) {
|
||||
body := `{"name":"Create ACL Type Error","domain_names":"create-acl-type-error.example.com","forward_scheme":"http","forward_host":"localhost","forward_port":8080,"enabled":true,"access_list_id":true}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/proxy-hosts", strings.NewReader(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
resp := httptest.NewRecorder()
|
||||
router.ServeHTTP(resp, req)
|
||||
require.Equal(t, http.StatusBadRequest, resp.Code)
|
||||
})
|
||||
|
||||
t.Run("returns bad request for missing security header profile", func(t *testing.T) {
|
||||
body := map[string]any{
|
||||
"name": "Create Security Missing",
|
||||
"domain_names": "create-security-missing.example.com",
|
||||
"forward_scheme": "http",
|
||||
"forward_host": "localhost",
|
||||
"forward_port": 8080,
|
||||
"enabled": true,
|
||||
"security_header_profile_id": uuid.NewString(),
|
||||
}
|
||||
payload, err := json.Marshal(body)
|
||||
require.NoError(t, err)
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/proxy-hosts", bytes.NewReader(payload))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
resp := httptest.NewRecorder()
|
||||
router.ServeHTTP(resp, req)
|
||||
require.Equal(t, http.StatusBadRequest, resp.Code)
|
||||
})
|
||||
}
|
||||
|
||||
func TestProxyHostCreate_TriggersAsyncUptimeSyncWhenServiceConfigured(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
router, db := setupTestRouterWithUptime(t)
|
||||
|
||||
upstream := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}))
|
||||
t.Cleanup(upstream.Close)
|
||||
|
||||
domain := strings.TrimPrefix(upstream.URL, "http://")
|
||||
body := fmt.Sprintf(`{"name":"Uptime Hook","domain_names":"%s","forward_scheme":"http","forward_host":"app-service","forward_port":8080,"enabled":true}`, domain)
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/proxy-hosts", strings.NewReader(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp := httptest.NewRecorder()
|
||||
router.ServeHTTP(resp, req)
|
||||
require.Equal(t, http.StatusCreated, resp.Code)
|
||||
|
||||
var created models.ProxyHost
|
||||
require.NoError(t, db.Where("domain_names = ?", domain).First(&created).Error)
|
||||
|
||||
var count int64
|
||||
require.Eventually(t, func() bool {
|
||||
db.Model(&models.UptimeMonitor{}).Where("proxy_host_id = ?", created.ID).Count(&count)
|
||||
return count > 0
|
||||
}, 3*time.Second, 50*time.Millisecond)
|
||||
}
|
||||
|
||||
func TestProxyHostLifecycle(t *testing.T) {
|
||||
t.Parallel()
|
||||
router, _ := setupTestRouter(t)
|
||||
|
||||
@@ -75,6 +75,203 @@ func createTestSecurityHeaderProfile(t *testing.T, db *gorm.DB, name string) mod
|
||||
return profile
|
||||
}
|
||||
|
||||
// createTestAccessList creates an access list for testing.
|
||||
func createTestAccessList(t *testing.T, db *gorm.DB, name string) models.AccessList {
|
||||
t.Helper()
|
||||
acl := models.AccessList{
|
||||
UUID: uuid.NewString(),
|
||||
Name: name,
|
||||
Type: "ip",
|
||||
Enabled: true,
|
||||
}
|
||||
require.NoError(t, db.Create(&acl).Error)
|
||||
return acl
|
||||
}
|
||||
|
||||
func TestProxyHostUpdate_AccessListID_Transitions_NoUnrelatedMutation(t *testing.T) {
|
||||
t.Parallel()
|
||||
router, db := setupUpdateTestRouter(t)
|
||||
|
||||
aclOne := createTestAccessList(t, db, "ACL One")
|
||||
aclTwo := createTestAccessList(t, db, "ACL Two")
|
||||
|
||||
host := models.ProxyHost{
|
||||
UUID: uuid.NewString(),
|
||||
Name: "Access List Transition Host",
|
||||
DomainNames: "acl-transition.test.com",
|
||||
ForwardScheme: "http",
|
||||
ForwardHost: "localhost",
|
||||
ForwardPort: 8080,
|
||||
Enabled: true,
|
||||
SSLForced: true,
|
||||
Application: "none",
|
||||
AccessListID: &aclOne.ID,
|
||||
}
|
||||
require.NoError(t, db.Create(&host).Error)
|
||||
|
||||
assertUnrelatedFields := func(t *testing.T, current models.ProxyHost) {
|
||||
t.Helper()
|
||||
assert.Equal(t, "Access List Transition Host", current.Name)
|
||||
assert.Equal(t, "acl-transition.test.com", current.DomainNames)
|
||||
assert.Equal(t, "localhost", current.ForwardHost)
|
||||
assert.Equal(t, 8080, current.ForwardPort)
|
||||
assert.True(t, current.SSLForced)
|
||||
assert.Equal(t, "none", current.Application)
|
||||
}
|
||||
|
||||
runUpdate := func(t *testing.T, update map[string]any) {
|
||||
t.Helper()
|
||||
body, _ := json.Marshal(update)
|
||||
req := httptest.NewRequest(http.MethodPut, "/api/v1/proxy-hosts/"+host.UUID, bytes.NewReader(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
resp := httptest.NewRecorder()
|
||||
router.ServeHTTP(resp, req)
|
||||
require.Equal(t, http.StatusOK, resp.Code)
|
||||
}
|
||||
|
||||
// value -> value
|
||||
runUpdate(t, map[string]any{"access_list_id": aclTwo.ID})
|
||||
var updated models.ProxyHost
|
||||
require.NoError(t, db.First(&updated, "uuid = ?", host.UUID).Error)
|
||||
require.NotNil(t, updated.AccessListID)
|
||||
assert.Equal(t, aclTwo.ID, *updated.AccessListID)
|
||||
assertUnrelatedFields(t, updated)
|
||||
|
||||
// value -> null
|
||||
runUpdate(t, map[string]any{"access_list_id": nil})
|
||||
require.NoError(t, db.First(&updated, "uuid = ?", host.UUID).Error)
|
||||
assert.Nil(t, updated.AccessListID)
|
||||
assertUnrelatedFields(t, updated)
|
||||
|
||||
// null -> value
|
||||
runUpdate(t, map[string]any{"access_list_id": aclOne.ID})
|
||||
require.NoError(t, db.First(&updated, "uuid = ?", host.UUID).Error)
|
||||
require.NotNil(t, updated.AccessListID)
|
||||
assert.Equal(t, aclOne.ID, *updated.AccessListID)
|
||||
assertUnrelatedFields(t, updated)
|
||||
}
|
||||
|
||||
func TestProxyHostUpdate_AccessListID_UUIDNotFound_ReturnsBadRequest(t *testing.T) {
|
||||
t.Parallel()
|
||||
router, db := setupUpdateTestRouter(t)
|
||||
|
||||
host := createTestProxyHost(t, db, "acl-uuid-not-found")
|
||||
|
||||
updateBody := map[string]any{
|
||||
"name": "ACL UUID Not Found",
|
||||
"domain_names": "acl-uuid-not-found.test.com",
|
||||
"forward_scheme": "http",
|
||||
"forward_host": "localhost",
|
||||
"forward_port": 8080,
|
||||
"access_list_id": uuid.NewString(),
|
||||
}
|
||||
body, _ := json.Marshal(updateBody)
|
||||
|
||||
req := httptest.NewRequest(http.MethodPut, "/api/v1/proxy-hosts/"+host.UUID, bytes.NewReader(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
resp := httptest.NewRecorder()
|
||||
router.ServeHTTP(resp, req)
|
||||
|
||||
require.Equal(t, http.StatusBadRequest, resp.Code)
|
||||
|
||||
var result map[string]any
|
||||
require.NoError(t, json.Unmarshal(resp.Body.Bytes(), &result))
|
||||
assert.Contains(t, result["error"], "access list not found")
|
||||
}
|
||||
|
||||
func TestProxyHostUpdate_AccessListID_ResolveQueryFailure_ReturnsBadRequest(t *testing.T) {
|
||||
t.Parallel()
|
||||
router, db := setupUpdateTestRouter(t)
|
||||
|
||||
host := createTestProxyHost(t, db, "acl-resolve-query-failure")
|
||||
|
||||
require.NoError(t, db.Migrator().DropTable(&models.AccessList{}))
|
||||
|
||||
updateBody := map[string]any{
|
||||
"name": "ACL Resolve Query Failure",
|
||||
"domain_names": "acl-resolve-query-failure.test.com",
|
||||
"forward_scheme": "http",
|
||||
"forward_host": "localhost",
|
||||
"forward_port": 8080,
|
||||
"access_list_id": uuid.NewString(),
|
||||
}
|
||||
body, _ := json.Marshal(updateBody)
|
||||
|
||||
req := httptest.NewRequest(http.MethodPut, "/api/v1/proxy-hosts/"+host.UUID, bytes.NewReader(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
resp := httptest.NewRecorder()
|
||||
router.ServeHTTP(resp, req)
|
||||
|
||||
require.Equal(t, http.StatusBadRequest, resp.Code)
|
||||
|
||||
var result map[string]any
|
||||
require.NoError(t, json.Unmarshal(resp.Body.Bytes(), &result))
|
||||
assert.Contains(t, result["error"], "failed to resolve access list")
|
||||
}
|
||||
|
||||
func TestProxyHostUpdate_SecurityHeaderProfileID_Transitions_NoUnrelatedMutation(t *testing.T) {
|
||||
t.Parallel()
|
||||
router, db := setupUpdateTestRouter(t)
|
||||
|
||||
profileOne := createTestSecurityHeaderProfile(t, db, "Security Profile One")
|
||||
profileTwo := createTestSecurityHeaderProfile(t, db, "Security Profile Two")
|
||||
|
||||
host := models.ProxyHost{
|
||||
UUID: uuid.NewString(),
|
||||
Name: "Security Profile Transition Host",
|
||||
DomainNames: "security-transition.test.com",
|
||||
ForwardScheme: "http",
|
||||
ForwardHost: "localhost",
|
||||
ForwardPort: 9090,
|
||||
Enabled: true,
|
||||
SSLForced: true,
|
||||
Application: "none",
|
||||
SecurityHeaderProfileID: &profileOne.ID,
|
||||
}
|
||||
require.NoError(t, db.Create(&host).Error)
|
||||
|
||||
assertUnrelatedFields := func(t *testing.T, current models.ProxyHost) {
|
||||
t.Helper()
|
||||
assert.Equal(t, "Security Profile Transition Host", current.Name)
|
||||
assert.Equal(t, "security-transition.test.com", current.DomainNames)
|
||||
assert.Equal(t, "localhost", current.ForwardHost)
|
||||
assert.Equal(t, 9090, current.ForwardPort)
|
||||
assert.True(t, current.SSLForced)
|
||||
assert.Equal(t, "none", current.Application)
|
||||
}
|
||||
|
||||
runUpdate := func(t *testing.T, update map[string]any) {
|
||||
t.Helper()
|
||||
body, _ := json.Marshal(update)
|
||||
req := httptest.NewRequest(http.MethodPut, "/api/v1/proxy-hosts/"+host.UUID, bytes.NewReader(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
resp := httptest.NewRecorder()
|
||||
router.ServeHTTP(resp, req)
|
||||
require.Equal(t, http.StatusOK, resp.Code)
|
||||
}
|
||||
|
||||
// value -> value
|
||||
runUpdate(t, map[string]any{"security_header_profile_id": fmt.Sprintf("%d", profileTwo.ID)})
|
||||
var updated models.ProxyHost
|
||||
require.NoError(t, db.First(&updated, "uuid = ?", host.UUID).Error)
|
||||
require.NotNil(t, updated.SecurityHeaderProfileID)
|
||||
assert.Equal(t, profileTwo.ID, *updated.SecurityHeaderProfileID)
|
||||
assertUnrelatedFields(t, updated)
|
||||
|
||||
// value -> null
|
||||
runUpdate(t, map[string]any{"security_header_profile_id": ""})
|
||||
require.NoError(t, db.First(&updated, "uuid = ?", host.UUID).Error)
|
||||
assert.Nil(t, updated.SecurityHeaderProfileID)
|
||||
assertUnrelatedFields(t, updated)
|
||||
|
||||
// null -> value
|
||||
runUpdate(t, map[string]any{"security_header_profile_id": fmt.Sprintf("%d", profileOne.ID)})
|
||||
require.NoError(t, db.First(&updated, "uuid = ?", host.UUID).Error)
|
||||
require.NotNil(t, updated.SecurityHeaderProfileID)
|
||||
assert.Equal(t, profileOne.ID, *updated.SecurityHeaderProfileID)
|
||||
assertUnrelatedFields(t, updated)
|
||||
}
|
||||
|
||||
// TestProxyHostUpdate_EnableStandardHeaders_Null tests updating enable_standard_headers to null.
|
||||
func TestProxyHostUpdate_EnableStandardHeaders_Null(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
491
backend/internal/api/handlers/security_event_intake_test.go
Normal file
491
backend/internal/api/handlers/security_event_intake_test.go
Normal file
@@ -0,0 +1,491 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/models"
|
||||
"github.com/Wikid82/charon/backend/internal/services"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
// TestSecurityEventIntakeCompileSuccess tests that the handler is properly instantiated and route registration doesn't fail.
|
||||
// Blocker 1: Fix compile error - undefined handler reference.
|
||||
func TestSecurityEventIntakeCompileSuccess(t *testing.T) {
|
||||
db := SetupCompatibilityTestDB(t)
|
||||
|
||||
// This test validates that the handler can be instantiated with all required dependencies
|
||||
notificationService := services.NewNotificationService(db)
|
||||
service := services.NewEnhancedSecurityNotificationService(db)
|
||||
securityService := services.NewSecurityService(db)
|
||||
managementCIDRs := []string{"127.0.0.0/8"}
|
||||
|
||||
handler := NewSecurityNotificationHandlerWithDeps(
|
||||
service,
|
||||
securityService,
|
||||
"/data",
|
||||
notificationService,
|
||||
managementCIDRs,
|
||||
)
|
||||
|
||||
require.NotNil(t, handler, "Handler should be instantiated successfully")
|
||||
require.NotNil(t, handler.notificationService, "Notification service should be set")
|
||||
require.NotNil(t, handler.managementCIDRs, "Management CIDRs should be set")
|
||||
assert.Equal(t, 1, len(handler.managementCIDRs), "Management CIDRs should have one entry")
|
||||
}
|
||||
|
||||
// TestSecurityEventIntakeAuthLocalhost tests that localhost requests are accepted.
|
||||
// Blocker 2: Implement real source validation - localhost should be allowed.
|
||||
func TestSecurityEventIntakeAuthLocalhost(t *testing.T) {
|
||||
db := SetupCompatibilityTestDB(t)
|
||||
|
||||
notificationService := services.NewNotificationService(db)
|
||||
service := services.NewEnhancedSecurityNotificationService(db)
|
||||
managementCIDRs := []string{"10.0.0.0/8"}
|
||||
|
||||
handler := NewSecurityNotificationHandlerWithDeps(
|
||||
service,
|
||||
nil,
|
||||
"",
|
||||
notificationService,
|
||||
managementCIDRs,
|
||||
)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
|
||||
event := models.SecurityEvent{
|
||||
EventType: "waf_block",
|
||||
Severity: "error",
|
||||
Message: "WAF blocked request",
|
||||
ClientIP: "192.168.1.100",
|
||||
Path: "/admin",
|
||||
Timestamp: time.Now(),
|
||||
}
|
||||
body, _ := json.Marshal(event)
|
||||
|
||||
// Localhost IPv4 request
|
||||
c.Request = httptest.NewRequest("POST", "/api/v1/security/events", bytes.NewReader(body))
|
||||
c.Request.RemoteAddr = "127.0.0.1:12345"
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
handler.HandleSecurityEvent(c)
|
||||
|
||||
assert.Equal(t, http.StatusAccepted, w.Code, "Localhost should be accepted")
|
||||
}
|
||||
|
||||
// TestSecurityEventIntakeAuthManagementCIDR tests that management network requests are accepted.
|
||||
// Blocker 2: Implement real source validation - management CIDRs should be allowed.
|
||||
func TestSecurityEventIntakeAuthManagementCIDR(t *testing.T) {
|
||||
db := SetupCompatibilityTestDB(t)
|
||||
|
||||
notificationService := services.NewNotificationService(db)
|
||||
service := services.NewEnhancedSecurityNotificationService(db)
|
||||
managementCIDRs := []string{"192.168.1.0/24", "10.0.0.0/8"}
|
||||
|
||||
handler := NewSecurityNotificationHandlerWithDeps(
|
||||
service,
|
||||
nil,
|
||||
"",
|
||||
notificationService,
|
||||
managementCIDRs,
|
||||
)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
|
||||
event := models.SecurityEvent{
|
||||
EventType: "waf_block",
|
||||
Severity: "error",
|
||||
Message: "WAF blocked request",
|
||||
ClientIP: "8.8.8.8",
|
||||
Path: "/admin",
|
||||
Timestamp: time.Now(),
|
||||
}
|
||||
body, _ := json.Marshal(event)
|
||||
|
||||
// Request from management network
|
||||
c.Request = httptest.NewRequest("POST", "/api/v1/security/events", bytes.NewReader(body))
|
||||
c.Request.RemoteAddr = "192.168.1.50:12345"
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
handler.HandleSecurityEvent(c)
|
||||
|
||||
assert.Equal(t, http.StatusAccepted, w.Code, "Management CIDR should be accepted")
|
||||
}
|
||||
|
||||
// TestSecurityEventIntakeAuthUnauthorizedIP tests that external IPs are rejected.
|
||||
// Blocker 2: Implement real source validation - external IPs should be rejected.
|
||||
func TestSecurityEventIntakeAuthUnauthorizedIP(t *testing.T) {
|
||||
db := SetupCompatibilityTestDB(t)
|
||||
|
||||
notificationService := services.NewNotificationService(db)
|
||||
service := services.NewEnhancedSecurityNotificationService(db)
|
||||
managementCIDRs := []string{"192.168.1.0/24"}
|
||||
|
||||
handler := NewSecurityNotificationHandlerWithDeps(
|
||||
service,
|
||||
nil,
|
||||
"",
|
||||
notificationService,
|
||||
managementCIDRs,
|
||||
)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
|
||||
event := models.SecurityEvent{
|
||||
EventType: "waf_block",
|
||||
Severity: "error",
|
||||
Message: "WAF blocked request",
|
||||
ClientIP: "8.8.8.8",
|
||||
Path: "/admin",
|
||||
Timestamp: time.Now(),
|
||||
}
|
||||
body, _ := json.Marshal(event)
|
||||
|
||||
// External IP not in management network
|
||||
c.Request = httptest.NewRequest("POST", "/api/v1/security/events", bytes.NewReader(body))
|
||||
c.Request.RemoteAddr = "8.8.8.8:12345"
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
handler.HandleSecurityEvent(c)
|
||||
|
||||
assert.Equal(t, http.StatusForbidden, w.Code, "External IP should be rejected")
|
||||
|
||||
var response map[string]interface{}
|
||||
err := json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "unauthorized_source", response["error"])
|
||||
}
|
||||
|
||||
// TestSecurityEventIntakeAuthInvalidIP tests that malformed IPs are rejected.
|
||||
// Blocker 2: Implement real source validation - invalid IPs should be rejected.
|
||||
func TestSecurityEventIntakeAuthInvalidIP(t *testing.T) {
|
||||
db := SetupCompatibilityTestDB(t)
|
||||
|
||||
notificationService := services.NewNotificationService(db)
|
||||
service := services.NewEnhancedSecurityNotificationService(db)
|
||||
managementCIDRs := []string{"192.168.1.0/24"}
|
||||
|
||||
handler := NewSecurityNotificationHandlerWithDeps(
|
||||
service,
|
||||
nil,
|
||||
"",
|
||||
notificationService,
|
||||
managementCIDRs,
|
||||
)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
|
||||
event := models.SecurityEvent{
|
||||
EventType: "waf_block",
|
||||
Severity: "error",
|
||||
Message: "WAF blocked request",
|
||||
ClientIP: "8.8.8.8",
|
||||
Path: "/admin",
|
||||
Timestamp: time.Now(),
|
||||
}
|
||||
body, _ := json.Marshal(event)
|
||||
|
||||
// Malformed IP
|
||||
c.Request = httptest.NewRequest("POST", "/api/v1/security/events", bytes.NewReader(body))
|
||||
c.Request.RemoteAddr = "invalid-ip:12345"
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
handler.HandleSecurityEvent(c)
|
||||
|
||||
assert.Equal(t, http.StatusForbidden, w.Code, "Invalid IP should be rejected")
|
||||
|
||||
var response map[string]interface{}
|
||||
err := json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "invalid_source", response["error"])
|
||||
}
|
||||
|
||||
// TestSecurityEventIntakeDispatchInvoked tests that notification dispatch is invoked.
|
||||
// Blocker 3: Implement actual dispatch - remove TODO/no-op behavior.
|
||||
func TestSecurityEventIntakeDispatchInvoked(t *testing.T) {
|
||||
db := SetupCompatibilityTestDB(t)
|
||||
|
||||
// Create a Discord provider with security notifications enabled
|
||||
provider := &models.NotificationProvider{
|
||||
Name: "Discord Security Alerts",
|
||||
Type: "discord",
|
||||
URL: "https://discord.com/api/webhooks/123/token",
|
||||
Enabled: true,
|
||||
NotifySecurityWAFBlocks: true,
|
||||
NotifySecurityACLDenies: true,
|
||||
NotifySecurityRateLimitHits: true,
|
||||
NotifySecurityCrowdSecDecisions: true,
|
||||
}
|
||||
require.NoError(t, db.Create(provider).Error)
|
||||
|
||||
notificationService := services.NewNotificationService(db)
|
||||
service := services.NewEnhancedSecurityNotificationService(db)
|
||||
managementCIDRs := []string{"127.0.0.0/8"}
|
||||
|
||||
handler := NewSecurityNotificationHandlerWithDeps(
|
||||
service,
|
||||
nil,
|
||||
"",
|
||||
notificationService,
|
||||
managementCIDRs,
|
||||
)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
|
||||
event := models.SecurityEvent{
|
||||
EventType: "waf_block",
|
||||
Severity: "error",
|
||||
Message: "WAF blocked suspicious request",
|
||||
ClientIP: "192.168.1.100",
|
||||
Path: "/admin/login",
|
||||
Timestamp: time.Now(),
|
||||
Metadata: map[string]any{
|
||||
"rule_id": "920100",
|
||||
"matched_data": "suspicious pattern",
|
||||
},
|
||||
}
|
||||
body, _ := json.Marshal(event)
|
||||
|
||||
c.Request = httptest.NewRequest("POST", "/api/v1/security/events", bytes.NewReader(body))
|
||||
c.Request.RemoteAddr = "127.0.0.1:12345"
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
handler.HandleSecurityEvent(c)
|
||||
|
||||
assert.Equal(t, http.StatusAccepted, w.Code, "Event should be accepted")
|
||||
|
||||
var response map[string]interface{}
|
||||
err := json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "Security event recorded", response["message"])
|
||||
assert.Equal(t, "waf_block", response["event_type"])
|
||||
assert.NotEmpty(t, response["timestamp"])
|
||||
}
|
||||
|
||||
// TestSecurityEventIntakeR6Intact tests that legacy PUT endpoint returns 410 Gone.
|
||||
// Constraint: Keep R6 410/no-mutation behavior intact.
|
||||
func TestSecurityEventIntakeR6Intact(t *testing.T) {
|
||||
// Create in-memory database with User table for this test
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, db.AutoMigrate(
|
||||
&models.User{},
|
||||
&models.NotificationProvider{},
|
||||
&models.NotificationConfig{},
|
||||
&models.Setting{},
|
||||
))
|
||||
|
||||
// Enable feature flag by default in tests
|
||||
featureFlag := &models.Setting{
|
||||
Key: "feature.notifications.security_provider_events.enabled",
|
||||
Value: "true",
|
||||
Type: "bool",
|
||||
Category: "feature",
|
||||
}
|
||||
require.NoError(t, db.Create(featureFlag).Error)
|
||||
|
||||
// Create an admin user for authentication
|
||||
adminUser := &models.User{
|
||||
Email: "admin@example.com",
|
||||
Name: "Admin User",
|
||||
PasswordHash: "$2a$10$abcdefghijklmnopqrstuvwxyz", // Dummy bcrypt hash
|
||||
Role: "admin",
|
||||
Enabled: true,
|
||||
}
|
||||
require.NoError(t, db.Create(adminUser).Error)
|
||||
|
||||
service := services.NewEnhancedSecurityNotificationService(db)
|
||||
handler := NewSecurityNotificationHandler(service)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
|
||||
// Add auth middleware that sets user context
|
||||
router.Use(func(c *gin.Context) {
|
||||
c.Set("user_id", adminUser.ID)
|
||||
c.Set("user", adminUser)
|
||||
c.Set("role", "admin")
|
||||
c.Next()
|
||||
})
|
||||
|
||||
router.PUT("/api/v1/notifications/settings/security", handler.UpdateSettings)
|
||||
|
||||
reqBody := map[string]interface{}{
|
||||
"enabled": true,
|
||||
"security_waf_enabled": true,
|
||||
}
|
||||
body, _ := json.Marshal(reqBody)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
req := httptest.NewRequest("PUT", "/api/v1/notifications/settings/security", bytes.NewReader(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusGone, w.Code, "PUT should return 410 Gone")
|
||||
|
||||
var response map[string]interface{}
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "legacy_security_settings_deprecated", response["error"])
|
||||
assert.Equal(t, "LEGACY_SECURITY_SETTINGS_DEPRECATED", response["code"])
|
||||
}
|
||||
|
||||
// TestSecurityEventIntakeDiscordOnly tests Discord-only dispatch enforcement.
|
||||
// Constraint: Keep Discord-only dispatch enforcement for this rollout stage.
|
||||
func TestSecurityEventIntakeDiscordOnly(t *testing.T) {
|
||||
db := SetupCompatibilityTestDB(t)
|
||||
|
||||
// Create various provider types
|
||||
discordProvider := &models.NotificationProvider{
|
||||
Name: "Discord",
|
||||
Type: "discord",
|
||||
URL: "https://discord.com/api/webhooks/123/token",
|
||||
Enabled: true,
|
||||
NotifySecurityWAFBlocks: true,
|
||||
}
|
||||
require.NoError(t, db.Create(discordProvider).Error)
|
||||
|
||||
// Non-Discord providers should also work with supportsJSONTemplates
|
||||
webhookProvider := &models.NotificationProvider{
|
||||
Name: "Webhook",
|
||||
Type: "webhook",
|
||||
URL: "https://example.com/webhook",
|
||||
Enabled: true,
|
||||
NotifySecurityWAFBlocks: true,
|
||||
}
|
||||
require.NoError(t, db.Create(webhookProvider).Error)
|
||||
|
||||
notificationService := services.NewNotificationService(db)
|
||||
service := services.NewEnhancedSecurityNotificationService(db)
|
||||
managementCIDRs := []string{"127.0.0.0/8"}
|
||||
|
||||
handler := NewSecurityNotificationHandlerWithDeps(
|
||||
service,
|
||||
nil,
|
||||
"",
|
||||
notificationService,
|
||||
managementCIDRs,
|
||||
)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
|
||||
event := models.SecurityEvent{
|
||||
EventType: "waf_block",
|
||||
Severity: "error",
|
||||
Message: "WAF blocked request",
|
||||
ClientIP: "192.168.1.100",
|
||||
Path: "/admin",
|
||||
Timestamp: time.Now(),
|
||||
}
|
||||
body, _ := json.Marshal(event)
|
||||
|
||||
c.Request = httptest.NewRequest("POST", "/api/v1/security/events", bytes.NewReader(body))
|
||||
c.Request.RemoteAddr = "127.0.0.1:12345"
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
handler.HandleSecurityEvent(c)
|
||||
|
||||
assert.Equal(t, http.StatusAccepted, w.Code)
|
||||
|
||||
// Validate both Discord and webhook providers exist (JSON-capable)
|
||||
var providers []models.NotificationProvider
|
||||
err := db.Where("enabled = ?", true).Find(&providers).Error
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 2, len(providers), "Both Discord and webhook providers should be enabled")
|
||||
}
|
||||
|
||||
// TestSecurityEventIntakeMalformedPayload tests rejection of malformed event payloads.
|
||||
func TestSecurityEventIntakeMalformedPayload(t *testing.T) {
|
||||
db := SetupCompatibilityTestDB(t)
|
||||
|
||||
notificationService := services.NewNotificationService(db)
|
||||
service := services.NewEnhancedSecurityNotificationService(db)
|
||||
managementCIDRs := []string{"127.0.0.0/8"}
|
||||
|
||||
handler := NewSecurityNotificationHandlerWithDeps(
|
||||
service,
|
||||
nil,
|
||||
"",
|
||||
notificationService,
|
||||
managementCIDRs,
|
||||
)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
|
||||
// Malformed JSON
|
||||
c.Request = httptest.NewRequest("POST", "/api/v1/security/events", bytes.NewReader([]byte("{invalid json")))
|
||||
c.Request.RemoteAddr = "127.0.0.1:12345"
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
handler.HandleSecurityEvent(c)
|
||||
|
||||
assert.Equal(t, http.StatusBadRequest, w.Code, "Malformed JSON should be rejected")
|
||||
|
||||
var response map[string]interface{}
|
||||
err := json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "Invalid security event payload", response["error"])
|
||||
}
|
||||
|
||||
// TestSecurityEventIntakeIPv6Localhost tests that IPv6 localhost is accepted.
|
||||
func TestSecurityEventIntakeIPv6Localhost(t *testing.T) {
|
||||
db := SetupCompatibilityTestDB(t)
|
||||
|
||||
notificationService := services.NewNotificationService(db)
|
||||
service := services.NewEnhancedSecurityNotificationService(db)
|
||||
managementCIDRs := []string{"10.0.0.0/8"}
|
||||
|
||||
handler := NewSecurityNotificationHandlerWithDeps(
|
||||
service,
|
||||
nil,
|
||||
"",
|
||||
notificationService,
|
||||
managementCIDRs,
|
||||
)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
|
||||
event := models.SecurityEvent{
|
||||
EventType: "acl_deny",
|
||||
Severity: "warn",
|
||||
Message: "ACL denied request",
|
||||
ClientIP: "::1",
|
||||
Path: "/api/admin",
|
||||
Timestamp: time.Now(),
|
||||
}
|
||||
body, _ := json.Marshal(event)
|
||||
|
||||
// IPv6 localhost
|
||||
c.Request = httptest.NewRequest("POST", "/api/v1/security/events", bytes.NewReader(body))
|
||||
c.Request.RemoteAddr = "[::1]:12345"
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
|
||||
handler.HandleSecurityEvent(c)
|
||||
|
||||
assert.Equal(t, http.StatusAccepted, w.Code, "IPv6 localhost should be accepted")
|
||||
}
|
||||
@@ -59,6 +59,10 @@ func TestSecurityHandler_ReloadGeoIP_NotInitialized(t *testing.T) {
|
||||
|
||||
h := NewSecurityHandler(config.SecurityConfig{}, nil, nil)
|
||||
r := gin.New()
|
||||
r.Use(func(c *gin.Context) {
|
||||
c.Set("role", "admin")
|
||||
c.Next()
|
||||
})
|
||||
r.POST("/security/geoip/reload", h.ReloadGeoIP)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
@@ -75,6 +79,10 @@ func TestSecurityHandler_ReloadGeoIP_LoadError(t *testing.T) {
|
||||
h.SetGeoIPService(&services.GeoIPService{}) // dbPath empty => Load() will error
|
||||
|
||||
r := gin.New()
|
||||
r.Use(func(c *gin.Context) {
|
||||
c.Set("role", "admin")
|
||||
c.Next()
|
||||
})
|
||||
r.POST("/security/geoip/reload", h.ReloadGeoIP)
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
@@ -90,6 +98,10 @@ func TestSecurityHandler_LookupGeoIP_MissingIPAddress(t *testing.T) {
|
||||
|
||||
h := NewSecurityHandler(config.SecurityConfig{}, nil, nil)
|
||||
r := gin.New()
|
||||
r.Use(func(c *gin.Context) {
|
||||
c.Set("role", "admin")
|
||||
c.Next()
|
||||
})
|
||||
r.POST("/security/geoip/lookup", h.LookupGeoIP)
|
||||
|
||||
payload := []byte(`{}`)
|
||||
@@ -109,6 +121,10 @@ func TestSecurityHandler_LookupGeoIP_ServiceUnavailable(t *testing.T) {
|
||||
h.SetGeoIPService(&services.GeoIPService{}) // present but not loaded
|
||||
|
||||
r := gin.New()
|
||||
r.Use(func(c *gin.Context) {
|
||||
c.Set("role", "admin")
|
||||
c.Next()
|
||||
})
|
||||
r.POST("/security/geoip/lookup", h.LookupGeoIP)
|
||||
|
||||
payload, _ := json.Marshal(map[string]string{"ip_address": "8.8.8.8"})
|
||||
|
||||
@@ -261,6 +261,10 @@ func (h *SecurityHandler) GetConfig(c *gin.Context) {
|
||||
|
||||
// UpdateConfig creates or updates the SecurityConfig in DB
|
||||
func (h *SecurityHandler) UpdateConfig(c *gin.Context) {
|
||||
if !requireAdmin(c) {
|
||||
return
|
||||
}
|
||||
|
||||
var payload models.SecurityConfig
|
||||
if err := c.ShouldBindJSON(&payload); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid payload"})
|
||||
@@ -290,6 +294,10 @@ func (h *SecurityHandler) UpdateConfig(c *gin.Context) {
|
||||
|
||||
// GenerateBreakGlass generates a break-glass token and returns the plaintext token once
|
||||
func (h *SecurityHandler) GenerateBreakGlass(c *gin.Context) {
|
||||
if !requireAdmin(c) {
|
||||
return
|
||||
}
|
||||
|
||||
token, err := h.svc.GenerateBreakGlassToken("default")
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to generate break-glass token"})
|
||||
@@ -316,6 +324,10 @@ func (h *SecurityHandler) ListDecisions(c *gin.Context) {
|
||||
|
||||
// CreateDecision creates a manual decision (override) - for now no checks besides payload
|
||||
func (h *SecurityHandler) CreateDecision(c *gin.Context) {
|
||||
if !requireAdmin(c) {
|
||||
return
|
||||
}
|
||||
|
||||
var payload models.SecurityDecision
|
||||
if err := c.ShouldBindJSON(&payload); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid payload"})
|
||||
@@ -371,6 +383,10 @@ func (h *SecurityHandler) ListRuleSets(c *gin.Context) {
|
||||
|
||||
// UpsertRuleSet uploads or updates a ruleset
|
||||
func (h *SecurityHandler) UpsertRuleSet(c *gin.Context) {
|
||||
if !requireAdmin(c) {
|
||||
return
|
||||
}
|
||||
|
||||
var payload models.SecurityRuleSet
|
||||
if err := c.ShouldBindJSON(&payload); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid payload"})
|
||||
@@ -401,6 +417,10 @@ func (h *SecurityHandler) UpsertRuleSet(c *gin.Context) {
|
||||
|
||||
// DeleteRuleSet removes a ruleset by id
|
||||
func (h *SecurityHandler) DeleteRuleSet(c *gin.Context) {
|
||||
if !requireAdmin(c) {
|
||||
return
|
||||
}
|
||||
|
||||
idParam := c.Param("id")
|
||||
if idParam == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "id is required"})
|
||||
@@ -610,6 +630,10 @@ func (h *SecurityHandler) GetGeoIPStatus(c *gin.Context) {
|
||||
|
||||
// ReloadGeoIP reloads the GeoIP database from disk.
|
||||
func (h *SecurityHandler) ReloadGeoIP(c *gin.Context) {
|
||||
if !requireAdmin(c) {
|
||||
return
|
||||
}
|
||||
|
||||
if h.geoipSvc == nil {
|
||||
c.JSON(http.StatusServiceUnavailable, gin.H{
|
||||
"error": "GeoIP service not initialized",
|
||||
@@ -641,6 +665,10 @@ func (h *SecurityHandler) ReloadGeoIP(c *gin.Context) {
|
||||
|
||||
// LookupGeoIP performs a GeoIP lookup for a given IP address.
|
||||
func (h *SecurityHandler) LookupGeoIP(c *gin.Context) {
|
||||
if !requireAdmin(c) {
|
||||
return
|
||||
}
|
||||
|
||||
var req struct {
|
||||
IPAddress string `json:"ip_address" binding:"required"`
|
||||
}
|
||||
@@ -707,6 +735,10 @@ func (h *SecurityHandler) GetWAFExclusions(c *gin.Context) {
|
||||
|
||||
// AddWAFExclusion adds a rule exclusion to the WAF configuration
|
||||
func (h *SecurityHandler) AddWAFExclusion(c *gin.Context) {
|
||||
if !requireAdmin(c) {
|
||||
return
|
||||
}
|
||||
|
||||
var req WAFExclusionRequest
|
||||
if err := c.ShouldBindJSON(&req); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "rule_id is required"})
|
||||
@@ -786,6 +818,10 @@ func (h *SecurityHandler) AddWAFExclusion(c *gin.Context) {
|
||||
|
||||
// DeleteWAFExclusion removes a rule exclusion by rule_id
|
||||
func (h *SecurityHandler) DeleteWAFExclusion(c *gin.Context) {
|
||||
if !requireAdmin(c) {
|
||||
return
|
||||
}
|
||||
|
||||
ruleIDParam := c.Param("rule_id")
|
||||
if ruleIDParam == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "rule_id is required"})
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/config"
|
||||
"github.com/Wikid82/charon/backend/internal/models"
|
||||
)
|
||||
|
||||
func TestSecurityHandler_GetConfigAndUpdateConfig(t *testing.T) {
|
||||
t.Helper()
|
||||
// Setup DB and router
|
||||
db, err := gorm.Open(sqlite.Open("file::memory:?mode=memory&cache=shared"), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}))
|
||||
|
||||
cfg := config.SecurityConfig{}
|
||||
h := NewSecurityHandler(cfg, db, nil)
|
||||
|
||||
// Create a gin test context for GetConfig when no config exists
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
req := httptest.NewRequest("GET", "/security/config", http.NoBody)
|
||||
c.Request = req
|
||||
h.GetConfig(c)
|
||||
require.Equal(t, http.StatusOK, w.Code)
|
||||
var body map[string]any
|
||||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &body))
|
||||
// Should return config: null
|
||||
if _, ok := body["config"]; !ok {
|
||||
t.Fatalf("expected 'config' in response, got %v", body)
|
||||
}
|
||||
|
||||
// Now update config
|
||||
w = httptest.NewRecorder()
|
||||
c, _ = gin.CreateTestContext(w)
|
||||
payload := `{"name":"default","admin_whitelist":"127.0.0.1/32"}`
|
||||
req = httptest.NewRequest("POST", "/security/config", strings.NewReader(payload))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
c.Request = req
|
||||
h.UpdateConfig(c)
|
||||
require.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
// Now call GetConfig again and ensure config is returned
|
||||
w = httptest.NewRecorder()
|
||||
c, _ = gin.CreateTestContext(w)
|
||||
req = httptest.NewRequest("GET", "/security/config", http.NoBody)
|
||||
c.Request = req
|
||||
h.GetConfig(c)
|
||||
require.Equal(t, http.StatusOK, w.Code)
|
||||
var body2 map[string]any
|
||||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &body2))
|
||||
cfgVal, ok := body2["config"].(map[string]any)
|
||||
if !ok {
|
||||
t.Fatalf("expected config object, got %v", body2["config"])
|
||||
}
|
||||
if cfgVal["admin_whitelist"] != "127.0.0.1/32" {
|
||||
t.Fatalf("unexpected admin_whitelist: %v", cfgVal["admin_whitelist"])
|
||||
}
|
||||
}
|
||||
@@ -100,6 +100,10 @@ func TestSecurityHandler_CreateDecision_SQLInjection(t *testing.T) {
|
||||
h := NewSecurityHandler(cfg, db, nil)
|
||||
|
||||
router := gin.New()
|
||||
router.Use(func(c *gin.Context) {
|
||||
c.Set("role", "admin")
|
||||
c.Next()
|
||||
})
|
||||
router.POST("/api/v1/security/decisions", h.CreateDecision)
|
||||
|
||||
// Attempt SQL injection via payload fields
|
||||
@@ -143,6 +147,10 @@ func TestSecurityHandler_UpsertRuleSet_MassivePayload(t *testing.T) {
|
||||
h := NewSecurityHandler(cfg, db, nil)
|
||||
|
||||
router := gin.New()
|
||||
router.Use(func(c *gin.Context) {
|
||||
c.Set("role", "admin")
|
||||
c.Next()
|
||||
})
|
||||
router.POST("/api/v1/security/rulesets", h.UpsertRuleSet)
|
||||
|
||||
// Try to submit a 3MB payload (should be rejected by service)
|
||||
@@ -175,6 +183,10 @@ func TestSecurityHandler_UpsertRuleSet_EmptyName(t *testing.T) {
|
||||
h := NewSecurityHandler(cfg, db, nil)
|
||||
|
||||
router := gin.New()
|
||||
router.Use(func(c *gin.Context) {
|
||||
c.Set("role", "admin")
|
||||
c.Next()
|
||||
})
|
||||
router.POST("/api/v1/security/rulesets", h.UpsertRuleSet)
|
||||
|
||||
payload := map[string]any{
|
||||
@@ -203,6 +215,10 @@ func TestSecurityHandler_CreateDecision_EmptyFields(t *testing.T) {
|
||||
h := NewSecurityHandler(cfg, db, nil)
|
||||
|
||||
router := gin.New()
|
||||
router.Use(func(c *gin.Context) {
|
||||
c.Set("role", "admin")
|
||||
c.Next()
|
||||
})
|
||||
router.POST("/api/v1/security/decisions", h.CreateDecision)
|
||||
|
||||
testCases := []struct {
|
||||
@@ -347,6 +363,10 @@ func TestSecurityAudit_DeleteRuleSet_InvalidID(t *testing.T) {
|
||||
h := NewSecurityHandler(cfg, db, nil)
|
||||
|
||||
router := gin.New()
|
||||
router.Use(func(c *gin.Context) {
|
||||
c.Set("role", "admin")
|
||||
c.Next()
|
||||
})
|
||||
router.DELETE("/api/v1/security/rulesets/:id", h.DeleteRuleSet)
|
||||
|
||||
testCases := []struct {
|
||||
@@ -388,6 +408,10 @@ func TestSecurityHandler_UpsertRuleSet_XSSInContent(t *testing.T) {
|
||||
h := NewSecurityHandler(cfg, db, nil)
|
||||
|
||||
router := gin.New()
|
||||
router.Use(func(c *gin.Context) {
|
||||
c.Set("role", "admin")
|
||||
c.Next()
|
||||
})
|
||||
router.POST("/api/v1/security/rulesets", h.UpsertRuleSet)
|
||||
router.GET("/api/v1/security/rulesets", h.ListRuleSets)
|
||||
|
||||
@@ -433,6 +457,10 @@ func TestSecurityHandler_UpdateConfig_RateLimitBounds(t *testing.T) {
|
||||
h := NewSecurityHandler(cfg, db, nil)
|
||||
|
||||
router := gin.New()
|
||||
router.Use(func(c *gin.Context) {
|
||||
c.Set("role", "admin")
|
||||
c.Next()
|
||||
})
|
||||
router.PUT("/api/v1/security/config", h.UpdateConfig)
|
||||
|
||||
testCases := []struct {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user