chore: Add tests for backup service, crowdsec startup, log service, and security headers

- Implement tests for BackupService to handle database extraction from backup archives with SHM and WAL entries.
- Add tests for BackupService to validate behavior when creating backups for non-SQLite databases and handling oversized database entries.
- Introduce tests for CrowdSec startup to ensure proper error handling during configuration creation.
- Enhance LogService tests to cover scenarios for skipping dot and empty directories and handling read directory errors.
- Add tests for SecurityHeadersService to ensure proper error handling during preset creation and updates.
- Update ProxyHostForm tests to include HSTS subdomains toggle and validation for port input handling.
- Enhance DNSProviders tests to validate manual challenge completion and error handling when no providers are available.
- Extend UsersPage tests to ensure fallback mechanisms for clipboard operations when the clipboard API fails.
This commit is contained in:
GitHub Actions
2026-02-17 19:13:28 +00:00
parent 9713908887
commit 2cad49de85
41 changed files with 4071 additions and 4 deletions

View File

@@ -399,6 +399,45 @@ func TestCertificateHandler_Upload_MissingKeyFile(t *testing.T) {
}
}
func TestCertificateHandler_Upload_MissingKeyFile_MultipartWithCert(t *testing.T) {
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})
if err != nil {
t.Fatalf("failed to open db: %v", err)
}
if err = db.AutoMigrate(&models.SSLCertificate{}, &models.ProxyHost{}); err != nil {
t.Fatalf("failed to migrate: %v", err)
}
gin.SetMode(gin.TestMode)
r := gin.New()
r.Use(mockAuthMiddleware())
svc := services.NewCertificateService("/tmp", db)
h := NewCertificateHandler(svc, nil, nil)
r.POST("/api/certificates", h.Upload)
var body bytes.Buffer
writer := multipart.NewWriter(&body)
_ = writer.WriteField("name", "testcert")
part, createErr := writer.CreateFormFile("certificate_file", "cert.pem")
if createErr != nil {
t.Fatalf("failed to create form file: %v", createErr)
}
_, _ = part.Write([]byte("-----BEGIN CERTIFICATE-----\nMIIB\n-----END CERTIFICATE-----"))
_ = writer.Close()
req := httptest.NewRequest(http.MethodPost, "/api/certificates", &body)
req.Header.Set("Content-Type", writer.FormDataContentType())
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
if w.Code != http.StatusBadRequest {
t.Fatalf("expected 400 Bad Request, got %d, body=%s", w.Code, w.Body.String())
}
if !strings.Contains(w.Body.String(), "key_file") {
t.Fatalf("expected error message about key_file, got: %s", w.Body.String())
}
}
// Test Upload handler success path using a mock CertificateService
func TestCertificateHandler_Upload_Success(t *testing.T) {
db, err := gorm.Open(sqlite.Open(fmt.Sprintf("file:%s?mode=memory&cache=shared", t.Name())), &gorm.Config{})

View File

@@ -0,0 +1,87 @@
package handlers
import (
"bytes"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"testing"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestResolveAcquisitionConfigPath_Validation(t *testing.T) {
t.Setenv("CHARON_CROWDSEC_ACQUIS_PATH", "")
resolved, err := resolveAcquisitionConfigPath()
require.NoError(t, err)
require.Equal(t, "/etc/crowdsec/acquis.yaml", resolved)
t.Setenv("CHARON_CROWDSEC_ACQUIS_PATH", "relative/acquis.yaml")
_, err = resolveAcquisitionConfigPath()
require.Error(t, err)
t.Setenv("CHARON_CROWDSEC_ACQUIS_PATH", "/tmp/../etc/acquis.yaml")
_, err = resolveAcquisitionConfigPath()
require.Error(t, err)
t.Setenv("CHARON_CROWDSEC_ACQUIS_PATH", "/tmp/acquis.yaml")
resolved, err = resolveAcquisitionConfigPath()
require.NoError(t, err)
require.Equal(t, "/tmp/acquis.yaml", resolved)
}
func TestReadAcquisitionConfig_ErrorsAndSuccess(t *testing.T) {
tmp := t.TempDir()
path := filepath.Join(tmp, "acquis.yaml")
require.NoError(t, os.WriteFile(path, []byte("source: file\n"), 0o600))
content, err := readAcquisitionConfig(path)
require.NoError(t, err)
assert.Contains(t, string(content), "source: file")
_, err = readAcquisitionConfig(filepath.Join(tmp, "missing.yaml"))
require.Error(t, err)
}
func TestCrowdsec_AcquisitionEndpoints_InvalidConfiguredPath(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("CHARON_CROWDSEC_ACQUIS_PATH", "relative/path.yaml")
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
h.RegisterRoutes(g)
wGet := httptest.NewRecorder()
reqGet := httptest.NewRequest(http.MethodGet, "/api/v1/admin/crowdsec/acquisition", http.NoBody)
r.ServeHTTP(wGet, reqGet)
require.Equal(t, http.StatusInternalServerError, wGet.Code)
wPut := httptest.NewRecorder()
reqPut := httptest.NewRequest(http.MethodPut, "/api/v1/admin/crowdsec/acquisition", bytes.NewBufferString(`{"content":"source: file"}`))
reqPut.Header.Set("Content-Type", "application/json")
r.ServeHTTP(wPut, reqPut)
require.Equal(t, http.StatusInternalServerError, wPut.Code)
}
func TestCrowdsec_GetBouncerKey_NotConfigured(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("CROWDSEC_API_KEY", "")
t.Setenv("CROWDSEC_BOUNCER_API_KEY", "")
t.Setenv("CERBERUS_SECURITY_CROWDSEC_API_KEY", "")
t.Setenv("CHARON_SECURITY_CROWDSEC_API_KEY", "")
t.Setenv("CPM_SECURITY_CROWDSEC_API_KEY", "")
h := newTestCrowdsecHandler(t, OpenTestDB(t), &fakeExec{}, "/bin/false", t.TempDir())
r := gin.New()
g := r.Group("/api/v1")
h.RegisterRoutes(g)
w := httptest.NewRecorder()
req := httptest.NewRequest(http.MethodGet, "/api/v1/admin/crowdsec/bouncer/key", http.NoBody)
r.ServeHTTP(w, req)
require.Equal(t, http.StatusNotFound, w.Code)
}

View File

@@ -0,0 +1,127 @@
package handlers
import (
"net/http"
"net/http/httptest"
"net/url"
"os"
"path/filepath"
"testing"
"github.com/Wikid82/charon/backend/internal/models"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/require"
)
func TestCrowdsecWave5_ResolveAcquisitionConfigPath_RelativeRejected(t *testing.T) {
t.Setenv("CHARON_CROWDSEC_ACQUIS_PATH", "relative/acquis.yaml")
_, err := resolveAcquisitionConfigPath()
require.Error(t, err)
require.Contains(t, err.Error(), "must be absolute")
}
func TestCrowdsecWave5_ReadAcquisitionConfig_InvalidFilenameBranch(t *testing.T) {
_, err := readAcquisitionConfig("/")
require.Error(t, err)
require.Contains(t, err.Error(), "filename is invalid")
}
func TestCrowdsecWave5_GetLAPIDecisions_Unauthorized(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusUnauthorized)
}))
t.Cleanup(server.Close)
original := validateCrowdsecLAPIBaseURLFunc
validateCrowdsecLAPIBaseURLFunc = func(raw string) (*url.URL, error) {
return url.Parse(raw)
}
t.Cleanup(func() {
validateCrowdsecLAPIBaseURLFunc = original
})
require.NoError(t, db.Create(&models.SecurityConfig{UUID: "default", CrowdSecAPIURL: server.URL}).Error)
h := newTestCrowdsecHandler(t, db, &fakeExec{}, "/bin/false", tmpDir)
r := gin.New()
g := r.Group("/api/v1")
h.RegisterRoutes(g)
w := httptest.NewRecorder()
req := httptest.NewRequest(http.MethodGet, "/api/v1/admin/crowdsec/decisions/lapi", http.NoBody)
r.ServeHTTP(w, req)
require.Equal(t, http.StatusUnauthorized, w.Code)
require.Contains(t, w.Body.String(), "authentication failed")
}
func TestCrowdsecWave5_GetLAPIDecisions_NonJSONContentTypeFallsBack(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupCrowdDB(t)
tmpDir := t.TempDir()
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/html")
w.WriteHeader(http.StatusOK)
_, _ = w.Write([]byte("<html>not-json</html>"))
}))
t.Cleanup(server.Close)
original := validateCrowdsecLAPIBaseURLFunc
validateCrowdsecLAPIBaseURLFunc = func(raw string) (*url.URL, error) {
return url.Parse(raw)
}
t.Cleanup(func() {
validateCrowdsecLAPIBaseURLFunc = original
})
require.NoError(t, db.Create(&models.SecurityConfig{UUID: "default", CrowdSecAPIURL: server.URL}).Error)
h := newTestCrowdsecHandler(t, db, &fakeExec{}, "/bin/false", tmpDir)
h.CmdExec = &mockCmdExecutor{output: []byte("[]"), err: nil}
r := gin.New()
g := r.Group("/api/v1")
h.RegisterRoutes(g)
w := httptest.NewRecorder()
req := httptest.NewRequest(http.MethodGet, "/api/v1/admin/crowdsec/decisions/lapi", http.NoBody)
r.ServeHTTP(w, req)
require.Equal(t, http.StatusOK, w.Code)
require.Contains(t, w.Body.String(), "decisions")
}
func TestCrowdsecWave5_GetBouncerInfo_And_GetBouncerKey_FileSource(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("CROWDSEC_BOUNCER_API_KEY", "")
t.Setenv("CERBERUS_SECURITY_CROWDSEC_API_KEY", "")
t.Setenv("CHARON_SECURITY_CROWDSEC_API_KEY", "")
t.Setenv("CPM_SECURITY_CROWDSEC_API_KEY", "")
db := setupCrowdDB(t)
tmpDir := t.TempDir()
h := newTestCrowdsecHandler(t, db, &fakeExec{}, "/bin/false", tmpDir)
keyPath := h.bouncerKeyPath()
require.NoError(t, os.MkdirAll(filepath.Dir(keyPath), 0o750))
require.NoError(t, os.WriteFile(keyPath, []byte("abcdefghijklmnop1234567890"), 0o600))
r := gin.New()
g := r.Group("/api/v1")
h.RegisterRoutes(g)
wInfo := httptest.NewRecorder()
reqInfo := httptest.NewRequest(http.MethodGet, "/api/v1/admin/crowdsec/bouncer", http.NoBody)
r.ServeHTTP(wInfo, reqInfo)
require.Equal(t, http.StatusOK, wInfo.Code)
require.Contains(t, wInfo.Body.String(), "file")
wKey := httptest.NewRecorder()
reqKey := httptest.NewRequest(http.MethodGet, "/api/v1/admin/crowdsec/bouncer/key", http.NoBody)
r.ServeHTTP(wKey, reqKey)
require.Equal(t, http.StatusOK, wKey.Code)
require.Contains(t, wKey.Body.String(), "\"source\":\"file\"")
}

View File

@@ -0,0 +1,65 @@
package handlers
import (
"encoding/json"
"net/http"
"net/http/httptest"
"testing"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/require"
)
func TestCrowdsecWave6_BouncerKeyPath_UsesEnvFallback(t *testing.T) {
t.Setenv("CHARON_CROWDSEC_BOUNCER_KEY_PATH", "/tmp/test-bouncer-key")
h := &CrowdsecHandler{}
require.Equal(t, "/tmp/test-bouncer-key", h.bouncerKeyPath())
}
func TestCrowdsecWave6_GetBouncerInfo_NoneSource(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("CROWDSEC_API_KEY", "")
t.Setenv("CROWDSEC_BOUNCER_API_KEY", "")
t.Setenv("CERBERUS_SECURITY_CROWDSEC_API_KEY", "")
t.Setenv("CHARON_SECURITY_CROWDSEC_API_KEY", "")
t.Setenv("CPM_SECURITY_CROWDSEC_API_KEY", "")
t.Setenv("CHARON_CROWDSEC_BOUNCER_KEY_PATH", "/tmp/non-existent-wave6-key")
h := &CrowdsecHandler{CmdExec: &mockCmdExecutor{output: []byte(`[]`)}}
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request = httptest.NewRequest(http.MethodGet, "/api/v1/admin/crowdsec/bouncer", nil)
h.GetBouncerInfo(c)
require.Equal(t, http.StatusOK, w.Code)
var payload map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &payload))
require.Equal(t, "none", payload["key_source"])
}
func TestCrowdsecWave6_GetKeyStatus_NoKeyConfiguredMessage(t *testing.T) {
gin.SetMode(gin.TestMode)
t.Setenv("CROWDSEC_API_KEY", "")
t.Setenv("CROWDSEC_BOUNCER_API_KEY", "")
t.Setenv("CERBERUS_SECURITY_CROWDSEC_API_KEY", "")
t.Setenv("CHARON_SECURITY_CROWDSEC_API_KEY", "")
t.Setenv("CPM_SECURITY_CROWDSEC_API_KEY", "")
t.Setenv("CHARON_CROWDSEC_BOUNCER_KEY_PATH", "/tmp/non-existent-wave6-key")
h := &CrowdsecHandler{}
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request = httptest.NewRequest(http.MethodGet, "/api/v1/admin/crowdsec/key-status", nil)
h.GetKeyStatus(c)
require.Equal(t, http.StatusOK, w.Code)
var payload map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &payload))
require.Equal(t, "none", payload["key_source"])
require.Equal(t, false, payload["valid"])
require.Contains(t, payload["message"], "No CrowdSec API key configured")
}

View File

@@ -0,0 +1,94 @@
package handlers
import (
"context"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"testing"
"github.com/Wikid82/charon/backend/internal/models"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
)
func TestCrowdsecWave7_ReadAcquisitionConfig_ReadErrorOnDirectory(t *testing.T) {
tmpDir := t.TempDir()
acqDir := filepath.Join(tmpDir, "acq")
require.NoError(t, os.MkdirAll(acqDir, 0o750))
_, err := readAcquisitionConfig(acqDir)
require.Error(t, err)
require.Contains(t, err.Error(), "read acquisition config")
}
func TestCrowdsecWave7_Start_CreateSecurityConfigFailsOnReadOnlyDB(t *testing.T) {
gin.SetMode(gin.TestMode)
tmpDir := t.TempDir()
dbPath := filepath.Join(tmpDir, "crowdsec-readonly.db")
rwDB, err := gorm.Open(sqlite.Open(dbPath), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, rwDB.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
sqlDB, err := rwDB.DB()
require.NoError(t, err)
require.NoError(t, sqlDB.Close())
roDB, err := gorm.Open(sqlite.Open("file:"+dbPath+"?mode=ro"), &gorm.Config{})
require.NoError(t, err)
h := newTestCrowdsecHandler(t, roDB, &fakeExec{}, "/bin/false", t.TempDir())
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request = httptest.NewRequest(http.MethodPost, "/api/v1/admin/crowdsec/start", nil)
h.Start(c)
require.Equal(t, http.StatusInternalServerError, w.Code)
require.Contains(t, w.Body.String(), "Failed to persist configuration")
}
func TestCrowdsecWave7_EnsureBouncerRegistration_InvalidFileKeyReRegisters(t *testing.T) {
tmpDir := t.TempDir()
keyPath := tmpDir + "/bouncer_key"
require.NoError(t, saveKeyToFile(keyPath, "invalid-file-key"))
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusForbidden)
}))
defer server.Close()
db := setupCrowdDB(t)
handler := newTestCrowdsecHandler(t, db, &fakeExec{}, "/bin/false", tmpDir)
t.Setenv("CHARON_CROWDSEC_BOUNCER_KEY_PATH", keyPath)
cfg := models.SecurityConfig{
UUID: uuid.New().String(),
Name: "default",
CrowdSecAPIURL: server.URL,
}
require.NoError(t, db.Create(&cfg).Error)
mockCmdExec := new(MockCommandExecutor)
mockCmdExec.On("Execute", mock.Anything, "cscli", mock.MatchedBy(func(args []string) bool {
return len(args) >= 2 && args[0] == "bouncers" && args[1] == "delete"
})).Return([]byte("deleted"), nil)
mockCmdExec.On("Execute", mock.Anything, "cscli", mock.MatchedBy(func(args []string) bool {
return len(args) >= 2 && args[0] == "bouncers" && args[1] == "add"
})).Return([]byte("new-file-key-1234567890"), nil)
handler.CmdExec = mockCmdExec
key, err := handler.ensureBouncerRegistration(context.Background())
require.NoError(t, err)
require.Equal(t, "new-file-key-1234567890", key)
require.Equal(t, "new-file-key-1234567890", readKeyFromFile(keyPath))
mockCmdExec.AssertExpectations(t)
}

View File

@@ -5,17 +5,56 @@ import (
"encoding/json"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"testing"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"github.com/Wikid82/charon/backend/internal/caddy"
"github.com/Wikid82/charon/backend/internal/models"
)
type importCoverageProxyHostSvcStub struct{}
func (importCoverageProxyHostSvcStub) Create(host *models.ProxyHost) error { return nil }
func (importCoverageProxyHostSvcStub) Update(host *models.ProxyHost) error { return nil }
func (importCoverageProxyHostSvcStub) List() ([]models.ProxyHost, error) {
return []models.ProxyHost{}, nil
}
func setupReadOnlyImportDB(t *testing.T) *gorm.DB {
t.Helper()
tmp := t.TempDir()
dbPath := filepath.Join(tmp, "import_ro.db")
rwDB, err := gorm.Open(sqlite.Open(dbPath), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, rwDB.AutoMigrate(&models.ImportSession{}))
sqlDB, err := rwDB.DB()
require.NoError(t, err)
require.NoError(t, sqlDB.Close())
require.NoError(t, os.Chmod(dbPath, 0o400))
roDB, err := gorm.Open(sqlite.Open("file:"+dbPath+"?mode=ro"), &gorm.Config{})
require.NoError(t, err)
t.Cleanup(func() {
if roSQLDB, dbErr := roDB.DB(); dbErr == nil {
_ = roSQLDB.Close()
}
})
return roDB
}
func setupImportCoverageTestDB(t *testing.T) *gorm.DB {
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
if err != nil {
@@ -186,3 +225,292 @@ func TestUploadMulti_NoSitesParsed(t *testing.T) {
assert.Equal(t, http.StatusBadRequest, w.Code)
assert.Contains(t, w.Body.String(), "no sites parsed")
}
func TestUpload_ImportsDetectedNoImportableHosts(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageTestDB(t)
mockSvc := new(MockImporterService)
mockSvc.On("NormalizeCaddyfile", mock.AnythingOfType("string")).Return("import sites/*.caddy # include\n", nil)
mockSvc.On("ImportFile", mock.AnythingOfType("string")).Return(&caddy.ImportResult{
Hosts: []caddy.ParsedHost{},
}, nil)
tmpImport := t.TempDir()
h := NewImportHandler(db, "caddy", tmpImport, "")
h.importerservice = mockSvc
w := httptest.NewRecorder()
_, r := gin.CreateTestContext(w)
r.Use(func(c *gin.Context) {
setAdminContext(c)
c.Next()
})
r.POST("/upload", h.Upload)
req := map[string]interface{}{
"filename": "Caddyfile",
"content": "import sites/*.caddy # include\n",
}
body, _ := json.Marshal(req)
request, _ := http.NewRequest("POST", "/upload", bytes.NewBuffer(body))
request.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, request)
assert.Equal(t, http.StatusBadRequest, w.Code)
assert.Contains(t, w.Body.String(), "imports")
mockSvc.AssertExpectations(t)
}
func TestUploadMulti_RequiresMainCaddyfile(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageTestDB(t)
h := NewImportHandler(db, "caddy", t.TempDir(), "")
w := httptest.NewRecorder()
_, r := gin.CreateTestContext(w)
r.Use(func(c *gin.Context) {
setAdminContext(c)
c.Next()
})
r.POST("/upload-multi", h.UploadMulti)
req := map[string]interface{}{
"files": []interface{}{
map[string]string{"filename": "sites/site1.caddy", "content": "example.com { reverse_proxy localhost:8080 }"},
},
}
body, _ := json.Marshal(req)
request, _ := http.NewRequest("POST", "/upload-multi", bytes.NewBuffer(body))
request.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, request)
assert.Equal(t, http.StatusBadRequest, w.Code)
assert.Contains(t, w.Body.String(), "must include a main Caddyfile")
}
func TestUploadMulti_RejectsEmptyFileContent(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageTestDB(t)
h := NewImportHandler(db, "caddy", t.TempDir(), "")
w := httptest.NewRecorder()
_, r := gin.CreateTestContext(w)
r.Use(func(c *gin.Context) {
setAdminContext(c)
c.Next()
})
r.POST("/upload-multi", h.UploadMulti)
req := map[string]interface{}{
"files": []interface{}{
map[string]string{"filename": "Caddyfile", "content": " "},
},
}
body, _ := json.Marshal(req)
request, _ := http.NewRequest("POST", "/upload-multi", bytes.NewBuffer(body))
request.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, request)
assert.Equal(t, http.StatusBadRequest, w.Code)
assert.Contains(t, w.Body.String(), "is empty")
}
func TestCommitAndCancel_InvalidSessionUUID(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageTestDB(t)
tmpImport := t.TempDir()
h := NewImportHandler(db, "caddy", tmpImport, "")
r := gin.New()
r.Use(func(c *gin.Context) {
setAdminContext(c)
c.Next()
})
h.RegisterRoutes(r.Group("/api/v1"))
commitBody := map[string]interface{}{"session_uuid": ".", "resolutions": map[string]string{}}
commitBytes, _ := json.Marshal(commitBody)
wCommit := httptest.NewRecorder()
reqCommit, _ := http.NewRequest(http.MethodPost, "/api/v1/import/commit", bytes.NewBuffer(commitBytes))
reqCommit.Header.Set("Content-Type", "application/json")
r.ServeHTTP(wCommit, reqCommit)
assert.Equal(t, http.StatusBadRequest, wCommit.Code)
wCancel := httptest.NewRecorder()
reqCancel, _ := http.NewRequest(http.MethodDelete, "/api/v1/import/cancel?session_uuid=.", http.NoBody)
r.ServeHTTP(wCancel, reqCancel)
assert.Equal(t, http.StatusBadRequest, wCancel.Code)
}
func TestCancel_RemovesTransientUpload(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupImportCoverageTestDB(t)
tmpImport := t.TempDir()
h := NewImportHandler(db, "caddy", tmpImport, "")
uploadsDir := filepath.Join(tmpImport, "uploads")
require.NoError(t, os.MkdirAll(uploadsDir, 0o750))
sid := "test-sid"
uploadPath := filepath.Join(uploadsDir, sid+".caddyfile")
require.NoError(t, os.WriteFile(uploadPath, []byte("example.com { reverse_proxy localhost:8080 }"), 0o600))
r := gin.New()
r.Use(func(c *gin.Context) {
setAdminContext(c)
c.Next()
})
h.RegisterRoutes(r.Group("/api/v1"))
w := httptest.NewRecorder()
req, _ := http.NewRequest(http.MethodDelete, "/api/v1/import/cancel?session_uuid="+sid, http.NoBody)
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
_, statErr := os.Stat(uploadPath)
assert.True(t, os.IsNotExist(statErr))
}
func TestUpload_ReadOnlyDBRespondsWithPermissionError(t *testing.T) {
gin.SetMode(gin.TestMode)
roDB := setupReadOnlyImportDB(t)
mockSvc := new(MockImporterService)
mockSvc.On("NormalizeCaddyfile", mock.AnythingOfType("string")).Return("example.com { reverse_proxy localhost:8080 }", nil)
mockSvc.On("ImportFile", mock.AnythingOfType("string")).Return(&caddy.ImportResult{
Hosts: []caddy.ParsedHost{{DomainNames: "example.com", ForwardHost: "localhost", ForwardPort: 8080}},
}, nil)
h := NewImportHandler(roDB, "caddy", t.TempDir(), "")
h.importerservice = mockSvc
w := httptest.NewRecorder()
_, r := gin.CreateTestContext(w)
r.Use(func(c *gin.Context) {
setAdminContext(c)
c.Next()
})
r.POST("/upload", h.Upload)
body, _ := json.Marshal(map[string]any{
"filename": "Caddyfile",
"content": "example.com { reverse_proxy localhost:8080 }",
})
req, _ := http.NewRequest(http.MethodPost, "/upload", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusInternalServerError, w.Code)
assert.Contains(t, w.Body.String(), "permissions_db_readonly")
}
func TestUploadMulti_ReadOnlyDBRespondsWithPermissionError(t *testing.T) {
gin.SetMode(gin.TestMode)
roDB := setupReadOnlyImportDB(t)
mockSvc := new(MockImporterService)
mockSvc.On("ImportFile", mock.AnythingOfType("string")).Return(&caddy.ImportResult{
Hosts: []caddy.ParsedHost{{DomainNames: "multi.example.com", ForwardHost: "localhost", ForwardPort: 8081}},
}, nil)
h := NewImportHandler(roDB, "caddy", t.TempDir(), "")
h.importerservice = mockSvc
w := httptest.NewRecorder()
_, r := gin.CreateTestContext(w)
r.Use(func(c *gin.Context) {
setAdminContext(c)
c.Next()
})
r.POST("/upload-multi", h.UploadMulti)
body, _ := json.Marshal(map[string]any{
"files": []map[string]string{{
"filename": "Caddyfile",
"content": "multi.example.com { reverse_proxy localhost:8081 }",
}},
})
req, _ := http.NewRequest(http.MethodPost, "/upload-multi", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusInternalServerError, w.Code)
assert.Contains(t, w.Body.String(), "permissions_db_readonly")
}
func TestCommit_ReadOnlyDBSaveRespondsWithPermissionError(t *testing.T) {
gin.SetMode(gin.TestMode)
roDB := setupReadOnlyImportDB(t)
mockSvc := new(MockImporterService)
mockSvc.On("ImportFile", mock.AnythingOfType("string")).Return(&caddy.ImportResult{
Hosts: []caddy.ParsedHost{{DomainNames: "commit.example.com", ForwardHost: "localhost", ForwardPort: 8080}},
}, nil)
importDir := t.TempDir()
uploadsDir := filepath.Join(importDir, "uploads")
require.NoError(t, os.MkdirAll(uploadsDir, 0o750))
sid := "readonly-commit-session"
require.NoError(t, os.WriteFile(filepath.Join(uploadsDir, sid+".caddyfile"), []byte("commit.example.com { reverse_proxy localhost:8080 }"), 0o600))
h := NewImportHandlerWithService(roDB, importCoverageProxyHostSvcStub{}, "caddy", importDir, "", nil)
h.importerservice = mockSvc
r := gin.New()
r.Use(func(c *gin.Context) {
setAdminContext(c)
c.Next()
})
r.POST("/commit", h.Commit)
body, _ := json.Marshal(map[string]any{"session_uuid": sid, "resolutions": map[string]string{}})
w := httptest.NewRecorder()
req, _ := http.NewRequest(http.MethodPost, "/commit", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusInternalServerError, w.Code)
assert.Contains(t, w.Body.String(), "permissions_db_readonly")
}
func TestCancel_ReadOnlyDBSaveRespondsWithPermissionError(t *testing.T) {
gin.SetMode(gin.TestMode)
tmp := t.TempDir()
dbPath := filepath.Join(tmp, "cancel_ro.db")
rwDB, err := gorm.Open(sqlite.Open(dbPath), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, rwDB.AutoMigrate(&models.ImportSession{}))
require.NoError(t, rwDB.Create(&models.ImportSession{UUID: "readonly-cancel", Status: "pending"}).Error)
rwSQLDB, err := rwDB.DB()
require.NoError(t, err)
require.NoError(t, rwSQLDB.Close())
require.NoError(t, os.Chmod(dbPath, 0o400))
roDB, err := gorm.Open(sqlite.Open("file:"+dbPath+"?mode=ro"), &gorm.Config{})
require.NoError(t, err)
if roSQLDB, dbErr := roDB.DB(); dbErr == nil {
t.Cleanup(func() { _ = roSQLDB.Close() })
}
h := NewImportHandler(roDB, "caddy", t.TempDir(), "")
r := gin.New()
r.Use(func(c *gin.Context) {
setAdminContext(c)
c.Next()
})
r.DELETE("/cancel", h.Cancel)
w := httptest.NewRecorder()
req, _ := http.NewRequest(http.MethodDelete, "/cancel?session_uuid=readonly-cancel", http.NoBody)
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusInternalServerError, w.Code)
assert.Contains(t, w.Body.String(), "permissions_db_readonly")
}

View File

@@ -14,6 +14,7 @@ import (
"github.com/Wikid82/charon/backend/internal/caddy"
"github.com/Wikid82/charon/backend/internal/models"
"github.com/Wikid82/charon/backend/internal/services"
"github.com/Wikid82/charon/backend/internal/testutil"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/assert"
@@ -870,6 +871,117 @@ func TestImportHandler_Commit_InvalidSessionUUID_BranchCoverage(t *testing.T) {
})
}
func TestImportHandler_Upload_NoImportableHosts_WithImportsDetected(t *testing.T) {
testutil.WithTx(t, setupImportTestDB(t), func(tx *gorm.DB) {
handler, _, mockImport := setupTestHandler(t, tx)
mockImport.importResult = &caddy.ImportResult{
Hosts: []caddy.ParsedHost{{
DomainNames: "file.example.com",
Warnings: []string{"file_server detected"},
}},
}
handler.importerservice = &mockImporterAdapter{mockImport}
reqBody := map[string]string{
"content": "import sites/*.caddyfile",
"filename": "Caddyfile",
}
body, _ := json.Marshal(reqBody)
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/upload", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
router.ServeHTTP(w, req)
require.Equal(t, http.StatusBadRequest, w.Code)
assert.Contains(t, w.Body.String(), "imports detected")
})
}
func TestImportHandler_Upload_NoImportableHosts_NoImportsNoFileServer(t *testing.T) {
testutil.WithTx(t, setupImportTestDB(t), func(tx *gorm.DB) {
handler, _, mockImport := setupTestHandler(t, tx)
mockImport.importResult = &caddy.ImportResult{
Hosts: []caddy.ParsedHost{{
DomainNames: "noop.example.com",
}},
}
handler.importerservice = &mockImporterAdapter{mockImport}
reqBody := map[string]string{
"content": "noop.example.com { respond \"ok\" }",
"filename": "Caddyfile",
}
body, _ := json.Marshal(reqBody)
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/upload", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
router.ServeHTTP(w, req)
require.Equal(t, http.StatusBadRequest, w.Code)
assert.Contains(t, w.Body.String(), "no sites found in uploaded Caddyfile")
})
}
func TestImportHandler_Commit_OverwriteAndRenameFlows(t *testing.T) {
testutil.WithTx(t, setupImportTestDB(t), func(tx *gorm.DB) {
handler, _, mockImport := setupTestHandler(t, tx)
handler.proxyHostSvc = services.NewProxyHostService(tx)
mockImport.importResult = &caddy.ImportResult{
Hosts: []caddy.ParsedHost{
{DomainNames: "rename.example.com", ForwardScheme: "http", ForwardHost: "rename-host", ForwardPort: 9000},
},
}
handler.importerservice = &mockImporterAdapter{mockImport}
uploadPath := filepath.Join(handler.importDir, "uploads", "overwrite-rename.caddyfile")
require.NoError(t, os.MkdirAll(filepath.Dir(uploadPath), 0o700))
require.NoError(t, os.WriteFile(uploadPath, []byte("placeholder"), 0o600))
commitBody := map[string]any{
"session_uuid": "overwrite-rename",
"resolutions": map[string]string{
"rename.example.com": "rename",
},
"names": map[string]string{
"rename.example.com": "Renamed Host",
},
}
body, _ := json.Marshal(commitBody)
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/commit", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
gin.SetMode(gin.TestMode)
router := gin.New()
addAdminMiddleware(router)
handler.RegisterRoutes(router.Group("/api/v1"))
router.ServeHTTP(w, req)
require.Equal(t, http.StatusOK, w.Code)
assert.Contains(t, w.Body.String(), "\"created\":1")
var renamed models.ProxyHost
require.NoError(t, tx.Where("domain_names = ?", "rename.example.com-imported").First(&renamed).Error)
assert.Equal(t, "Renamed Host", renamed.Name)
})
}
func TestImportHandler_Cancel_ValidationAndNotFound_BranchCoverage(t *testing.T) {
testutil.WithTx(t, setupImportTestDB(t), func(tx *gorm.DB) {
handler, _, _ := setupTestHandler(t, tx)

View File

@@ -2,6 +2,7 @@ package handlers
import (
"encoding/json"
"fmt"
"net/http"
"net/http/httptest"
"strings"
@@ -149,3 +150,150 @@ func TestNotificationTemplateHandler_Preview_InvalidJSON(t *testing.T) {
r.ServeHTTP(w, req)
require.Equal(t, http.StatusBadRequest, w.Code)
}
func TestNotificationTemplateHandler_AdminRequired(t *testing.T) {
db, err := gorm.Open(sqlite.Open("file::memory:?mode=memory&cache=shared"), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.NotificationTemplate{}))
svc := services.NewNotificationService(db)
h := NewNotificationTemplateHandler(svc)
r := gin.New()
r.POST("/api/templates", h.Create)
r.PUT("/api/templates/:id", h.Update)
r.DELETE("/api/templates/:id", h.Delete)
createReq := httptest.NewRequest(http.MethodPost, "/api/templates", strings.NewReader(`{"name":"x","config":"{}"}`))
createReq.Header.Set("Content-Type", "application/json")
createW := httptest.NewRecorder()
r.ServeHTTP(createW, createReq)
require.Equal(t, http.StatusForbidden, createW.Code)
updateReq := httptest.NewRequest(http.MethodPut, "/api/templates/test-id", strings.NewReader(`{"name":"x","config":"{}"}`))
updateReq.Header.Set("Content-Type", "application/json")
updateW := httptest.NewRecorder()
r.ServeHTTP(updateW, updateReq)
require.Equal(t, http.StatusForbidden, updateW.Code)
deleteReq := httptest.NewRequest(http.MethodDelete, "/api/templates/test-id", http.NoBody)
deleteW := httptest.NewRecorder()
r.ServeHTTP(deleteW, deleteReq)
require.Equal(t, http.StatusForbidden, deleteW.Code)
}
func TestNotificationTemplateHandler_List_DBError(t *testing.T) {
db, err := gorm.Open(sqlite.Open("file::memory:?mode=memory&cache=shared"), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.NotificationTemplate{}))
svc := services.NewNotificationService(db)
h := NewNotificationTemplateHandler(svc)
r := gin.New()
r.GET("/api/templates", h.List)
sqlDB, err := db.DB()
require.NoError(t, err)
require.NoError(t, sqlDB.Close())
req := httptest.NewRequest(http.MethodGet, "/api/templates", http.NoBody)
w := httptest.NewRecorder()
r.ServeHTTP(w, req)
require.Equal(t, http.StatusInternalServerError, w.Code)
}
func TestNotificationTemplateHandler_WriteOps_DBError(t *testing.T) {
db, err := gorm.Open(sqlite.Open("file::memory:?mode=memory&cache=shared"), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.NotificationTemplate{}))
svc := services.NewNotificationService(db)
h := NewNotificationTemplateHandler(svc)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("role", "admin")
c.Set("userID", uint(1))
c.Next()
})
r.POST("/api/templates", h.Create)
r.PUT("/api/templates/:id", h.Update)
r.DELETE("/api/templates/:id", h.Delete)
sqlDB, err := db.DB()
require.NoError(t, err)
require.NoError(t, sqlDB.Close())
createReq := httptest.NewRequest(http.MethodPost, "/api/templates", strings.NewReader(`{"name":"x","config":"{}"}`))
createReq.Header.Set("Content-Type", "application/json")
createW := httptest.NewRecorder()
r.ServeHTTP(createW, createReq)
require.Equal(t, http.StatusInternalServerError, createW.Code)
updateReq := httptest.NewRequest(http.MethodPut, "/api/templates/test-id", strings.NewReader(`{"id":"test-id","name":"x","config":"{}"}`))
updateReq.Header.Set("Content-Type", "application/json")
updateW := httptest.NewRecorder()
r.ServeHTTP(updateW, updateReq)
require.Equal(t, http.StatusInternalServerError, updateW.Code)
deleteReq := httptest.NewRequest(http.MethodDelete, "/api/templates/test-id", http.NoBody)
deleteW := httptest.NewRecorder()
r.ServeHTTP(deleteW, deleteReq)
require.Equal(t, http.StatusInternalServerError, deleteW.Code)
}
func TestNotificationTemplateHandler_WriteOps_PermissionErrorResponse(t *testing.T) {
db, err := gorm.Open(sqlite.Open("file::memory:?mode=memory&cache=shared"), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.NotificationTemplate{}))
createHook := "test_notification_template_permission_create"
updateHook := "test_notification_template_permission_update"
deleteHook := "test_notification_template_permission_delete"
require.NoError(t, db.Callback().Create().Before("gorm:create").Register(createHook, func(tx *gorm.DB) {
_ = tx.AddError(fmt.Errorf("permission denied"))
}))
require.NoError(t, db.Callback().Update().Before("gorm:update").Register(updateHook, func(tx *gorm.DB) {
_ = tx.AddError(fmt.Errorf("permission denied"))
}))
require.NoError(t, db.Callback().Delete().Before("gorm:delete").Register(deleteHook, func(tx *gorm.DB) {
_ = tx.AddError(fmt.Errorf("permission denied"))
}))
t.Cleanup(func() {
_ = db.Callback().Create().Remove(createHook)
_ = db.Callback().Update().Remove(updateHook)
_ = db.Callback().Delete().Remove(deleteHook)
})
svc := services.NewNotificationService(db)
h := NewNotificationTemplateHandler(svc)
r := gin.New()
r.Use(func(c *gin.Context) {
c.Set("role", "admin")
c.Set("userID", uint(1))
c.Next()
})
r.POST("/api/templates", h.Create)
r.PUT("/api/templates/:id", h.Update)
r.DELETE("/api/templates/:id", h.Delete)
createReq := httptest.NewRequest(http.MethodPost, "/api/templates", strings.NewReader(`{"name":"x","config":"{}"}`))
createReq.Header.Set("Content-Type", "application/json")
createW := httptest.NewRecorder()
r.ServeHTTP(createW, createReq)
require.Equal(t, http.StatusInternalServerError, createW.Code)
require.Contains(t, createW.Body.String(), "permissions_write_denied")
updateReq := httptest.NewRequest(http.MethodPut, "/api/templates/test-id", strings.NewReader(`{"id":"test-id","name":"x","config":"{}"}`))
updateReq.Header.Set("Content-Type", "application/json")
updateW := httptest.NewRecorder()
r.ServeHTTP(updateW, updateReq)
require.Equal(t, http.StatusInternalServerError, updateW.Code)
require.Contains(t, updateW.Body.String(), "permissions_write_denied")
deleteReq := httptest.NewRequest(http.MethodDelete, "/api/templates/test-id", http.NoBody)
deleteW := httptest.NewRecorder()
r.ServeHTTP(deleteW, deleteReq)
require.Equal(t, http.StatusInternalServerError, deleteW.Code)
require.Contains(t, deleteW.Body.String(), "permissions_write_denied")
}

View File

@@ -16,6 +16,7 @@ import (
"github.com/Wikid82/charon/backend/internal/config"
"github.com/Wikid82/charon/backend/internal/models"
"gorm.io/gorm"
)
// Tests for UpdateConfig handler to improve coverage (currently 46%)
@@ -772,3 +773,205 @@ func TestSecurityHandler_Enable_WithExactIPWhitelist(t *testing.T) {
assert.Equal(t, http.StatusOK, w.Code)
}
func TestSecurityHandler_GetStatus_BackwardCompatibilityOverrides(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}, &models.CaddyConfig{}))
require.NoError(t, db.Create(&models.SecurityConfig{
Name: "default",
Enabled: true,
WAFMode: "block",
RateLimitMode: "enabled",
CrowdSecMode: "local",
}).Error)
seed := []models.Setting{
{Key: "security.cerberus.enabled", Value: "false", Category: "security", Type: "bool"},
{Key: "security.crowdsec.mode", Value: "external", Category: "security", Type: "string"},
{Key: "security.waf.enabled", Value: "true", Category: "security", Type: "bool"},
{Key: "security.rate_limit.enabled", Value: "true", Category: "security", Type: "bool"},
{Key: "security.acl.enabled", Value: "true", Category: "security", Type: "bool"},
}
for _, setting := range seed {
require.NoError(t, db.Create(&setting).Error)
}
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
router.GET("/security/status", handler.GetStatus)
w := httptest.NewRecorder()
req, _ := http.NewRequest(http.MethodGet, "/security/status", http.NoBody)
router.ServeHTTP(w, req)
require.Equal(t, http.StatusOK, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
cerberus := resp["cerberus"].(map[string]any)
require.Equal(t, false, cerberus["enabled"])
crowdsec := resp["crowdsec"].(map[string]any)
require.Equal(t, "disabled", crowdsec["mode"])
require.Equal(t, false, crowdsec["enabled"])
}
func TestSecurityHandler_AddWAFExclusion_InvalidExistingJSONStillAdds(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.SecurityAudit{}))
require.NoError(t, db.Create(&models.SecurityConfig{Name: "default", WAFExclusions: "{"}).Error)
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
router.Use(func(c *gin.Context) {
c.Set("role", "admin")
c.Next()
})
router.POST("/security/waf/exclusions", handler.AddWAFExclusion)
body := `{"rule_id":942100,"target":"ARGS:user","description":"test"}`
w := httptest.NewRecorder()
req, _ := http.NewRequest(http.MethodPost, "/security/waf/exclusions", strings.NewReader(body))
req.Header.Set("Content-Type", "application/json")
router.ServeHTTP(w, req)
require.Equal(t, http.StatusOK, w.Code)
}
func TestSecurityHandler_ToggleSecurityModule_SnapshotSettingsError(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupTestDB(t)
require.NoError(t, db.AutoMigrate(&models.Setting{}, &models.SecurityConfig{}))
sqlDB, err := db.DB()
require.NoError(t, err)
require.NoError(t, sqlDB.Close())
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
router.Use(func(c *gin.Context) {
c.Set("role", "admin")
c.Next()
})
router.POST("/security/waf/enable", handler.EnableWAF)
w := httptest.NewRecorder()
req, _ := http.NewRequest(http.MethodPost, "/security/waf/enable", http.NoBody)
router.ServeHTTP(w, req)
require.Equal(t, http.StatusInternalServerError, w.Code)
require.Contains(t, w.Body.String(), "Failed to update security module")
}
func TestSecurityHandler_ToggleSecurityModule_SnapshotSecurityConfigError(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupTestDB(t)
require.NoError(t, db.AutoMigrate(&models.Setting{}, &models.SecurityConfig{}))
require.NoError(t, db.Exec("DROP TABLE security_configs").Error)
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
router := gin.New()
router.Use(func(c *gin.Context) {
c.Set("role", "admin")
c.Next()
})
router.POST("/security/waf/enable", handler.EnableWAF)
w := httptest.NewRecorder()
req, _ := http.NewRequest(http.MethodPost, "/security/waf/enable", http.NoBody)
router.ServeHTTP(w, req)
require.Equal(t, http.StatusInternalServerError, w.Code)
require.Contains(t, w.Body.String(), "Failed to update security module")
}
func TestSecurityHandler_SnapshotAndRestoreHelpers(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupTestDB(t)
require.NoError(t, db.AutoMigrate(&models.Setting{}, &models.SecurityConfig{}))
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
require.NoError(t, db.Create(&models.Setting{Key: "k1", Value: "v1", Category: "security", Type: "string"}).Error)
snapshots, err := handler.snapshotSettings([]string{"k1", "k1", "k2"})
require.NoError(t, err)
require.Len(t, snapshots, 2)
require.True(t, snapshots["k1"].exists)
require.False(t, snapshots["k2"].exists)
require.NoError(t, handler.restoreSettings(map[string]settingSnapshot{
"k1": snapshots["k1"],
"k2": snapshots["k2"],
}))
require.NoError(t, db.Exec("DROP TABLE settings").Error)
err = handler.restoreSettings(map[string]settingSnapshot{
"k1": snapshots["k1"],
})
require.Error(t, err)
}
func TestSecurityHandler_DefaultSecurityConfigStateHelpers(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}))
handler := NewSecurityHandler(config.SecurityConfig{}, db, nil)
exists, enabled, err := handler.snapshotDefaultSecurityConfigState()
require.NoError(t, err)
require.False(t, exists)
require.False(t, enabled)
require.NoError(t, db.Create(&models.SecurityConfig{Name: "default", Enabled: true}).Error)
exists, enabled, err = handler.snapshotDefaultSecurityConfigState()
require.NoError(t, err)
require.True(t, exists)
require.True(t, enabled)
require.NoError(t, handler.restoreDefaultSecurityConfigState(true, false))
var cfg models.SecurityConfig
require.NoError(t, db.Where("name = ?", "default").First(&cfg).Error)
require.False(t, cfg.Enabled)
require.NoError(t, handler.restoreDefaultSecurityConfigState(false, false))
err = db.Where("name = ?", "default").First(&cfg).Error
require.ErrorIs(t, err, gorm.ErrRecordNotFound)
}
func TestSecurityHandler_EnsureSecurityConfigEnabled_Helper(t *testing.T) {
handler := &SecurityHandler{db: nil}
err := handler.ensureSecurityConfigEnabled()
require.Error(t, err)
require.Contains(t, err.Error(), "database not configured")
db := setupTestDB(t)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}))
require.NoError(t, db.Create(&models.SecurityConfig{Name: "default", Enabled: false}).Error)
handler = NewSecurityHandler(config.SecurityConfig{}, db, nil)
require.NoError(t, handler.ensureSecurityConfigEnabled())
var cfg models.SecurityConfig
require.NoError(t, db.Where("name = ?", "default").First(&cfg).Error)
require.True(t, cfg.Enabled)
}
func TestLatestConfigApplyState_Helper(t *testing.T) {
state := latestConfigApplyState(nil)
require.Equal(t, false, state["available"])
db := setupTestDB(t)
require.NoError(t, db.AutoMigrate(&models.CaddyConfig{}))
state = latestConfigApplyState(db)
require.Equal(t, false, state["available"])
require.NoError(t, db.Create(&models.CaddyConfig{Success: true}).Error)
state = latestConfigApplyState(db)
require.Equal(t, true, state["available"])
require.Equal(t, "applied", state["status"])
}

View File

@@ -28,6 +28,14 @@ type mockCaddyConfigManager struct {
calls int
}
type mockCacheInvalidator struct {
calls int
}
func (m *mockCacheInvalidator) InvalidateCache() {
m.calls++
}
func (m *mockCaddyConfigManager) ApplyConfig(ctx context.Context) error {
m.calls++
if m.applyFunc != nil {
@@ -359,6 +367,132 @@ func TestSettingsHandler_UpdateSetting_SecurityKeyApplyFailureReturnsError(t *te
assert.Equal(t, 1, mgr.calls)
}
func TestSettingsHandler_UpdateSetting_NonAdminForbidden(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSettingsTestDB(t)
handler := handlers.NewSettingsHandler(db)
router := gin.New()
router.Use(func(c *gin.Context) {
c.Set("role", "user")
c.Next()
})
router.POST("/settings", handler.UpdateSetting)
payload := map[string]string{"key": "security.waf.enabled", "value": "true"}
body, _ := json.Marshal(payload)
w := httptest.NewRecorder()
req, _ := http.NewRequest("POST", "/settings", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
router.ServeHTTP(w, req)
assert.Equal(t, http.StatusForbidden, w.Code)
}
func TestSettingsHandler_UpdateSetting_InvalidAdminWhitelist(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSettingsTestDB(t)
handler := handlers.NewSettingsHandler(db)
router := newAdminRouter()
router.POST("/settings", handler.UpdateSetting)
payload := map[string]string{
"key": "security.admin_whitelist",
"value": "invalid-cidr-without-prefix",
}
body, _ := json.Marshal(payload)
w := httptest.NewRecorder()
req, _ := http.NewRequest("POST", "/settings", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
router.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
assert.Contains(t, w.Body.String(), "Invalid admin_whitelist")
}
func TestSettingsHandler_UpdateSetting_SecurityKeyInvalidatesCache(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSettingsTestDB(t)
mgr := &mockCaddyConfigManager{}
inv := &mockCacheInvalidator{}
handler := handlers.NewSettingsHandlerWithDeps(db, mgr, inv, nil, "")
router := newAdminRouter()
router.POST("/settings", handler.UpdateSetting)
payload := map[string]string{
"key": "security.rate_limit.enabled",
"value": "true",
}
body, _ := json.Marshal(payload)
w := httptest.NewRecorder()
req, _ := http.NewRequest("POST", "/settings", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
router.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
assert.Equal(t, 1, inv.calls)
assert.Equal(t, 1, mgr.calls)
}
func TestSettingsHandler_PatchConfig_InvalidAdminWhitelist(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSettingsTestDB(t)
handler := handlers.NewSettingsHandler(db)
router := newAdminRouter()
router.PATCH("/config", handler.PatchConfig)
payload := map[string]any{
"security": map[string]any{
"admin_whitelist": "bad-cidr",
},
}
body, _ := json.Marshal(payload)
w := httptest.NewRecorder()
req, _ := http.NewRequest(http.MethodPatch, "/config", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
router.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
assert.Contains(t, w.Body.String(), "Invalid admin_whitelist")
}
func TestSettingsHandler_PatchConfig_ReloadFailureReturns500(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSettingsTestDB(t)
mgr := &mockCaddyConfigManager{applyFunc: func(context.Context) error {
return fmt.Errorf("reload failed")
}}
inv := &mockCacheInvalidator{}
handler := handlers.NewSettingsHandlerWithDeps(db, mgr, inv, nil, "")
router := newAdminRouter()
router.PATCH("/config", handler.PatchConfig)
payload := map[string]any{
"security": map[string]any{
"waf": map[string]any{"enabled": true},
},
}
body, _ := json.Marshal(payload)
w := httptest.NewRecorder()
req, _ := http.NewRequest(http.MethodPatch, "/config", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
router.ServeHTTP(w, req)
assert.Equal(t, http.StatusInternalServerError, w.Code)
assert.Equal(t, 1, inv.calls)
assert.Equal(t, 1, mgr.calls)
assert.Contains(t, w.Body.String(), "Failed to reload configuration")
}
func TestSettingsHandler_PatchConfig_SyncsAdminWhitelist(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupSettingsTestDB(t)

View File

@@ -0,0 +1,65 @@
package handlers
import (
"testing"
"github.com/Wikid82/charon/backend/internal/models"
"github.com/stretchr/testify/require"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
)
func setupSettingsWave3DB(t *testing.T) *gorm.DB {
t.Helper()
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SecurityConfig{}, &models.Setting{}))
return db
}
func TestSettingsHandler_EnsureSecurityConfigEnabledWithDB_Branches(t *testing.T) {
db := setupSettingsWave3DB(t)
h := &SettingsHandler{DB: db}
// Record missing -> create enabled
require.NoError(t, h.ensureSecurityConfigEnabledWithDB(db))
var cfg models.SecurityConfig
require.NoError(t, db.Where("name = ?", "default").First(&cfg).Error)
require.True(t, cfg.Enabled)
// Record exists enabled=false -> update to true
require.NoError(t, db.Model(&cfg).Update("enabled", false).Error)
require.NoError(t, h.ensureSecurityConfigEnabledWithDB(db))
require.NoError(t, db.Where("name = ?", "default").First(&cfg).Error)
require.True(t, cfg.Enabled)
// Record exists enabled=true -> no-op success
require.NoError(t, h.ensureSecurityConfigEnabledWithDB(db))
}
func TestFlattenConfig_MixedTypes(t *testing.T) {
result := map[string]string{}
input := map[string]interface{}{
"security": map[string]interface{}{
"acl": map[string]interface{}{
"enabled": true,
},
"rate_limit": map[string]interface{}{
"requests": 100,
},
},
"name": "charon",
}
flattenConfig(input, "", result)
require.Equal(t, "true", result["security.acl.enabled"])
require.Equal(t, "100", result["security.rate_limit.requests"])
require.Equal(t, "charon", result["name"])
}
func TestValidateAdminWhitelist_Strictness(t *testing.T) {
require.NoError(t, validateAdminWhitelist(""))
require.NoError(t, validateAdminWhitelist("192.0.2.0/24, 198.51.100.10/32"))
require.Error(t, validateAdminWhitelist("192.0.2.1"))
}

View File

@@ -0,0 +1,200 @@
package handlers
import (
"bytes"
"context"
"encoding/json"
"fmt"
"net/http"
"net/http/httptest"
"testing"
"github.com/Wikid82/charon/backend/internal/models"
"github.com/Wikid82/charon/backend/internal/services"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/require"
"gorm.io/gorm"
)
type wave4CaddyManager struct {
calls int
err error
}
func (m *wave4CaddyManager) ApplyConfig(context.Context) error {
m.calls++
return m.err
}
type wave4CacheInvalidator struct {
calls int
}
func (i *wave4CacheInvalidator) InvalidateCache() {
i.calls++
}
func registerCreatePermissionDeniedHook(t *testing.T, db *gorm.DB, name string, shouldFail func(*gorm.DB) bool) {
t.Helper()
require.NoError(t, db.Callback().Create().Before("gorm:create").Register(name, func(tx *gorm.DB) {
if shouldFail(tx) {
_ = tx.AddError(fmt.Errorf("permission denied"))
}
}))
t.Cleanup(func() {
_ = db.Callback().Create().Remove(name)
})
}
func settingKeyFromCreateCallback(tx *gorm.DB) string {
if tx == nil || tx.Statement == nil || tx.Statement.Dest == nil {
return ""
}
switch v := tx.Statement.Dest.(type) {
case *models.Setting:
return v.Key
case models.Setting:
return v.Key
default:
return ""
}
}
func performUpdateSettingRequest(t *testing.T, h *SettingsHandler, payload map[string]any) *httptest.ResponseRecorder {
t.Helper()
g := gin.New()
g.Use(func(c *gin.Context) {
c.Set("role", "admin")
c.Set("userID", uint(1))
c.Next()
})
g.POST("/settings", h.UpdateSetting)
body, err := json.Marshal(payload)
require.NoError(t, err)
w := httptest.NewRecorder()
req := httptest.NewRequest(http.MethodPost, "/settings", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
g.ServeHTTP(w, req)
return w
}
func performPatchConfigRequest(t *testing.T, h *SettingsHandler, payload map[string]any) *httptest.ResponseRecorder {
t.Helper()
g := gin.New()
g.Use(func(c *gin.Context) {
c.Set("role", "admin")
c.Set("userID", uint(1))
c.Next()
})
g.PATCH("/config", h.PatchConfig)
body, err := json.Marshal(payload)
require.NoError(t, err)
w := httptest.NewRecorder()
req := httptest.NewRequest(http.MethodPatch, "/config", bytes.NewBuffer(body))
req.Header.Set("Content-Type", "application/json")
g.ServeHTTP(w, req)
return w
}
func TestSettingsHandlerWave4_UpdateSetting_ACLPathsPermissionErrors(t *testing.T) {
t.Run("feature cerberus upsert permission denied", func(t *testing.T) {
db := setupSettingsWave3DB(t)
registerCreatePermissionDeniedHook(t, db, "wave4-deny-feature-cerberus", func(tx *gorm.DB) bool {
return settingKeyFromCreateCallback(tx) == "feature.cerberus.enabled"
})
h := NewSettingsHandler(db)
h.SecuritySvc = services.NewSecurityService(db)
h.DataRoot = "/app/data"
w := performUpdateSettingRequest(t, h, map[string]any{
"key": "security.acl.enabled",
"value": "true",
})
require.Equal(t, http.StatusInternalServerError, w.Code)
require.Contains(t, w.Body.String(), "permissions_write_denied")
})
}
func TestSettingsHandlerWave4_PatchConfig_SecurityReloadSuccessLogsPath(t *testing.T) {
db := setupSettingsWave3DB(t)
mgr := &wave4CaddyManager{}
inv := &wave4CacheInvalidator{}
h := NewSettingsHandlerWithDeps(db, mgr, inv, nil, "")
w := performPatchConfigRequest(t, h, map[string]any{
"security": map[string]any{
"waf": map[string]any{"enabled": true},
},
})
require.Equal(t, http.StatusOK, w.Code)
require.Equal(t, 1, mgr.calls)
require.Equal(t, 1, inv.calls)
}
func TestSettingsHandlerWave4_UpdateSetting_GenericSaveError(t *testing.T) {
db := setupSettingsWave3DB(t)
require.NoError(t, db.Callback().Create().Before("gorm:create").Register("wave4-generic-save-error", func(tx *gorm.DB) {
if settingKeyFromCreateCallback(tx) == "security.waf.enabled" {
_ = tx.AddError(fmt.Errorf("boom"))
}
}))
t.Cleanup(func() {
_ = db.Callback().Create().Remove("wave4-generic-save-error")
})
h := NewSettingsHandler(db)
h.SecuritySvc = services.NewSecurityService(db)
h.DataRoot = "/app/data"
w := performUpdateSettingRequest(t, h, map[string]any{
"key": "security.waf.enabled",
"value": "true",
})
require.Equal(t, http.StatusInternalServerError, w.Code)
require.Contains(t, w.Body.String(), "Failed to save setting")
}
func TestSettingsHandlerWave4_PatchConfig_InvalidAdminWhitelistFromSync(t *testing.T) {
db := setupSettingsWave3DB(t)
h := NewSettingsHandler(db)
h.SecuritySvc = services.NewSecurityService(db)
h.DataRoot = "/app/data"
w := performPatchConfigRequest(t, h, map[string]any{
"security": map[string]any{
"admin_whitelist": "10.10.10.10/",
},
})
require.Equal(t, http.StatusBadRequest, w.Code)
require.Contains(t, w.Body.String(), "Invalid admin_whitelist")
}
func TestSettingsHandlerWave4_TestPublicURL_BindError(t *testing.T) {
db := setupSettingsWave3DB(t)
h := NewSettingsHandler(db)
g := gin.New()
g.Use(func(c *gin.Context) {
c.Set("role", "admin")
c.Set("userID", uint(1))
c.Next()
})
g.POST("/settings/test-public-url", h.TestPublicURL)
w := httptest.NewRecorder()
req := httptest.NewRequest(http.MethodPost, "/settings/test-public-url", bytes.NewBufferString("{"))
req.Header.Set("Content-Type", "application/json")
g.ServeHTTP(w, req)
require.Equal(t, http.StatusBadRequest, w.Code)
}

View File

@@ -10,16 +10,30 @@ import (
"path/filepath"
"syscall"
"testing"
"time"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/require"
"github.com/Wikid82/charon/backend/internal/config"
"github.com/Wikid82/charon/backend/internal/models"
"github.com/Wikid82/charon/backend/internal/services"
"github.com/Wikid82/charon/backend/internal/util"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
)
type stubPermissionChecker struct{}
type fakeNoStatFileInfo struct{}
func (fakeNoStatFileInfo) Name() string { return "fake" }
func (fakeNoStatFileInfo) Size() int64 { return 0 }
func (fakeNoStatFileInfo) Mode() os.FileMode { return 0 }
func (fakeNoStatFileInfo) ModTime() time.Time { return time.Time{} }
func (fakeNoStatFileInfo) IsDir() bool { return false }
func (fakeNoStatFileInfo) Sys() any { return nil }
func (stubPermissionChecker) Check(path, required string) util.PermissionCheck {
return util.PermissionCheck{
Path: path,
@@ -192,6 +206,12 @@ func TestSystemPermissionsHandler_PathHasSymlink(t *testing.T) {
require.Error(t, err)
}
func TestSystemPermissionsHandler_NewDefaultsCheckerToOSChecker(t *testing.T) {
h := NewSystemPermissionsHandler(config.Config{}, nil, nil)
require.NotNil(t, h)
require.NotNil(t, h.checker)
}
func TestSystemPermissionsHandler_RepairPermissions_DisabledWhenNotSingleContainer(t *testing.T) {
gin.SetMode(gin.TestMode)
@@ -289,6 +309,132 @@ func TestSystemPermissionsHandler_RepairPermissions_Success(t *testing.T) {
require.NotEqual(t, "error", payload.Paths[0].Status)
}
func TestSystemPermissionsHandler_RepairPermissions_NonAdmin(t *testing.T) {
gin.SetMode(gin.TestMode)
h := NewSystemPermissionsHandler(config.Config{SingleContainer: true}, nil, stubPermissionChecker{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Set("role", "user")
c.Request = httptest.NewRequest(http.MethodPost, "/system/permissions/repair", bytes.NewBufferString(`{"paths":["/tmp"]}`))
c.Request.Header.Set("Content-Type", "application/json")
h.RepairPermissions(c)
require.Equal(t, http.StatusForbidden, w.Code)
}
func TestSystemPermissionsHandler_RepairPermissions_InvalidJSONWhenRoot(t *testing.T) {
if os.Geteuid() != 0 {
t.Skip("test requires root execution")
}
gin.SetMode(gin.TestMode)
root := t.TempDir()
dataDir := filepath.Join(root, "data")
require.NoError(t, os.MkdirAll(dataDir, 0o750))
h := NewSystemPermissionsHandler(config.Config{
SingleContainer: true,
DatabasePath: filepath.Join(dataDir, "charon.db"),
ConfigRoot: dataDir,
CaddyLogDir: dataDir,
CrowdSecLogDir: dataDir,
}, nil, stubPermissionChecker{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Set("role", "admin")
c.Request = httptest.NewRequest(http.MethodPost, "/system/permissions/repair", bytes.NewBufferString(`{"paths":`))
c.Request.Header.Set("Content-Type", "application/json")
h.RepairPermissions(c)
require.Equal(t, http.StatusBadRequest, w.Code)
}
func TestSystemPermissionsHandler_DefaultPathsAndAllowlistRoots(t *testing.T) {
h := NewSystemPermissionsHandler(config.Config{
DatabasePath: "/app/data/charon.db",
ConfigRoot: "/app/config",
CaddyLogDir: "/var/log/caddy",
CrowdSecLogDir: "/var/log/crowdsec",
PluginsDir: "/app/plugins",
}, nil, stubPermissionChecker{})
paths := h.defaultPaths()
require.Len(t, paths, 11)
require.Equal(t, "/app/data", paths[0].Path)
require.Equal(t, "/app/plugins", paths[len(paths)-1].Path)
roots := h.allowlistRoots()
require.Equal(t, []string{"/app/data", "/app/config", "/var/log/caddy", "/var/log/crowdsec"}, roots)
}
func TestSystemPermissionsHandler_IsOwnedByFalseWhenSysNotStat(t *testing.T) {
owned := isOwnedBy(fakeNoStatFileInfo{}, os.Geteuid(), os.Getegid())
require.False(t, owned)
}
func TestSystemPermissionsHandler_IsWithinAllowlist_RelErrorBranch(t *testing.T) {
tmp := t.TempDir()
inAllow := filepath.Join(tmp, "a", "b")
require.NoError(t, os.MkdirAll(inAllow, 0o750))
badRoot := string([]byte{'/', 0, 'x'})
allowed := isWithinAllowlist(inAllow, []string{badRoot, tmp})
require.True(t, allowed)
}
func TestSystemPermissionsHandler_IsWithinAllowlist_AllRelErrorsReturnFalse(t *testing.T) {
badRoot1 := string([]byte{'/', 0, 'x'})
badRoot2 := string([]byte{'/', 0, 'y'})
allowed := isWithinAllowlist("/tmp/some/path", []string{badRoot1, badRoot2})
require.False(t, allowed)
}
func TestSystemPermissionsHandler_LogAudit_PersistsAuditWithUserID(t *testing.T) {
gin.SetMode(gin.TestMode)
db, err := gorm.Open(sqlite.Open("file::memory:?cache=shared"), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SecurityAudit{}))
securitySvc := services.NewSecurityService(db)
h := NewSystemPermissionsHandler(config.Config{}, securitySvc, stubPermissionChecker{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Set("role", "admin")
c.Set("userID", 42)
c.Request = httptest.NewRequest(http.MethodGet, "/system/permissions", http.NoBody)
require.NotPanics(t, func() {
h.logAudit(c, "permissions_diagnostics", "ok", "", 2)
})
}
func TestSystemPermissionsHandler_LogAudit_PersistsAuditWithUnknownActor(t *testing.T) {
gin.SetMode(gin.TestMode)
db, err := gorm.Open(sqlite.Open("file::memory:?cache=shared"), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, db.AutoMigrate(&models.SecurityAudit{}))
securitySvc := services.NewSecurityService(db)
h := NewSystemPermissionsHandler(config.Config{}, securitySvc, stubPermissionChecker{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Set("role", "admin")
c.Request = httptest.NewRequest(http.MethodGet, "/system/permissions", http.NoBody)
require.NotPanics(t, func() {
h.logAudit(c, "permissions_diagnostics", "ok", "", 1)
})
}
func TestSystemPermissionsHandler_RepairPath_Branches(t *testing.T) {
h := NewSystemPermissionsHandler(config.Config{}, nil, stubPermissionChecker{})
allowRoot := t.TempDir()
@@ -360,3 +506,87 @@ func TestSystemPermissionsHandler_RepairPath_Branches(t *testing.T) {
require.Equal(t, "0600", result.ModeAfter)
})
}
func TestSystemPermissionsHandler_OSChecker_Check(t *testing.T) {
if os.Geteuid() != 0 {
t.Skip("test expects root-owned temp paths in CI")
}
tmp := t.TempDir()
filePath := filepath.Join(tmp, "check.txt")
require.NoError(t, os.WriteFile(filePath, []byte("ok"), 0o600))
checker := OSChecker{}
result := checker.Check(filePath, "rw")
require.Equal(t, filePath, result.Path)
require.Equal(t, "rw", result.Required)
require.True(t, result.Exists)
}
func TestSystemPermissionsHandler_RepairPermissions_InvalidRequestBody_Root(t *testing.T) {
if os.Geteuid() != 0 {
t.Skip("test requires root execution")
}
gin.SetMode(gin.TestMode)
tmp := t.TempDir()
dataDir := filepath.Join(tmp, "data")
require.NoError(t, os.MkdirAll(dataDir, 0o750))
h := NewSystemPermissionsHandler(config.Config{
SingleContainer: true,
DatabasePath: filepath.Join(dataDir, "charon.db"),
ConfigRoot: dataDir,
CaddyLogDir: dataDir,
CrowdSecLogDir: dataDir,
PluginsDir: filepath.Join(tmp, "plugins"),
}, nil, stubPermissionChecker{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Set("role", "admin")
c.Request = httptest.NewRequest(http.MethodPost, "/system/permissions/repair", bytes.NewBufferString(`{"group_mode":true}`))
c.Request.Header.Set("Content-Type", "application/json")
h.RepairPermissions(c)
require.Equal(t, http.StatusBadRequest, w.Code)
}
func TestSystemPermissionsHandler_RepairPath_LstatInvalidArgument(t *testing.T) {
h := NewSystemPermissionsHandler(config.Config{}, nil, stubPermissionChecker{})
allowRoot := t.TempDir()
result := h.repairPath("/tmp/\x00invalid", false, []string{allowRoot})
require.Equal(t, "error", result.Status)
require.Equal(t, "permissions_repair_failed", result.ErrorCode)
}
func TestSystemPermissionsHandler_RepairPath_RepairedBranch(t *testing.T) {
if os.Geteuid() != 0 {
t.Skip("test requires root execution")
}
h := NewSystemPermissionsHandler(config.Config{}, nil, stubPermissionChecker{})
allowRoot := t.TempDir()
targetFile := filepath.Join(allowRoot, "needs-repair.txt")
require.NoError(t, os.WriteFile(targetFile, []byte("ok"), 0o600))
result := h.repairPath(targetFile, true, []string{allowRoot})
require.Equal(t, "repaired", result.Status)
require.Equal(t, "0660", result.ModeAfter)
info, err := os.Stat(targetFile)
require.NoError(t, err)
require.Equal(t, os.FileMode(0o660), info.Mode().Perm())
}
func TestSystemPermissionsHandler_NormalizePath_ParentRefBranches(t *testing.T) {
clean, code := normalizePath("/../etc")
require.Equal(t, "/etc", clean)
require.Empty(t, code)
clean, code = normalizePath("/var/../etc")
require.Equal(t, "/etc", clean)
require.Empty(t, code)
}

View File

@@ -0,0 +1,57 @@
package handlers
import (
"bytes"
"encoding/json"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"syscall"
"testing"
"github.com/Wikid82/charon/backend/internal/config"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/require"
)
func TestSystemPermissionsWave6_RepairPermissions_NonRootBranchViaSeteuid(t *testing.T) {
if os.Geteuid() != 0 {
t.Skip("test requires root execution")
}
if err := syscall.Seteuid(65534); err != nil {
t.Skip("unable to drop euid for test")
}
defer func() {
restoreErr := syscall.Seteuid(0)
require.NoError(t, restoreErr)
}()
gin.SetMode(gin.TestMode)
root := t.TempDir()
dataDir := filepath.Join(root, "data")
require.NoError(t, os.MkdirAll(dataDir, 0o750))
h := NewSystemPermissionsHandler(config.Config{
SingleContainer: true,
DatabasePath: filepath.Join(dataDir, "charon.db"),
ConfigRoot: dataDir,
CaddyLogDir: dataDir,
CrowdSecLogDir: dataDir,
}, nil, stubPermissionChecker{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Set("role", "admin")
c.Request = httptest.NewRequest(http.MethodPost, "/system/permissions/repair", bytes.NewBufferString(`{"paths":["/tmp"]}`))
c.Request.Header.Set("Content-Type", "application/json")
h.RepairPermissions(c)
require.Equal(t, http.StatusForbidden, w.Code)
var payload map[string]string
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &payload))
require.Equal(t, "permissions_non_root", payload["error_code"])
}

View File

@@ -3,6 +3,8 @@ package routes
import (
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"strings"
"testing"
@@ -1164,3 +1166,20 @@ func TestEmergencyBypass_UnauthorizedIP(t *testing.T) {
// Should not activate bypass (unauthorized IP)
assert.NotEqual(t, http.StatusNotFound, w.Code)
}
func TestRegister_CreatesAccessLogFileForLogWatcher(t *testing.T) {
gin.SetMode(gin.TestMode)
router := gin.New()
db, err := gorm.Open(sqlite.Open("file::memory:?cache=shared&_test_access_log_create"), &gorm.Config{})
require.NoError(t, err)
logFilePath := filepath.Join(t.TempDir(), "logs", "access.log")
t.Setenv("CHARON_CADDY_ACCESS_LOG", logFilePath)
cfg := config.Config{JWTSecret: "test-secret"}
require.NoError(t, Register(router, db, cfg))
_, statErr := os.Stat(logFilePath)
assert.NoError(t, statErr)
}

View File

@@ -130,6 +130,28 @@ func TestGetEnvAny(t *testing.T) {
assert.Equal(t, "fallback", result) // Empty strings are treated as not set
}
func TestGetEnvIntAny(t *testing.T) {
t.Run("returns fallback when unset", func(t *testing.T) {
assert.Equal(t, 42, getEnvIntAny(42, "MISSING_INT_A", "MISSING_INT_B"))
})
t.Run("returns parsed value from first key", func(t *testing.T) {
t.Setenv("TEST_INT_A", "123")
assert.Equal(t, 123, getEnvIntAny(42, "TEST_INT_A", "TEST_INT_B"))
})
t.Run("returns parsed value from second key", func(t *testing.T) {
t.Setenv("TEST_INT_A", "")
t.Setenv("TEST_INT_B", "77")
assert.Equal(t, 77, getEnvIntAny(42, "TEST_INT_A", "TEST_INT_B"))
})
t.Run("returns fallback when parse fails", func(t *testing.T) {
t.Setenv("TEST_INT_BAD", "not-a-number")
assert.Equal(t, 42, getEnvIntAny(42, "TEST_INT_BAD"))
})
}
func TestLoad_SecurityConfig(t *testing.T) {
tempDir := t.TempDir()
t.Setenv("CHARON_DB_PATH", filepath.Join(tempDir, "test.db"))

View File

@@ -531,3 +531,34 @@ func TestRotationServiceZeroDowntime(t *testing.T) {
assert.Equal(t, "secret", credentials["api_key"])
})
}
func TestRotateProviderCredentials_InvalidJSONAfterDecrypt(t *testing.T) {
db := setupTestDB(t)
currentKey, nextKey, _ := setupTestKeys(t)
currentService, err := NewEncryptionService(currentKey)
require.NoError(t, err)
invalidJSONPlaintext := []byte("not-json")
encrypted, err := currentService.Encrypt(invalidJSONPlaintext)
require.NoError(t, err)
provider := models.DNSProvider{
UUID: "test-invalid-json",
Name: "Invalid JSON Provider",
ProviderType: "cloudflare",
CredentialsEncrypted: encrypted,
KeyVersion: 1,
}
require.NoError(t, db.Create(&provider).Error)
require.NoError(t, os.Setenv("CHARON_ENCRYPTION_KEY_NEXT", nextKey))
defer func() { _ = os.Unsetenv("CHARON_ENCRYPTION_KEY_NEXT") }()
rs, err := NewRotationService(db)
require.NoError(t, err)
err = rs.rotateProviderCredentials(context.Background(), &provider)
require.Error(t, err)
assert.Contains(t, err.Error(), "invalid credential format after decryption")
}

View File

@@ -86,6 +86,18 @@ func TestResolveThreshold(t *testing.T) {
}
}
func TestResolveThreshold_WithNilLookupUsesOSLookupEnv(t *testing.T) {
t.Setenv("PATCH_THRESHOLD_TEST", "91.2")
resolved := ResolveThreshold("PATCH_THRESHOLD_TEST", 85.0, nil)
if resolved.Value != 91.2 {
t.Fatalf("expected env value 91.2, got %.1f", resolved.Value)
}
if resolved.Source != "env" {
t.Fatalf("expected source env, got %s", resolved.Source)
}
}
func TestParseUnifiedDiffChangedLines(t *testing.T) {
t.Parallel()
@@ -116,6 +128,26 @@ index 3333333..4444444 100644
assertHasLines(t, frontendChanged, "frontend/src/App.tsx", []int{21, 22})
}
func TestParseUnifiedDiffChangedLines_InvalidHunkStartReturnsError(t *testing.T) {
t.Parallel()
diff := `diff --git a/backend/internal/app.go b/backend/internal/app.go
index 1111111..2222222 100644
--- a/backend/internal/app.go
+++ b/backend/internal/app.go
@@ -1,1 +abc,2 @@
+line
`
backendChanged, frontendChanged, err := ParseUnifiedDiffChangedLines(diff)
if err != nil {
t.Fatalf("expected graceful handling for invalid hunk, got error: %v", err)
}
if len(backendChanged) != 0 || len(frontendChanged) != 0 {
t.Fatalf("expected no changed lines for invalid hunk, got backend=%v frontend=%v", backendChanged, frontendChanged)
}
}
func TestBackendChangedLineCoverageComputation(t *testing.T) {
t.Parallel()
@@ -347,6 +379,30 @@ func TestComputeFilesNeedingCoverage_IncludesUncoveredAndSortsDeterministically(
}
}
func TestComputeFilesNeedingCoverage_IncludesFullyCoveredWhenThresholdAbove100(t *testing.T) {
t.Parallel()
changed := FileLineSet{
"backend/internal/fully.go": {10: {}, 11: {}},
}
coverage := CoverageData{
Executable: FileLineSet{
"backend/internal/fully.go": {10: {}, 11: {}},
},
Covered: FileLineSet{
"backend/internal/fully.go": {10: {}, 11: {}},
},
}
details := ComputeFilesNeedingCoverage(changed, coverage, 101)
if len(details) != 1 {
t.Fatalf("expected 1 file detail when threshold is 101, got %d", len(details))
}
if details[0].PatchCoveragePct != 100.0 {
t.Fatalf("expected 100%% patch coverage detail, got %.1f", details[0].PatchCoveragePct)
}
}
func TestMergeFileCoverageDetails_SortsWorstCoverageThenPath(t *testing.T) {
t.Parallel()
@@ -371,3 +427,113 @@ func TestMergeFileCoverageDetails_SortsWorstCoverageThenPath(t *testing.T) {
t.Fatalf("unexpected merged order: got %s want %s", got, want)
}
}
func TestParseCoverageRange_ErrorBranches(t *testing.T) {
t.Parallel()
_, _, _, err := parseCoverageRange("missing-colon")
if err == nil {
t.Fatal("expected error for missing colon")
}
_, _, _, err = parseCoverageRange("file.go:10.1")
if err == nil {
t.Fatal("expected error for missing end coordinate")
}
_, _, _, err = parseCoverageRange("file.go:bad.1,10.1")
if err == nil {
t.Fatal("expected error for bad start line")
}
_, _, _, err = parseCoverageRange("file.go:10.1,9.1")
if err == nil {
t.Fatal("expected error for reversed range")
}
}
func TestSortedWarnings_FiltersBlanksAndSorts(t *testing.T) {
t.Parallel()
sorted := SortedWarnings([]string{"z warning", "", " ", "a warning"})
got := strings.Join(sorted, ",")
want := "a warning,z warning"
if got != want {
t.Fatalf("unexpected warnings ordering: got %q want %q", got, want)
}
}
func TestNormalizePathsAndRanges(t *testing.T) {
t.Parallel()
if got := normalizeGoCoveragePath("internal/service.go"); got != "backend/internal/service.go" {
t.Fatalf("unexpected normalized go path: %s", got)
}
if got := normalizeGoCoveragePath("/tmp/work/backend/internal/service.go"); got != "backend/internal/service.go" {
t.Fatalf("unexpected backend extraction path: %s", got)
}
frontend := normalizeFrontendCoveragePaths("/tmp/work/frontend/src/App.tsx")
if len(frontend) == 0 {
t.Fatal("expected frontend normalized paths")
}
ranges := formatLineRanges([]int{1, 2, 3, 7, 9, 10})
gotRanges := strings.Join(ranges, ",")
wantRanges := "1-3,7,9-10"
if gotRanges != wantRanges {
t.Fatalf("unexpected ranges: got %q want %q", gotRanges, wantRanges)
}
}
func TestScopeCoverageMergeAndStatus(t *testing.T) {
t.Parallel()
merged := MergeScopeCoverage(
ScopeCoverage{ChangedLines: 4, CoveredLines: 3},
ScopeCoverage{ChangedLines: 0, CoveredLines: 0},
)
if merged.ChangedLines != 4 || merged.CoveredLines != 3 || merged.PatchCoveragePct != 75.0 {
t.Fatalf("unexpected merged scope: %+v", merged)
}
if status := ApplyStatus(merged, 70); status.Status != "pass" {
t.Fatalf("expected pass status, got %s", status.Status)
}
}
func TestParseCoverageProfiles_InvalidPath(t *testing.T) {
t.Parallel()
_, err := ParseGoCoverageProfile(" ")
if err == nil {
t.Fatal("expected go profile path validation error")
}
_, err = ParseLCOVProfile("\t")
if err == nil {
t.Fatal("expected lcov profile path validation error")
}
}
func TestNormalizeFrontendCoveragePaths_EmptyInput(t *testing.T) {
t.Parallel()
paths := normalizeFrontendCoveragePaths(" ")
if len(paths) == 0 {
t.Fatalf("expected normalized fallback paths, got %#v", paths)
}
}
func TestAddLine_IgnoresInvalidInputs(t *testing.T) {
t.Parallel()
set := make(FileLineSet)
addLine(set, "", 10)
addLine(set, "backend/internal/x.go", 0)
if len(set) != 0 {
t.Fatalf("expected no entries for invalid addLine input, got %#v", set)
}
}

View File

@@ -198,6 +198,30 @@ func TestAccessListService_GetByUUID(t *testing.T) {
})
}
func TestAccessListService_GetByID_DBError(t *testing.T) {
db := setupTestDB(t)
service := NewAccessListService(db)
sqlDB, err := db.DB()
assert.NoError(t, err)
assert.NoError(t, sqlDB.Close())
_, err = service.GetByID(1)
assert.Error(t, err)
}
func TestAccessListService_GetByUUID_DBError(t *testing.T) {
db := setupTestDB(t)
service := NewAccessListService(db)
sqlDB, err := db.DB()
assert.NoError(t, err)
assert.NoError(t, sqlDB.Close())
_, err = service.GetByUUID("any")
assert.Error(t, err)
}
func TestAccessListService_List(t *testing.T) {
db := setupTestDB(t)
service := NewAccessListService(db)

View File

@@ -7,6 +7,7 @@ import (
"github.com/Wikid82/charon/backend/internal/config"
"github.com/Wikid82/charon/backend/internal/models"
"github.com/golang-jwt/jwt/v5"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gorm.io/driver/sqlite"
@@ -288,3 +289,45 @@ func TestAuthService_InvalidateSessions(t *testing.T) {
require.Error(t, err)
assert.Equal(t, "user not found", err.Error())
}
func TestAuthService_AuthenticateToken_InvalidUserIDInClaims(t *testing.T) {
db := setupAuthTestDB(t)
cfg := config.Config{JWTSecret: "test-secret"}
service := NewAuthService(db, cfg)
user, err := service.Register("claims@example.com", "password123", "Claims User")
require.NoError(t, err)
claims := Claims{
UserID: user.ID + 9999,
Role: "user",
SessionVersion: user.SessionVersion,
RegisteredClaims: jwt.RegisteredClaims{
ExpiresAt: jwt.NewNumericDate(time.Now().Add(24 * time.Hour)),
IssuedAt: jwt.NewNumericDate(time.Now()),
},
}
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
tokenString, err := token.SignedString([]byte(cfg.JWTSecret))
require.NoError(t, err)
_, _, err = service.AuthenticateToken(tokenString)
require.Error(t, err)
assert.Equal(t, "invalid token", err.Error())
}
func TestAuthService_InvalidateSessions_DBError(t *testing.T) {
db := setupAuthTestDB(t)
cfg := config.Config{JWTSecret: "test-secret"}
service := NewAuthService(db, cfg)
user, err := service.Register("dberror@example.com", "password123", "DB Error User")
require.NoError(t, err)
sqlDB, err := db.DB()
require.NoError(t, err)
require.NoError(t, sqlDB.Close())
err = service.InvalidateSessions(user.ID)
require.Error(t, err)
}

View File

@@ -2,6 +2,7 @@ package services
import (
"archive/zip"
"fmt"
"io"
"os"
"path/filepath"
@@ -16,6 +17,18 @@ import (
"gorm.io/gorm"
)
func TestCreateSQLiteSnapshot_InvalidDBPath(t *testing.T) {
badPath := filepath.Join(t.TempDir(), "missing-parent", "missing.db")
_, _, err := createSQLiteSnapshot(badPath)
require.Error(t, err)
}
func TestCheckpointSQLiteDatabase_InvalidDBPath(t *testing.T) {
badPath := filepath.Join(t.TempDir(), "missing-parent", "missing.db")
err := checkpointSQLiteDatabase(badPath)
require.Error(t, err)
}
func TestBackupService_RehydrateLiveDatabase(t *testing.T) {
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
@@ -120,3 +133,122 @@ func TestBackupService_RehydrateLiveDatabase_FromBackupWithWAL(t *testing.T) {
require.Len(t, restoredUsers, 1)
assert.Equal(t, "restore-from-wal@example.com", restoredUsers[0].Email)
}
func TestBackupService_ExtractDatabaseFromBackup_WALCheckpointFailure(t *testing.T) {
tmpDir := t.TempDir()
zipPath := filepath.Join(tmpDir, "with-invalid-wal.zip")
zipFile, err := os.Create(zipPath) //nolint:gosec
require.NoError(t, err)
writer := zip.NewWriter(zipFile)
dbEntry, err := writer.Create("charon.db")
require.NoError(t, err)
_, err = dbEntry.Write([]byte("not-a-valid-sqlite-db"))
require.NoError(t, err)
walEntry, err := writer.Create("charon.db-wal")
require.NoError(t, err)
_, err = walEntry.Write([]byte("not-a-valid-wal"))
require.NoError(t, err)
require.NoError(t, writer.Close())
require.NoError(t, zipFile.Close())
svc := &BackupService{DatabaseName: "charon.db"}
_, err = svc.extractDatabaseFromBackup(zipPath)
require.Error(t, err)
require.Contains(t, err.Error(), "checkpoint extracted sqlite wal")
}
func TestBackupService_RehydrateLiveDatabase_InvalidRestoreDB(t *testing.T) {
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
require.NoError(t, os.MkdirAll(dataDir, 0o700))
activeDBPath := filepath.Join(dataDir, "charon.db")
activeDB, err := gorm.Open(sqlite.Open(activeDBPath), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, activeDB.Exec("CREATE TABLE IF NOT EXISTS healthcheck (id INTEGER PRIMARY KEY, value TEXT)").Error)
invalidRestorePath := filepath.Join(tmpDir, "invalid-restore.sqlite")
require.NoError(t, os.WriteFile(invalidRestorePath, []byte("invalid sqlite content"), 0o600))
svc := &BackupService{
DataDir: dataDir,
DatabaseName: "charon.db",
restoreDBPath: invalidRestorePath,
}
err = svc.RehydrateLiveDatabase(activeDB)
require.Error(t, err)
require.Contains(t, err.Error(), "attach restored database")
}
func TestBackupService_RehydrateLiveDatabase_InvalidTableIdentifier(t *testing.T) {
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
require.NoError(t, os.MkdirAll(dataDir, 0o700))
activeDBPath := filepath.Join(dataDir, "charon.db")
activeDB, err := gorm.Open(sqlite.Open(activeDBPath), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, activeDB.Exec("CREATE TABLE \"bad-name\" (id INTEGER PRIMARY KEY, value TEXT)").Error)
restoreDBPath := filepath.Join(tmpDir, "restore.sqlite")
restoreDB, err := gorm.Open(sqlite.Open(restoreDBPath), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, restoreDB.Exec("CREATE TABLE \"bad-name\" (id INTEGER PRIMARY KEY, value TEXT)").Error)
require.NoError(t, restoreDB.Exec("INSERT INTO \"bad-name\" (value) VALUES (?)", "ok").Error)
svc := &BackupService{
DataDir: dataDir,
DatabaseName: "charon.db",
restoreDBPath: restoreDBPath,
}
err = svc.RehydrateLiveDatabase(activeDB)
require.Error(t, err)
require.Contains(t, err.Error(), "quote table identifier")
}
func TestBackupService_CreateSQLiteSnapshot_TempDirInvalid(t *testing.T) {
tmpDir := t.TempDir()
dbPath := filepath.Join(tmpDir, "charon.db")
createSQLiteTestDB(t, dbPath)
originalTmp := os.Getenv("TMPDIR")
t.Setenv("TMPDIR", filepath.Join(tmpDir, "nonexistent-tmp"))
defer func() {
_ = os.Setenv("TMPDIR", originalTmp)
}()
_, _, err := createSQLiteSnapshot(dbPath)
require.Error(t, err)
require.Contains(t, err.Error(), "create sqlite snapshot file")
}
func TestBackupService_RunScheduledBackup_CreateBackupAndCleanupHooks(t *testing.T) {
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
require.NoError(t, os.MkdirAll(dataDir, 0o700))
cfg := &config.Config{DatabasePath: filepath.Join(dataDir, "charon.db")}
service := NewBackupService(cfg)
defer service.Stop()
createCalls := 0
cleanupCalls := 0
service.createBackup = func() (string, error) {
createCalls++
return fmt.Sprintf("backup-%d.zip", createCalls), nil
}
service.cleanupOld = func(keep int) (int, error) {
cleanupCalls++
return 1, nil
}
service.RunScheduledBackup()
require.Equal(t, 1, createCalls)
require.Equal(t, 1, cleanupCalls)
}

View File

@@ -1551,3 +1551,100 @@ func TestSafeJoinPath(t *testing.T) {
assert.Equal(t, "/data/backups/backup.2024.01.01.zip", path)
})
}
func TestBackupService_RehydrateLiveDatabase_NilHandle(t *testing.T) {
tmpDir := t.TempDir()
svc := &BackupService{DataDir: tmpDir, DatabaseName: "charon.db"}
err := svc.RehydrateLiveDatabase(nil)
require.Error(t, err)
assert.Contains(t, err.Error(), "database handle is required")
}
func TestBackupService_RehydrateLiveDatabase_MissingSource(t *testing.T) {
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
require.NoError(t, os.MkdirAll(dataDir, 0o700))
dbPath := filepath.Join(dataDir, "charon.db")
createSQLiteTestDB(t, dbPath)
db, err := gorm.Open(sqlite.Open(dbPath), &gorm.Config{})
require.NoError(t, err)
svc := &BackupService{
DataDir: dataDir,
DatabaseName: "charon.db",
restoreDBPath: filepath.Join(tmpDir, "missing-restore.sqlite"),
}
require.NoError(t, os.Remove(dbPath))
err = svc.RehydrateLiveDatabase(db)
require.Error(t, err)
assert.Contains(t, err.Error(), "restored database file missing")
}
func TestBackupService_ExtractDatabaseFromBackup_MissingDBEntry(t *testing.T) {
tmpDir := t.TempDir()
zipPath := filepath.Join(tmpDir, "missing-db-entry.zip")
zipFile, err := os.Create(zipPath) //nolint:gosec
require.NoError(t, err)
writer := zip.NewWriter(zipFile)
entry, err := writer.Create("not-charon.db")
require.NoError(t, err)
_, err = entry.Write([]byte("placeholder"))
require.NoError(t, err)
require.NoError(t, writer.Close())
require.NoError(t, zipFile.Close())
svc := &BackupService{DatabaseName: "charon.db"}
_, err = svc.extractDatabaseFromBackup(zipPath)
require.Error(t, err)
assert.Contains(t, err.Error(), "database entry charon.db not found")
}
func TestBackupService_RestoreBackup_ReplacesStagedRestoreSnapshot(t *testing.T) {
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
backupDir := filepath.Join(tmpDir, "backups")
require.NoError(t, os.MkdirAll(dataDir, 0o700))
require.NoError(t, os.MkdirAll(backupDir, 0o700))
createBackupZipWithDB := func(name string, content []byte) string {
path := filepath.Join(backupDir, name)
zipFile, err := os.Create(path) //nolint:gosec
require.NoError(t, err)
writer := zip.NewWriter(zipFile)
entry, err := writer.Create("charon.db")
require.NoError(t, err)
_, err = entry.Write(content)
require.NoError(t, err)
require.NoError(t, writer.Close())
require.NoError(t, zipFile.Close())
return path
}
createBackupZipWithDB("backup-one.zip", []byte("one"))
createBackupZipWithDB("backup-two.zip", []byte("two"))
svc := &BackupService{
DataDir: dataDir,
BackupDir: backupDir,
DatabaseName: "charon.db",
restoreDBPath: "",
}
require.NoError(t, svc.RestoreBackup("backup-one.zip"))
firstRestore := svc.restoreDBPath
assert.NotEmpty(t, firstRestore)
assert.FileExists(t, firstRestore)
require.NoError(t, svc.RestoreBackup("backup-two.zip"))
secondRestore := svc.restoreDBPath
assert.NotEqual(t, firstRestore, secondRestore)
assert.NoFileExists(t, firstRestore)
assert.FileExists(t, secondRestore)
}

View File

@@ -0,0 +1,92 @@
package services
import (
"archive/zip"
"os"
"path/filepath"
"strings"
"testing"
"github.com/stretchr/testify/require"
)
func openZipInTempDir(t *testing.T, tempDir, zipPath string) *os.File {
t.Helper()
absTempDir, err := filepath.Abs(tempDir)
require.NoError(t, err)
absZipPath, err := filepath.Abs(zipPath)
require.NoError(t, err)
relPath, err := filepath.Rel(absTempDir, absZipPath)
require.NoError(t, err)
require.False(t, relPath == ".." || strings.HasPrefix(relPath, ".."+string(filepath.Separator)))
// #nosec G304 -- absZipPath is constrained to test TempDir via Abs+Rel checks above.
zipFile, err := os.OpenFile(absZipPath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o600)
require.NoError(t, err)
return zipFile
}
func TestBackupService_UnzipWithSkip_SkipsDatabaseEntries(t *testing.T) {
tmp := t.TempDir()
destDir := filepath.Join(tmp, "data")
require.NoError(t, os.MkdirAll(destDir, 0o700))
zipPath := filepath.Join(tmp, "restore.zip")
zipFile := openZipInTempDir(t, tmp, zipPath)
writer := zip.NewWriter(zipFile)
for name, content := range map[string]string{
"charon.db": "db",
"charon.db-wal": "wal",
"charon.db-shm": "shm",
"caddy/config": "cfg",
"nested/file.txt": "hello",
} {
entry, createErr := writer.Create(name)
require.NoError(t, createErr)
_, writeErr := entry.Write([]byte(content))
require.NoError(t, writeErr)
}
require.NoError(t, writer.Close())
require.NoError(t, zipFile.Close())
svc := &BackupService{DataDir: destDir, DatabaseName: "charon.db"}
require.NoError(t, svc.unzipWithSkip(zipPath, destDir, map[string]struct{}{
"charon.db": {},
"charon.db-wal": {},
"charon.db-shm": {},
}))
_, err := os.Stat(filepath.Join(destDir, "charon.db"))
require.Error(t, err)
require.FileExists(t, filepath.Join(destDir, "caddy", "config"))
require.FileExists(t, filepath.Join(destDir, "nested", "file.txt"))
}
func TestBackupService_ExtractDatabaseFromBackup_ExtractWalFailure(t *testing.T) {
tmp := t.TempDir()
zipPath := filepath.Join(tmp, "invalid-wal.zip")
zipFile := openZipInTempDir(t, tmp, zipPath)
writer := zip.NewWriter(zipFile)
dbEntry, err := writer.Create("charon.db")
require.NoError(t, err)
_, err = dbEntry.Write([]byte("sqlite header placeholder"))
require.NoError(t, err)
walEntry, err := writer.Create("charon.db-wal")
require.NoError(t, err)
_, err = walEntry.Write([]byte("invalid wal content"))
require.NoError(t, err)
require.NoError(t, writer.Close())
require.NoError(t, zipFile.Close())
svc := &BackupService{DatabaseName: "charon.db"}
_, err = svc.extractDatabaseFromBackup(zipPath)
require.Error(t, err)
}

View File

@@ -0,0 +1,267 @@
package services
import (
"archive/zip"
"fmt"
"os"
"path/filepath"
"strings"
"testing"
"github.com/stretchr/testify/require"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
)
func openWave4ZipInTempDir(t *testing.T, tempDir, zipPath string) *os.File {
t.Helper()
absTempDir, err := filepath.Abs(tempDir)
require.NoError(t, err)
absZipPath, err := filepath.Abs(zipPath)
require.NoError(t, err)
relPath, err := filepath.Rel(absTempDir, absZipPath)
require.NoError(t, err)
require.False(t, relPath == ".." || strings.HasPrefix(relPath, ".."+string(filepath.Separator)))
// #nosec G304 -- absZipPath is constrained to test TempDir via Abs+Rel checks above.
zipFile, err := os.OpenFile(absZipPath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o600)
require.NoError(t, err)
return zipFile
}
func registerBackupRawErrorHook(t *testing.T, db *gorm.DB, name string, shouldFail func(*gorm.DB) bool) {
t.Helper()
require.NoError(t, db.Callback().Raw().Before("gorm:raw").Register(name, func(tx *gorm.DB) {
if shouldFail(tx) {
_ = tx.AddError(fmt.Errorf("forced raw failure"))
}
}))
t.Cleanup(func() {
_ = db.Callback().Raw().Remove(name)
})
}
func backupSQLContains(tx *gorm.DB, fragment string) bool {
if tx == nil || tx.Statement == nil {
return false
}
return strings.Contains(strings.ToLower(tx.Statement.SQL.String()), strings.ToLower(fragment))
}
func setupRehydrateDBPair(t *testing.T) (*gorm.DB, string, string) {
t.Helper()
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
require.NoError(t, os.MkdirAll(dataDir, 0o700))
activeDBPath := filepath.Join(tmpDir, "active.db")
activeDB, err := gorm.Open(sqlite.Open(activeDBPath), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, activeDB.Exec(`CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT)`).Error)
restoreDBPath := filepath.Join(tmpDir, "restore.db")
restoreDB, err := gorm.Open(sqlite.Open(restoreDBPath), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, restoreDB.Exec(`CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT)`).Error)
require.NoError(t, restoreDB.Exec(`INSERT INTO users (name) VALUES ('alice')`).Error)
return activeDB, dataDir, restoreDBPath
}
func TestBackupServiceWave4_Rehydrate_CheckpointWarningPath(t *testing.T) {
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
require.NoError(t, os.MkdirAll(dataDir, 0o700))
activeDBPath := filepath.Join(tmpDir, "active.db")
activeDB, err := gorm.Open(sqlite.Open(activeDBPath), &gorm.Config{})
require.NoError(t, err)
// Place an invalid database file at DataDir/DatabaseName so checkpointSQLiteDatabase fails
restoredDBPath := filepath.Join(dataDir, "charon.db")
require.NoError(t, os.WriteFile(restoredDBPath, []byte("not-sqlite"), 0o600))
svc := &BackupService{DataDir: dataDir, DatabaseName: "charon.db"}
err = svc.RehydrateLiveDatabase(activeDB)
require.Error(t, err)
}
func TestBackupServiceWave4_Rehydrate_CreateTempFailure(t *testing.T) {
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
require.NoError(t, os.MkdirAll(dataDir, 0o700))
dbPath := filepath.Join(dataDir, "charon.db")
createSQLiteTestDB(t, dbPath)
activeDB, err := gorm.Open(sqlite.Open(filepath.Join(tmpDir, "active.db")), &gorm.Config{})
require.NoError(t, err)
t.Setenv("TMPDIR", filepath.Join(tmpDir, "missing-temp-dir"))
svc := &BackupService{DataDir: dataDir, DatabaseName: "charon.db"}
err = svc.RehydrateLiveDatabase(activeDB)
require.Error(t, err)
require.Contains(t, err.Error(), "create temporary restore database copy")
}
func TestBackupServiceWave4_Rehydrate_CopyErrorFromDirectorySource(t *testing.T) {
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
require.NoError(t, os.MkdirAll(dataDir, 0o700))
activeDB, err := gorm.Open(sqlite.Open(filepath.Join(tmpDir, "active.db")), &gorm.Config{})
require.NoError(t, err)
// Use a directory as restore source path so io.Copy fails deterministically.
badSourceDir := filepath.Join(tmpDir, "restore-source-dir")
require.NoError(t, os.MkdirAll(badSourceDir, 0o700))
svc := &BackupService{DataDir: dataDir, DatabaseName: "charon.db", restoreDBPath: badSourceDir}
err = svc.RehydrateLiveDatabase(activeDB)
require.Error(t, err)
require.Contains(t, err.Error(), "copy restored database to temporary file")
}
func TestBackupServiceWave4_Rehydrate_CopyTableErrorOnSchemaMismatch(t *testing.T) {
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
require.NoError(t, os.MkdirAll(dataDir, 0o700))
activeDBPath := filepath.Join(tmpDir, "active.db")
activeDB, err := gorm.Open(sqlite.Open(activeDBPath), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, activeDB.Exec(`CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT)`).Error)
restoreDBPath := filepath.Join(tmpDir, "restore.db")
restoreDB, err := gorm.Open(sqlite.Open(restoreDBPath), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, restoreDB.Exec(`CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, extra TEXT)`).Error)
require.NoError(t, restoreDB.Exec(`INSERT INTO users (name, extra) VALUES ('alice', 'x')`).Error)
svc := &BackupService{DataDir: dataDir, DatabaseName: "charon.db", restoreDBPath: restoreDBPath}
err = svc.RehydrateLiveDatabase(activeDB)
require.Error(t, err)
require.Contains(t, err.Error(), "copy table users")
}
func TestBackupServiceWave4_ExtractDatabaseFromBackup_CreateTempError(t *testing.T) {
tmpDir := t.TempDir()
zipPath := filepath.Join(tmpDir, "backup.zip")
zf := openWave4ZipInTempDir(t, tmpDir, zipPath)
zw := zip.NewWriter(zf)
entry, err := zw.Create("charon.db")
require.NoError(t, err)
_, err = entry.Write([]byte("sqlite-header-placeholder"))
require.NoError(t, err)
require.NoError(t, zw.Close())
require.NoError(t, zf.Close())
t.Setenv("TMPDIR", filepath.Join(tmpDir, "missing-temp-dir"))
svc := &BackupService{DatabaseName: "charon.db"}
_, err = svc.extractDatabaseFromBackup(zipPath)
require.Error(t, err)
require.Contains(t, err.Error(), "create restore snapshot file")
}
func TestBackupServiceWave4_UnzipWithSkip_MkdirParentError(t *testing.T) {
tmpDir := t.TempDir()
zipPath := filepath.Join(tmpDir, "nested.zip")
zf := openWave4ZipInTempDir(t, tmpDir, zipPath)
zw := zip.NewWriter(zf)
entry, err := zw.Create("nested/file.txt")
require.NoError(t, err)
_, err = entry.Write([]byte("hello"))
require.NoError(t, err)
require.NoError(t, zw.Close())
require.NoError(t, zf.Close())
// Make destination a regular file so MkdirAll(filepath.Dir(fpath)) fails with ENOTDIR.
destFile := filepath.Join(tmpDir, "dest-as-file")
require.NoError(t, os.WriteFile(destFile, []byte("block"), 0o600))
svc := &BackupService{}
err = svc.unzipWithSkip(zipPath, destFile, nil)
require.Error(t, err)
}
func TestBackupServiceWave4_Rehydrate_ClearSQLiteSequenceError(t *testing.T) {
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
require.NoError(t, os.MkdirAll(dataDir, 0o700))
activeDB, err := gorm.Open(sqlite.Open(filepath.Join(tmpDir, "active.db")), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, activeDB.Exec(`CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT)`).Error)
restoreDBPath := filepath.Join(tmpDir, "restore.db")
restoreDB, err := gorm.Open(sqlite.Open(restoreDBPath), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, restoreDB.Exec(`CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT)`).Error)
require.NoError(t, restoreDB.Exec(`INSERT INTO users (name) VALUES ('alice')`).Error)
registerBackupRawErrorHook(t, activeDB, "wave4-clear-sqlite-sequence", func(tx *gorm.DB) bool {
return backupSQLContains(tx, "delete from sqlite_sequence")
})
svc := &BackupService{DataDir: dataDir, DatabaseName: "charon.db", restoreDBPath: restoreDBPath}
err = svc.RehydrateLiveDatabase(activeDB)
require.Error(t, err)
require.Contains(t, err.Error(), "clear sqlite_sequence")
}
func TestBackupServiceWave4_Rehydrate_CopySQLiteSequenceError(t *testing.T) {
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
require.NoError(t, os.MkdirAll(dataDir, 0o700))
activeDB, err := gorm.Open(sqlite.Open(filepath.Join(tmpDir, "active.db")), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, activeDB.Exec(`CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT)`).Error)
restoreDBPath := filepath.Join(tmpDir, "restore.db")
restoreDB, err := gorm.Open(sqlite.Open(restoreDBPath), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, restoreDB.Exec(`CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT)`).Error)
require.NoError(t, restoreDB.Exec(`INSERT INTO users (name) VALUES ('alice')`).Error)
registerBackupRawErrorHook(t, activeDB, "wave4-copy-sqlite-sequence", func(tx *gorm.DB) bool {
return backupSQLContains(tx, "insert into sqlite_sequence select * from restore_src.sqlite_sequence")
})
svc := &BackupService{DataDir: dataDir, DatabaseName: "charon.db", restoreDBPath: restoreDBPath}
err = svc.RehydrateLiveDatabase(activeDB)
require.Error(t, err)
require.Contains(t, err.Error(), "copy sqlite_sequence")
}
func TestBackupServiceWave4_Rehydrate_DetachErrorNotBusyOrLocked(t *testing.T) {
activeDB, dataDir, restoreDBPath := setupRehydrateDBPair(t)
registerBackupRawErrorHook(t, activeDB, "wave4-detach-error", func(tx *gorm.DB) bool {
return backupSQLContains(tx, "detach database restore_src")
})
svc := &BackupService{DataDir: dataDir, DatabaseName: "charon.db", restoreDBPath: restoreDBPath}
err := svc.RehydrateLiveDatabase(activeDB)
require.Error(t, err)
require.Contains(t, err.Error(), "detach restored database")
}
func TestBackupServiceWave4_Rehydrate_WALCheckpointErrorNotBusyOrLocked(t *testing.T) {
activeDB, dataDir, restoreDBPath := setupRehydrateDBPair(t)
registerBackupRawErrorHook(t, activeDB, "wave4-wal-checkpoint-error", func(tx *gorm.DB) bool {
return backupSQLContains(tx, "pragma wal_checkpoint(truncate)")
})
svc := &BackupService{DataDir: dataDir, DatabaseName: "charon.db", restoreDBPath: restoreDBPath}
err := svc.RehydrateLiveDatabase(activeDB)
require.Error(t, err)
require.Contains(t, err.Error(), "checkpoint wal after rehydrate")
}

View File

@@ -0,0 +1,56 @@
package services
import (
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
)
func TestBackupServiceWave5_Rehydrate_FallbackWhenRestorePathMissing(t *testing.T) {
tmpDir := t.TempDir()
dataDir := filepath.Join(tmpDir, "data")
require.NoError(t, os.MkdirAll(dataDir, 0o700))
restoredDBPath := filepath.Join(dataDir, "charon.db")
createSQLiteTestDB(t, restoredDBPath)
activeDB, err := gorm.Open(sqlite.Open(filepath.Join(tmpDir, "active.db")), &gorm.Config{})
require.NoError(t, err)
require.NoError(t, activeDB.Exec(`CREATE TABLE healthcheck (id INTEGER PRIMARY KEY, value TEXT)`).Error)
svc := &BackupService{
DataDir: dataDir,
DatabaseName: "charon.db",
restoreDBPath: filepath.Join(tmpDir, "missing-restore.sqlite"),
}
require.NoError(t, svc.RehydrateLiveDatabase(activeDB))
}
func TestBackupServiceWave5_Rehydrate_DisableForeignKeysError(t *testing.T) {
activeDB, dataDir, restoreDBPath := setupRehydrateDBPair(t)
registerBackupRawErrorHook(t, activeDB, "wave5-disable-fk", func(tx *gorm.DB) bool {
return backupSQLContains(tx, "pragma foreign_keys = off")
})
svc := &BackupService{DataDir: dataDir, DatabaseName: "charon.db", restoreDBPath: restoreDBPath}
err := svc.RehydrateLiveDatabase(activeDB)
require.Error(t, err)
require.Contains(t, err.Error(), "disable foreign keys")
}
func TestBackupServiceWave5_Rehydrate_ClearTableError(t *testing.T) {
activeDB, dataDir, restoreDBPath := setupRehydrateDBPair(t)
registerBackupRawErrorHook(t, activeDB, "wave5-clear-users", func(tx *gorm.DB) bool {
return backupSQLContains(tx, "delete from \"users\"")
})
svc := &BackupService{DataDir: dataDir, DatabaseName: "charon.db", restoreDBPath: restoreDBPath}
err := svc.RehydrateLiveDatabase(activeDB)
require.Error(t, err)
require.Contains(t, err.Error(), "clear table users")
}

View File

@@ -0,0 +1,49 @@
package services
import (
"archive/zip"
"io"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
)
func TestBackupServiceWave6_ExtractDatabaseFromBackup_WithShmEntry(t *testing.T) {
tmpDir := t.TempDir()
dbPath := filepath.Join(tmpDir, "charon.db")
createSQLiteTestDB(t, dbPath)
zipPath := filepath.Join(tmpDir, "with-shm.zip")
zipFile, err := os.Create(zipPath) // #nosec G304 -- path is derived from t.TempDir()
require.NoError(t, err)
writer := zip.NewWriter(zipFile)
sourceDB, err := os.Open(dbPath) // #nosec G304 -- path is derived from t.TempDir()
require.NoError(t, err)
defer func() { _ = sourceDB.Close() }()
dbEntry, err := writer.Create("charon.db")
require.NoError(t, err)
_, err = io.Copy(dbEntry, sourceDB)
require.NoError(t, err)
walEntry, err := writer.Create("charon.db-wal")
require.NoError(t, err)
_, err = walEntry.Write([]byte("invalid wal content"))
require.NoError(t, err)
shmEntry, err := writer.Create("charon.db-shm")
require.NoError(t, err)
_, err = shmEntry.Write([]byte("shm placeholder"))
require.NoError(t, err)
require.NoError(t, writer.Close())
require.NoError(t, zipFile.Close())
svc := &BackupService{DatabaseName: "charon.db"}
restoredPath, err := svc.extractDatabaseFromBackup(zipPath)
require.NoError(t, err)
require.FileExists(t, restoredPath)
}

View File

@@ -0,0 +1,97 @@
package services
import (
"archive/zip"
"bytes"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
)
func writeLargeZipEntry(t *testing.T, writer *zip.Writer, name string, sizeBytes int64) {
t.Helper()
entry, err := writer.Create(name)
require.NoError(t, err)
chunk := bytes.Repeat([]byte{0}, 1024*1024)
remaining := sizeBytes
for remaining > 0 {
toWrite := int64(len(chunk))
if remaining < toWrite {
toWrite = remaining
}
_, err := entry.Write(chunk[:toWrite])
require.NoError(t, err)
remaining -= toWrite
}
}
func TestBackupServiceWave7_CreateBackup_SnapshotFailureForNonSQLiteDB(t *testing.T) {
tmpDir := t.TempDir()
backupDir := filepath.Join(tmpDir, "backups")
require.NoError(t, os.MkdirAll(backupDir, 0o700))
dbPath := filepath.Join(tmpDir, "charon.db")
require.NoError(t, os.WriteFile(dbPath, []byte("not-a-sqlite-db"), 0o600))
svc := &BackupService{
DataDir: tmpDir,
BackupDir: backupDir,
DatabaseName: "charon.db",
}
_, err := svc.CreateBackup()
require.Error(t, err)
require.Contains(t, err.Error(), "create sqlite snapshot before backup")
}
func TestBackupServiceWave7_ExtractDatabaseFromBackup_DBEntryOverLimit(t *testing.T) {
tmpDir := t.TempDir()
zipPath := filepath.Join(tmpDir, "db-over-limit.zip")
zipFile, err := os.Create(zipPath) // #nosec G304 -- path is derived from t.TempDir()
require.NoError(t, err)
writer := zip.NewWriter(zipFile)
writeLargeZipEntry(t, writer, "charon.db", int64(101*1024*1024))
require.NoError(t, writer.Close())
require.NoError(t, zipFile.Close())
svc := &BackupService{DatabaseName: "charon.db"}
_, err = svc.extractDatabaseFromBackup(zipPath)
require.Error(t, err)
require.Contains(t, err.Error(), "extract database entry from backup archive")
require.Contains(t, err.Error(), "decompression limit")
}
func TestBackupServiceWave7_ExtractDatabaseFromBackup_WALEntryOverLimit(t *testing.T) {
tmpDir := t.TempDir()
dbPath := filepath.Join(tmpDir, "charon.db")
createSQLiteTestDB(t, dbPath)
zipPath := filepath.Join(tmpDir, "wal-over-limit.zip")
zipFile, err := os.Create(zipPath) // #nosec G304 -- path is derived from t.TempDir()
require.NoError(t, err)
writer := zip.NewWriter(zipFile)
dbBytes, err := os.ReadFile(dbPath) // #nosec G304 -- path is derived from t.TempDir()
require.NoError(t, err)
dbEntry, err := writer.Create("charon.db")
require.NoError(t, err)
_, err = dbEntry.Write(dbBytes)
require.NoError(t, err)
writeLargeZipEntry(t, writer, "charon.db-wal", int64(101*1024*1024))
require.NoError(t, writer.Close())
require.NoError(t, zipFile.Close())
svc := &BackupService{DatabaseName: "charon.db"}
_, err = svc.extractDatabaseFromBackup(zipPath)
require.Error(t, err)
require.Contains(t, err.Error(), "extract wal entry from backup archive")
require.Contains(t, err.Error(), "decompression limit")
}

View File

@@ -2,6 +2,7 @@ package services
import (
"context"
"fmt"
"os"
"path/filepath"
"testing"
@@ -542,6 +543,30 @@ func TestReconcileCrowdSecOnStartup_CreateConfigDBError(t *testing.T) {
assert.False(t, exec.startCalled)
}
func TestReconcileCrowdSecOnStartup_CreateConfigCallbackError(t *testing.T) {
db := setupCrowdsecTestDB(t)
binPath, dataDir, cleanup := setupCrowdsecTestFixtures(t)
defer cleanup()
cbName := "test:force-create-config-error"
err := db.Callback().Create().Before("gorm:create").Register(cbName, func(tx *gorm.DB) {
if tx.Statement != nil && tx.Statement.Schema != nil && tx.Statement.Schema.Name == "SecurityConfig" {
_ = tx.AddError(fmt.Errorf("forced security config create error"))
}
})
require.NoError(t, err)
t.Cleanup(func() {
_ = db.Callback().Create().Remove(cbName)
})
exec := &smartMockCrowdsecExecutor{startPid: 99999}
cmdExec := &mockCommandExecutor{}
ReconcileCrowdSecOnStartup(db, exec, binPath, dataDir, cmdExec)
assert.False(t, exec.startCalled)
}
func TestReconcileCrowdSecOnStartup_SettingsTableQueryError(t *testing.T) {
db := setupCrowdsecTestDB(t)
binPath, dataDir, cleanup := setupCrowdsecTestFixtures(t)

View File

@@ -192,3 +192,23 @@ func TestLogService_logDirsAndSymlinkDedup(t *testing.T) {
assert.Len(t, logs, 1)
assert.Equal(t, "access.log", logs[0].Name)
}
func TestLogService_logDirs_SkipsDotAndEmpty(t *testing.T) {
t.Setenv("CHARON_CADDY_ACCESS_LOG", filepath.Join(t.TempDir(), "caddy", "access.log"))
service := &LogService{LogDir: ".", CaddyLogDir: ""}
dirs := service.logDirs()
require.Len(t, dirs, 1)
assert.NotEqual(t, ".", dirs[0])
}
func TestLogService_ListLogs_ReadDirError(t *testing.T) {
tmpDir := t.TempDir()
notDir := filepath.Join(tmpDir, "not-a-dir")
require.NoError(t, os.WriteFile(notDir, []byte("x"), 0o600))
service := &LogService{LogDir: notDir}
_, err := service.ListLogs()
require.Error(t, err)
}

View File

@@ -210,6 +210,7 @@ func TestProxyHostService_ValidateHostname(t *testing.T) {
}{
{name: "plain hostname", host: "example.com", wantErr: false},
{name: "hostname with scheme", host: "https://example.com", wantErr: false},
{name: "hostname with http scheme", host: "http://example.com", wantErr: false},
{name: "hostname with port", host: "example.com:8080", wantErr: false},
{name: "ipv4 address", host: "127.0.0.1", wantErr: false},
{name: "bracketed ipv6 with port", host: "[::1]:443", wantErr: false},

View File

@@ -1,10 +1,12 @@
package services
import (
"fmt"
"testing"
"github.com/Wikid82/charon/backend/internal/models"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
)
@@ -330,3 +332,41 @@ func TestApplyPreset_MultipleProfiles(t *testing.T) {
db.Model(&models.SecurityHeaderProfile{}).Count(&count)
assert.Equal(t, int64(2), count)
}
func TestEnsurePresetsExist_CreateError(t *testing.T) {
db := setupSecurityHeadersServiceDB(t)
service := NewSecurityHeadersService(db)
cbName := "test:create-error"
err := db.Callback().Create().Before("gorm:create").Register(cbName, func(tx *gorm.DB) {
_ = tx.AddError(fmt.Errorf("forced create error"))
})
assert.NoError(t, err)
t.Cleanup(func() {
_ = db.Callback().Create().Remove(cbName)
})
err = service.EnsurePresetsExist()
assert.Error(t, err)
assert.Contains(t, err.Error(), "failed to create preset")
}
func TestEnsurePresetsExist_SaveError(t *testing.T) {
db := setupSecurityHeadersServiceDB(t)
service := NewSecurityHeadersService(db)
require.NoError(t, service.EnsurePresetsExist())
cbName := "test:update-error"
err := db.Callback().Update().Before("gorm:update").Register(cbName, func(tx *gorm.DB) {
_ = tx.AddError(fmt.Errorf("forced update error"))
})
assert.NoError(t, err)
t.Cleanup(func() {
_ = db.Callback().Update().Remove(cbName)
})
err = service.EnsurePresetsExist()
assert.Error(t, err)
assert.Contains(t, err.Error(), "failed to update preset")
}

View File

@@ -5,6 +5,7 @@ import (
"fmt"
"os"
"path/filepath"
"runtime"
"syscall"
"testing"
)
@@ -189,3 +190,47 @@ func TestMapSaveErrorCode_PermissionDeniedText(t *testing.T) {
t.Fatalf("expected permissions_write_denied, got %q", code)
}
}
func TestCheckPathPermissions_NullBytePath(t *testing.T) {
result := CheckPathPermissions("bad\x00path", "rw")
if result.ErrorCode != "permissions_invalid_path" {
t.Fatalf("expected permissions_invalid_path, got %q", result.ErrorCode)
}
if result.Writable {
t.Fatalf("expected writable=false for null-byte path")
}
}
func TestCheckPathPermissions_SymlinkPath(t *testing.T) {
if runtime.GOOS == "windows" {
t.Skip("symlink test is environment-dependent on windows")
}
tmpDir := t.TempDir()
target := filepath.Join(tmpDir, "target.txt")
if err := os.WriteFile(target, []byte("ok"), 0o600); err != nil {
t.Fatalf("write target: %v", err)
}
link := filepath.Join(tmpDir, "target-link.txt")
if err := os.Symlink(target, link); err != nil {
t.Skipf("symlink not available in this environment: %v", err)
}
result := CheckPathPermissions(link, "rw")
if result.ErrorCode != "permissions_unsupported_type" {
t.Fatalf("expected permissions_unsupported_type, got %q", result.ErrorCode)
}
if result.Writable {
t.Fatalf("expected writable=false for symlink path")
}
}
func TestMapSaveErrorCode_ReadOnlyFilesystem(t *testing.T) {
code, ok := MapSaveErrorCode(syscall.EROFS)
if !ok {
t.Fatalf("expected readonly filesystem to be recognized")
}
if code != "permissions_db_readonly" {
t.Fatalf("expected permissions_db_readonly, got %q", code)
}
}

View File

@@ -0,0 +1,68 @@
---
title: Manual Checklist - Local Patch Report DoD Ordering
status: Open
priority: High
assignee: QA
labels: testing, coverage, dod
---
# Goal
Validate that local patch-report workflow is executed in Definition of Done (DoD) order and produces required artifacts for handoff.
# Preconditions
- Work from repository root: `/projects/Charon`
- Branch has local changes to evaluate
- Docker E2E environment is healthy
# Manual Checklist
## 1) E2E First (Mandatory)
- [ ] Run: `cd /projects/Charon && npx playwright test --project=firefox`
- [ ] Confirm run completes without blocking failures
- [ ] Record run timestamp for ordering evidence
## 2) Local Patch Report Preflight (Before Unit Coverage)
- [ ] Run: `cd /projects/Charon && bash scripts/local-patch-report.sh`
- [ ] Confirm artifacts exist:
- [ ] `test-results/local-patch-report.md`
- [ ] `test-results/local-patch-report.json`
- [ ] Confirm JSON includes:
- [ ] `baseline = origin/main...HEAD`
- [ ] `mode = warn`
- [ ] `overall`, `backend`, `frontend` coverage blocks
- [ ] `files_needing_coverage` list
## 3) Backend Coverage Run
- [ ] Run: `cd /projects/Charon/backend && go test ./... -coverprofile=coverage.txt`
- [ ] Confirm `backend/coverage.txt` exists and is current
- [ ] Confirm run exit code is 0
## 4) Frontend Coverage Run
- [ ] Run: `cd /projects/Charon/frontend && npm run test:coverage`
- [ ] Confirm `frontend/coverage/lcov.info` exists and is current
- [ ] Confirm run exit code is 0
## 5) Refresh Local Patch Report After Coverage Updates
- [ ] Run again: `cd /projects/Charon && bash scripts/local-patch-report.sh`
- [ ] Confirm report reflects latest coverage inputs and updated file gaps
## 6) DoD Ordering Verification (Practical)
- [ ] Verify command history/logs show this order:
1. E2E
2. Local patch report preflight
3. Backend/frontend coverage runs
4. Local patch report refresh
- [ ] Verify no skipped step in the sequence
## 7) Handoff Artifact Verification
- [ ] Verify required handoff artifacts are present:
- [ ] `test-results/local-patch-report.md`
- [ ] `test-results/local-patch-report.json`
- [ ] `backend/coverage.txt`
- [ ] `frontend/coverage/lcov.info`
- [ ] Verify latest QA report includes current patch-coverage summary section
# Pass Criteria
- All checklist items complete in order.
- Local patch report artifacts are generated and current.
- Any below-threshold overall patch coverage is explicitly documented as warn-mode during rollout.

View File

@@ -54,6 +54,7 @@ Both artifacts are mandatory per run. Missing either artifact is a failed local
- Local patch report does not fail DoD on low patch coverage during initial rollout.
- Local runner emits warnings (stdout + markdown/json status fields) when thresholds are not met.
- DoD requires the report to run and artifacts to exist, even in warning mode.
- Execution and final merge checks in this plan follow this same warn-mode policy during rollout.
### Threshold Defaults and Source Precedence
@@ -121,6 +122,10 @@ Minimum JSON fields:
- `patch_coverage_pct`
- `status` (`pass` | `warn`)
- `backend` and `frontend` objects with same coverage counters and status
- `files_needing_coverage` (required array for execution baselines), where each item includes at minimum:
- `path`
- `uncovered_changed_lines`
- `patch_coverage_pct`
- `artifacts` with emitted file paths
Minimum Markdown sections:
@@ -243,3 +248,198 @@ jq -r '.baseline' test-results/local-patch-report.json
- [ ] Concrete script + task wiring tasks are present and executable.
- [ ] Validation commands are present and reproducible.
- [ ] Stale unrelated placeholder gates are removed from this active spec.
## 10) Concrete Execution Plan — Patch Gap Closure (PR Merge Objective)
Single-scope objective: close current patch gaps for this PR merge by adding targeted tests and iterating local patch reports until changed-line coverage is merge-ready under DoD.
### Authoritative Gap Baseline (2026-02-17)
Use this list as the only planning baseline for this execution cycle:
- `backend/cmd/localpatchreport/main.go`: 0%, 200 uncovered changed lines, ranges `46-59`, `61-73`, `75-79`, `81-85`, `87-96`, `98-123`, `125-156`, `158-165`, `167-172`, `175-179`, `182-187`, `190-198`, `201-207`, `210-219`, `222-254`, `257-264`, `267-269`
- `frontend/src/pages/UsersPage.tsx`: 30.8%, 9 uncovered (`152-160`)
- `frontend/src/pages/CrowdSecConfig.tsx`: 36.8%, 12 uncovered (`975-977`, `1220`, `1248-1249`, `1281-1282`, `1316`, `1324-1325`, `1335`)
- `frontend/src/pages/DNSProviders.tsx`: 70.6%, 10 uncovered
- `frontend/src/pages/AuditLogs.tsx`: 75.0%, 1 uncovered
- `frontend/src/components/ProxyHostForm.tsx`: 75.5%, 12 uncovered
- `backend/internal/api/middleware/auth.go`: 86.4%, 3 uncovered
- `frontend/src/pages/Notifications.tsx`: 88.9%, 3 uncovered
- `backend/internal/cerberus/rate_limit.go`: 91.9%, 12 uncovered
### DoD Entry Gate (Mandatory Before Phase 1)
All execution phases are blocked until this gate is completed in order:
1) E2E first:
```bash
cd /projects/Charon && npx playwright test --project=firefox
```
2) Local patch preflight (baseline refresh trigger):
```bash
cd /projects/Charon && bash scripts/local-patch-report.sh
```
3) Baseline refresh checkpoint (must pass before phase execution):
```bash
cd /projects/Charon && jq -r '.files_needing_coverage[].path' test-results/local-patch-report.json | sort > /tmp/charon-baseline-files.txt
cd /projects/Charon && while read -r f; do git diff --name-only origin/main...HEAD -- "$f" | grep -qx "$f" || echo "baseline file missing from current diff: $f"; done < /tmp/charon-baseline-files.txt
```
4) If checkpoint output is non-empty, refresh this baseline list to match the latest `test-results/local-patch-report.json` before starting Phase 1.
### Ordered Phases (Highest Impact First)
#### Phase 1 — Backend Local Patch Report CLI (Highest Delta)
Targets:
- `backend/cmd/localpatchreport/main.go` (all listed uncovered ranges)
Suggested test file:
- `backend/cmd/localpatchreport/main_test.go`
Test focus:
- argument parsing and mode selection
- coverage input validation paths
- baseline/diff resolution flow
- report generation branches (markdown/json)
- warning/error branches for missing inputs and malformed coverage
Pass criteria:
- maximize reduction of uncovered changed lines in `backend/cmd/localpatchreport/main.go` from the `200` baseline, with priority on highest-impact uncovered ranges and no new uncovered changed lines introduced
- backend targeted test command passes
Targeted test command:
```bash
cd /projects/Charon/backend && go test ./cmd/localpatchreport -coverprofile=coverage.txt
```
#### Phase 2 — Frontend Lowest-Coverage, Highest-Uncovered Pages
Targets:
- `frontend/src/pages/CrowdSecConfig.tsx` (`975-977`, `1220`, `1248-1249`, `1281-1282`, `1316`, `1324-1325`, `1335`)
- `frontend/src/pages/UsersPage.tsx` (`152-160`)
- `frontend/src/pages/DNSProviders.tsx` (10 uncovered changed lines)
Suggested test files:
- `frontend/src/pages/__tests__/CrowdSecConfig.patch-gap.test.tsx`
- `frontend/src/pages/__tests__/UsersPage.patch-gap.test.tsx`
- `frontend/src/pages/__tests__/DNSProviders.patch-gap.test.tsx`
Test focus:
- branch/error-state rendering tied to uncovered lines
- conditional action handlers and callback guards
- edge-case interaction states not hit by existing tests
Pass criteria:
- maximize reduction of changed-line gaps for the three targets, prioritize highest-impact uncovered lines first, and avoid introducing new uncovered changed lines
- frontend targeted test command passes
Targeted test command:
```bash
cd /projects/Charon/frontend && npm run test:coverage -- src/pages/__tests__/CrowdSecConfig.patch-gap.test.tsx src/pages/__tests__/UsersPage.patch-gap.test.tsx src/pages/__tests__/DNSProviders.patch-gap.test.tsx
```
#### Phase 3 — Backend Residual Middleware/Security Gaps
Targets:
- `backend/internal/api/middleware/auth.go` (3 uncovered changed lines)
- `backend/internal/cerberus/rate_limit.go` (12 uncovered changed lines)
Suggested test targets/files:
- extend `backend/internal/api/middleware/auth_test.go`
- extend `backend/internal/cerberus/rate_limit_test.go`
Test focus:
- auth middleware edge branches (token/context failure paths)
- rate-limit boundary and deny/allow branch coverage
Pass criteria:
- maximize reduction of changed-line gaps for both backend files, prioritize highest-impact uncovered lines first, and avoid introducing new uncovered changed lines
- backend targeted test command passes
Targeted test command:
```bash
cd /projects/Charon/backend && go test ./internal/api/middleware ./internal/cerberus -coverprofile=coverage.txt
```
#### Phase 4 — Frontend Component + Residual Page Gaps
Targets:
- `frontend/src/components/ProxyHostForm.tsx` (12 uncovered changed lines)
- `frontend/src/pages/AuditLogs.tsx` (1 uncovered changed line)
- `frontend/src/pages/Notifications.tsx` (3 uncovered changed lines)
Suggested test files:
- `frontend/src/components/__tests__/ProxyHostForm.patch-gap.test.tsx`
- `frontend/src/pages/__tests__/AuditLogs.patch-gap.test.tsx`
- `frontend/src/pages/__tests__/Notifications.patch-gap.test.tsx`
Test focus:
- form branch paths and validation fallbacks
- single-line residual branch in audit logs
- notification branch handling for low-frequency states
Pass criteria:
- maximize reduction of changed-line gaps for all three targets, prioritize highest-impact uncovered lines first, and avoid introducing new uncovered changed lines
- frontend targeted test command passes
Targeted test command:
```bash
cd /projects/Charon/frontend && npm run test:coverage -- src/components/__tests__/ProxyHostForm.patch-gap.test.tsx src/pages/__tests__/AuditLogs.patch-gap.test.tsx src/pages/__tests__/Notifications.patch-gap.test.tsx
```
### Execution Commands
Run from repository root unless stated otherwise.
1) Backend coverage:
```bash
cd backend && go test ./... -coverprofile=coverage.txt
```
2) Frontend coverage:
```bash
cd frontend && npm run test:coverage
```
3) Local patch report iteration:
```bash
bash scripts/local-patch-report.sh
```
4) Iteration loop (repeat until all target gaps are closed):
```bash
cd backend && go test ./... -coverprofile=coverage.txt
cd /projects/Charon/frontend && npm run test:coverage
cd /projects/Charon && bash scripts/local-patch-report.sh
```
### Phase Completion Checks
- After each phase, rerun `bash scripts/local-patch-report.sh` and confirm that only the next planned target set remains uncovered.
- Do not advance phases when a phase target still shows uncovered changed lines.
### Final Merge-Ready Gate (DoD-Aligned, Warn-Mode Rollout)
This PR is merge-ready only when all conditions are true:
- local patch report runs in warn mode and required artifacts are generated
- practical merge objective: drive a significant reduction in authoritative baseline uncovered changed lines in this PR, prioritizing highest-impact files; `0` remains aspirational and is not a warn-mode merge blocker
- required artifacts exist and are current:
- `test-results/local-patch-report.md`
- `test-results/local-patch-report.json`
- backend and frontend coverage commands complete successfully
- DoD checks remain satisfied (E2E first, local patch report preflight, required security/coverage/type/build validations)

View File

@@ -11,6 +11,120 @@ summary: "Definition of Done validation results, including coverage, security sc
post_date: "2026-02-10"
---
## Current Branch QA/Security Audit - 2026-02-17
### Patch Coverage Push Handoff (Latest Local Report)
- Source: `test-results/local-patch-report.json`
- Generated: `2026-02-17T18:40:46Z`
- Mode: **warn**
- Summary:
- Overall patch coverage: **85.4%** (threshold 90%) → **warn**
- Backend patch coverage: **85.1%** (threshold 85%) → **pass**
- Frontend patch coverage: **91.0%** (threshold 85%) → **pass**
- Current warn-mode trigger:
- Overall is below threshold by **4.6 points**; rollout remains non-blocking while artifacts are still required.
- Key files still needing patch coverage (highest handoff priority):
- `backend/internal/services/mail_service.go` — 20.8% patch coverage, 19 uncovered changed lines
- `frontend/src/pages/UsersPage.tsx` — 30.8% patch coverage, 9 uncovered changed lines
- `backend/internal/crowdsec/hub_sync.go` — 37.5% patch coverage, 10 uncovered changed lines
- `backend/internal/services/security_service.go` — 46.4% patch coverage, 15 uncovered changed lines
- `backend/internal/api/handlers/backup_handler.go` — 53.6% patch coverage, 26 uncovered changed lines
- `backend/internal/api/handlers/import_handler.go` — 67.5% patch coverage, 26 uncovered changed lines
- `backend/internal/api/handlers/settings_handler.go` — 73.6% patch coverage, 24 uncovered changed lines
- `backend/internal/util/permissions.go` — 74.4% patch coverage, 34 uncovered changed lines
### 1) E2E Ordering Requirement and Evidence
- Status: **FAIL (missing current-cycle evidence)**
- Requirement: E2E must run before unit coverage and local patch preflight.
- Evidence found this cycle:
- Local patch preflight was run (`bash scripts/local-patch-report.sh`).
- No fresh Playwright execution artifact/report was found for this cycle before the preflight.
- Conclusion: Ordering proof is not satisfied for this audit cycle.
### 2) Local Patch Preflight Artifacts (Presence + Validity)
- Status: **PASS (warn-mode valid)**
- Artifacts present:
- `test-results/local-patch-report.md`
- `test-results/local-patch-report.json`
- Generated: `2026-02-17T18:40:46Z`
- Validity summary:
- Overall patch coverage: `85.4%` (**warn**, threshold `90%`)
- Backend patch coverage: `85.1%` (**pass**, threshold `85%`)
- Frontend patch coverage: `91.0%` (**pass**, threshold `85%`)
### 3) Backend/Frontend Coverage Status and Thresholds
- Threshold baseline: **85% minimum** (project QA/testing instructions)
- Backend coverage (current artifact `backend/coverage.txt`): **87.0%****PASS**
- Frontend line coverage (current artifact `frontend/coverage/lcov.info`): **74.70%** (`LH=1072`, `LF=1435`) → **FAIL**
- Note: Frontend coverage is currently below required threshold and blocks merge readiness.
### 4) Fast Lint / Pre-commit Status
- Command run: `pre-commit run --all-files`
- Status: **FAIL**
- Failing gate: `golangci-lint-fast`
- Current blocker categories from output:
- `errcheck`: unchecked `AddError` return values in tests
- `gosec`: test file permission/path safety findings
- `unused`: unused helper functions in tests
### 5) Security Scans Required by DoD (This Cycle)
- **Go vulnerability scan (`security-scan-go-vuln`)**: **PASS** (`No vulnerabilities found`)
- **GORM security scan (`security-scan-gorm --check`)**: **PASS** (0 critical/high/medium; info-only suggestions)
- **CodeQL (CI-aligned via skill)**: **PASS (non-blocking)**
- Go SARIF: `5` results (non-error/non-warning categories in this run)
- JavaScript SARIF: `0` results
- **Trivy filesystem scan (`security-scan-trivy`)**: **FAIL**
- Reported security issues, including Dockerfile misconfiguration (`DS-0002`: container user should not be root)
- **Docker image scan (`security-scan-docker-image`)**: **FAIL**
- Vulnerabilities found: `0 critical`, `1 high`, `9 medium`, `1 low`
- High finding: `GHSA-69x3-g4r3-p962` in `github.com/slackhq/nebula@v1.9.7` (fixed in `1.10.3`)
### 6) Merge-Readiness Summary (Blockers + Exact Next Commands)
- Merge readiness: **NOT READY**
#### Explicit blockers
1. Missing E2E-first ordering evidence for this cycle.
2. Frontend coverage below threshold (`74.70% < 85%`).
3. Fast pre-commit/lint failing (`golangci-lint-fast`).
4. Security scans failing:
- Trivy filesystem scan
- Docker image scan (1 High vulnerability)
#### Exact next commands
```bash
cd /projects/Charon && .github/skills/scripts/skill-runner.sh docker-rebuild-e2e
cd /projects/Charon && npx playwright test --project=firefox
cd /projects/Charon && bash scripts/local-patch-report.sh
cd /projects/Charon && .github/skills/scripts/skill-runner.sh test-frontend-coverage
cd /projects/Charon && pre-commit run --all-files
cd /projects/Charon && .github/skills/scripts/skill-runner.sh security-scan-trivy vuln,secret,misconfig json
cd /projects/Charon && .github/skills/scripts/skill-runner.sh security-scan-docker-image
cd /projects/Charon && .github/skills/scripts/skill-runner.sh security-scan-codeql all summary
```
#### Re-check command set after fixes
```bash
cd /projects/Charon && npx playwright test --project=firefox
cd /projects/Charon && bash scripts/local-patch-report.sh
cd /projects/Charon && .github/skills/scripts/skill-runner.sh test-frontend-coverage
cd /projects/Charon && pre-commit run --all-files
cd /projects/Charon && .github/skills/scripts/skill-runner.sh security-scan-go-vuln
cd /projects/Charon && .github/skills/scripts/skill-runner.sh security-scan-gorm --check
cd /projects/Charon && .github/skills/scripts/skill-runner.sh security-scan-codeql all summary
```
## Validation Checklist
- Phase 1 - E2E Tests: PASS (provided: notification tests now pass)

View File

@@ -1,5 +1,5 @@
import { describe, it, expect, vi, afterEach, beforeEach } from 'vitest'
import { render, screen, waitFor, within } from '@testing-library/react'
import { render, screen, waitFor, within, fireEvent } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import { act } from 'react'
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
@@ -731,6 +731,33 @@ describe('ProxyHostForm', () => {
expect(blockExploitsCheckbox).toBeChecked()
await userEvent.click(blockExploitsCheckbox)
expect(blockExploitsCheckbox).not.toBeChecked()
// Toggle HSTS Subdomains (default is true)
const hstsSubdomainsCheckbox = screen.getByLabelText('HSTS Subdomains')
expect(hstsSubdomainsCheckbox).toBeChecked()
await userEvent.click(hstsSubdomainsCheckbox)
expect(hstsSubdomainsCheckbox).not.toBeChecked()
})
it('submits updated hsts_subdomains flag', async () => {
await renderWithClientAct(
<ProxyHostForm onSubmit={mockOnSubmit} onCancel={mockOnCancel} />
)
await userEvent.type(screen.getByPlaceholderText('My Service'), 'HSTS Toggle')
await userEvent.type(screen.getByPlaceholderText('example.com, www.example.com'), 'hsts.existing.com')
await userEvent.type(screen.getByLabelText(/^Host$/), '192.168.1.10')
const hstsSubdomainsCheckbox = screen.getByLabelText('HSTS Subdomains')
await userEvent.click(hstsSubdomainsCheckbox)
await userEvent.click(screen.getByText('Save'))
await waitFor(() => {
expect(mockOnSubmit).toHaveBeenCalledWith(expect.objectContaining({
hsts_subdomains: false,
}))
})
})
})
@@ -897,6 +924,25 @@ describe('ProxyHostForm', () => {
}))
})
})
it('renders and selects non-preset security header profile options', async () => {
const { useSecurityHeaderProfiles } = await import('../../hooks/useSecurityHeaders')
vi.mocked(useSecurityHeaderProfiles).mockReturnValue({
data: [
{ id: 100, name: 'Strict Profile', description: 'Very strict', security_score: 90, is_preset: true, preset_type: 'strict' },
{ id: 101, name: 'Custom Profile', description: 'Custom profile', security_score: 70, is_preset: false },
],
isLoading: false,
error: null,
} as unknown as ReturnType<typeof useSecurityHeaderProfiles>)
renderWithClient(
<ProxyHostForm onSubmit={mockOnSubmit} onCancel={mockOnCancel} />
)
await selectComboboxOption(/Security Headers/i, 'Custom Profile (Score: 70/100)')
expect(screen.getByRole('combobox', { name: /Security Headers/i })).toHaveTextContent('Custom Profile')
})
})
describe('Edit Mode vs Create Mode', () => {
@@ -1072,6 +1118,34 @@ describe('ProxyHostForm', () => {
})
describe('Port Input Handling', () => {
it('shows required-port validation branch when submit is triggered with empty port', async () => {
await renderWithClientAct(
<ProxyHostForm onSubmit={mockOnSubmit} onCancel={mockOnCancel} />
)
await userEvent.type(screen.getByPlaceholderText('My Service'), 'Port Required')
await userEvent.type(screen.getByPlaceholderText('example.com, www.example.com'), 'required.existing.com')
await userEvent.type(screen.getByLabelText(/^Host$/), '192.168.1.100')
const portInput = screen.getByLabelText(/^Port$/) as HTMLInputElement
await userEvent.clear(portInput)
const setCustomValiditySpy = vi.spyOn(HTMLInputElement.prototype, 'setCustomValidity')
const reportValiditySpy = vi.spyOn(HTMLInputElement.prototype, 'reportValidity').mockReturnValue(false)
const form = document.querySelector('form') as HTMLFormElement
fireEvent.submit(form)
await waitFor(() => {
expect(setCustomValiditySpy).toHaveBeenCalledWith('Port is required')
expect(reportValiditySpy).toHaveBeenCalled()
expect(mockOnSubmit).not.toHaveBeenCalled()
})
setCustomValiditySpy.mockRestore()
reportValiditySpy.mockRestore()
})
it('validates port number range', async () => {
await renderWithClientAct(
<ProxyHostForm onSubmit={mockOnSubmit} onCancel={mockOnCancel} />
@@ -1092,6 +1166,87 @@ describe('ProxyHostForm', () => {
expect(portInput).toBeInvalid()
expect(mockOnSubmit).not.toHaveBeenCalled()
})
it('shows out-of-range validation branch when submit is triggered with invalid port', async () => {
await renderWithClientAct(
<ProxyHostForm onSubmit={mockOnSubmit} onCancel={mockOnCancel} />
)
await userEvent.type(screen.getByPlaceholderText('My Service'), 'Port Range Branch')
await userEvent.type(screen.getByPlaceholderText('example.com, www.example.com'), 'range.existing.com')
await userEvent.type(screen.getByLabelText(/^Host$/), '192.168.1.100')
const portInput = screen.getByLabelText(/^Port$/) as HTMLInputElement
await userEvent.clear(portInput)
await userEvent.type(portInput, '70000')
const setCustomValiditySpy = vi.spyOn(HTMLInputElement.prototype, 'setCustomValidity')
const reportValiditySpy = vi.spyOn(HTMLInputElement.prototype, 'reportValidity').mockReturnValue(false)
const form = document.querySelector('form') as HTMLFormElement
fireEvent.submit(form)
await waitFor(() => {
expect(setCustomValiditySpy).toHaveBeenCalledWith('Port must be between 1 and 65535')
expect(reportValiditySpy).toHaveBeenCalled()
expect(mockOnSubmit).not.toHaveBeenCalled()
})
setCustomValiditySpy.mockRestore()
reportValiditySpy.mockRestore()
})
})
describe('Remote Server Container Mapping', () => {
it('allows selecting a remote docker source option', async () => {
await renderWithClientAct(
<ProxyHostForm onSubmit={mockOnSubmit} onCancel={mockOnCancel} />
)
await selectComboboxOption('Source', 'Local Docker Registry (localhost)')
expect(screen.getByRole('combobox', { name: 'Source' })).toHaveTextContent('Local Docker Registry')
})
it('maps remote docker container to remote host and public port', async () => {
const { useDocker } = await import('../../hooks/useDocker')
vi.mocked(useDocker).mockReturnValue({
containers: [
{
id: 'remote-container-1',
names: ['remote-app'],
image: 'nginx:latest',
state: 'running',
status: 'Up 1 hour',
network: 'bridge',
ip: '172.18.0.10',
ports: [{ private_port: 80, public_port: 18080, type: 'tcp' }],
},
],
isLoading: false,
error: null,
refetch: vi.fn(),
})
renderWithClient(
<ProxyHostForm onSubmit={mockOnSubmit} onCancel={mockOnCancel} />
)
await userEvent.type(screen.getByLabelText(/^Name/), 'Remote Mapping')
await userEvent.type(screen.getByPlaceholderText('example.com, www.example.com'), 'remote.existing.com')
await selectComboboxOption('Source', 'Local Docker Registry (localhost)')
await selectComboboxOption('Containers', 'remote-app (nginx:latest)')
await userEvent.click(screen.getByText('Save'))
await waitFor(() => {
expect(mockOnSubmit).toHaveBeenCalledWith(expect.objectContaining({
forward_host: 'localhost',
forward_port: 18080,
}))
})
})
})
describe('Host and Port Combination', () => {

View File

@@ -374,6 +374,37 @@ describe('<AuditLogs />', () => {
})
})
it('falls back to raw details when details are not valid JSON', async () => {
const invalidDetailsLog = {
...mockAuditLogs[0],
uuid: 'raw-details-log',
details: 'not-json',
}
vi.spyOn(auditLogsApi, 'getAuditLogs').mockResolvedValue({
logs: [invalidDetailsLog],
total: 1,
page: 1,
limit: 50,
})
renderWithProviders(<AuditLogs />)
await waitFor(() => {
expect(screen.getByText('admin@example.com')).toBeInTheDocument()
})
const row = screen.getByText('admin@example.com').closest('tr')
if (row) {
fireEvent.click(row)
}
await waitFor(() => {
expect(screen.getByText('Audit Log Details')).toBeInTheDocument()
expect(screen.getByText(/"raw": "not-json"/)).toBeInTheDocument()
})
})
it('shows filter count badge', async () => {
vi.spyOn(auditLogsApi, 'getAuditLogs').mockResolvedValue({
logs: [],

View File

@@ -1,5 +1,5 @@
import { AxiosError } from 'axios'
import { screen, waitFor, act, cleanup, within } from '@testing-library/react'
import { screen, waitFor, act, cleanup, within, fireEvent } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import { QueryClient } from '@tanstack/react-query'
import { describe, it, expect, vi, beforeEach } from 'vitest'
@@ -288,6 +288,45 @@ describe('CrowdSecConfig coverage', () => {
await waitFor(() => expect(screen.getByTestId('preset-apply-info')).toHaveTextContent('Backup: /tmp/backup.tar.gz'))
})
it('supports keyboard selection for preset cards (Enter and Space)', async () => {
vi.mocked(presetsApi.listCrowdsecPresets).mockResolvedValueOnce({
presets: [
{
slug: CROWDSEC_PRESETS[0].slug,
title: CROWDSEC_PRESETS[0].title,
summary: CROWDSEC_PRESETS[0].description,
source: 'hub',
requires_hub: false,
available: true,
cached: false,
cache_key: 'cache-a',
},
{
slug: CROWDSEC_PRESETS[1].slug,
title: CROWDSEC_PRESETS[1].title,
summary: CROWDSEC_PRESETS[1].description,
source: 'hub',
requires_hub: false,
available: true,
cached: false,
cache_key: 'cache-b',
},
],
})
await renderPage()
const firstCard = await screen.findByRole('button', { name: new RegExp(CROWDSEC_PRESETS[0].title, 'i') })
const secondCard = await screen.findByRole('button', { name: new RegExp(CROWDSEC_PRESETS[1].title, 'i') })
firstCard.focus()
await userEvent.keyboard('{Enter}')
secondCard.focus()
await userEvent.keyboard(' ')
await waitFor(() => expect(presetsApi.pullCrowdsecPreset).toHaveBeenCalledTimes(2))
})
it('falls back to local apply on 501 and covers validation/hub/offline branches', async () => {
vi.mocked(crowdsecApi.writeCrowdsecFile).mockResolvedValue({})
vi.mocked(presetsApi.applyCrowdsecPreset).mockRejectedValueOnce(axiosError(501, 'not implemented'))
@@ -460,6 +499,79 @@ describe('CrowdSecConfig coverage', () => {
await waitFor(() => expect(toast.error).toHaveBeenCalledWith('unban fail'))
})
it('supports ban modal click and keyboard interactions', async () => {
await renderPage()
await userEvent.click(screen.getByRole('button', { name: /Ban IP/ }))
expect(await screen.findByText('Ban IP Address')).toBeInTheDocument()
const banDialog = screen.getByRole('dialog', { name: 'Ban IP Address' })
const banOverlay = banDialog.parentElement?.querySelector('[class*="bg-black/60"]') as HTMLElement
fireEvent.click(banOverlay)
await waitFor(() => expect(screen.queryByText('Ban IP Address')).not.toBeInTheDocument())
await userEvent.click(screen.getByRole('button', { name: /Ban IP/ }))
const modalContainer = screen.getByRole('dialog', { name: 'Ban IP Address' })
const ipInput = within(modalContainer).getByPlaceholderText('192.168.1.100')
await userEvent.type(ipInput, '9.9.9.9')
await userEvent.keyboard('{Control>}{Enter}{/Control}')
await waitFor(() => expect(crowdsecApi.banIP).toHaveBeenCalledWith('9.9.9.9', '24h', ''))
await userEvent.click(screen.getByRole('button', { name: /Ban IP/ }))
const secondModalContainer = screen.getByRole('dialog', { name: 'Ban IP Address' })
const secondIpInput = within(secondModalContainer).getByPlaceholderText('192.168.1.100')
await userEvent.type(secondIpInput, '8.8.8.8')
await userEvent.keyboard('{Enter}')
await waitFor(() => expect(crowdsecApi.banIP).toHaveBeenCalledWith('8.8.8.8', '24h', ''))
await userEvent.click(screen.getByRole('button', { name: /Ban IP/ }))
const thirdModalContainer = screen.getByRole('dialog', { name: 'Ban IP Address' })
const thirdIpInput = within(thirdModalContainer).getByPlaceholderText('192.168.1.100')
await userEvent.type(thirdIpInput, '8.8.8.8')
const reasonInput = within(thirdModalContainer).getByLabelText('Reason')
await userEvent.type(reasonInput, 'manual reason{Enter}')
await waitFor(() => expect(crowdsecApi.banIP).toHaveBeenCalledWith('8.8.8.8', '24h', 'manual reason'))
})
it('supports unban modal overlay, Escape, Enter, and cancel button', async () => {
vi.mocked(crowdsecApi.listCrowdsecDecisions).mockResolvedValueOnce({
decisions: [
{ id: '1', ip: '7.7.7.7', reason: 'bot', duration: '24h', created_at: '2024-01-01T00:00:00Z', source: 'manual' },
],
})
await renderPage()
await userEvent.click(await screen.findByRole('button', { name: 'Unban' }))
expect(await screen.findByText('Confirm Unban')).toBeInTheDocument()
const unbanDialog = screen.getByRole('dialog', { name: 'Confirm Unban' })
const unbanOverlay = unbanDialog.parentElement?.querySelector('[class*="bg-black/60"]') as HTMLElement
fireEvent.click(unbanOverlay)
await waitFor(() => expect(screen.queryByText('Confirm Unban')).not.toBeInTheDocument())
await userEvent.click(await screen.findByRole('button', { name: 'Unban' }))
expect(await screen.findByText('Confirm Unban')).toBeInTheDocument()
await userEvent.keyboard('{Escape}')
await waitFor(() => expect(screen.queryByText('Confirm Unban')).not.toBeInTheDocument())
await userEvent.click(await screen.findByRole('button', { name: 'Unban' }))
expect(await screen.findByText('Confirm Unban')).toBeInTheDocument()
await userEvent.keyboard('{Enter}')
await waitFor(() => expect(crowdsecApi.unbanIP).toHaveBeenCalledWith('7.7.7.7'))
vi.mocked(crowdsecApi.listCrowdsecDecisions).mockResolvedValueOnce({
decisions: [
{ id: '1', ip: '7.7.7.7', reason: 'bot', duration: '24h', created_at: '2024-01-01T00:00:00Z', source: 'manual' },
],
})
await renderPage()
await userEvent.click(await screen.findByRole('button', { name: 'Unban' }))
const confirmContainer = screen.getByRole('dialog', { name: 'Confirm Unban' })
await userEvent.click(within(confirmContainer).getByRole('button', { name: 'Cancel' }))
await waitFor(() => expect(screen.queryByText('Confirm Unban')).not.toBeInTheDocument())
})
it('bans and unbans IPs with overlay messaging', async () => {
vi.mocked(crowdsecApi.listCrowdsecDecisions).mockResolvedValue({
decisions: [

View File

@@ -6,6 +6,7 @@ import DNSProviders from '../DNSProviders'
import { renderWithQueryClient } from '../../test-utils/renderWithQueryClient'
import { useDNSProviders, useDNSProviderMutations, type DNSProvider } from '../../hooks/useDNSProviders'
import { getChallenge } from '../../api/manualChallenge'
import { toast } from '../../utils/toast'
vi.mock('react-i18next', () => ({
useTranslation: () => ({
@@ -55,8 +56,20 @@ vi.mock('../../components/DNSProviderForm', () => ({
}))
vi.mock('../../components/dns-providers', () => ({
ManualDNSChallenge: ({ challenge }: { challenge: { fqdn: string } }) => (
<section data-testid="manual-dns-challenge">{challenge.fqdn}</section>
ManualDNSChallenge: ({
challenge,
onComplete,
onCancel,
}: {
challenge: { fqdn: string }
onComplete: () => void
onCancel: () => void
}) => (
<section data-testid="manual-dns-challenge">
<div>{challenge.fqdn}</div>
<button type="button" onClick={onComplete}>complete-manual</button>
<button type="button" onClick={onCancel}>cancel-manual</button>
</section>
),
}))
@@ -140,5 +153,59 @@ describe('DNSProviders page state behavior', () => {
await user.click(screen.getByRole('button', { name: 'dnsProvider.manual.title' }))
expect(await screen.findByTestId('manual-dns-challenge')).toBeInTheDocument()
await user.click(screen.getByRole('button', { name: 'complete-manual' }))
await waitFor(() => {
expect(getChallenge).toHaveBeenCalledTimes(2)
})
await user.click(screen.getByRole('button', { name: 'cancel-manual' }))
await waitFor(() => {
expect(screen.queryByTestId('manual-dns-challenge')).not.toBeInTheDocument()
})
})
it('re-evaluates manual challenge visibility after completion refresh', async () => {
vi.mocked(getChallenge)
.mockResolvedValueOnce({
id: 'active',
status: 'pending',
fqdn: '_acme-challenge.example.com',
value: 'token',
ttl: 300,
created_at: '2026-02-15T00:00:00Z',
expires_at: '2026-02-15T00:10:00Z',
dns_propagated: false,
})
.mockRejectedValueOnce(new Error('challenge missing after refresh'))
const user = userEvent.setup()
renderWithQueryClient(<DNSProviders />)
await user.click(screen.getByRole('button', { name: 'dnsProvider.manual.title' }))
expect(await screen.findByTestId('manual-dns-challenge')).toBeInTheDocument()
await user.click(screen.getByRole('button', { name: 'complete-manual' }))
await waitFor(() => {
expect(getChallenge).toHaveBeenCalledTimes(2)
expect(screen.queryByTestId('manual-dns-challenge')).not.toBeInTheDocument()
})
})
it('shows no provider toast when manual challenge is requested without providers', async () => {
vi.mocked(useDNSProviders).mockReturnValue({
data: [],
isLoading: false,
refetch: vi.fn(),
} as unknown as ReturnType<typeof useDNSProviders>)
const user = userEvent.setup()
renderWithQueryClient(<DNSProviders />)
await user.click(screen.getByRole('button', { name: 'dnsProvider.manual.title' }))
expect(toast.error).toHaveBeenCalledWith('dnsProviders.noProviders')
expect(getChallenge).not.toHaveBeenCalled()
})
})

View File

@@ -198,4 +198,102 @@ describe('Notifications', () => {
const resetNotifyProxyHosts = screen.getByTestId('notify-proxy-hosts') as HTMLInputElement
expect(resetNotifyProxyHosts.checked).toBe(true)
})
it('renders external template loading and rows when templates are present', async () => {
const template = {
id: 'template-1',
name: 'Ops Payload',
description: 'Template for ops alerts',
template: 'custom' as const,
config: '{"text":"{{.Message}}"}',
created_at: '2024-01-01T00:00:00Z',
updated_at: '2024-01-01T00:00:00Z',
}
vi.mocked(notificationsApi.getExternalTemplates).mockReturnValue(new Promise(() => {}))
const { unmount } = renderWithQueryClient(<Notifications />)
await userEvent.click(await screen.findByRole('button', { name: 'notificationProviders.manageTemplates' }))
expect(screen.getByTestId('external-templates-loading')).toBeInTheDocument()
unmount()
vi.mocked(notificationsApi.getExternalTemplates).mockResolvedValue([template])
renderWithQueryClient(<Notifications />)
await userEvent.click(await screen.findByRole('button', { name: 'notificationProviders.manageTemplates' }))
expect(await screen.findByTestId('external-template-row-template-1')).toBeInTheDocument()
expect(screen.getByText('Ops Payload')).toBeInTheDocument()
})
it('opens external template editor and deletes template on confirm', async () => {
const template = {
id: 'template-2',
name: 'Security Payload',
description: 'Template for security alerts',
template: 'custom' as const,
config: '{"text":"{{.Message}}"}',
created_at: '2024-01-01T00:00:00Z',
updated_at: '2024-01-01T00:00:00Z',
}
vi.mocked(notificationsApi.getExternalTemplates).mockResolvedValue([template])
vi.mocked(notificationsApi.deleteExternalTemplate).mockResolvedValue(undefined)
const confirmSpy = vi.spyOn(window, 'confirm').mockReturnValue(true)
const user = userEvent.setup()
renderWithQueryClient(<Notifications />)
await user.click(await screen.findByRole('button', { name: 'notificationProviders.manageTemplates' }))
const row = await screen.findByTestId('external-template-row-template-2')
expect(row).toBeInTheDocument()
await user.click(screen.getByTestId('external-template-edit-template-2'))
await waitFor(() => {
expect((screen.getByTestId('template-name') as HTMLInputElement).value).toBe('Security Payload')
})
await user.click(screen.getByTestId('external-template-delete-template-2'))
await waitFor(() => {
expect(confirmSpy).toHaveBeenCalled()
expect(notificationsApi.deleteExternalTemplate).toHaveBeenCalledWith('template-2')
})
confirmSpy.mockRestore()
})
it('renders external template action buttons and skips delete when confirm is cancelled', async () => {
const template = {
id: 'template-cancel',
name: 'Cancel Delete Template',
description: 'Template used for cancel delete branch',
template: 'custom' as const,
config: '{"text":"{{.Message}}"}',
created_at: '2024-01-01T00:00:00Z',
updated_at: '2024-01-01T00:00:00Z',
}
vi.mocked(notificationsApi.getExternalTemplates).mockResolvedValue([template])
vi.mocked(notificationsApi.deleteExternalTemplate).mockResolvedValue(undefined)
const confirmSpy = vi.spyOn(window, 'confirm').mockReturnValue(false)
const user = userEvent.setup()
renderWithQueryClient(<Notifications />)
await user.click(await screen.findByRole('button', { name: 'notificationProviders.manageTemplates' }))
expect(await screen.findByTestId('external-template-row-template-cancel')).toBeInTheDocument()
const editButton = screen.getByTestId('external-template-edit-template-cancel')
const deleteButton = screen.getByTestId('external-template-delete-template-cancel')
await user.click(editButton)
await waitFor(() => {
expect((screen.getByTestId('template-name') as HTMLInputElement).value).toBe('Cancel Delete Template')
})
await user.click(deleteButton)
expect(confirmSpy).toHaveBeenCalled()
expect(notificationsApi.deleteExternalTemplate).not.toHaveBeenCalled()
confirmSpy.mockRestore()
})
})

View File

@@ -363,6 +363,113 @@ describe('UsersPage', () => {
}
})
it('uses textarea fallback copy when clipboard API fails', async () => {
vi.mocked(usersApi.listUsers).mockResolvedValue(mockUsers)
vi.mocked(usersApi.inviteUser).mockResolvedValue({
id: 6,
uuid: 'invitee-fallback',
email: 'fallback@example.com',
role: 'user',
invite_token: 'token-fallback',
invite_url: 'https://charon.example.com/accept-invite?token=token-fallback',
email_sent: false,
expires_at: '2025-01-01T00:00:00Z',
})
const originalDescriptor = Object.getOwnPropertyDescriptor(navigator, 'clipboard')
Object.defineProperty(navigator, 'clipboard', {
get: () => undefined,
configurable: true,
})
const appendSpy = vi.spyOn(document.body, 'appendChild')
const removeSpy = vi.spyOn(document.body, 'removeChild')
Object.defineProperty(document, 'execCommand', {
value: vi.fn(),
configurable: true,
writable: true,
})
renderWithQueryClient(<UsersPage />)
const user = userEvent.setup()
await waitFor(() => expect(screen.getByText('Invite User')).toBeInTheDocument())
await user.click(screen.getByRole('button', { name: /Invite User/i }))
await user.type(screen.getByPlaceholderText('user@example.com'), 'fallback@example.com')
await user.click(screen.getByRole('button', { name: /^Send Invite$/i }))
await screen.findByDisplayValue(/accept-invite\?token=token-fallback/)
await user.click(screen.getByRole('button', { name: /copy invite link/i }))
await waitFor(() => {
expect(appendSpy).toHaveBeenCalled()
expect(toast.success).toHaveBeenCalledWith('Invite link copied to clipboard')
})
appendSpy.mockRestore()
removeSpy.mockRestore()
if (originalDescriptor) {
Object.defineProperty(navigator, 'clipboard', originalDescriptor)
} else {
delete (navigator as unknown as { clipboard?: unknown }).clipboard
}
})
it('uses textarea fallback copy when clipboard writeText rejects', async () => {
vi.mocked(usersApi.listUsers).mockResolvedValue(mockUsers)
vi.mocked(usersApi.inviteUser).mockResolvedValue({
id: 7,
uuid: 'invitee-reject',
email: 'reject@example.com',
role: 'user',
invite_token: 'token-reject',
invite_url: 'https://charon.example.com/accept-invite?token=token-reject',
email_sent: false,
expires_at: '2025-01-01T00:00:00Z',
})
const writeText = vi.fn().mockRejectedValue(new Error('clipboard denied'))
const originalDescriptor = Object.getOwnPropertyDescriptor(navigator, 'clipboard')
Object.defineProperty(navigator, 'clipboard', {
get: () => ({ writeText }),
configurable: true,
})
const appendSpy = vi.spyOn(document.body, 'appendChild')
const removeSpy = vi.spyOn(document.body, 'removeChild')
Object.defineProperty(document, 'execCommand', {
value: vi.fn().mockReturnValue(true),
configurable: true,
writable: true,
})
renderWithQueryClient(<UsersPage />)
const user = userEvent.setup()
await waitFor(() => expect(screen.getByText('Invite User')).toBeInTheDocument())
await user.click(screen.getByRole('button', { name: /Invite User/i }))
await user.type(screen.getByPlaceholderText('user@example.com'), 'reject@example.com')
await user.click(screen.getByRole('button', { name: /^Send Invite$/i }))
await screen.findByDisplayValue(/accept-invite\?token=token-reject/)
await user.click(screen.getByRole('button', { name: /copy invite link/i }))
await waitFor(() => {
expect(appendSpy).toHaveBeenCalled()
expect(toast.success).toHaveBeenCalledWith('Invite link copied to clipboard')
})
appendSpy.mockRestore()
removeSpy.mockRestore()
if (originalDescriptor) {
Object.defineProperty(navigator, 'clipboard', originalDescriptor)
} else {
delete (navigator as unknown as { clipboard?: unknown }).clipboard
}
})
describe('URL Preview in InviteModal', () => {
afterEach(() => {
vi.useRealTimers()