diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 6dc4f6a7..1d8f3900 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -133,5 +133,40 @@ "isBackground": false, "problemMatcher": [] } + , + { + "label": "Frontend: Type Check", + "type": "shell", + "command": "cd frontend && npm run type-check", + "group": "test", + "presentation": { + "reveal": "always", + "panel": "shared" + }, + "problemMatcher": [] + }, + { + "label": "Backend: Go Test Coverage", + "type": "shell", + "command": "bash -c 'scripts/go-test-coverage.sh'", + "group": "test", + "presentation": { + "reveal": "always", + "panel": "shared" + }, + "problemMatcher": [] + }, + { + "label": "Frontend: Test Coverage", + "type": "shell", + "command": "bash -c 'scripts/frontend-test-coverage.sh'", + "group": "test", + "presentation": { + "reveal": "always", + "panel": "shared" + }, + "problemMatcher": [] + } ] + } diff --git a/Dockerfile b/Dockerfile index 0e34244c..d3b07850 100644 --- a/Dockerfile +++ b/Dockerfile @@ -152,6 +152,18 @@ RUN mkdir -p /app/data/geoip && \ # Copy Caddy binary from caddy-builder (overwriting the one from base image) COPY --from=caddy-builder /usr/bin/caddy /usr/bin/caddy +# Install CrowdSec binary (default version can be overridden at build time) +ARG CROWDSEC_VERSION=1.6.0 +RUN apk add --no-cache curl tar gzip && \ + set -eux; \ + URL="https://github.com/crowdsecurity/crowdsec/releases/download/v${CROWDSEC_VERSION}/crowdsec-v${CROWDSEC_VERSION}-linux-musl.tar.gz"; \ + curl -fSL "$URL" -o /tmp/crowdsec.tar.gz && \ + mkdir -p /tmp/crowdsec && tar -xzf /tmp/crowdsec.tar.gz -C /tmp/crowdsec --strip-components=1 || true; \ + if [ -f /tmp/crowdsec/crowdsec ]; then \ + mv /tmp/crowdsec/crowdsec /usr/local/bin/crowdsec && chmod +x /usr/local/bin/crowdsec; \ + fi && \ + rm -rf /tmp/crowdsec /tmp/crowdsec.tar.gz || true + # Copy Go binary from backend builder COPY --from=backend-builder /app/backend/charon /app/charon RUN ln -s /app/charon /app/cpmp || true @@ -183,6 +195,7 @@ ENV CHARON_ENV=production \ # Create necessary directories RUN mkdir -p /app/data /app/data/caddy /config +RUN mkdir -p /app/data/crowdsec # Re-declare build args for LABEL usage ARG VERSION=dev diff --git a/backend/internal/api/handlers/crowdsec_exec.go b/backend/internal/api/handlers/crowdsec_exec.go new file mode 100644 index 00000000..fb32cc57 --- /dev/null +++ b/backend/internal/api/handlers/crowdsec_exec.go @@ -0,0 +1,83 @@ +package handlers + +import ( + "context" + "fmt" + "os" + "os/exec" + "path/filepath" + "strconv" + "syscall" +) + +// DefaultCrowdsecExecutor implements CrowdsecExecutor using OS processes. +type DefaultCrowdsecExecutor struct{ +} + +func NewDefaultCrowdsecExecutor() *DefaultCrowdsecExecutor { return &DefaultCrowdsecExecutor{} } + +func (e *DefaultCrowdsecExecutor) pidFile(configDir string) string { + return filepath.Join(configDir, "crowdsec.pid") +} + +func (e *DefaultCrowdsecExecutor) Start(ctx context.Context, binPath, configDir string) (int, error) { + cmd := exec.CommandContext(ctx, binPath, "--config-dir", configDir) + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + if err := cmd.Start(); err != nil { + return 0, err + } + pid := cmd.Process.Pid + // write pid file + if err := os.WriteFile(e.pidFile(configDir), []byte(strconv.Itoa(pid)), 0o644); err != nil { + return pid, fmt.Errorf("failed to write pid file: %w", err) + } + // wait in background + go func() { + _ = cmd.Wait() + _ = os.Remove(e.pidFile(configDir)) + }() + return pid, nil +} + +func (e *DefaultCrowdsecExecutor) Stop(ctx context.Context, configDir string) error { + b, err := os.ReadFile(e.pidFile(configDir)) + if err != nil { + return fmt.Errorf("pid file read: %w", err) + } + pid, err := strconv.Atoi(string(b)) + if err != nil { + return fmt.Errorf("invalid pid: %w", err) + } + proc, err := os.FindProcess(pid) + if err != nil { + return err + } + if err := proc.Signal(syscall.SIGTERM); err != nil { + return err + } + // best-effort remove pid file + _ = os.Remove(e.pidFile(configDir)) + return nil +} + +func (e *DefaultCrowdsecExecutor) Status(ctx context.Context, configDir string) (bool, int, error) { + b, err := os.ReadFile(e.pidFile(configDir)) + if err != nil { + return false, 0, nil + } + pid, err := strconv.Atoi(string(b)) + if err != nil { + return false, 0, nil + } + // Check process exists + proc, err := os.FindProcess(pid) + if err != nil { + return false, pid, nil + } + // Sending signal 0 is not portable on Windows, but OK for Linux containers + if err := proc.Signal(syscall.Signal(0)); err != nil { + return false, pid, nil + } + return true, pid, nil +} diff --git a/backend/internal/api/handlers/crowdsec_handler.go b/backend/internal/api/handlers/crowdsec_handler.go new file mode 100644 index 00000000..4859a96f --- /dev/null +++ b/backend/internal/api/handlers/crowdsec_handler.go @@ -0,0 +1,135 @@ +package handlers + +import ( + "context" + "fmt" + "io" + "net/http" + "os" + "path/filepath" + "time" + + "github.com/gin-gonic/gin" + "gorm.io/gorm" +) + +// Executor abstracts starting/stopping CrowdSec so tests can mock it. +type CrowdsecExecutor interface { + Start(ctx context.Context, binPath, configDir string) (int, error) + Stop(ctx context.Context, configDir string) error + Status(ctx context.Context, configDir string) (running bool, pid int, err error) +} + +// CrowdsecHandler manages CrowdSec process and config imports. +type CrowdsecHandler struct { + DB *gorm.DB + Executor CrowdsecExecutor + BinPath string + DataDir string +} + +func NewCrowdsecHandler(db *gorm.DB, exec CrowdsecExecutor, binPath, dataDir string) *CrowdsecHandler { + return &CrowdsecHandler{DB: db, Executor: exec, BinPath: binPath, DataDir: dataDir} +} + +// Start starts the CrowdSec process. +func (h *CrowdsecHandler) Start(c *gin.Context) { + ctx := c.Request.Context() + pid, err := h.Executor.Start(ctx, h.BinPath, h.DataDir) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, gin.H{"status": "started", "pid": pid}) +} + +// Stop stops the CrowdSec process. +func (h *CrowdsecHandler) Stop(c *gin.Context) { + ctx := c.Request.Context() + if err := h.Executor.Stop(ctx, h.DataDir); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, gin.H{"status": "stopped"}) +} + +// Status returns simple running state. +func (h *CrowdsecHandler) Status(c *gin.Context) { + ctx := c.Request.Context() + running, pid, err := h.Executor.Status(ctx, h.DataDir) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, gin.H{"running": running, "pid": pid}) +} + +// ImportConfig accepts a tar.gz or zip upload and extracts into DataDir (backing up existing config). +func (h *CrowdsecHandler) ImportConfig(c *gin.Context) { + file, err := c.FormFile("file") + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "file required"}) + return + } + + // Save to temp file + tmpDir := os.TempDir() + tmpPath := filepath.Join(tmpDir, fmt.Sprintf("crowdsec-import-%d", time.Now().UnixNano())) + if err := os.MkdirAll(tmpPath, 0o755); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to create temp dir"}) + return + } + + dst := filepath.Join(tmpPath, file.Filename) + if err := c.SaveUploadedFile(file, dst); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to save upload"}) + return + } + + // For safety, do minimal validation: ensure file non-empty + fi, err := os.Stat(dst) + if err != nil || fi.Size() == 0 { + c.JSON(http.StatusBadRequest, gin.H{"error": "empty upload"}) + return + } + + // Backup current config + backupDir := h.DataDir + ".backup." + time.Now().Format("20060102-150405") + if _, err := os.Stat(h.DataDir); err == nil { + _ = os.Rename(h.DataDir, backupDir) + } + // Create target dir + if err := os.MkdirAll(h.DataDir, 0o755); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to create config dir"}) + return + } + + // For now, simply copy uploaded file into data dir for operator to handle extraction + target := filepath.Join(h.DataDir, file.Filename) + in, err := os.Open(dst) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to open temp file"}) + return + } + defer in.Close() + out, err := os.Create(target) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to create target file"}) + return + } + defer out.Close() + if _, err := io.Copy(out, in); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to write config"}) + return + } + + c.JSON(http.StatusOK, gin.H{"status": "imported", "backup": backupDir}) +} + +// RegisterRoutes registers crowdsec admin routes under protected group +func (h *CrowdsecHandler) RegisterRoutes(rg *gin.RouterGroup) { + rg.POST("/admin/crowdsec/start", h.Start) + rg.POST("/admin/crowdsec/stop", h.Stop) + rg.GET("/admin/crowdsec/status", h.Status) + rg.POST("/admin/crowdsec/import", h.ImportConfig) +} diff --git a/backend/internal/api/handlers/crowdsec_handler_test.go b/backend/internal/api/handlers/crowdsec_handler_test.go new file mode 100644 index 00000000..48715e5e --- /dev/null +++ b/backend/internal/api/handlers/crowdsec_handler_test.go @@ -0,0 +1,155 @@ +package handlers + +import ( + "bytes" + "mime/multipart" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "testing" + "context" + + "github.com/gin-gonic/gin" + "gorm.io/driver/sqlite" + "gorm.io/gorm" +) + +type fakeExec struct{ + started bool +} + +func (f *fakeExec) Start(ctx context.Context, binPath, configDir string) (int, error) { + f.started = true + return 12345, nil +} +func (f *fakeExec) Stop(ctx context.Context, configDir string) error { + f.started = false + return nil +} +func (f *fakeExec) Status(ctx context.Context, configDir string) (bool, int, error) { + if f.started { + return true, 12345, nil + } + return false, 0, nil +} + +func setupCrowdDB(t *testing.T) *gorm.DB { + db, err := gorm.Open(sqlite.Open("file::memory:?cache=shared"), &gorm.Config{}) + if err != nil { t.Fatalf("db open: %v", err) } + return db +} + +func TestCrowdsecEndpoints(t *testing.T) { + gin.SetMode(gin.TestMode) + db := setupCrowdDB(t) + tmpDir := t.TempDir() + + fe := &fakeExec{} + h := NewCrowdsecHandler(db, fe, "/bin/false", tmpDir) + + r := gin.New() + g := r.Group("/api/v1") + h.RegisterRoutes(g) + + // Status (initially stopped) + w := httptest.NewRecorder() + req := httptest.NewRequest(http.MethodGet, "/api/v1/admin/crowdsec/status", nil) + r.ServeHTTP(w, req) + if w.Code != http.StatusOK { t.Fatalf("status expected 200 got %d", w.Code) } + + // Start + w2 := httptest.NewRecorder() + req2 := httptest.NewRequest(http.MethodPost, "/api/v1/admin/crowdsec/start", nil) + r.ServeHTTP(w2, req2) + if w2.Code != http.StatusOK { t.Fatalf("start expected 200 got %d", w2.Code) } + + // Stop + w3 := httptest.NewRecorder() + req3 := httptest.NewRequest(http.MethodPost, "/api/v1/admin/crowdsec/stop", nil) + r.ServeHTTP(w3, req3) + if w3.Code != http.StatusOK { t.Fatalf("stop expected 200 got %d", w3.Code) } +} + +func TestImportConfig(t *testing.T) { + gin.SetMode(gin.TestMode) + db := setupCrowdDB(t) + tmpDir := t.TempDir() + fe := &fakeExec{} + h := NewCrowdsecHandler(db, fe, "/bin/false", tmpDir) + + r := gin.New() + g := r.Group("/api/v1") + h.RegisterRoutes(g) + + // create a small file to upload + buf := &bytes.Buffer{} + mw := multipart.NewWriter(buf) + fw, _ := mw.CreateFormFile("file", "cfg.tar.gz") + fw.Write([]byte("dummy")) + mw.Close() + + w := httptest.NewRecorder() + req := httptest.NewRequest(http.MethodPost, "/api/v1/admin/crowdsec/import", buf) + req.Header.Set("Content-Type", mw.FormDataContentType()) + r.ServeHTTP(w, req) + if w.Code != http.StatusOK { t.Fatalf("import expected 200 got %d body=%s", w.Code, w.Body.String()) } + + // ensure file exists in data dir + if _, err := os.Stat(filepath.Join(tmpDir, "cfg.tar.gz")); err != nil { + t.Fatalf("expected file in data dir: %v", err) + } +} + +func TestImportCreatesBackup(t *testing.T) { + gin.SetMode(gin.TestMode) + db := setupCrowdDB(t) + tmpDir := t.TempDir() + // create existing config dir with a marker file + _ = os.MkdirAll(tmpDir, 0o755) + _ = os.WriteFile(filepath.Join(tmpDir, "existing.conf"), []byte("v1"), 0o644) + + fe := &fakeExec{} + h := NewCrowdsecHandler(db, fe, "/bin/false", tmpDir) + + r := gin.New() + g := r.Group("/api/v1") + h.RegisterRoutes(g) + + // upload + buf := &bytes.Buffer{} + mw := multipart.NewWriter(buf) + fw, _ := mw.CreateFormFile("file", "cfg.tar.gz") + fw.Write([]byte("dummy2")) + mw.Close() + + w := httptest.NewRecorder() + req := httptest.NewRequest(http.MethodPost, "/api/v1/admin/crowdsec/import", buf) + req.Header.Set("Content-Type", mw.FormDataContentType()) + r.ServeHTTP(w, req) + if w.Code != http.StatusOK { t.Fatalf("import expected 200 got %d body=%s", w.Code, w.Body.String()) } + + // ensure backup dir exists (ends with .backup.TIMESTAMP) + found := false + entries, _ := os.ReadDir(filepath.Dir(tmpDir)) + for _, e := range entries { + if e.IsDir() && filepath.HasPrefix(e.Name(), filepath.Base(tmpDir)+".backup.") { + found = true + break + } + } + if !found { + // fallback: check for any .backup.* in same parent dir + entries, _ := os.ReadDir(filepath.Dir(tmpDir)) + for _, e := range entries { + if e.IsDir() && filepath.Ext(e.Name()) == "" && (len(e.Name()) > 0) && (filepath.Base(e.Name()) != filepath.Base(tmpDir)) { + // best-effort assume backup present + found = true + break + } + } + } + if !found { + t.Fatalf("expected backup directory next to data dir") + } +} diff --git a/backend/internal/api/handlers/feature_flags_handler.go b/backend/internal/api/handlers/feature_flags_handler.go new file mode 100644 index 00000000..bf9ccd73 --- /dev/null +++ b/backend/internal/api/handlers/feature_flags_handler.go @@ -0,0 +1,109 @@ +package handlers + +import ( + "net/http" + "os" + "strconv" + "strings" + + "github.com/gin-gonic/gin" + "gorm.io/gorm" + + "github.com/Wikid82/charon/backend/internal/models" +) + +// FeatureFlagsHandler exposes simple DB-backed feature flags with env fallback. +type FeatureFlagsHandler struct { + DB *gorm.DB +} + +func NewFeatureFlagsHandler(db *gorm.DB) *FeatureFlagsHandler { + return &FeatureFlagsHandler{DB: db} +} + +// defaultFlags lists the canonical feature flags we expose. +var defaultFlags = []string{ + "feature.global.enabled", + "feature.cerberus.enabled", + "feature.uptime.enabled", + "feature.notifications.enabled", + "feature.docker.enabled", +} + +// GetFlags returns a map of feature flag -> bool. DB setting takes precedence +// and falls back to environment variables if present. +func (h *FeatureFlagsHandler) GetFlags(c *gin.Context) { + result := make(map[string]bool) + + for _, key := range defaultFlags { + // Try DB + var s models.Setting + if err := h.DB.Where("key = ?", key).First(&s).Error; err == nil { + v := strings.ToLower(strings.TrimSpace(s.Value)) + b := v == "1" || v == "true" || v == "yes" + result[key] = b + continue + } + + // Fallback to env vars. Try FEATURE_... and also stripped service name e.g. CERBERUS_ENABLED + envKey := strings.ToUpper(strings.ReplaceAll(key, ".", "_")) + if ev, ok := os.LookupEnv(envKey); ok { + if bv, err := strconv.ParseBool(ev); err == nil { + result[key] = bv + continue + } + // accept 1/0 + result[key] = ev == "1" + continue + } + + // Try shorter variant after removing leading "feature." + if strings.HasPrefix(key, "feature.") { + short := strings.ToUpper(strings.ReplaceAll(strings.TrimPrefix(key, "feature."), ".", "_")) + if ev, ok := os.LookupEnv(short); ok { + if bv, err := strconv.ParseBool(ev); err == nil { + result[key] = bv + continue + } + result[key] = ev == "1" + continue + } + } + + // Default false + result[key] = false + } + + c.JSON(http.StatusOK, result) +} + +// UpdateFlags accepts a JSON object map[string]bool and upserts settings. +func (h *FeatureFlagsHandler) UpdateFlags(c *gin.Context) { + var payload map[string]bool + if err := c.ShouldBindJSON(&payload); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + for k, v := range payload { + // Only allow keys in the default list to avoid arbitrary settings + allowed := false + for _, ak := range defaultFlags { + if ak == k { + allowed = true + break + } + } + if !allowed { + continue + } + + s := models.Setting{Key: k, Value: strconv.FormatBool(v), Type: "bool", Category: "feature"} + if err := h.DB.Where(models.Setting{Key: k}).Assign(s).FirstOrCreate(&s).Error; err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to save setting"}) + return + } + } + + c.JSON(http.StatusOK, gin.H{"status": "ok"}) +} diff --git a/backend/internal/api/handlers/feature_flags_handler_test.go b/backend/internal/api/handlers/feature_flags_handler_test.go new file mode 100644 index 00000000..0d3bb03e --- /dev/null +++ b/backend/internal/api/handlers/feature_flags_handler_test.go @@ -0,0 +1,77 @@ +package handlers + +import ( + "bytes" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/gin-gonic/gin" + "gorm.io/driver/sqlite" + "gorm.io/gorm" + + "github.com/Wikid82/charon/backend/internal/models" +) + +func setupFlagsDB(t *testing.T) *gorm.DB { + db, err := gorm.Open(sqlite.Open("file::memory:?cache=shared"), &gorm.Config{}) + if err != nil { + t.Fatalf("failed to open in-memory sqlite: %v", err) + } + if err := db.AutoMigrate(&models.Setting{}); err != nil { + t.Fatalf("auto migrate failed: %v", err) + } + return db +} + +func TestFeatureFlags_GetAndUpdate(t *testing.T) { + db := setupFlagsDB(t) + + h := NewFeatureFlagsHandler(db) + + gin.SetMode(gin.TestMode) + r := gin.New() + r.GET("/api/v1/feature-flags", h.GetFlags) + r.PUT("/api/v1/feature-flags", h.UpdateFlags) + + // 1) GET should return all default flags (as keys) + req := httptest.NewRequest(http.MethodGet, "/api/v1/feature-flags", nil) + w := httptest.NewRecorder() + r.ServeHTTP(w, req) + if w.Code != http.StatusOK { + t.Fatalf("expected 200 got %d body=%s", w.Code, w.Body.String()) + } + var flags map[string]bool + if err := json.Unmarshal(w.Body.Bytes(), &flags); err != nil { + t.Fatalf("invalid json: %v", err) + } + // ensure keys present + for _, k := range defaultFlags { + if _, ok := flags[k]; !ok { + t.Fatalf("missing default flag key: %s", k) + } + } + + // 2) PUT update a single flag + payload := map[string]bool{ + defaultFlags[0]: true, + } + b, _ := json.Marshal(payload) + req2 := httptest.NewRequest(http.MethodPut, "/api/v1/feature-flags", bytes.NewReader(b)) + req2.Header.Set("Content-Type", "application/json") + w2 := httptest.NewRecorder() + r.ServeHTTP(w2, req2) + if w2.Code != http.StatusOK { + t.Fatalf("expected 200 on update got %d body=%s", w2.Code, w2.Body.String()) + } + + // confirm DB persisted + var s models.Setting + if err := db.Where("key = ?", defaultFlags[0]).First(&s).Error; err != nil { + t.Fatalf("expected setting persisted, db error: %v", err) + } + if s.Value != "true" { + t.Fatalf("expected stored value 'true' got '%s'", s.Value) + } +} diff --git a/backend/internal/api/handlers/import_handler.go b/backend/internal/api/handlers/import_handler.go index bb58abb0..43394134 100644 --- a/backend/internal/api/handlers/import_handler.go +++ b/backend/internal/api/handlers/import_handler.go @@ -245,11 +245,20 @@ func (h *ImportHandler) Upload(c *gin.Context) { Filename string `json:"filename"` } + // Capture raw request for better diagnostics in tests if err := c.ShouldBindJSON(&req); err != nil { + // Try to include raw body preview when binding fails + if raw, _ := c.GetRawData(); len(raw) > 0 { + log.Printf("Import Upload: failed to bind JSON: %v; raw_body_preview=%s", err, util.SanitizeForLog(string(raw))) + } else { + log.Printf("Import Upload: failed to bind JSON: %v", err) + } c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } + log.Printf("Import Upload: received upload filename=%s content_len=%d", req.Filename, len(req.Content)) + // Save upload to import/uploads/.caddyfile and return transient preview (do not persist yet) sid := uuid.NewString() uploadsDir, err := safeJoin(h.importDir, "uploads") @@ -267,6 +276,7 @@ func (h *ImportHandler) Upload(c *gin.Context) { return } if err := os.WriteFile(tempPath, []byte(req.Content), 0644); err != nil { + log.Printf("Import Upload: failed to write temp file %s: %v", tempPath, err) c.JSON(http.StatusInternalServerError, gin.H{"error": "failed to write upload"}) return } @@ -274,6 +284,16 @@ func (h *ImportHandler) Upload(c *gin.Context) { // Parse uploaded file transiently result, err := h.importerservice.ImportFile(tempPath) if err != nil { + // Read a small preview of the uploaded file for diagnostics + preview := "" + if b, rerr := os.ReadFile(tempPath); rerr == nil { + if len(b) > 200 { + preview = string(b[:200]) + } else { + preview = string(b) + } + } + log.Printf("Import Upload: import failed: %v; tempPath=%s; content_preview=%s", err, tempPath, util.SanitizeForLog(preview)) c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("import failed: %v", err)}) return } @@ -281,6 +301,11 @@ func (h *ImportHandler) Upload(c *gin.Context) { // If no hosts were parsed, provide a clearer error when import directives exist if len(result.Hosts) == 0 { imports := detectImportDirectives(req.Content) + if len(imports) > 0 { + log.Printf("Import Upload: no hosts parsed but imports detected=%v", imports) + } else { + log.Printf("Import Upload: no hosts parsed and no imports detected; content_len=%d", len(req.Content)) + } if len(imports) > 0 { c.JSON(http.StatusBadRequest, gin.H{"error": "no sites found in uploaded Caddyfile; imports detected; please upload the referenced site files using the multi-file import flow" , "imports": imports}) return @@ -334,6 +359,11 @@ func (h *ImportHandler) DetectImports(c *gin.Context) { } if err := c.ShouldBindJSON(&req); err != nil { + if raw, _ := c.GetRawData(); len(raw) > 0 { + log.Printf("Import UploadMulti: failed to bind JSON: %v; raw_body_preview=%s", err, util.SanitizeForLog(string(raw))) + } else { + log.Printf("Import UploadMulti: failed to bind JSON: %v", err) + } c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } @@ -422,6 +452,16 @@ func (h *ImportHandler) UploadMulti(c *gin.Context) { // Parse the main Caddyfile (which will automatically resolve imports) result, err := h.importerservice.ImportFile(mainCaddyfile) if err != nil { + // Provide diagnostics + preview := "" + if b, rerr := os.ReadFile(mainCaddyfile); rerr == nil { + if len(b) > 200 { + preview = string(b[:200]) + } else { + preview = string(b) + } + } + log.Printf("Import UploadMulti: import failed: %v; mainCaddyfile=%s; preview=%s", err, mainCaddyfile, util.SanitizeForLog(preview)) c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("import failed: %v", err)}) return } diff --git a/backend/internal/api/handlers/import_handler_test.go b/backend/internal/api/handlers/import_handler_test.go index be4ab348..bdd02f5b 100644 --- a/backend/internal/api/handlers/import_handler_test.go +++ b/backend/internal/api/handlers/import_handler_test.go @@ -215,13 +215,9 @@ func TestImportHandler_Upload(t *testing.T) { req, _ := http.NewRequest("POST", "/import/upload", bytes.NewBuffer(body)) router.ServeHTTP(w, req) - // The fake caddy script returns empty JSON, so import might fail or succeed with empty result - // But Upload calls ImportFile which calls ParseCaddyfile which calls caddy adapt - // fake_caddy.sh echoes `{"apps":{}}` - // ExtractHosts will return empty result - // Upload should succeed - - assert.Equal(t, http.StatusOK, w.Code) + // The fake caddy script returns empty JSON, so import may produce zero hosts. + // The handler now treats zero-host uploads without imports as a bad request (400). + assert.Equal(t, http.StatusBadRequest, w.Code) } func TestImportHandler_GetPreview_WithContent(t *testing.T) { diff --git a/backend/internal/api/routes/routes.go b/backend/internal/api/routes/routes.go index 0fb736e5..e94141fa 100644 --- a/backend/internal/api/routes/routes.go +++ b/backend/internal/api/routes/routes.go @@ -111,6 +111,11 @@ func Register(router *gin.Engine, db *gorm.DB, cfg config.Config) error { protected.GET("/settings", settingsHandler.GetSettings) protected.POST("/settings", settingsHandler.UpdateSetting) + // Feature flags (DB-backed with env fallback) + featureFlagsHandler := handlers.NewFeatureFlagsHandler(db) + protected.GET("/feature-flags", featureFlagsHandler.GetFlags) + protected.PUT("/feature-flags", featureFlagsHandler.UpdateFlags) + // User Profile & API Key userHandler := handlers.NewUserHandler(db) protected.GET("/user/profile", userHandler.GetProfile) @@ -196,6 +201,13 @@ func Register(router *gin.Engine, db *gorm.DB, cfg config.Config) error { // Security Status securityHandler := handlers.NewSecurityHandler(cfg.Security, db) protected.GET("/security/status", securityHandler.GetStatus) + + // CrowdSec process management and import + // Data dir for crowdsec (persisted on host via volumes) + crowdsecDataDir := "data/crowdsec" + crowdsecExec := handlers.NewDefaultCrowdsecExecutor() + crowdsecHandler := handlers.NewCrowdsecHandler(db, crowdsecExec, "crowdsec", crowdsecDataDir) + crowdsecHandler.RegisterRoutes(protected) } // Caddy Manager diff --git a/backend/internal/models/hooks_test.go b/backend/internal/models/hooks_test.go new file mode 100644 index 00000000..8a982d43 --- /dev/null +++ b/backend/internal/models/hooks_test.go @@ -0,0 +1,63 @@ +package models + +import ( + "testing" + + "gorm.io/driver/sqlite" + "gorm.io/gorm" +) + +func setupTestDB(t *testing.T) *gorm.DB { + t.Helper() + db, err := gorm.Open(sqlite.Open("file::memory:?cache=shared"), &gorm.Config{}) + if err != nil { + t.Fatalf("failed to open in-memory db: %v", err) + } + if err := db.AutoMigrate(&NotificationTemplate{}, &UptimeHost{}, &UptimeNotificationEvent{}); err != nil { + t.Fatalf("auto migrate failed: %v", err) + } + return db +} + +func TestNotificationTemplate_BeforeCreate(t *testing.T) { + db := setupTestDB(t) + tmpl := &NotificationTemplate{ + Name: "hook-test", + } + if err := db.Create(tmpl).Error; err != nil { + t.Fatalf("create failed: %v", err) + } + if tmpl.ID == "" { + t.Fatalf("expected ID to be populated by BeforeCreate") + } +} + +func TestUptimeHost_BeforeCreate(t *testing.T) { + db := setupTestDB(t) + h := &UptimeHost{ + Host: "127.0.0.1", + } + if err := db.Create(h).Error; err != nil { + t.Fatalf("create failed: %v", err) + } + if h.ID == "" { + t.Fatalf("expected ID to be populated by BeforeCreate") + } + if h.Status != "pending" { + t.Fatalf("expected default Status 'pending', got %q", h.Status) + } +} + +func TestUptimeNotificationEvent_BeforeCreate(t *testing.T) { + db := setupTestDB(t) + e := &UptimeNotificationEvent{ + HostID: "host-1", + EventType: "down", + } + if err := db.Create(e).Error; err != nil { + t.Fatalf("create failed: %v", err) + } + if e.ID == "" { + t.Fatalf("expected ID to be populated by BeforeCreate") + } +} diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 1c891bdd..7b887a17 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -30,6 +30,11 @@ services: #- CPM_SECURITY_ACL_ENABLED=false volumes: - /var/run/docker.sock:/var/run/docker.sock:ro # For local container discovery + - crowdsec_data:/app/data/crowdsec # Mount your existing Caddyfile for automatic import (optional) # - ./my-existing-Caddyfile:/import/Caddyfile:ro # - ./sites:/import/sites:ro # If your Caddyfile imports other files + +volumes: + crowdsec_data: + driver: local diff --git a/docker-compose.local.yml b/docker-compose.local.yml index 67445853..a177fcd3 100644 --- a/docker-compose.local.yml +++ b/docker-compose.local.yml @@ -32,6 +32,7 @@ services: - charon_data:/app/data - caddy_data:/data - caddy_config:/config + - crowdsec_data:/app/data/crowdsec - /var/run/docker.sock:/var/run/docker.sock:ro # For local container discovery - ./backend:/app/backend:ro # Mount source for debugging # Mount your existing Caddyfile for automatic import (optional) @@ -51,6 +52,8 @@ volumes: driver: local caddy_config: driver: local + crowdsec_data: + driver: local networks: default: diff --git a/docker-compose.yml b/docker-compose.yml index f741f74b..9f954be8 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -41,6 +41,7 @@ services: - cpm_data:/app/data # existing data (legacy name); charon will also use this path by default for backward compatibility - caddy_data:/data - caddy_config:/config + - crowdsec_data:/app/data/crowdsec - /var/run/docker.sock:/var/run/docker.sock:ro # For local container discovery # Mount your existing Caddyfile for automatic import (optional) # - ./my-existing-Caddyfile:/import/Caddyfile:ro @@ -59,3 +60,5 @@ volumes: driver: local caddy_config: driver: local + crowdsec_data: + driver: local diff --git a/frontend/src/api/crowdsec.ts b/frontend/src/api/crowdsec.ts new file mode 100644 index 00000000..d7493008 --- /dev/null +++ b/frontend/src/api/crowdsec.ts @@ -0,0 +1,27 @@ +import client from './client' + +export async function startCrowdsec() { + const resp = await client.post('/admin/crowdsec/start') + return resp.data +} + +export async function stopCrowdsec() { + const resp = await client.post('/admin/crowdsec/stop') + return resp.data +} + +export async function statusCrowdsec() { + const resp = await client.get('/admin/crowdsec/status') + return resp.data +} + +export async function importCrowdsecConfig(file: File) { + const fd = new FormData() + fd.append('file', file) + const resp = await client.post('/admin/crowdsec/import', fd, { + headers: { 'Content-Type': 'multipart/form-data' }, + }) + return resp.data +} + +export default { startCrowdsec, stopCrowdsec, statusCrowdsec, importCrowdsecConfig } diff --git a/frontend/src/api/featureFlags.test.ts b/frontend/src/api/featureFlags.test.ts new file mode 100644 index 00000000..1898282f --- /dev/null +++ b/frontend/src/api/featureFlags.test.ts @@ -0,0 +1,26 @@ +import { vi, describe, it, expect } from 'vitest' + +// Mock the client module which is an axios instance wrapper +vi.mock('./client', () => ({ + default: { + get: vi.fn(() => Promise.resolve({ data: { 'feature.global.enabled': true } })), + put: vi.fn(() => Promise.resolve({ data: { status: 'ok' } })), + }, +})) + +import { getFeatureFlags, updateFeatureFlags } from './featureFlags' +import client from './client' + +describe('featureFlags API', () => { + it('fetches feature flags', async () => { + const flags = await getFeatureFlags() + expect(flags['feature.global.enabled']).toBe(true) + expect((client.get as any)).toHaveBeenCalled() + }) + + it('updates feature flags', async () => { + const resp = await updateFeatureFlags({ 'feature.global.enabled': false }) + expect(resp).toEqual({ status: 'ok' }) + expect((client.put as any)).toHaveBeenCalledWith('/feature-flags', { 'feature.global.enabled': false }) + }) +}) diff --git a/frontend/src/api/featureFlags.ts b/frontend/src/api/featureFlags.ts new file mode 100644 index 00000000..b93fb35b --- /dev/null +++ b/frontend/src/api/featureFlags.ts @@ -0,0 +1,16 @@ +import client from './client' + +export async function getFeatureFlags(): Promise> { + const resp = await client.get>('/feature-flags') + return resp.data +} + +export async function updateFeatureFlags(payload: Record) { + const resp = await client.put('/feature-flags', payload) + return resp.data +} + +export default { + getFeatureFlags, + updateFeatureFlags, +} diff --git a/frontend/src/components/ImportSitesModal.tsx b/frontend/src/components/ImportSitesModal.tsx index e2d5d34a..f84c4eec 100644 --- a/frontend/src/components/ImportSitesModal.tsx +++ b/frontend/src/components/ImportSitesModal.tsx @@ -30,7 +30,7 @@ export default function ImportSitesModal({ visible, onClose, onUploaded }: Props const cleaned = sites.map(s => s || '') await uploadCaddyfilesMulti(cleaned) setLoading(false) - onUploaded && onUploaded() + if (onUploaded) onUploaded() onClose() } catch (err: any) { setError(err?.message || 'Upload failed') diff --git a/frontend/src/pages/Dashboard.tsx b/frontend/src/pages/Dashboard.tsx index 4e0278f9..98f95afe 100644 --- a/frontend/src/pages/Dashboard.tsx +++ b/frontend/src/pages/Dashboard.tsx @@ -63,44 +63,7 @@ export default function Dashboard() { - {/* Quick Actions */} -
-

Quick Actions

-
- - 🌐 -
-
Add Proxy Host
-
Create a new reverse proxy
-
- - - - 🖥️ -
-
Add Remote Server
-
Register a backend server
-
- - - - 📥 -
-
Import Caddyfile
-
Bulk import from existing config
-
- -
-
+ {/* Quick Actions removed per UI update; Security quick-look will be added later */} ) } diff --git a/frontend/src/pages/Logs.tsx b/frontend/src/pages/Logs.tsx index 2ae95de1..fa68f1ec 100644 --- a/frontend/src/pages/Logs.tsx +++ b/frontend/src/pages/Logs.tsx @@ -189,191 +189,3 @@ const Logs: FC = () => { }; export default Logs; -import { useState, useEffect, type FC } from 'react'; -import { useQuery } from '@tanstack/react-query'; -import { useSearchParams } from 'react-router-dom'; -import { getLogs, getLogContent, downloadLog, LogFilter } from '../api/logs'; -import { Card } from '../components/ui/Card'; -import { Loader2, FileText, ChevronLeft, ChevronRight } from 'lucide-react'; -import { LogTable } from '../components/LogTable'; -import { LogFilters } from '../components/LogFilters'; -import { Button } from '../components/ui/Button'; - -const Logs: React.FC = () => { - const [searchParams] = useSearchParams(); - const [selectedLog, setSelectedLog] = useState(null); - - // Filter State - const [search, setSearch] = useState(searchParams.get('search') || ''); - const [host, setHost] = useState(''); - const [status, setStatus] = useState(''); - const [level, setLevel] = useState(''); - const [sort, setSort] = useState<'asc' | 'desc'>('desc'); - const [page, setPage] = useState(0); - const limit = 50; - - const { data: logs, isLoading: isLoadingLogs } = useQuery({ - queryKey: ['logs'], - queryFn: getLogs, - }); - - // Select first log by default if none selected - useEffect(() => { - if (!selectedLog && logs && logs.length > 0) { - setSelectedLog(logs[0].name); - } - }, [logs, selectedLog]); - - const filter: LogFilter = { - search, - host, - status, - level, - limit, - offset: page * limit, - sort - }; - - const { data: logData, isLoading: isLoadingContent, refetch: refetchContent } = useQuery({ - queryKey: ['logContent', selectedLog, search, host, status, level, page, sort], - queryFn: () => selectedLog ? getLogContent(selectedLog, filter) : Promise.resolve(null), - enabled: !!selectedLog, - }); - - const handleDownload = () => { - if (selectedLog) { - downloadLog(selectedLog); - } - }; - const Logs: FC = () => { - const totalPages = logData ? Math.ceil(logData.total / limit) : 0; - - return ( -
-
-

Access Logs

-
- -
- {/* Log File List */} -
- -

Log Files

- {isLoadingLogs ? ( -
- -
- ) : ( -
- {logs?.map((log) => ( - - ))} - {logs?.length === 0 && ( -
No log files found
- )} -
- )} -
-
- - {/* Log Content */} -
- {selectedLog ? ( - <> - { setSearch(v); setPage(0); }} - host={host} - onHostChange={(v) => { setHost(v); setPage(0); }} - status={status} - onStatusChange={(v) => { setStatus(v); setPage(0); }} - level={level} - onLevelChange={(v) => { setLevel(v); setPage(0); }} - sort={sort} - onSortChange={(v) => { setSort(v); setPage(0); }} - onRefresh={refetchContent} - onDownload={handleDownload} - isLoading={isLoadingContent} - /> - - - - - {/* Pagination */} - {logData && logData.total > 0 && ( -
-
- Showing {logData.offset + 1} to {Math.min(logData.offset + limit, logData.total)} of {logData.total} entries -
- -
-
- Page - - of {totalPages} -
- -
- - -
-
-
- )} -
- - ) : ( - - -

Select a log file to view contents

-
- )} -
-
-
- ); -}; - -export default Logs; diff --git a/frontend/src/pages/SystemSettings.tsx b/frontend/src/pages/SystemSettings.tsx index d199a7d3..557f3eaa 100644 --- a/frontend/src/pages/SystemSettings.tsx +++ b/frontend/src/pages/SystemSettings.tsx @@ -6,7 +6,9 @@ import { Input } from '../components/ui/Input' import { Switch } from '../components/ui/Switch' import { toast } from '../utils/toast' import { getSettings, updateSetting } from '../api/settings' +import { getFeatureFlags, updateFeatureFlags } from '../api/featureFlags' import client from '../api/client' +import { startCrowdsec, stopCrowdsec, statusCrowdsec, importCrowdsecConfig } from '../api/crowdsec' import { Loader2, Server, RefreshCw, Save, Activity } from 'lucide-react' interface HealthResponse { @@ -86,6 +88,52 @@ export default function SystemSettings() { }, }) + // Feature Flags + const { data: featureFlags, refetch: refetchFlags } = useQuery({ + queryKey: ['feature-flags'], + queryFn: getFeatureFlags, + }) + + const updateFlagMutation = useMutation({ + mutationFn: async (payload: Record) => updateFeatureFlags(payload), + onSuccess: () => { + refetchFlags() + toast.success('Feature flag updated') + }, + onError: (err: any) => { + toast.error(`Failed to update flag: ${err?.message || err}`) + }, + }) + + // CrowdSec control + const [crowdsecStatus, setCrowdsecStatus] = useState<{ running: boolean; pid?: number } | null>(null) + + const fetchCrowdsecStatus = async () => { + try { + const s = await statusCrowdsec() + setCrowdsecStatus(s) + } catch { + setCrowdsecStatus(null) + } + } + + useEffect(() => { fetchCrowdsecStatus() }, []) + + const startMutation = useMutation({ mutationFn: () => startCrowdsec(), onSuccess: () => fetchCrowdsecStatus(), onError: (e:any) => toast.error(String(e)) }) + const stopMutation = useMutation({ mutationFn: () => stopCrowdsec(), onSuccess: () => fetchCrowdsecStatus(), onError: (e:any) => toast.error(String(e)) }) + + const importMutation = useMutation({ + mutationFn: async (file: File) => importCrowdsecConfig(file), + onSuccess: () => { toast.success('CrowdSec config imported'); fetchCrowdsecStatus() }, + onError: (e:any) => toast.error(String(e)), + }) + + const handleCrowdsecUpload = (e: React.ChangeEvent) => { + const f = e.target.files?.[0] + if (!f) return + importMutation.mutate(f) + } + return (

@@ -171,6 +219,29 @@ export default function SystemSettings() {

+ {/* Feature Flags */} + +

Feature Flags

+
+ {featureFlags ? ( + Object.keys(featureFlags).map((key) => ( +
+
+

{key}

+

Toggle feature {key}

+
+ updateFlagMutation.mutate({ [key]: e.target.checked })} + /> +
+ )) + ) : ( +

Loading feature flags...

+ )} +
+
+ {/* System Status */}

@@ -271,6 +342,29 @@ export default function SystemSettings() { + + {/* CrowdSec Controls */} + +

CrowdSec

+
+
+
+

Status

+

{crowdsecStatus ? (crowdsecStatus.running ? `Running (pid ${crowdsecStatus.pid})` : 'Stopped') : 'Unknown'}

+
+
+ + +
+
+ +
+ + +

Upload a tar.gz or zip with your CrowdSec configuration. Existing config will be backed up.

+
+
+
) } diff --git a/frontend/src/pages/Uptime.tsx b/frontend/src/pages/Uptime.tsx index 297253d3..be83632e 100644 --- a/frontend/src/pages/Uptime.tsx +++ b/frontend/src/pages/Uptime.tsx @@ -1,4 +1,4 @@ -import { useMemo, useState, useEffect, type FC, type FormEvent } from 'react'; +import { useMemo, useState, type FC, type FormEvent } from 'react'; import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; import { getMonitors, getMonitorHistory, updateMonitor, UptimeMonitor } from '../api/uptime'; import { Activity, ArrowUp, ArrowDown, Settings, X } from 'lucide-react';