diff --git a/README.md b/README.md index 5471cecc..e073a3ef 100644 --- a/README.md +++ b/README.md @@ -95,7 +95,12 @@ See exactly what's happening with live request logs, uptime monitoring, and inst ### 📥 **Migration Made Easy** -Import your existing Caddy configurations with one click. Already invested in another reverse proxy? Bring your work with you. +Import your existing configurations with one click: +- **Caddyfile Import** — Migrate from other Caddy setups +- **NPM Import** — Import from Nginx Proxy Manager exports +- **JSON Import** — Restore from Charon backups or generic JSON configs + +Already invested in another reverse proxy? Bring your work with you. ### ⚡ **Live Configuration Changes** diff --git a/backend/internal/api/handlers/json_import_handler.go b/backend/internal/api/handlers/json_import_handler.go new file mode 100644 index 00000000..9c549680 --- /dev/null +++ b/backend/internal/api/handlers/json_import_handler.go @@ -0,0 +1,516 @@ +package handlers + +import ( + "encoding/json" + "fmt" + "net/http" + "strings" + "sync" + "time" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "gorm.io/gorm" + + "github.com/Wikid82/charon/backend/internal/caddy" + "github.com/Wikid82/charon/backend/internal/models" + "github.com/Wikid82/charon/backend/internal/services" +) + +// jsonImportSession stores the parsed content for a JSON import session. +type jsonImportSession struct { + SourceType string // "charon" or "npm" + CharonExport *CharonExport + NPMExport *NPMExport +} + +// jsonImportSessions stores parsed exports keyed by session UUID. +// TODO: Implement session expiration to prevent memory leaks (e.g., TTL-based cleanup). +var ( + jsonImportSessions = make(map[string]jsonImportSession) + jsonImportSessionsMu sync.RWMutex +) + +// CharonExport represents the top-level structure of a Charon export file. +type CharonExport struct { + Version string `json:"version"` + ExportedAt time.Time `json:"exported_at"` + ProxyHosts []CharonProxyHost `json:"proxy_hosts"` + AccessLists []CharonAccessList `json:"access_lists"` + DNSRecords []CharonDNSRecord `json:"dns_records"` +} + +// CharonProxyHost represents a proxy host in Charon export format. +type CharonProxyHost struct { + UUID string `json:"uuid"` + Name string `json:"name"` + DomainNames string `json:"domain_names"` + ForwardScheme string `json:"forward_scheme"` + ForwardHost string `json:"forward_host"` + ForwardPort int `json:"forward_port"` + SSLForced bool `json:"ssl_forced"` + HTTP2Support bool `json:"http2_support"` + HSTSEnabled bool `json:"hsts_enabled"` + HSTSSubdomains bool `json:"hsts_subdomains"` + BlockExploits bool `json:"block_exploits"` + WebsocketSupport bool `json:"websocket_support"` + Application string `json:"application"` + Enabled bool `json:"enabled"` + AdvancedConfig string `json:"advanced_config"` + WAFDisabled bool `json:"waf_disabled"` + UseDNSChallenge bool `json:"use_dns_challenge"` +} + +// CharonAccessList represents an access list in Charon export format. +type CharonAccessList struct { + UUID string `json:"uuid"` + Name string `json:"name"` + Description string `json:"description"` + Type string `json:"type"` + IPRules string `json:"ip_rules"` + CountryCodes string `json:"country_codes"` + LocalNetworkOnly bool `json:"local_network_only"` + Enabled bool `json:"enabled"` +} + +// CharonDNSRecord represents a DNS record in Charon export format. +type CharonDNSRecord struct { + UUID string `json:"uuid"` + Name string `json:"name"` + Type string `json:"type"` + Value string `json:"value"` + TTL int `json:"ttl"` + ProviderID uint `json:"provider_id"` +} + +// JSONImportHandler handles JSON configuration imports (both Charon and NPM formats). +type JSONImportHandler struct { + db *gorm.DB + proxyHostSvc *services.ProxyHostService +} + +// NewJSONImportHandler creates a new JSON import handler. +func NewJSONImportHandler(db *gorm.DB) *JSONImportHandler { + return &JSONImportHandler{ + db: db, + proxyHostSvc: services.NewProxyHostService(db), + } +} + +// RegisterRoutes registers JSON import routes. +func (h *JSONImportHandler) RegisterRoutes(router *gin.RouterGroup) { + router.POST("/import/json/upload", h.Upload) + router.POST("/import/json/commit", h.Commit) + router.POST("/import/json/cancel", h.Cancel) +} + +// Upload parses a JSON export (Charon or NPM format) and returns a preview. +func (h *JSONImportHandler) Upload(c *gin.Context) { + var req struct { + Content string `json:"content" binding:"required"` + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Try Charon format first + var charonExport CharonExport + if err := json.Unmarshal([]byte(req.Content), &charonExport); err == nil && h.isCharonFormat(charonExport) { + h.handleCharonUpload(c, charonExport) + return + } + + // Fall back to NPM format + var npmExport NPMExport + if err := json.Unmarshal([]byte(req.Content), &npmExport); err == nil && len(npmExport.ProxyHosts) > 0 { + h.handleNPMUpload(c, npmExport) + return + } + + c.JSON(http.StatusBadRequest, gin.H{"error": "unrecognized JSON format - must be Charon or NPM export"}) +} + +// isCharonFormat checks if the export is in Charon format. +func (h *JSONImportHandler) isCharonFormat(export CharonExport) bool { + return export.Version != "" || len(export.ProxyHosts) > 0 +} + +// handleCharonUpload processes a Charon format export. +func (h *JSONImportHandler) handleCharonUpload(c *gin.Context, export CharonExport) { + result := h.convertCharonToImportResult(export) + + if len(result.Hosts) == 0 { + c.JSON(http.StatusBadRequest, gin.H{"error": "no proxy hosts found in Charon export"}) + return + } + + existingHosts, _ := h.proxyHostSvc.List() + existingDomainsMap := make(map[string]models.ProxyHost) + for _, eh := range existingHosts { + existingDomainsMap[eh.DomainNames] = eh + } + + conflictDetails := make(map[string]gin.H) + for _, ph := range result.Hosts { + if existing, found := existingDomainsMap[ph.DomainNames]; found { + result.Conflicts = append(result.Conflicts, ph.DomainNames) + conflictDetails[ph.DomainNames] = gin.H{ + "existing": gin.H{ + "forward_scheme": existing.ForwardScheme, + "forward_host": existing.ForwardHost, + "forward_port": existing.ForwardPort, + "ssl_forced": existing.SSLForced, + "websocket": existing.WebsocketSupport, + "enabled": existing.Enabled, + }, + "imported": gin.H{ + "forward_scheme": ph.ForwardScheme, + "forward_host": ph.ForwardHost, + "forward_port": ph.ForwardPort, + "ssl_forced": ph.SSLForced, + "websocket": ph.WebsocketSupport, + }, + } + } + } + + sid := uuid.NewString() + + // Store the parsed export in session storage for later commit + jsonImportSessionsMu.Lock() + jsonImportSessions[sid] = jsonImportSession{ + SourceType: "charon", + CharonExport: &export, + } + jsonImportSessionsMu.Unlock() + + c.JSON(http.StatusOK, gin.H{ + "session": gin.H{"id": sid, "state": "transient", "source_type": "charon"}, + "preview": result, + "conflict_details": conflictDetails, + "charon_export": gin.H{ + "version": export.Version, + "exported_at": export.ExportedAt, + "proxy_hosts": len(export.ProxyHosts), + "access_lists": len(export.AccessLists), + "dns_records": len(export.DNSRecords), + }, + }) +} + +// handleNPMUpload processes an NPM format export. +func (h *JSONImportHandler) handleNPMUpload(c *gin.Context, export NPMExport) { + npmHandler := NewNPMImportHandler(h.db) + result := npmHandler.convertNPMToImportResult(export) + + if len(result.Hosts) == 0 { + c.JSON(http.StatusBadRequest, gin.H{"error": "no proxy hosts found in NPM export"}) + return + } + + existingHosts, _ := h.proxyHostSvc.List() + existingDomainsMap := make(map[string]models.ProxyHost) + for _, eh := range existingHosts { + existingDomainsMap[eh.DomainNames] = eh + } + + conflictDetails := make(map[string]gin.H) + for _, ph := range result.Hosts { + if existing, found := existingDomainsMap[ph.DomainNames]; found { + result.Conflicts = append(result.Conflicts, ph.DomainNames) + conflictDetails[ph.DomainNames] = gin.H{ + "existing": gin.H{ + "forward_scheme": existing.ForwardScheme, + "forward_host": existing.ForwardHost, + "forward_port": existing.ForwardPort, + "ssl_forced": existing.SSLForced, + "websocket": existing.WebsocketSupport, + "enabled": existing.Enabled, + }, + "imported": gin.H{ + "forward_scheme": ph.ForwardScheme, + "forward_host": ph.ForwardHost, + "forward_port": ph.ForwardPort, + "ssl_forced": ph.SSLForced, + "websocket": ph.WebsocketSupport, + }, + } + } + } + + sid := uuid.NewString() + + // Store the parsed export in session storage for later commit + jsonImportSessionsMu.Lock() + jsonImportSessions[sid] = jsonImportSession{ + SourceType: "npm", + NPMExport: &export, + } + jsonImportSessionsMu.Unlock() + + c.JSON(http.StatusOK, gin.H{ + "session": gin.H{"id": sid, "state": "transient", "source_type": "npm"}, + "preview": result, + "conflict_details": conflictDetails, + "npm_export": gin.H{ + "proxy_hosts": len(export.ProxyHosts), + "access_lists": len(export.AccessLists), + "certificates": len(export.Certificates), + }, + }) +} + +// Commit finalizes the JSON import with user's conflict resolutions. +func (h *JSONImportHandler) Commit(c *gin.Context) { + var req struct { + SessionUUID string `json:"session_uuid" binding:"required"` + Resolutions map[string]string `json:"resolutions"` + Names map[string]string `json:"names"` + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Retrieve the stored session + jsonImportSessionsMu.RLock() + session, ok := jsonImportSessions[req.SessionUUID] + jsonImportSessionsMu.RUnlock() + + if !ok { + c.JSON(http.StatusNotFound, gin.H{"error": "session not found or expired"}) + return + } + + // Route to the appropriate commit handler based on source type + if session.SourceType == "charon" && session.CharonExport != nil { + h.commitCharonImport(c, *session.CharonExport, req.Resolutions, req.Names, req.SessionUUID) + return + } + + if session.SourceType == "npm" && session.NPMExport != nil { + h.commitNPMImport(c, *session.NPMExport, req.Resolutions, req.Names, req.SessionUUID) + return + } + + c.JSON(http.StatusBadRequest, gin.H{"error": "invalid session state"}) +} + +// Cancel cancels a JSON import session and cleans up resources. +func (h *JSONImportHandler) Cancel(c *gin.Context) { + var req struct { + SessionUUID string `json:"session_uuid"` + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Clean up session if it exists + jsonImportSessionsMu.Lock() + delete(jsonImportSessions, req.SessionUUID) + jsonImportSessionsMu.Unlock() + + c.JSON(http.StatusOK, gin.H{"status": "cancelled"}) +} + +// commitCharonImport commits a Charon format import. +func (h *JSONImportHandler) commitCharonImport(c *gin.Context, export CharonExport, resolutions, names map[string]string, sessionUUID string) { + result := h.convertCharonToImportResult(export) + proxyHosts := caddy.ConvertToProxyHosts(result.Hosts) + + created := 0 + updated := 0 + skipped := 0 + errors := []string{} + + existingHosts, _ := h.proxyHostSvc.List() + existingMap := make(map[string]*models.ProxyHost) + for i := range existingHosts { + existingMap[existingHosts[i].DomainNames] = &existingHosts[i] + } + + for _, host := range proxyHosts { + action := resolutions[host.DomainNames] + + if customName, ok := names[host.DomainNames]; ok && customName != "" { + host.Name = customName + } + + if action == "skip" || action == "keep" { + skipped++ + continue + } + + if action == "rename" { + host.DomainNames += "-imported" + } + + if action == "overwrite" { + if existing, found := existingMap[host.DomainNames]; found { + host.ID = existing.ID + host.UUID = existing.UUID + host.CertificateID = existing.CertificateID + host.CreatedAt = existing.CreatedAt + + if err := h.proxyHostSvc.Update(&host); err != nil { + errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error())) + } else { + updated++ + } + continue + } + } + + host.UUID = uuid.NewString() + if err := h.proxyHostSvc.Create(&host); err != nil { + errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error())) + } else { + created++ + } + } + + // Clean up session after successful commit + jsonImportSessionsMu.Lock() + delete(jsonImportSessions, sessionUUID) + jsonImportSessionsMu.Unlock() + + c.JSON(http.StatusOK, gin.H{ + "created": created, + "updated": updated, + "skipped": skipped, + "errors": errors, + }) +} + +// commitNPMImport commits an NPM format import. +func (h *JSONImportHandler) commitNPMImport(c *gin.Context, export NPMExport, resolutions, names map[string]string, sessionUUID string) { + npmHandler := NewNPMImportHandler(h.db) + result := npmHandler.convertNPMToImportResult(export) + proxyHosts := caddy.ConvertToProxyHosts(result.Hosts) + + created := 0 + updated := 0 + skipped := 0 + errors := []string{} + + existingHosts, _ := h.proxyHostSvc.List() + existingMap := make(map[string]*models.ProxyHost) + for i := range existingHosts { + existingMap[existingHosts[i].DomainNames] = &existingHosts[i] + } + + for _, host := range proxyHosts { + action := resolutions[host.DomainNames] + + if customName, ok := names[host.DomainNames]; ok && customName != "" { + host.Name = customName + } + + if action == "skip" || action == "keep" { + skipped++ + continue + } + + if action == "rename" { + host.DomainNames += "-imported" + } + + if action == "overwrite" { + if existing, found := existingMap[host.DomainNames]; found { + host.ID = existing.ID + host.UUID = existing.UUID + host.CertificateID = existing.CertificateID + host.CreatedAt = existing.CreatedAt + + if err := h.proxyHostSvc.Update(&host); err != nil { + errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error())) + } else { + updated++ + } + continue + } + } + + host.UUID = uuid.NewString() + if err := h.proxyHostSvc.Create(&host); err != nil { + errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error())) + } else { + created++ + } + } + + // Clean up session after successful commit + jsonImportSessionsMu.Lock() + delete(jsonImportSessions, sessionUUID) + jsonImportSessionsMu.Unlock() + + c.JSON(http.StatusOK, gin.H{ + "created": created, + "updated": updated, + "skipped": skipped, + "errors": errors, + }) +} + +// convertCharonToImportResult converts Charon export format to ImportResult. +func (h *JSONImportHandler) convertCharonToImportResult(export CharonExport) *caddy.ImportResult { + result := &caddy.ImportResult{ + Hosts: []caddy.ParsedHost{}, + Conflicts: []string{}, + Errors: []string{}, + } + + for _, ch := range export.ProxyHosts { + if ch.DomainNames == "" { + result.Errors = append(result.Errors, fmt.Sprintf("host %s has no domain names", ch.UUID)) + continue + } + + scheme := ch.ForwardScheme + if scheme == "" { + scheme = "http" + } + + port := ch.ForwardPort + if port == 0 { + port = 80 + } + + warnings := []string{} + if ch.AdvancedConfig != "" && !isValidJSON(ch.AdvancedConfig) { + warnings = append(warnings, "Advanced config may need review") + } + + host := caddy.ParsedHost{ + DomainNames: ch.DomainNames, + ForwardScheme: scheme, + ForwardHost: ch.ForwardHost, + ForwardPort: port, + SSLForced: ch.SSLForced, + WebsocketSupport: ch.WebsocketSupport, + Warnings: warnings, + } + + rawJSON, _ := json.Marshal(ch) + host.RawJSON = string(rawJSON) + + result.Hosts = append(result.Hosts, host) + } + + return result +} + +// isValidJSON checks if a string is valid JSON. +func isValidJSON(s string) bool { + s = strings.TrimSpace(s) + if s == "" { + return true + } + var js json.RawMessage + return json.Unmarshal([]byte(s), &js) == nil +} diff --git a/backend/internal/api/handlers/json_import_handler_test.go b/backend/internal/api/handlers/json_import_handler_test.go new file mode 100644 index 00000000..1ae7a230 --- /dev/null +++ b/backend/internal/api/handlers/json_import_handler_test.go @@ -0,0 +1,600 @@ +package handlers + +import ( + "bytes" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/gin-gonic/gin" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gorm.io/driver/sqlite" + "gorm.io/gorm" + + "github.com/Wikid82/charon/backend/internal/models" +) + +func setupJSONTestDB(t *testing.T) *gorm.DB { + db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{}) + require.NoError(t, err) + + err = db.AutoMigrate(&models.ProxyHost{}, &models.Location{}, &models.Setting{}) + require.NoError(t, err) + + return db +} + +func TestNewJSONImportHandler(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + assert.NotNil(t, handler) + assert.NotNil(t, handler.db) + assert.NotNil(t, handler.proxyHostSvc) +} + +func TestJSONImportHandler_RegisterRoutes(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + routes := router.Routes() + routePaths := make(map[string]bool) + for _, r := range routes { + routePaths[r.Method+":"+r.Path] = true + } + + assert.True(t, routePaths["POST:/api/v1/import/json/upload"]) + assert.True(t, routePaths["POST:/api/v1/import/json/commit"]) + assert.True(t, routePaths["POST:/api/v1/import/json/cancel"]) +} + +func TestJSONImportHandler_Upload_CharonFormat(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + charonExport := CharonExport{ + Version: "1.0.0", + ExportedAt: time.Now(), + ProxyHosts: []CharonProxyHost{ + { + UUID: "test-uuid-1", + Name: "Test Host", + DomainNames: "example.com", + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + SSLForced: true, + WebsocketSupport: true, + Enabled: true, + }, + }, + AccessLists: []CharonAccessList{ + { + UUID: "acl-uuid-1", + Name: "Test ACL", + Type: "whitelist", + Enabled: true, + }, + }, + } + + content, _ := json.Marshal(charonExport) + body, _ := json.Marshal(map[string]string{"content": string(content)}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err := json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Contains(t, response, "session") + session := response["session"].(map[string]any) + assert.Equal(t, "charon", session["source_type"]) + + assert.Contains(t, response, "charon_export") + charonInfo := response["charon_export"].(map[string]any) + assert.Equal(t, "1.0.0", charonInfo["version"]) +} + +func TestJSONImportHandler_Upload_NPMFormatFallback(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{ + { + ID: 1, + DomainNames: []string{"npm-example.com"}, + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + content, _ := json.Marshal(npmExport) + body, _ := json.Marshal(map[string]string{"content": string(content)}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err := json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + session := response["session"].(map[string]any) + assert.Equal(t, "npm", session["source_type"]) + + assert.Contains(t, response, "npm_export") +} + +func TestJSONImportHandler_Upload_UnrecognizedFormat(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + unknownFormat := map[string]any{ + "some_field": "some_value", + "other": 123, + } + + content, _ := json.Marshal(unknownFormat) + body, _ := json.Marshal(map[string]string{"content": string(content)}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusBadRequest, w.Code) +} + +func TestJSONImportHandler_Upload_InvalidJSON(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + body, _ := json.Marshal(map[string]string{"content": "{invalid json"}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusBadRequest, w.Code) +} + +func TestJSONImportHandler_Commit_CharonFormat(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + charonExport := CharonExport{ + Version: "1.0.0", + ExportedAt: time.Now(), + ProxyHosts: []CharonProxyHost{ + { + UUID: "test-uuid-1", + Name: "Test Host", + DomainNames: "newcharon.com", + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + // Step 1: Upload to get session ID + content, _ := json.Marshal(charonExport) + uploadBody, _ := json.Marshal(map[string]string{"content": string(content)}) + + uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(uploadBody)) + uploadReq.Header.Set("Content-Type", "application/json") + uploadW := httptest.NewRecorder() + + router.ServeHTTP(uploadW, uploadReq) + require.Equal(t, http.StatusOK, uploadW.Code) + + var uploadResponse map[string]any + err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse) + require.NoError(t, err) + + session := uploadResponse["session"].(map[string]any) + sessionID := session["id"].(string) + + // Step 2: Commit with session UUID + commitBody, _ := json.Marshal(map[string]any{ + "session_uuid": sessionID, + "resolutions": map[string]string{}, + "names": map[string]string{"newcharon.com": "Custom Name"}, + }) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/commit", bytes.NewReader(commitBody)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err = json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Equal(t, float64(1), response["created"]) + + var host models.ProxyHost + db.Where("domain_names = ?", "newcharon.com").First(&host) + assert.Equal(t, "Custom Name", host.Name) +} + +func TestJSONImportHandler_Commit_NPMFormatFallback(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{ + { + ID: 1, + DomainNames: []string{"newnpm.com"}, + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + // Step 1: Upload to get session ID + content, _ := json.Marshal(npmExport) + uploadBody, _ := json.Marshal(map[string]string{"content": string(content)}) + + uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(uploadBody)) + uploadReq.Header.Set("Content-Type", "application/json") + uploadW := httptest.NewRecorder() + + router.ServeHTTP(uploadW, uploadReq) + require.Equal(t, http.StatusOK, uploadW.Code) + + var uploadResponse map[string]any + err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse) + require.NoError(t, err) + + session := uploadResponse["session"].(map[string]any) + sessionID := session["id"].(string) + + // Step 2: Commit with session UUID + commitBody, _ := json.Marshal(map[string]any{ + "session_uuid": sessionID, + "resolutions": map[string]string{}, + "names": map[string]string{}, + }) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/commit", bytes.NewReader(commitBody)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err = json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Equal(t, float64(1), response["created"]) +} + +func TestJSONImportHandler_Commit_SessionNotFound(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + // Try to commit with a non-existent session + commitBody, _ := json.Marshal(map[string]any{ + "session_uuid": "non-existent-uuid", + "resolutions": map[string]string{}, + "names": map[string]string{}, + }) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/commit", bytes.NewReader(commitBody)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusNotFound, w.Code) + + var response map[string]any + err := json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Contains(t, response["error"], "session not found") +} + +func TestJSONImportHandler_Cancel(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + charonExport := CharonExport{ + Version: "1.0.0", + ExportedAt: time.Now(), + ProxyHosts: []CharonProxyHost{ + { + UUID: "cancel-test-uuid", + Name: "Cancel Test", + DomainNames: "cancel-test.com", + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + // Step 1: Upload to get session ID + content, _ := json.Marshal(charonExport) + uploadBody, _ := json.Marshal(map[string]string{"content": string(content)}) + + uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(uploadBody)) + uploadReq.Header.Set("Content-Type", "application/json") + uploadW := httptest.NewRecorder() + + router.ServeHTTP(uploadW, uploadReq) + require.Equal(t, http.StatusOK, uploadW.Code) + + var uploadResponse map[string]any + err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse) + require.NoError(t, err) + + session := uploadResponse["session"].(map[string]any) + sessionID := session["id"].(string) + + // Step 2: Cancel the session + cancelBody, _ := json.Marshal(map[string]any{ + "session_uuid": sessionID, + }) + + cancelReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/cancel", bytes.NewReader(cancelBody)) + cancelReq.Header.Set("Content-Type", "application/json") + cancelW := httptest.NewRecorder() + + router.ServeHTTP(cancelW, cancelReq) + + assert.Equal(t, http.StatusOK, cancelW.Code) + + var cancelResponse map[string]any + err = json.Unmarshal(cancelW.Body.Bytes(), &cancelResponse) + require.NoError(t, err) + + assert.Equal(t, "cancelled", cancelResponse["status"]) + + // Step 3: Try to commit with cancelled session (should fail) + commitBody, _ := json.Marshal(map[string]any{ + "session_uuid": sessionID, + "resolutions": map[string]string{}, + "names": map[string]string{}, + }) + + commitReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/commit", bytes.NewReader(commitBody)) + commitReq.Header.Set("Content-Type", "application/json") + commitW := httptest.NewRecorder() + + router.ServeHTTP(commitW, commitReq) + + assert.Equal(t, http.StatusNotFound, commitW.Code) +} + +func TestJSONImportHandler_ConflictDetection(t *testing.T) { + db := setupJSONTestDB(t) + + existingHost := models.ProxyHost{ + UUID: "existing-uuid", + DomainNames: "conflict.com", + ForwardScheme: "http", + ForwardHost: "old-server", + ForwardPort: 80, + Enabled: true, + } + db.Create(&existingHost) + + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + charonExport := CharonExport{ + Version: "1.0.0", + ProxyHosts: []CharonProxyHost{ + { + UUID: "new-uuid", + DomainNames: "conflict.com", + ForwardScheme: "http", + ForwardHost: "new-server", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + content, _ := json.Marshal(charonExport) + body, _ := json.Marshal(map[string]string{"content": string(content)}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err := json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + conflictDetails := response["conflict_details"].(map[string]any) + assert.Contains(t, conflictDetails, "conflict.com") +} + +func TestJSONImportHandler_IsCharonFormat(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + tests := []struct { + name string + export CharonExport + expected bool + }{ + { + name: "with version", + export: CharonExport{Version: "1.0.0"}, + expected: true, + }, + { + name: "with proxy hosts", + export: CharonExport{ + ProxyHosts: []CharonProxyHost{{DomainNames: "test.com"}}, + }, + expected: true, + }, + { + name: "empty export", + export: CharonExport{}, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := handler.isCharonFormat(tt.export) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestIsValidJSON(t *testing.T) { + tests := []struct { + name string + input string + expected bool + }{ + {"valid object", `{"key": "value"}`, true}, + {"valid array", `[1, 2, 3]`, true}, + {"valid string", `"hello"`, true}, + {"valid number", `123`, true}, + {"empty string", "", true}, + {"whitespace only", " ", true}, + {"invalid json", `{key: "value"}`, false}, + {"incomplete", `{"key":`, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := isValidJSON(tt.input) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestJSONImportHandler_ConvertCharonToImportResult(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + charonExport := CharonExport{ + Version: "1.0.0", + ExportedAt: time.Now(), + ProxyHosts: []CharonProxyHost{ + { + UUID: "uuid-1", + Name: "Host 1", + DomainNames: "host1.com", + ForwardScheme: "https", + ForwardHost: "backend1", + ForwardPort: 443, + SSLForced: true, + WebsocketSupport: true, + }, + { + UUID: "uuid-2", + DomainNames: "", + ForwardScheme: "http", + ForwardHost: "backend2", + ForwardPort: 80, + }, + }, + } + + result := handler.convertCharonToImportResult(charonExport) + + assert.Len(t, result.Hosts, 1) + assert.Len(t, result.Errors, 1) + + host := result.Hosts[0] + assert.Equal(t, "host1.com", host.DomainNames) + assert.Equal(t, "https", host.ForwardScheme) + assert.Equal(t, "backend1", host.ForwardHost) + assert.Equal(t, 443, host.ForwardPort) + assert.True(t, host.SSLForced) + assert.True(t, host.WebsocketSupport) +} diff --git a/backend/internal/api/handlers/npm_import_handler.go b/backend/internal/api/handlers/npm_import_handler.go new file mode 100644 index 00000000..8f124eca --- /dev/null +++ b/backend/internal/api/handlers/npm_import_handler.go @@ -0,0 +1,368 @@ +package handlers + +import ( + "encoding/json" + "fmt" + "net/http" + "sync" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "gorm.io/gorm" + + "github.com/Wikid82/charon/backend/internal/caddy" + "github.com/Wikid82/charon/backend/internal/models" + "github.com/Wikid82/charon/backend/internal/services" +) + +// npmImportSessions stores parsed NPM exports keyed by session UUID. +// TODO: Implement session expiration to prevent memory leaks (e.g., TTL-based cleanup). +var ( + npmImportSessions = make(map[string]NPMExport) + npmImportSessionsMu sync.RWMutex +) + +// NPMExport represents the top-level structure of an NPM export file. +type NPMExport struct { + ProxyHosts []NPMProxyHost `json:"proxy_hosts"` + AccessLists []NPMAccessList `json:"access_lists"` + Certificates []NPMCertificate `json:"certificates"` +} + +// NPMProxyHost represents a proxy host from NPM export. +type NPMProxyHost struct { + ID int `json:"id"` + DomainNames []string `json:"domain_names"` + ForwardScheme string `json:"forward_scheme"` + ForwardHost string `json:"forward_host"` + ForwardPort int `json:"forward_port"` + CertificateID *int `json:"certificate_id"` + SSLForced bool `json:"ssl_forced"` + CachingEnabled bool `json:"caching_enabled"` + BlockExploits bool `json:"block_exploits"` + AdvancedConfig string `json:"advanced_config"` + Meta any `json:"meta"` + AllowWebsocketUpgrade bool `json:"allow_websocket_upgrade"` + HTTP2Support bool `json:"http2_support"` + HSTSEnabled bool `json:"hsts_enabled"` + HSTSSubdomains bool `json:"hsts_subdomains"` + AccessListID *int `json:"access_list_id"` + Enabled bool `json:"enabled"` + Locations []any `json:"locations"` + CustomLocations []any `json:"custom_locations"` + OwnerUserID int `json:"owner_user_id"` + UseDefaultLocation bool `json:"use_default_location"` + IPV6 bool `json:"ipv6"` + CreatedOn string `json:"created_on"` + ModifiedOn string `json:"modified_on"` + ForwardDomainName string `json:"forward_domain_name"` + ForwardDomainNameEnabled bool `json:"forward_domain_name_enabled"` +} + +// NPMAccessList represents an access list from NPM export. +type NPMAccessList struct { + ID int `json:"id"` + Name string `json:"name"` + PassAuth int `json:"pass_auth"` + SatisfyAny int `json:"satisfy_any"` + OwnerUserID int `json:"owner_user_id"` + Items []NPMAccessItem `json:"items"` + Clients []NPMAccessItem `json:"clients"` + ProxyHostsCount int `json:"proxy_host_count"` + CreatedOn string `json:"created_on"` + ModifiedOn string `json:"modified_on"` + AuthorizationHeader any `json:"authorization_header"` +} + +// NPMAccessItem represents an item in an NPM access list. +type NPMAccessItem struct { + ID int `json:"id"` + AccessListID int `json:"access_list_id"` + Address string `json:"address"` + Directive string `json:"directive"` + CreatedOn string `json:"created_on"` + ModifiedOn string `json:"modified_on"` +} + +// NPMCertificate represents a certificate from NPM export. +type NPMCertificate struct { + ID int `json:"id"` + Provider string `json:"provider"` + NiceName string `json:"nice_name"` + DomainNames []string `json:"domain_names"` + ExpiresOn string `json:"expires_on"` + CreatedOn string `json:"created_on"` + ModifiedOn string `json:"modified_on"` + IsDNSChallenge bool `json:"is_dns_challenge"` + Meta any `json:"meta"` +} + +// NPMImportHandler handles NPM configuration imports. +type NPMImportHandler struct { + db *gorm.DB + proxyHostSvc *services.ProxyHostService +} + +// NewNPMImportHandler creates a new NPM import handler. +func NewNPMImportHandler(db *gorm.DB) *NPMImportHandler { + return &NPMImportHandler{ + db: db, + proxyHostSvc: services.NewProxyHostService(db), + } +} + +// RegisterRoutes registers NPM import routes. +func (h *NPMImportHandler) RegisterRoutes(router *gin.RouterGroup) { + router.POST("/import/npm/upload", h.Upload) + router.POST("/import/npm/commit", h.Commit) + router.POST("/import/npm/cancel", h.Cancel) +} + +// Upload parses an NPM export JSON and returns a preview with conflict detection. +func (h *NPMImportHandler) Upload(c *gin.Context) { + var req struct { + Content string `json:"content" binding:"required"` + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + var npmExport NPMExport + if err := json.Unmarshal([]byte(req.Content), &npmExport); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("invalid NPM export JSON: %v", err)}) + return + } + + result := h.convertNPMToImportResult(npmExport) + + if len(result.Hosts) == 0 { + c.JSON(http.StatusBadRequest, gin.H{"error": "no proxy hosts found in NPM export"}) + return + } + + // Check for conflicts with existing hosts + existingHosts, _ := h.proxyHostSvc.List() + existingDomainsMap := make(map[string]models.ProxyHost) + for _, eh := range existingHosts { + existingDomainsMap[eh.DomainNames] = eh + } + + conflictDetails := make(map[string]gin.H) + for _, ph := range result.Hosts { + if existing, found := existingDomainsMap[ph.DomainNames]; found { + result.Conflicts = append(result.Conflicts, ph.DomainNames) + conflictDetails[ph.DomainNames] = gin.H{ + "existing": gin.H{ + "forward_scheme": existing.ForwardScheme, + "forward_host": existing.ForwardHost, + "forward_port": existing.ForwardPort, + "ssl_forced": existing.SSLForced, + "websocket": existing.WebsocketSupport, + "enabled": existing.Enabled, + }, + "imported": gin.H{ + "forward_scheme": ph.ForwardScheme, + "forward_host": ph.ForwardHost, + "forward_port": ph.ForwardPort, + "ssl_forced": ph.SSLForced, + "websocket": ph.WebsocketSupport, + }, + } + } + } + + sid := uuid.NewString() + + // Store the parsed export in session storage for later commit + npmImportSessionsMu.Lock() + npmImportSessions[sid] = npmExport + npmImportSessionsMu.Unlock() + + c.JSON(http.StatusOK, gin.H{ + "session": gin.H{"id": sid, "state": "transient", "source_type": "npm"}, + "preview": result, + "conflict_details": conflictDetails, + "npm_export": gin.H{ + "proxy_hosts": len(npmExport.ProxyHosts), + "access_lists": len(npmExport.AccessLists), + "certificates": len(npmExport.Certificates), + }, + }) +} + +// Commit finalizes the NPM import with user's conflict resolutions. +func (h *NPMImportHandler) Commit(c *gin.Context) { + var req struct { + SessionUUID string `json:"session_uuid" binding:"required"` + Resolutions map[string]string `json:"resolutions"` // domain -> action + Names map[string]string `json:"names"` // domain -> custom name + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Retrieve the stored NPM export from session + npmImportSessionsMu.RLock() + npmExport, ok := npmImportSessions[req.SessionUUID] + npmImportSessionsMu.RUnlock() + + if !ok { + c.JSON(http.StatusNotFound, gin.H{"error": "session not found or expired"}) + return + } + + result := h.convertNPMToImportResult(npmExport) + proxyHosts := caddy.ConvertToProxyHosts(result.Hosts) + + created := 0 + updated := 0 + skipped := 0 + errors := []string{} + + existingHosts, _ := h.proxyHostSvc.List() + existingMap := make(map[string]*models.ProxyHost) + for i := range existingHosts { + existingMap[existingHosts[i].DomainNames] = &existingHosts[i] + } + + for _, host := range proxyHosts { + action := req.Resolutions[host.DomainNames] + + if customName, ok := req.Names[host.DomainNames]; ok && customName != "" { + host.Name = customName + } + + if action == "skip" || action == "keep" { + skipped++ + continue + } + + if action == "rename" { + host.DomainNames += "-imported" + } + + if action == "overwrite" { + if existing, found := existingMap[host.DomainNames]; found { + host.ID = existing.ID + host.UUID = existing.UUID + host.CertificateID = existing.CertificateID + host.CreatedAt = existing.CreatedAt + + if err := h.proxyHostSvc.Update(&host); err != nil { + errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error())) + } else { + updated++ + } + continue + } + } + + host.UUID = uuid.NewString() + if err := h.proxyHostSvc.Create(&host); err != nil { + errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error())) + } else { + created++ + } + } + + // Clean up session after successful commit + npmImportSessionsMu.Lock() + delete(npmImportSessions, req.SessionUUID) + npmImportSessionsMu.Unlock() + + c.JSON(http.StatusOK, gin.H{ + "created": created, + "updated": updated, + "skipped": skipped, + "errors": errors, + }) +} + +// Cancel cancels an NPM import session and cleans up resources. +func (h *NPMImportHandler) Cancel(c *gin.Context) { + var req struct { + SessionUUID string `json:"session_uuid"` + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Clean up session if it exists + npmImportSessionsMu.Lock() + delete(npmImportSessions, req.SessionUUID) + npmImportSessionsMu.Unlock() + + c.JSON(http.StatusOK, gin.H{"status": "cancelled"}) +} + +// convertNPMToImportResult converts NPM export format to Charon's ImportResult. +func (h *NPMImportHandler) convertNPMToImportResult(export NPMExport) *caddy.ImportResult { + result := &caddy.ImportResult{ + Hosts: []caddy.ParsedHost{}, + Conflicts: []string{}, + Errors: []string{}, + } + + for _, npmHost := range export.ProxyHosts { + if len(npmHost.DomainNames) == 0 { + result.Errors = append(result.Errors, fmt.Sprintf("host %d has no domain names", npmHost.ID)) + continue + } + + // NPM stores multiple domains as array; join them + domainNames := "" + for i, d := range npmHost.DomainNames { + if i > 0 { + domainNames += "," + } + domainNames += d + } + + scheme := npmHost.ForwardScheme + if scheme == "" { + scheme = "http" + } + + port := npmHost.ForwardPort + if port == 0 { + port = 80 + } + + warnings := []string{} + if npmHost.CachingEnabled { + warnings = append(warnings, "Caching not supported - will be disabled") + } + if len(npmHost.Locations) > 0 || len(npmHost.CustomLocations) > 0 { + warnings = append(warnings, "Custom locations not fully supported") + } + if npmHost.AdvancedConfig != "" { + warnings = append(warnings, "Advanced nginx config not compatible - manual review required") + } + if npmHost.AccessListID != nil && *npmHost.AccessListID > 0 { + warnings = append(warnings, fmt.Sprintf("Access list reference (ID: %d) needs manual mapping", *npmHost.AccessListID)) + } + + host := caddy.ParsedHost{ + DomainNames: domainNames, + ForwardScheme: scheme, + ForwardHost: npmHost.ForwardHost, + ForwardPort: port, + SSLForced: npmHost.SSLForced, + WebsocketSupport: npmHost.AllowWebsocketUpgrade, + Warnings: warnings, + } + + rawJSON, _ := json.Marshal(npmHost) + host.RawJSON = string(rawJSON) + + result.Hosts = append(result.Hosts, host) + } + + return result +} diff --git a/backend/internal/api/handlers/npm_import_handler_test.go b/backend/internal/api/handlers/npm_import_handler_test.go new file mode 100644 index 00000000..74d7be78 --- /dev/null +++ b/backend/internal/api/handlers/npm_import_handler_test.go @@ -0,0 +1,493 @@ +package handlers + +import ( + "bytes" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/gin-gonic/gin" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gorm.io/driver/sqlite" + "gorm.io/gorm" + + "github.com/Wikid82/charon/backend/internal/models" +) + +func setupNPMTestDB(t *testing.T) *gorm.DB { + db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{}) + require.NoError(t, err) + + err = db.AutoMigrate(&models.ProxyHost{}, &models.Location{}, &models.Setting{}) + require.NoError(t, err) + + return db +} + +func TestNewNPMImportHandler(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + assert.NotNil(t, handler) + assert.NotNil(t, handler.db) + assert.NotNil(t, handler.proxyHostSvc) +} + +func TestNPMImportHandler_RegisterRoutes(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + routes := router.Routes() + routePaths := make(map[string]bool) + for _, r := range routes { + routePaths[r.Method+":"+r.Path] = true + } + + assert.True(t, routePaths["POST:/api/v1/import/npm/upload"]) + assert.True(t, routePaths["POST:/api/v1/import/npm/commit"]) + assert.True(t, routePaths["POST:/api/v1/import/npm/cancel"]) +} + +func TestNPMImportHandler_Upload_ValidNPMExport(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{ + { + ID: 1, + DomainNames: []string{"example.com"}, + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + SSLForced: true, + AllowWebsocketUpgrade: true, + Enabled: true, + }, + { + ID: 2, + DomainNames: []string{"test.com", "www.test.com"}, + ForwardScheme: "https", + ForwardHost: "192.168.1.101", + ForwardPort: 443, + Enabled: true, + }, + }, + AccessLists: []NPMAccessList{ + { + ID: 1, + Name: "Test ACL", + }, + }, + } + + content, _ := json.Marshal(npmExport) + body, _ := json.Marshal(map[string]string{"content": string(content)}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err := json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Contains(t, response, "session") + assert.Contains(t, response, "preview") + assert.Contains(t, response, "npm_export") + + preview := response["preview"].(map[string]any) + hosts := preview["hosts"].([]any) + assert.Len(t, hosts, 2) +} + +func TestNPMImportHandler_Upload_EmptyExport(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{}, + } + + content, _ := json.Marshal(npmExport) + body, _ := json.Marshal(map[string]string{"content": string(content)}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusBadRequest, w.Code) +} + +func TestNPMImportHandler_Upload_InvalidJSON(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + body, _ := json.Marshal(map[string]string{"content": "not valid json"}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusBadRequest, w.Code) +} + +func TestNPMImportHandler_Upload_ConflictDetection(t *testing.T) { + db := setupNPMTestDB(t) + + existingHost := models.ProxyHost{ + UUID: "existing-uuid", + DomainNames: "example.com", + ForwardScheme: "http", + ForwardHost: "old-server", + ForwardPort: 80, + Enabled: true, + } + db.Create(&existingHost) + + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{ + { + ID: 1, + DomainNames: []string{"example.com"}, + ForwardScheme: "http", + ForwardHost: "new-server", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + content, _ := json.Marshal(npmExport) + body, _ := json.Marshal(map[string]string{"content": string(content)}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err := json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Contains(t, response, "conflict_details") + conflictDetails := response["conflict_details"].(map[string]any) + assert.Contains(t, conflictDetails, "example.com") +} + +func TestNPMImportHandler_Commit_CreateNew(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{ + { + ID: 1, + DomainNames: []string{"newhost.com"}, + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + // Step 1: Upload to get session ID + content, _ := json.Marshal(npmExport) + uploadBody, _ := json.Marshal(map[string]string{"content": string(content)}) + + uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(uploadBody)) + uploadReq.Header.Set("Content-Type", "application/json") + uploadW := httptest.NewRecorder() + + router.ServeHTTP(uploadW, uploadReq) + require.Equal(t, http.StatusOK, uploadW.Code) + + var uploadResponse map[string]any + err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse) + require.NoError(t, err) + + session := uploadResponse["session"].(map[string]any) + sessionID := session["id"].(string) + + // Step 2: Commit with session UUID + commitBody, _ := json.Marshal(map[string]any{ + "session_uuid": sessionID, + "resolutions": map[string]string{}, + "names": map[string]string{}, + }) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/commit", bytes.NewReader(commitBody)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err = json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Equal(t, float64(1), response["created"]) + assert.Equal(t, float64(0), response["updated"]) + assert.Equal(t, float64(0), response["skipped"]) + + var host models.ProxyHost + db.Where("domain_names = ?", "newhost.com").First(&host) + assert.NotEmpty(t, host.UUID) + assert.Equal(t, "192.168.1.100", host.ForwardHost) +} + +func TestNPMImportHandler_Commit_SkipAction(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{ + { + ID: 1, + DomainNames: []string{"skipme.com"}, + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + // Step 1: Upload to get session ID + content, _ := json.Marshal(npmExport) + uploadBody, _ := json.Marshal(map[string]string{"content": string(content)}) + + uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(uploadBody)) + uploadReq.Header.Set("Content-Type", "application/json") + uploadW := httptest.NewRecorder() + + router.ServeHTTP(uploadW, uploadReq) + require.Equal(t, http.StatusOK, uploadW.Code) + + var uploadResponse map[string]any + err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse) + require.NoError(t, err) + + session := uploadResponse["session"].(map[string]any) + sessionID := session["id"].(string) + + // Step 2: Commit with skip resolution + commitBody, _ := json.Marshal(map[string]any{ + "session_uuid": sessionID, + "resolutions": map[string]string{"skipme.com": "skip"}, + "names": map[string]string{}, + }) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/commit", bytes.NewReader(commitBody)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err = json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Equal(t, float64(0), response["created"]) + assert.Equal(t, float64(1), response["skipped"]) +} + +func TestNPMImportHandler_Commit_SessionNotFound(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + // Try to commit with a non-existent session + commitBody, _ := json.Marshal(map[string]any{ + "session_uuid": "non-existent-uuid", + "resolutions": map[string]string{}, + "names": map[string]string{}, + }) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/commit", bytes.NewReader(commitBody)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusNotFound, w.Code) + + var response map[string]any + err := json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Contains(t, response["error"], "session not found") +} + +func TestNPMImportHandler_Cancel(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{ + { + ID: 1, + DomainNames: []string{"cancel-test.com"}, + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + // Step 1: Upload to get session ID + content, _ := json.Marshal(npmExport) + uploadBody, _ := json.Marshal(map[string]string{"content": string(content)}) + + uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(uploadBody)) + uploadReq.Header.Set("Content-Type", "application/json") + uploadW := httptest.NewRecorder() + + router.ServeHTTP(uploadW, uploadReq) + require.Equal(t, http.StatusOK, uploadW.Code) + + var uploadResponse map[string]any + err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse) + require.NoError(t, err) + + session := uploadResponse["session"].(map[string]any) + sessionID := session["id"].(string) + + // Step 2: Cancel the session + cancelBody, _ := json.Marshal(map[string]any{ + "session_uuid": sessionID, + }) + + cancelReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/cancel", bytes.NewReader(cancelBody)) + cancelReq.Header.Set("Content-Type", "application/json") + cancelW := httptest.NewRecorder() + + router.ServeHTTP(cancelW, cancelReq) + + assert.Equal(t, http.StatusOK, cancelW.Code) + + var cancelResponse map[string]any + err = json.Unmarshal(cancelW.Body.Bytes(), &cancelResponse) + require.NoError(t, err) + + assert.Equal(t, "cancelled", cancelResponse["status"]) + + // Step 3: Try to commit with cancelled session (should fail) + commitBody, _ := json.Marshal(map[string]any{ + "session_uuid": sessionID, + "resolutions": map[string]string{}, + "names": map[string]string{}, + }) + + commitReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/commit", bytes.NewReader(commitBody)) + commitReq.Header.Set("Content-Type", "application/json") + commitW := httptest.NewRecorder() + + router.ServeHTTP(commitW, commitReq) + + assert.Equal(t, http.StatusNotFound, commitW.Code) +} + +func TestNPMImportHandler_ConvertNPMToImportResult(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{ + { + ID: 1, + DomainNames: []string{"test.com", "www.test.com"}, + ForwardScheme: "https", + ForwardHost: "backend", + ForwardPort: 443, + SSLForced: true, + AllowWebsocketUpgrade: true, + CachingEnabled: true, + AdvancedConfig: "proxy_set_header X-Custom value;", + }, + { + ID: 2, + DomainNames: []string{}, + }, + }, + } + + result := handler.convertNPMToImportResult(npmExport) + + assert.Len(t, result.Hosts, 1) + assert.Len(t, result.Errors, 1) + + host := result.Hosts[0] + assert.Equal(t, "test.com,www.test.com", host.DomainNames) + assert.Equal(t, "https", host.ForwardScheme) + assert.Equal(t, "backend", host.ForwardHost) + assert.Equal(t, 443, host.ForwardPort) + assert.True(t, host.SSLForced) + assert.True(t, host.WebsocketSupport) + assert.Len(t, host.Warnings, 2) // Caching + Advanced config warnings +} diff --git a/backend/internal/api/routes/routes.go b/backend/internal/api/routes/routes.go index df9574bd..26fc9317 100644 --- a/backend/internal/api/routes/routes.go +++ b/backend/internal/api/routes/routes.go @@ -583,4 +583,12 @@ func RegisterImportHandler(router *gin.Engine, db *gorm.DB, caddyBinary, importD importHandler := handlers.NewImportHandler(db, caddyBinary, importDir, mountPath) api := router.Group("/api/v1") importHandler.RegisterRoutes(api) + + // NPM Import Handler - supports Nginx Proxy Manager export format + npmImportHandler := handlers.NewNPMImportHandler(db) + npmImportHandler.RegisterRoutes(api) + + // JSON Import Handler - supports both Charon and NPM export formats + jsonImportHandler := handlers.NewJSONImportHandler(db) + jsonImportHandler.RegisterRoutes(api) } diff --git a/backend/internal/services/mail_service.go b/backend/internal/services/mail_service.go index 5b922735..eb07c0b0 100644 --- a/backend/internal/services/mail_service.go +++ b/backend/internal/services/mail_service.go @@ -137,7 +137,7 @@ func (s *MailService) GetSMTPConfig() (*SMTPConfig, error) { return config, nil } -// SaveSMTPConfig saves SMTP settings to the database. +// SaveSMTPConfig saves SMTP settings to the database using a transaction. func (s *MailService) SaveSMTPConfig(config *SMTPConfig) error { settings := map[string]string{ "smtp_host": config.Host, @@ -148,31 +148,34 @@ func (s *MailService) SaveSMTPConfig(config *SMTPConfig) error { "smtp_encryption": config.Encryption, } - for key, value := range settings { - setting := models.Setting{ - Key: key, - Value: value, - Type: "string", - Category: "smtp", - } + return s.db.Transaction(func(tx *gorm.DB) error { + for key, value := range settings { + var existing models.Setting + result := tx.Where("key = ?", key).First(&existing) - // Upsert: update if exists, create if not - result := s.db.Where("key = ?", key).First(&models.Setting{}) - if result.Error == gorm.ErrRecordNotFound { - if err := s.db.Create(&setting).Error; err != nil { - return fmt.Errorf("failed to create setting %s: %w", key, err) - } - } else { - if err := s.db.Model(&models.Setting{}).Where("key = ?", key).Updates(map[string]any{ - "value": value, - "category": "smtp", - }).Error; err != nil { - return fmt.Errorf("failed to update setting %s: %w", key, err) + switch result.Error { + case gorm.ErrRecordNotFound: + setting := models.Setting{ + Key: key, + Value: value, + Type: "string", + Category: "smtp", + } + if err := tx.Create(&setting).Error; err != nil { + return fmt.Errorf("failed to create setting %s: %w", key, err) + } + case nil: + existing.Value = value + existing.Category = "smtp" + if err := tx.Save(&existing).Error; err != nil { + return fmt.Errorf("failed to update setting %s: %w", key, err) + } + default: + return fmt.Errorf("failed to query setting %s: %w", key, result.Error) } } - } - - return nil + return nil + }) } // IsConfigured returns true if SMTP is properly configured. diff --git a/docs/features.md b/docs/features.md index e141c441..471bc5f9 100644 --- a/docs/features.md +++ b/docs/features.md @@ -128,7 +128,23 @@ Migrating from another Caddy setup? Import your existing Caddyfile configuration --- -### 🔌 WebSocket Support +### � Nginx Proxy Manager Import + +Migrating from Nginx Proxy Manager? Import your proxy host configurations directly from NPM export files. Charon parses your domains, upstream servers, SSL settings, and access lists, giving you a preview before committing. + +→ [Learn More](features/npm-import.md) + +--- + +### 📄 JSON Configuration Import + +Import configurations from generic JSON exports or Charon backup files. Supports both Charon's native export format and Nginx Proxy Manager format with automatic detection. Perfect for restoring backups or migrating between Charon instances. + +→ [Learn More](features/json-import.md) + +--- + +### �🔌 WebSocket Support Real-time applications like chat servers, live dashboards, and collaborative tools work out of the box. Charon handles WebSocket connections automatically with no special configuration needed. diff --git a/docs/plans/current_spec.md b/docs/plans/current_spec.md index 3c31cc3f..809bf4bc 100644 --- a/docs/plans/current_spec.md +++ b/docs/plans/current_spec.md @@ -1,518 +1,791 @@ -# CrowdSec 1.7.5 Upgrade Verification Plan +# Phase 3: Backend Routes Implementation Plan -**Document Type**: Verification Plan -**Version**: 1.7.4 → 1.7.5 -**Created**: 2026-01-22 -**Status**: Ready for Implementation +> **Phase**: 3 of Skipped Tests Remediation +> **Status**: ✅ COMPLETE +> **Created**: 2026-01-22 +> **Completed**: 2026-01-22 +> **Target Tests**: 7 tests to re-enable +> **Actual Result**: 7 tests enabled and passing --- ## Executive Summary -This document outlines the verification plan for upgrading CrowdSec from version 1.7.4 to 1.7.5 in the Charon project. Based on analysis of the CrowdSec 1.7.5 release notes and the current integration implementation, this upgrade appears to be a **low-risk maintenance release** focused on internal refactoring, improved error handling, and dependency updates. +Phase 3 addresses missing backend API routes and a data persistence issue that block 7 E2E tests: + +1. **NPM Import Route** (`/tasks/import/npm`) - 4 skipped tests +2. **JSON Import Route** (`/tasks/import/json`) - 2 skipped tests +3. **SMTP Persistence Bug** - 1 skipped test at `smtp-settings.spec.ts:336` + +The existing Caddyfile import infrastructure provides a solid foundation. NPM and JSON import routes will extend this pattern with format-specific parsers. --- -## 1. CrowdSec 1.7.5 Release Analysis +## Root Cause Analysis -### 1.1 Key Changes Summary +### Issue 1: Missing NPM Import Route -| Category | Count | Risk Level | -|----------|-------|------------| -| Internal Refactoring | ~25 | Low | -| Bug Fixes | 8 | Low | -| Dependency Updates | ~12 | Low | -| New Features | 2 | Low | +**Location**: Tests at [tests/integration/import-to-production.spec.ts](../../tests/integration/import-to-production.spec.ts#L170-L237) -### 1.2 Notable Changes Relevant to Charon Integration +**Problem**: The tests navigate to `/tasks/import/npm` but this route doesn't exist in the frontend router or backend API. -#### New Features/Improvements +**Evidence**: +```typescript +// From import-to-production.spec.ts lines 170-180 +test.skip('should display NPM import page', async ({ page, adminUser }) => { + await page.goto('/tasks/import/npm'); // Route doesn't exist + ... +}); +``` -1. **`ParseKVLax` for Flexible Key-Value Parsing** ([#4007](https://github.com/crowdsecurity/crowdsec/pull/4007)) - - Adds more flexible parsing capabilities - - Impact: None - internal parser enhancement +**Expected NPM Export Format** (from test file): +```json +{ + "proxy_hosts": [ + { + "domain_names": ["test.example.com"], + "forward_host": "192.168.1.100", + "forward_port": 80 + } + ], + "access_lists": [], + "certificates": [] +} +``` -2. **AppSec Transaction ID Header Support** ([#4124](https://github.com/crowdsecurity/crowdsec/pull/4124)) - - Enables request tracing via transaction ID header - - Impact: Optional feature, no required changes +### Issue 2: Missing JSON Import Route -3. **Docker Datasource Schema** ([#4206](https://github.com/crowdsecurity/crowdsec/pull/4206)) - - Improved Docker acquisition configuration - - Impact: May benefit container monitoring setups +**Location**: Tests at [tests/integration/import-to-production.spec.ts](../../tests/integration/import-to-production.spec.ts#L243-L256) -#### Bug Fixes +**Problem**: The `/tasks/import/json` route is not implemented. Tests navigate to this route expecting a generic JSON configuration import interface. -1. **PAPI Allowlist Check** ([#4196](https://github.com/crowdsecurity/crowdsec/pull/4196)) - - Checks if decision is allowlisted before adding - - Impact: Improved decision handling +### Issue 3: SMTP Save Not Persisting -2. **CAPI Token Reuse** ([#4201](https://github.com/crowdsecurity/crowdsec/pull/4201)) - - Always reuses stored token for CAPI - - Impact: Better authentication stability +**Location**: Test at [tests/settings/smtp-settings.spec.ts](../../tests/settings/smtp-settings.spec.ts#L336) -3. **LAPI-Only Container Hub Fix** ([#4169](https://github.com/crowdsecurity/crowdsec/pull/4169)) - - Don't prepare hub in LAPI-only containers - - Impact: Better for containerized deployments +**Problem**: After saving SMTP configuration and reloading the page, the updated values don't persist. -#### Internal Changes (No External Impact) +**Skip Comment**: +```typescript +// Note: Skip - SMTP save not persisting correctly (backend issue, not test issue) +``` -- Removed `github.com/pkg/errors` dependency - uses `fmt.Errorf` instead -- Replaced syscall with unix/windows packages -- Various linting improvements (golangci-lint 2.8) -- Refactored acquisition and leakybucket packages -- Removed global variables in favor of dependency injection -- Build improvements for Docker (larger runners) -- Updated expr to 1.17.7 (already patched in Charon Dockerfile) -- Updated modernc.org/sqlite +**Analysis of Code Flow**: -### 1.3 Breaking Changes Assessment +1. **Frontend**: [SMTPSettings.tsx](../../frontend/src/pages/SMTPSettings.tsx#L50-L62) + - Calls `updateSMTPConfig()` which POSTs to `/settings/smtp` + - On success, invalidates query and shows toast -**No Breaking Changes Identified** +2. **Backend Handler**: [settings_handler.go](../../backend/internal/api/handlers/settings_handler.go#L109-L136) + - `UpdateSMTPConfig()` receives the request + - Calls `h.MailService.SaveSMTPConfig(config)` -The 1.7.5 release contains no API-breaking changes. All modifications are: -- Internal refactoring -- Bug fixes -- Dependency updates -- CI/CD improvements +3. **Mail Service**: [mail_service.go](../../backend/internal/services/mail_service.go#L117-L144) + - `SaveSMTPConfig()` uses upsert pattern + - **POTENTIAL BUG**: Uses `First()` then conditional `Create()`/`Updates()` separately + +**Root Cause Hypothesis**: +The `SaveSMTPConfig` method has a problematic upsert pattern: +```go +// Current pattern in mail_service.go lines 127-143: +result := s.db.Where("key = ?", key).First(&models.Setting{}) +if result.Error == gorm.ErrRecordNotFound { + s.db.Create(&setting) // Creates new +} else { + s.db.Model(&models.Setting{}).Where("key = ?", key).Updates(...) // Updates existing +} +``` + +**Issues identified**: +1. No transaction wrapping - partial failures possible +2. `Updates()` with map may not update all fields correctly +3. If `First()` returns error other than `ErrRecordNotFound`, the else branch runs but may not execute correctly +4. Race condition between read and write operations --- -## 2. Current Charon CrowdSec Integration Analysis +## Implementation Plan -### 2.1 Integration Points +### Task 1: Implement NPM Import Backend Handler -| Component | Location | Description | -|-----------|----------|-------------| -| **Core Package** | [backend/internal/crowdsec/](backend/internal/crowdsec/) | CrowdSec integration library | -| **API Handler** | [backend/internal/api/handlers/crowdsec_handler.go](backend/internal/api/handlers/crowdsec_handler.go) | REST API endpoints | -| **Startup Service** | [backend/internal/services/crowdsec_startup.go](backend/internal/services/) | Initialization logic | -| **Dockerfile** | [Dockerfile](../../Dockerfile) (lines 199-290) | Source build configuration | +**File**: `backend/internal/api/handlers/npm_import_handler.go` (NEW) -### 2.2 Key Files in crowdsec Package +#### 1.1 Create NPM Parser Model -| File | Purpose | Functions to Verify | -|------|---------|---------------------| -| `registration.go` | Bouncer registration, LAPI health | `EnsureBouncerRegistered`, `CheckLAPIHealth`, `GetLAPIVersion` | -| `hub_sync.go` | Hub index fetching, preset pull/apply | `FetchIndex`, `Pull`, `Apply`, `extractTarGz` | -| `hub_cache.go` | Preset caching with TTL | `Store`, `Load`, `Evict` | -| `console_enroll.go` | Console enrollment | `Enroll`, `Status`, `checkLAPIAvailable` | -| `presets.go` | Curated preset definitions | `ListCuratedPresets`, `FindPreset` | +```go +// NPMExport represents the Nginx Proxy Manager export format +type NPMExport struct { + ProxyHosts []NPMProxyHost `json:"proxy_hosts"` + AccessLists []NPMAccessList `json:"access_lists"` + Certificates []NPMCertificate `json:"certificates"` +} -### 2.3 Handler Functions (crowdsec_handler.go) +type NPMProxyHost struct { + DomainNames []string `json:"domain_names"` + ForwardScheme string `json:"forward_scheme"` + ForwardHost string `json:"forward_host"` + ForwardPort int `json:"forward_port"` + CachingEnabled bool `json:"caching_enabled"` + BlockExploits bool `json:"block_exploits"` + AllowWebsocket bool `json:"allow_websocket_upgrade"` + HTTP2Support bool `json:"http2_support"` + HSTSEnabled bool `json:"hsts_enabled"` + HSTSSubdomains bool `json:"hsts_subdomains"` + SSLForced bool `json:"ssl_forced"` + Enabled bool `json:"enabled"` +} -| Handler | Line | API Endpoint | -|---------|------|--------------| -| `Start` | 188 | POST /api/crowdsec/start | -| `Stop` | 290 | POST /api/crowdsec/stop | -| `Status` | 317 | GET /api/crowdsec/status | -| `ImportConfig` | 346 | POST /api/crowdsec/import | -| `ExportConfig` | 417 | GET /api/crowdsec/export | -| `ListFiles` | 486 | GET /api/crowdsec/files | -| `ReadFile` | 513 | GET /api/crowdsec/files/:path | -| `WriteFile` | 540 | PUT /api/crowdsec/files/:path | -| `ListPresets` | 580 | GET /api/crowdsec/presets | -| `PullPreset` | 662 | POST /api/crowdsec/presets/:slug/pull | -| `ApplyPreset` | 748 | POST /api/crowdsec/presets/:slug/apply | -| `ConsoleEnroll` | 876 | POST /api/crowdsec/console/enroll | -| `ConsoleStatus` | 932 | GET /api/crowdsec/console/status | -| `DeleteConsoleEnrollment` | 954 | DELETE /api/crowdsec/console/enrollment | -| `GetCachedPreset` | 975 | GET /api/crowdsec/presets/:slug | -| `GetLAPIDecisions` | 1077 | GET /api/crowdsec/lapi/decisions | -| `CheckLAPIHealth` | 1231 | GET /api/crowdsec/lapi/health | +type NPMAccessList struct { + Name string `json:"name"` + Items []NPMAccessItem `json:"items"` +} -### 2.4 Docker Configuration +type NPMAccessItem struct { + Type string `json:"type"` // "allow" or "deny" + Address string `json:"address"` +} -**Dockerfile CrowdSec Section** (lines 199-290): -- Current version: `CROWDSEC_VERSION=1.7.4` -- Build method: Source compilation with Go 1.25.6 -- Dependency patches applied: - - `github.com/expr-lang/expr@v1.17.7` - - `golang.org/x/crypto@v0.46.0` -- Fix for expr-lang v1.17.7 compatibility (sed replacement) +type NPMCertificate struct { + NiceName string `json:"nice_name"` + DomainNames []string `json:"domain_names"` + Provider string `json:"provider"` +} +``` -**Docker Compose Files**: -- `.docker/compose/docker-compose.yml` - Production config with crowdsec_data volume -- `.docker/compose/docker-compose.local.yml` - Local development -- `.docker/compose/docker-compose.playwright.yml` - E2E testing (crowdsec disabled) +#### 1.2 Create NPM Import Handler + +**File**: `backend/internal/api/handlers/npm_import_handler.go` + +```go +package handlers + +import ( + "encoding/json" + "net/http" + "strings" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "gorm.io/gorm" + + "github.com/Wikid82/charon/backend/internal/caddy" + "github.com/Wikid82/charon/backend/internal/models" + "github.com/Wikid82/charon/backend/internal/services" +) + +type NPMImportHandler struct { + db *gorm.DB + proxyHostSvc *services.ProxyHostService +} + +func NewNPMImportHandler(db *gorm.DB) *NPMImportHandler { + return &NPMImportHandler{ + db: db, + proxyHostSvc: services.NewProxyHostService(db), + } +} + +func (h *NPMImportHandler) RegisterRoutes(router *gin.RouterGroup) { + router.POST("/import/npm/upload", h.Upload) + router.POST("/import/npm/commit", h.Commit) +} + +// Upload handles NPM export JSON upload and returns preview +func (h *NPMImportHandler) Upload(c *gin.Context) { + var req struct { + Content string `json:"content" binding:"required"` + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Parse NPM export JSON + var npmExport NPMExport + if err := json.Unmarshal([]byte(req.Content), &npmExport); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid NPM export JSON"}) + return + } + + // Convert to internal format + result := h.convertNPMToImportResult(npmExport) + + // Check for conflicts with existing hosts + existingHosts, _ := h.proxyHostSvc.List() + existingDomainsMap := make(map[string]models.ProxyHost) + for _, eh := range existingHosts { + existingDomainsMap[eh.DomainNames] = eh + } + + conflictDetails := make(map[string]gin.H) + for _, ph := range result.Hosts { + if existing, found := existingDomainsMap[ph.DomainNames]; found { + result.Conflicts = append(result.Conflicts, ph.DomainNames) + conflictDetails[ph.DomainNames] = gin.H{ + "existing": gin.H{ + "forward_scheme": existing.ForwardScheme, + "forward_host": existing.ForwardHost, + "forward_port": existing.ForwardPort, + }, + "imported": gin.H{ + "forward_scheme": ph.ForwardScheme, + "forward_host": ph.ForwardHost, + "forward_port": ph.ForwardPort, + }, + } + } + } + + sid := uuid.NewString() + c.JSON(http.StatusOK, gin.H{ + "session": gin.H{"id": sid, "state": "transient", "source": "npm"}, + "conflict_details": conflictDetails, + "preview": result, + }) +} + +func (h *NPMImportHandler) convertNPMToImportResult(export NPMExport) *caddy.ImportResult { + result := &caddy.ImportResult{ + Hosts: []caddy.ParsedHost{}, + Conflicts: []string{}, + Errors: []string{}, + } + + for _, proxy := range export.ProxyHosts { + // Join domain names with comma for storage + domains := strings.Join(proxy.DomainNames, ", ") + + host := caddy.ParsedHost{ + DomainNames: domains, + ForwardScheme: proxy.ForwardScheme, + ForwardHost: proxy.ForwardHost, + ForwardPort: proxy.ForwardPort, + SSLForced: proxy.SSLForced, + WebsocketSupport: proxy.AllowWebsocket, + } + + if host.ForwardScheme == "" { + host.ForwardScheme = "http" + } + if host.ForwardPort == 0 { + host.ForwardPort = 80 + } + + result.Hosts = append(result.Hosts, host) + } + + return result +} +``` + +#### 1.3 Register NPM Import Routes + +**File**: `backend/internal/api/routes/routes.go` + +Add to the `Register` function: +```go +// NPM Import Handler +npmImportHandler := handlers.NewNPMImportHandler(db) +npmImportHandler.RegisterRoutes(api) +``` + +### Task 2: Implement JSON Import Backend Handler + +**File**: `backend/internal/api/handlers/json_import_handler.go` (NEW) + +The JSON import handler will accept a generic Charon export format: + +```go +package handlers + +// CharonExport represents a generic Charon configuration export +type CharonExport struct { + Version string `json:"version"` + ExportedAt string `json:"exported_at"` + ProxyHosts []CharonProxyHost `json:"proxy_hosts"` + AccessLists []CharonAccessList `json:"access_lists"` + DNSRecords []CharonDNSRecord `json:"dns_records"` +} + +type JSONImportHandler struct { + db *gorm.DB + proxyHostSvc *services.ProxyHostService +} + +func NewJSONImportHandler(db *gorm.DB) *JSONImportHandler { + return &JSONImportHandler{ + db: db, + proxyHostSvc: services.NewProxyHostService(db), + } +} + +func (h *JSONImportHandler) RegisterRoutes(router *gin.RouterGroup) { + router.POST("/import/json/upload", h.Upload) + router.POST("/import/json/commit", h.Commit) +} + +// Upload validates and previews JSON import +func (h *JSONImportHandler) Upload(c *gin.Context) { + var req struct { + Content string `json:"content" binding:"required"` + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Try to parse as Charon export format + var charonExport CharonExport + if err := json.Unmarshal([]byte(req.Content), &charonExport); err != nil { + // Fallback: try NPM format + var npmExport NPMExport + if err := json.Unmarshal([]byte(req.Content), &npmExport); err != nil { + c.JSON(http.StatusBadRequest, gin.H{ + "error": "Invalid JSON format. Expected Charon or NPM export format.", + }) + return + } + // Convert NPM to import result + // ... (similar to NPM handler) + } + + // Convert Charon export to import result + result := h.convertCharonToImportResult(charonExport) + // ... (conflict checking and response) +} +``` + +### Task 3: Implement Frontend Routes + +#### 3.1 Create ImportNPM Page + +**File**: `frontend/src/pages/ImportNPM.tsx` (NEW) + +```tsx +import { useState } from 'react' +import { useNavigate } from 'react-router-dom' +import { useTranslation } from 'react-i18next' +import { useNPMImport } from '../hooks/useNPMImport' +import ImportReviewTable from '../components/ImportReviewTable' +import ImportSuccessModal from '../components/dialogs/ImportSuccessModal' + +export default function ImportNPM() { + const { t } = useTranslation() + const navigate = useNavigate() + const { preview, loading, error, upload, commit, commitResult, clearCommitResult } = useNPMImport() + const [content, setContent] = useState('') + const [showReview, setShowReview] = useState(false) + const [showSuccessModal, setShowSuccessModal] = useState(false) + + const handleUpload = async () => { + if (!content.trim()) { + alert(t('importNPM.enterContent')) + return + } + + // Validate JSON + try { + JSON.parse(content) + } catch { + alert(t('importNPM.invalidJSON')) + return + } + + try { + await upload(content) + setShowReview(true) + } catch { + // Error handled by hook + } + } + + // ... (rest follows ImportCaddy pattern) + + return ( +
+

{t('importNPM.title')}

+ {/* Similar UI to ImportCaddy but for JSON input */} +
+ ) +} +``` + +#### 3.2 Create ImportJSON Page + +**File**: `frontend/src/pages/ImportJSON.tsx` (NEW) + +Similar structure to ImportNPM, but handles generic JSON/Charon export format. + +#### 3.3 Add Frontend Routes + +**File**: `frontend/src/App.tsx` + +Add to the Tasks routes section: +```tsx + + } /> + } /> + } /> + } /> + +``` + +#### 3.4 Add Navigation Items + +**File**: `frontend/src/components/Layout.tsx` + +Add to the import submenu: +```tsx +{ name: t('navigation.npm'), path: '/tasks/import/npm', icon: '📦' }, +{ name: t('navigation.json'), path: '/tasks/import/json', icon: '📄' }, +``` + +### Task 4: Fix SMTP Persistence Bug + +**File**: `backend/internal/services/mail_service.go` + +#### 4.1 Fix the Upsert Pattern + +Replace the `SaveSMTPConfig` method (lines ~117-144): + +```go +// SaveSMTPConfig saves SMTP settings to the database using proper upsert pattern. +func (s *MailService) SaveSMTPConfig(config *SMTPConfig) error { + settings := map[string]string{ + "smtp_host": config.Host, + "smtp_port": fmt.Sprintf("%d", config.Port), + "smtp_username": config.Username, + "smtp_password": config.Password, + "smtp_from_address": config.FromAddress, + "smtp_encryption": config.Encryption, + } + + // Use a transaction for atomic updates + return s.db.Transaction(func(tx *gorm.DB) error { + for key, value := range settings { + var existing models.Setting + result := tx.Where("key = ?", key).First(&existing) + + if result.Error == gorm.ErrRecordNotFound { + // Create new setting + setting := models.Setting{ + Key: key, + Value: value, + Type: "string", + Category: "smtp", + } + if err := tx.Create(&setting).Error; err != nil { + return fmt.Errorf("failed to create setting %s: %w", key, err) + } + } else if result.Error == nil { + // Update existing setting - use Save() instead of Updates() + existing.Value = value + existing.Category = "smtp" + if err := tx.Save(&existing).Error; err != nil { + return fmt.Errorf("failed to update setting %s: %w", key, err) + } + } else { + return fmt.Errorf("failed to query setting %s: %w", key, result.Error) + } + } + return nil + }) +} +``` + +**Key Changes**: +1. Wrapped in transaction for atomicity +2. Using `Save()` instead of `Updates()` for reliable updates +3. Proper error handling for all cases +4. Modifying the fetched struct directly before saving --- -## 3. Verification Checklist +## API Contracts -### 3.1 Pre-Upgrade Verification +### NPM Import Upload -- [ ] **Backup current state** - - Export current CrowdSec configuration - - Document current bouncer registrations - - Note current LAPI version from `/api/crowdsec/lapi/health` +**Endpoint**: `POST /api/v1/import/npm/upload` -- [ ] **Review dependency patches** - - Verify if `expr-lang@v1.17.7` patch is still needed (1.7.5 updates to 1.17.7) - - Check if `golang.org/x/crypto@v0.46.0` is still required - -### 3.2 Dockerfile Update Checklist - -- [ ] Update `CROWDSEC_VERSION=1.7.5` on line 213 -- [ ] Update `CROWDSEC_VERSION=1.7.5` on line 267 (fallback stage) -- [ ] Verify expr-lang patch compatibility (line 228-235) -- [ ] Test multi-arch build (amd64, arm64) - -### 3.3 Build Verification - -```bash -# Build CrowdSec builder stage -docker build --target crowdsec-builder -t charon-crowdsec-test:1.7.5 . - -# Verify binaries -docker run --rm charon-crowdsec-test:1.7.5 /crowdsec-out/cscli version -docker run --rm charon-crowdsec-test:1.7.5 /crowdsec-out/crowdsec -version - -# Full image build -docker build -t charon:1.7.5-test . +**Request**: +```json +{ + "content": "{\"proxy_hosts\": [...], \"access_lists\": [], \"certificates\": []}" +} ``` -### 3.4 Unit Test Verification - -Run all CrowdSec-related tests: - -```bash -# Core package tests -cd backend && go test -v -race ./internal/crowdsec/... - -# Handler tests -go test -v -race ./internal/api/handlers/... -run "Crowdsec" - -# Startup tests -go test -v -race ./internal/services/... -run "Crowdsec" +**Response** (200 OK): +```json +{ + "session": { + "id": "uuid-string", + "state": "transient", + "source": "npm" + }, + "preview": { + "hosts": [ + { + "domain_names": "test.example.com", + "forward_scheme": "http", + "forward_host": "192.168.1.100", + "forward_port": 80, + "ssl_forced": false, + "websocket_support": false + } + ], + "conflicts": [], + "errors": [] + }, + "conflict_details": {} +} ``` -**Test Files to Execute**: -| Test File | Purpose | -|-----------|---------| -| `hub_sync_test.go` | Hub fetching and preset application | -| `hub_cache_test.go` | Cache TTL and eviction | -| `registration_test.go` | Bouncer registration | -| `console_enroll_test.go` | Console enrollment | -| `presets_test.go` | Curated preset definitions | -| `crowdsec_handler_test.go` | Handler integration | -| `crowdsec_lapi_test.go` | LAPI communication | -| `crowdsec_decisions_test.go` | Decision handling | -| `crowdsec_startup_test.go` | Service startup | +### JSON Import Upload -### 3.5 Integration Test Verification +**Endpoint**: `POST /api/v1/import/json/upload` -```bash -# Run integration tests -cd backend && go test -v -tags=integration ./integration/... -run "Crowdsec" - -# Run via task -make integration-crowdsec +**Request**: +```json +{ + "content": "{\"version\": \"1.0\", \"proxy_hosts\": [...]}" +} ``` -### 3.6 E2E Test Verification +**Response**: Same structure as NPM import. -**Test Files**: -| Test File | Status | Purpose | -|-----------|--------|---------|| -| `tests/security/crowdsec-config.spec.ts` | Active | CrowdSec configuration UI tests | -| `tests/security/crowdsec-decisions.spec.ts` | Skipped | LAPI decisions tests (requires running CrowdSec) | +### Import Commit (shared) -> **Note**: `crowdsec-decisions.spec.ts` is currently skipped as it requires a running CrowdSec instance with LAPI enabled. These tests run in CI with full infrastructure. +**Endpoint**: `POST /api/v1/import/npm/commit` or `POST /api/v1/import/json/commit` -```bash -# Run Playwright E2E tests (via skill runner - recommended) -.github/skills/scripts/skill-runner.sh test-e2e-playwright - -# Or run specific test files -npx playwright test --project=chromium tests/security/crowdsec-config.spec.ts +**Request**: +```json +{ + "session_uuid": "uuid-string", + "resolutions": { + "example.com": "overwrite", + "test.com": "skip" + }, + "names": { + "example.com": "My Example Site" + } +} ``` -### 3.7 Functional Verification Matrix - -| Feature | Test Method | Expected Outcome | -|---------|-------------|------------------| -| **LAPI Health** | GET `/api/crowdsec/lapi/health` | Returns version "1.7.5" | -| **Start/Stop** | POST `/api/crowdsec/start`, `/stop` | Process starts/stops cleanly | -| **Status Check** | GET `/api/crowdsec/status` | Returns running state and PID | -| **Hub Index Fetch** | GET `/api/crowdsec/presets` | Returns preset list | -| **Preset Pull** | POST `/api/crowdsec/presets/base-http-scenarios/pull` | Downloads and caches preset | -| **Preset Apply** | POST `/api/crowdsec/presets/base-http-scenarios/apply` | Applies preset configuration | -| **Console Enroll** | POST `/api/crowdsec/console/enroll` | Sends enrollment request | -| **LAPI Decisions** | GET `/api/crowdsec/lapi/decisions` | Returns decision list | -| **Bouncer Registration** | Automatic on start | API key retrieved/generated | - -### 3.8 Dependency Patch Verification - -CrowdSec 1.7.5 includes `expr-lang/expr@v1.17.7` natively. Test whether the Dockerfile patch can be removed. - -**Verification Steps**: - -1. [ ] **Test WITHOUT expr-lang patch**: - ```bash - # Temporarily comment out expr-lang patch in Dockerfile (lines 225-229) - # Build and run tests - docker build --target crowdsec-builder -t charon-crowdsec-no-patch:test . - docker run --rm charon-crowdsec-no-patch:test /crowdsec-out/cscli version - ``` - -2. [ ] **If build succeeds without patch**: - - Remove `go get github.com/expr-lang/expr@v1.17.7` line - - Remove the `sed` fix for `program.Source().String()` if not needed - - Keep `golang.org/x/crypto@v0.46.0` patch for security - -3. [ ] **If build fails without patch**: - - Retain the patch with updated comment noting it's still required - - Document the specific error for future reference - -4. [ ] **Validation**: - - Run full test suite after patch removal - - Verify no regression in CrowdSec functionality - ---- - -## 4. Test Scenarios - -### 4.1 Upgrade Smoke Test - -``` -WHEN the Docker image is built with CROWDSEC_VERSION=1.7.5 -THEN the cscli version command reports v1.7.5 -AND the crowdsec binary starts successfully -AND the LAPI health endpoint responds -``` - -### 4.2 Hub Sync Compatibility - -``` -WHEN hub index is fetched after upgrade -THEN the index format is parsed correctly -AND preset pull operations complete successfully -AND preset apply operations complete without errors -``` - -### 4.3 Console Enrollment Stability - -``` -WHEN console enrollment is attempted after upgrade -THEN LAPI availability check succeeds -AND CAPI registration works if needed -AND enrollment request is sent successfully -``` - -### 4.4 Decision API Compatibility - -``` -WHEN LAPI decisions are queried after upgrade -THEN the response format is unchanged -AND decisions are correctly parsed -AND filtering by scope/type works -``` - -### 4.5 Bouncer Registration - -``` -WHEN a new bouncer is registered after upgrade -THEN cscli bouncers add command succeeds -AND the bouncer appears in cscli bouncers list -AND the API key is correctly returned +**Response**: +```json +{ + "created": 5, + "updated": 2, + "skipped": 1, + "errors": [] +} ``` --- -## 5. Rollback Plan +## Files to Create/Modify -### 5.1 Quick Rollback +### New Files -If issues are encountered after upgrade: +| File | Purpose | +|------|---------| +| `backend/internal/api/handlers/npm_import_handler.go` | NPM import handler | +| `backend/internal/api/handlers/npm_import_handler_test.go` | Unit tests | +| `backend/internal/api/handlers/json_import_handler.go` | JSON import handler | +| `backend/internal/api/handlers/json_import_handler_test.go` | Unit tests | +| `frontend/src/pages/ImportNPM.tsx` | NPM import page | +| `frontend/src/pages/ImportJSON.tsx` | JSON import page | +| `frontend/src/hooks/useNPMImport.ts` | NPM import hook | +| `frontend/src/hooks/useJSONImport.ts` | JSON import hook | +| `frontend/src/api/npmImport.ts` | NPM import API client | +| `frontend/src/api/jsonImport.ts` | JSON import API client | -1. **Revert Dockerfile**: - ```bash - git checkout HEAD~1 -- Dockerfile - ``` +### Modified Files -2. **Rebuild with previous version**: - ```bash - docker build --build-arg CROWDSEC_VERSION=1.7.4 -t charon:rollback . - ``` - -3. **Redeploy**: - ```bash - docker-compose -f .docker/compose/docker-compose.yml down - docker-compose -f .docker/compose/docker-compose.yml up -d - ``` - -### 5.2 Data Preservation - -The `crowdsec_data` volume contains: -- Configuration files -- Acquired scenarios and parsers -- Decision database -- Bouncer registrations - -This volume persists across container recreations, ensuring data is preserved during rollback. - ---- - -## 6. Files Requiring Updates - -### 6.1 Must Update - -| File | Line(s) | Change | -|------|---------|--------| -| `Dockerfile` | 213 | `CROWDSEC_VERSION=1.7.4` → `1.7.5` | -| `Dockerfile` | 267 | `CROWDSEC_VERSION=1.7.4` → `1.7.5` | - -### 6.2 May Require Review - -| File | Reason | +| File | Change | |------|--------| -| `Dockerfile` (lines 228-235) | Verify expr-lang patch still needed | -| `docs/plans/crowdsec_source_build.md` | Update version reference | -| `docs/implementation/QUICK_FIX_SUPPLY_CHAIN.md` | Update version reference | - -### 6.3 No Changes Required (Verified Compatible) - -| File | Reason | -|------|--------| -| `backend/internal/crowdsec/*.go` | No API changes in 1.7.5 | -| `backend/internal/api/handlers/crowdsec_handler.go` | No API changes | -| `.docker/compose/*.yml` | Volume/env unchanged | +| `backend/internal/api/routes/routes.go` | Register new import handlers | +| `backend/internal/services/mail_service.go` | Fix SMTP upsert pattern (lines ~117-144) | +| `frontend/src/App.tsx` | Add new routes (around line 113) | +| `frontend/src/components/Layout.tsx` | Add navigation items (around line 102) | +| `frontend/src/locales/en/translation.json` | Add i18n keys | --- -## 7. Dependency Analysis +## Tests to Re-enable -### 7.1 Current Dockerfile Patches +After implementation, update these tests by removing `test.skip`: -```dockerfile -# Dockerfile lines 225-229 -RUN go get github.com/expr-lang/expr@v1.17.7 && \ - go get golang.org/x/crypto@v0.46.0 && \ - go mod tidy +### import-to-production.spec.ts + +| Line | Test Name | Condition | +|------|-----------|-----------| +| 172 | `should display NPM import page` | NPM route exists | +| 188 | `should parse NPM export JSON` | NPM route exists | +| 204 | `should preview NPM import results` | NPM route exists | +| 220 | `should import NPM proxy hosts and access lists` | NPM route exists | +| 246 | `should display JSON import page` | JSON route exists | +| 262 | `should validate JSON schema before import` | JSON route exists | + +### smtp-settings.spec.ts + +| Line | Test Name | Condition | +|------|-----------|-----------| +| 336 | `should update existing SMTP configuration` | SMTP persistence fixed | + +--- + +## Verification Steps + +### Backend Verification + +1. **Unit Tests**: + ```bash + go test ./backend/internal/api/handlers/... -run "NPM|JSON" -v + go test ./backend/internal/services/... -run "SMTP" -v + ``` + +2. **API Integration Tests**: + ```bash + # NPM Import + curl -X POST http://localhost:8080/api/v1/import/npm/upload \ + -H "Content-Type: application/json" \ + -H "Cookie: " \ + -d '{"content": "{\"proxy_hosts\": [{\"domain_names\": [\"test.com\"], \"forward_host\": \"localhost\", \"forward_port\": 80}]}"}' + + # SMTP Persistence + curl -X POST http://localhost:8080/api/v1/settings/smtp \ + -H "Content-Type: application/json" \ + -H "Cookie: " \ + -d '{"host": "smtp.test.local", "port": 587, "from_address": "test@test.local", "encryption": "starttls"}' + + curl http://localhost:8080/api/v1/settings/smtp -H "Cookie: " + # Should return saved values + ``` + +### Frontend Verification + +1. Navigate to `/tasks/import/npm` - page should load +2. Navigate to `/tasks/import/json` - page should load +3. Paste valid NPM export JSON - should show preview +4. Save SMTP settings, reload page - values should persist + +### E2E Verification + +```bash +# Run the import tests +npx playwright test tests/integration/import-to-production.spec.ts --project=chromium + +# Run SMTP test +npx playwright test tests/settings/smtp-settings.spec.ts -g "should update existing SMTP configuration" --project=chromium ``` -**1.7.5 Status**: -- CrowdSec 1.7.5 already includes `expr@v1.17.7` ([#4150](https://github.com/crowdsecurity/crowdsec/pull/4150)) -- The `expr-lang` patch **may be removable** - verify during testing -- The `golang.org/x/crypto` patch should remain for security +--- -### 7.2 Compatibility Fix +## Implementation Checklist -```dockerfile -# Dockerfile lines 232-235 -RUN sed -i 's/string(program\.Source())/program.Source().String()/g' pkg/exprhelpers/debugger.go -``` +### Phase 3.1: NPM Import (Backend) +- [x] Create `NPMExport` and related structs +- [x] Create `npm_import_handler.go` with Upload and Commit handlers +- [x] Create `npm_import_handler_test.go` with unit tests +- [x] Register routes in `routes.go` +- [x] Test API endpoints manually -**Verification Needed**: Check if this fix is still required in 1.7.5 +### Phase 3.2: JSON Import (Backend) +- [x] Create `CharonExport` struct +- [x] Create `json_import_handler.go` with Upload and Commit handlers +- [x] Create `json_import_handler_test.go` with unit tests +- [x] Register routes in `routes.go` +- [x] Test API endpoints manually + +### Phase 3.3: SMTP Persistence Fix +- [x] Update `SaveSMTPConfig` in `mail_service.go` +- [x] Add transaction-based upsert pattern +- [x] Update `mail_service_test.go` with persistence test +- [x] Verify fix with manual testing + +### Phase 3.4: Frontend Routes +- [x] Create `ImportNPM.tsx` page component +- [x] Create `ImportJSON.tsx` page component +- [x] Create `useNPMImport.ts` hook +- [x] Create `useJSONImport.ts` hook +- [x] Create API client files +- [x] Add routes to `App.tsx` +- [x] Add navigation items to `Layout.tsx` +- [x] Add i18n translation keys + +### Phase 3.5: Test Re-enablement +- [x] Remove `test.skip` from NPM import tests (4 tests) +- [x] Remove `test.skip` from JSON import tests (2 tests) +- [x] Remove `test.skip` from SMTP persistence test (1 test) +- [x] Run full E2E test suite + +### Phase 3.6: Verification +- [x] All new tests pass (7 tests enabled and passing) +- [x] No regressions in existing tests +- [x] Update `skipped-tests-remediation.md` with Phase 3 completion --- -## 8. Implementation Steps - -### Phase 1: Preparation - -1. [ ] Create feature branch: `feature/crowdsec-1.7.5-upgrade` -2. [ ] Run current test suite to establish baseline -3. [ ] Document current LAPI version and status - -### Phase 2: Update - -4. [ ] Update Dockerfile with version 1.7.5 -5. [ ] Test build locally (amd64) -6. [ ] Test build for arm64 (if available) -7. [ ] Verify binaries report correct version - -### Phase 3: Verification - -8. [ ] Run E2E tests first: `.github/skills/scripts/skill-runner.sh test-e2e-playwright` -9. [ ] Run integration tests: `make integration-crowdsec` -10. [ ] Run unit tests: `make test-backend` -11. [ ] Run coverage verification: `make test-backend-coverage` -12. [ ] Manual API verification using functional matrix - -### Phase 4: Documentation - -12. [ ] Update version references in documentation -13. [ ] Update CHANGELOG.md -14. [ ] Create PR with test results - -### Phase 5: Deployment - -15. [ ] Merge to main -16. [ ] Monitor CI/CD pipeline -17. [ ] Verify production deployment -18. [ ] Monitor logs for any CrowdSec errors - ---- - -## 9. Acceptance Criteria - -The upgrade is considered successful when: - -- [ ] All unit tests pass -- [ ] All integration tests pass -- [ ] All E2E tests pass -- [ ] LAPI reports version 1.7.5 -- [ ] Start/Stop operations work correctly -- [ ] Hub preset operations complete successfully -- [ ] Console enrollment works (if applicable) -- [ ] No new errors in application logs -- [ ] Docker image builds successfully for amd64 and arm64 -- [ ] Coverage report generated -- [ ] Coverage threshold ≥85% maintained -- [ ] Patch coverage 100% for Dockerfile modifications -- [ ] No new CodeQL alerts introduced - ---- - -## 10. Risk Assessment +## Risk Assessment | Risk | Likelihood | Impact | Mitigation | |------|------------|--------|------------| -| Build failure | Low | Medium | Fallback stage in Dockerfile | -| API incompatibility | Very Low | High | Comprehensive test coverage | -| Performance regression | Low | Medium | Monitor via observability | -| expr-lang patch conflict | Low | Low | Test without patch first | -| Skipped E2E tests miss regression | Medium | Medium | Integration tests cover LAPI; CI runs full suite | - -**Overall Risk Level**: **LOW** - -The 1.7.5 release is a maintenance update with no breaking changes. The comprehensive test coverage in Charon provides high confidence in upgrade success. +| NPM export format varies by version | Medium | Medium | Support multiple format versions, validate required fields | +| SMTP fix causes other issues | Low | High | Transaction-based approach is safer, comprehensive tests | +| Frontend state management complexity | Low | Low | Follow existing ImportCaddy pattern exactly | --- -## Appendix A: Quick Reference Commands +## Dependencies -```bash -# Build and test -make docker-build -make test-backend -make test-crowdsec - -# Run specific test files -cd backend && go test -v ./internal/crowdsec/... -run TestHub -cd backend && go test -v ./internal/crowdsec/... -run TestConsole -cd backend && go test -v ./internal/crowdsec/... -run TestRegistration - -# Integration tests -.github/skills/scripts/skill-runner.sh integration-crowdsec - -# E2E tests -npx playwright test --project=chromium - -# Check CrowdSec version in container -docker exec charon cscli version -docker exec charon curl -s http://127.0.0.1:8085/health - -# LAPI health check -curl http://localhost:8080/api/crowdsec/lapi/health -``` +- Phase 2 completion (TestDataManager auth fix) - **COMPLETE** +- Existing Caddyfile import infrastructure - **AVAILABLE** +- Frontend React component patterns - **AVAILABLE** --- -## Appendix B: Related Documentation +## Summary for Delegation -- [CrowdSec 1.7.5 Release Notes](https://github.com/crowdsecurity/crowdsec/releases/tag/v1.7.5) -- [CrowdSec Source Build Plan](crowdsec_source_build.md) -- [Supply Chain Remediation](../implementation/SUPPLY_CHAIN_REMEDIATION_PLAN.md) -- [Charon Security Documentation](../../SECURITY.md) +### For Backend_Dev Agent: + +**Task 1: NPM Import Handler** +- Create file: `backend/internal/api/handlers/npm_import_handler.go` +- Implement structs: `NPMExport`, `NPMProxyHost`, `NPMAccessList`, `NPMCertificate` +- Implement handlers: `Upload()`, `Commit()` +- Register in: `backend/internal/api/routes/routes.go` + +**Task 2: JSON Import Handler** +- Create file: `backend/internal/api/handlers/json_import_handler.go` +- Implement structs: `CharonExport`, `CharonProxyHost` +- Implement handlers: `Upload()`, `Commit()` (with NPM format fallback) +- Register in: `backend/internal/api/routes/routes.go` + +**Task 3: SMTP Fix** +- File: `backend/internal/services/mail_service.go` +- Function: `SaveSMTPConfig()` (lines ~117-144) +- Fix: Wrap in transaction, use `Save()` instead of `Updates()` + +### For Frontend_Dev Agent: + +**Task 4: Frontend Routes** +- Create: `frontend/src/pages/ImportNPM.tsx` +- Create: `frontend/src/pages/ImportJSON.tsx` +- Create: `frontend/src/hooks/useNPMImport.ts` +- Create: `frontend/src/hooks/useJSONImport.ts` +- Create: `frontend/src/api/npmImport.ts` +- Create: `frontend/src/api/jsonImport.ts` +- Modify: `frontend/src/App.tsx` (add routes) +- Modify: `frontend/src/components/Layout.tsx` (add nav items) +- Modify: `frontend/src/locales/en/translation.json` (add i18n) + +--- + +## Change Log + +| Date | Author | Change | +|------|--------|--------| +| 2026-01-22 | Planning Agent (Architect) | Initial Phase 3 plan created | +| 2026-01-22 | Implementation Team | Phase 3 implementation complete - NPM/JSON import routes, SMTP fix, 7 tests enabled | diff --git a/docs/plans/skipped-tests-remediation.md b/docs/plans/skipped-tests-remediation.md index 3bb11992..0f8c2787 100644 --- a/docs/plans/skipped-tests-remediation.md +++ b/docs/plans/skipped-tests-remediation.md @@ -1,8 +1,8 @@ # Skipped Playwright Tests Remediation Plan -> **Status**: Active +> **Status**: Active (Phase 3 Complete) > **Created**: 2024 -> **Total Skipped Tests**: 98 +> **Total Skipped Tests**: 91 (was 98, reduced by 7 in Phase 3) > **Target**: Reduce to <10 intentional skips ## Executive Summary @@ -15,12 +15,14 @@ This plan addresses 98 skipped Playwright E2E tests discovered through comprehen |----------|-------|--------|----------| | Environment-Dependent (Cerberus) | 35 | S | P0 | | Feature Not Implemented | 25 | L | P1 | -| Route/API Not Implemented | 12 | M | P1 | -| UI Mismatch/Test ID Issues | 10 | S | P2 | +| Route/API Not Implemented | 6 | M | P1 | +| UI Mismatch/Test ID Issues | 9 | S | P2 | | TestDataManager Auth Issues | 8 | M | P1 | | Flaky/Timing Issues | 5 | S | P2 | | Intentional Skips | 3 | - | - | +> **Note**: Phase 3 completed - NPM/JSON import routes implemented (6→0), SMTP fix (1 test), reducing total from 98 to 91. + --- ## Category 1: Environment-Dependent Tests (Cerberus Disabled) @@ -339,13 +341,14 @@ These tests are intentionally skipped with documented reasons: ### Phase 3: Backend Routes (Week 3-4) **Target**: Implement missing API routes +**Status**: ✅ COMPLETE (2026-01-22) -1. Implement NPM import route -2. Implement JSON import route -3. Review SMTP persistence issue -4. Re-enable import tests (+6 tests) +1. ✅ Implemented NPM import route (`POST /api/v1/import/npm/upload`, `commit`, `cancel`) +2. ✅ Implemented JSON import route (`POST /api/v1/import/json/upload`, `commit`, `cancel`) +3. ✅ Fixed SMTP persistence bug (settings now persist correctly after save) +4. ✅ Re-enabled import tests (+7 tests now passing) -**Estimated Work**: 16-24 hours +**Actual Work**: ~20 hours ### Phase 4: UI Components (Week 5-8) **Target**: Implement missing frontend components @@ -462,3 +465,4 @@ grep -rn "test\.skip\|test\.fixme" tests/ --include="*.spec.ts" > skip-report.tx | Date | Author | Change | |------|--------|--------| | 2024-XX-XX | AI Analysis | Initial plan created | +| 2026-01-22 | Implementation Team | Phase 3 complete - NPM/JSON import routes implemented, SMTP persistence fixed, 7 tests re-enabled | diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index a12a7c80..ddcc4dbd 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -16,6 +16,8 @@ const RemoteServers = lazy(() => import('./pages/RemoteServers')) const DNS = lazy(() => import('./pages/DNS')) const ImportCaddy = lazy(() => import('./pages/ImportCaddy')) const ImportCrowdSec = lazy(() => import('./pages/ImportCrowdSec')) +const ImportNPM = lazy(() => import('./pages/ImportNPM')) +const ImportJSON = lazy(() => import('./pages/ImportJSON')) const Certificates = lazy(() => import('./pages/Certificates')) const DNSProviders = lazy(() => import('./pages/DNSProviders')) const SystemSettings = lazy(() => import('./pages/SystemSettings')) @@ -109,6 +111,8 @@ export default function App() { } /> } /> + } /> + } /> diff --git a/frontend/src/api/jsonImport.ts b/frontend/src/api/jsonImport.ts new file mode 100644 index 00000000..db7713d9 --- /dev/null +++ b/frontend/src/api/jsonImport.ts @@ -0,0 +1,90 @@ +import client from './client'; + +/** Represents a host parsed from a JSON export. */ +export interface JSONHost { + domain_names: string; + forward_scheme: string; + forward_host: string; + forward_port: number; + ssl_forced: boolean; + websocket_support: boolean; +} + +/** Preview of a JSON import with hosts and conflicts. */ +export interface JSONImportPreview { + session: { + id: string; + state: string; + source: string; + }; + preview: { + hosts: JSONHost[]; + conflicts: string[]; + errors: string[]; + }; + conflict_details: Record; +} + +/** Result of committing a JSON import operation. */ +export interface JSONImportCommitResult { + created: number; + updated: number; + skipped: number; + errors: string[]; +} + +/** + * Uploads JSON export content for import preview. + * @param content - The JSON export content as a string + * @returns Promise resolving to JSONImportPreview with parsed hosts + * @throws {AxiosError} If parsing fails or content is invalid + */ +export const uploadJSONExport = async (content: string): Promise => { + const { data } = await client.post('/import/json/upload', { content }); + return data; +}; + +/** + * Commits the JSON import, creating/updating proxy hosts. + * @param sessionUuid - The import session UUID + * @param resolutions - Map of conflict resolutions (domain -> 'keep'|'replace'|'skip') + * @param names - Map of custom names for imported hosts + * @returns Promise resolving to JSONImportCommitResult with counts + * @throws {AxiosError} If commit fails + */ +export const commitJSONImport = async ( + sessionUuid: string, + resolutions: Record, + names: Record +): Promise => { + const { data } = await client.post('/import/json/commit', { + session_uuid: sessionUuid, + resolutions, + names, + }); + return data; +}; + +/** + * Cancels the current JSON import session. + * @throws {AxiosError} If cancellation fails + */ +export const cancelJSONImport = async (): Promise => { + await client.post('/import/json/cancel'); +}; diff --git a/frontend/src/api/npmImport.ts b/frontend/src/api/npmImport.ts new file mode 100644 index 00000000..5ccdadc2 --- /dev/null +++ b/frontend/src/api/npmImport.ts @@ -0,0 +1,90 @@ +import client from './client'; + +/** Represents a host parsed from an NPM export. */ +export interface NPMHost { + domain_names: string; + forward_scheme: string; + forward_host: string; + forward_port: number; + ssl_forced: boolean; + websocket_support: boolean; +} + +/** Preview of an NPM import with hosts and conflicts. */ +export interface NPMImportPreview { + session: { + id: string; + state: string; + source: string; + }; + preview: { + hosts: NPMHost[]; + conflicts: string[]; + errors: string[]; + }; + conflict_details: Record; +} + +/** Result of committing an NPM import operation. */ +export interface NPMImportCommitResult { + created: number; + updated: number; + skipped: number; + errors: string[]; +} + +/** + * Uploads NPM export content for import preview. + * @param content - The NPM export JSON content as a string + * @returns Promise resolving to NPMImportPreview with parsed hosts + * @throws {AxiosError} If parsing fails or content is invalid + */ +export const uploadNPMExport = async (content: string): Promise => { + const { data } = await client.post('/import/npm/upload', { content }); + return data; +}; + +/** + * Commits the NPM import, creating/updating proxy hosts. + * @param sessionUuid - The import session UUID + * @param resolutions - Map of conflict resolutions (domain -> 'keep'|'replace'|'skip') + * @param names - Map of custom names for imported hosts + * @returns Promise resolving to NPMImportCommitResult with counts + * @throws {AxiosError} If commit fails + */ +export const commitNPMImport = async ( + sessionUuid: string, + resolutions: Record, + names: Record +): Promise => { + const { data } = await client.post('/import/npm/commit', { + session_uuid: sessionUuid, + resolutions, + names, + }); + return data; +}; + +/** + * Cancels the current NPM import session. + * @throws {AxiosError} If cancellation fails + */ +export const cancelNPMImport = async (): Promise => { + await client.post('/import/npm/cancel'); +}; diff --git a/frontend/src/components/Layout.tsx b/frontend/src/components/Layout.tsx index 25c96547..6eca8542 100644 --- a/frontend/src/components/Layout.tsx +++ b/frontend/src/components/Layout.tsx @@ -100,6 +100,8 @@ export default function Layout({ children }: LayoutProps) { children: [ { name: t('navigation.caddyfile'), path: '/tasks/import/caddyfile', icon: '📥' }, { name: t('navigation.crowdsec'), path: '/tasks/import/crowdsec', icon: '🛡️' }, + { name: t('navigation.importNPM'), path: '/tasks/import/npm', icon: '📦' }, + { name: t('navigation.importJSON'), path: '/tasks/import/json', icon: '📄' }, ] }, { name: t('navigation.backups'), path: '/tasks/backups', icon: '💾' }, diff --git a/frontend/src/hooks/useJSONImport.ts b/frontend/src/hooks/useJSONImport.ts new file mode 100644 index 00000000..96287b9b --- /dev/null +++ b/frontend/src/hooks/useJSONImport.ts @@ -0,0 +1,84 @@ +import { useState } from 'react'; +import { useMutation, useQueryClient } from '@tanstack/react-query'; +import { + uploadJSONExport, + commitJSONImport, + cancelJSONImport, + JSONImportPreview, + JSONImportCommitResult, +} from '../api/jsonImport'; + +/** + * Hook for managing JSON import workflow. + * Provides upload, commit, and cancel functionality with state management. + */ +export function useJSONImport() { + const queryClient = useQueryClient(); + const [preview, setPreview] = useState(null); + const [sessionId, setSessionId] = useState(null); + const [commitResult, setCommitResult] = useState(null); + + const uploadMutation = useMutation({ + mutationFn: uploadJSONExport, + onSuccess: (data) => { + setPreview(data); + setSessionId(data.session.id); + }, + }); + + const commitMutation = useMutation({ + mutationFn: ({ + resolutions, + names, + }: { + resolutions: Record; + names: Record; + }) => { + if (!sessionId) throw new Error('No active session'); + return commitJSONImport(sessionId, resolutions, names); + }, + onSuccess: (data) => { + setCommitResult(data); + setPreview(null); + setSessionId(null); + queryClient.invalidateQueries({ queryKey: ['proxy-hosts'] }); + }, + }); + + const cancelMutation = useMutation({ + mutationFn: cancelJSONImport, + onSuccess: () => { + setPreview(null); + setSessionId(null); + }, + }); + + const clearCommitResult = () => { + setCommitResult(null); + }; + + const reset = () => { + setPreview(null); + setSessionId(null); + setCommitResult(null); + }; + + return { + preview, + sessionId, + loading: uploadMutation.isPending, + error: uploadMutation.error, + upload: uploadMutation.mutateAsync, + commit: (resolutions: Record, names: Record) => + commitMutation.mutateAsync({ resolutions, names }), + committing: commitMutation.isPending, + commitError: commitMutation.error, + commitResult, + clearCommitResult, + cancel: cancelMutation.mutateAsync, + cancelling: cancelMutation.isPending, + reset, + }; +} + +export type { JSONImportPreview, JSONImportCommitResult }; diff --git a/frontend/src/hooks/useNPMImport.ts b/frontend/src/hooks/useNPMImport.ts new file mode 100644 index 00000000..dc9211a8 --- /dev/null +++ b/frontend/src/hooks/useNPMImport.ts @@ -0,0 +1,84 @@ +import { useState } from 'react'; +import { useMutation, useQueryClient } from '@tanstack/react-query'; +import { + uploadNPMExport, + commitNPMImport, + cancelNPMImport, + NPMImportPreview, + NPMImportCommitResult, +} from '../api/npmImport'; + +/** + * Hook for managing NPM import workflow. + * Provides upload, commit, and cancel functionality with state management. + */ +export function useNPMImport() { + const queryClient = useQueryClient(); + const [preview, setPreview] = useState(null); + const [sessionId, setSessionId] = useState(null); + const [commitResult, setCommitResult] = useState(null); + + const uploadMutation = useMutation({ + mutationFn: uploadNPMExport, + onSuccess: (data) => { + setPreview(data); + setSessionId(data.session.id); + }, + }); + + const commitMutation = useMutation({ + mutationFn: ({ + resolutions, + names, + }: { + resolutions: Record; + names: Record; + }) => { + if (!sessionId) throw new Error('No active session'); + return commitNPMImport(sessionId, resolutions, names); + }, + onSuccess: (data) => { + setCommitResult(data); + setPreview(null); + setSessionId(null); + queryClient.invalidateQueries({ queryKey: ['proxy-hosts'] }); + }, + }); + + const cancelMutation = useMutation({ + mutationFn: cancelNPMImport, + onSuccess: () => { + setPreview(null); + setSessionId(null); + }, + }); + + const clearCommitResult = () => { + setCommitResult(null); + }; + + const reset = () => { + setPreview(null); + setSessionId(null); + setCommitResult(null); + }; + + return { + preview, + sessionId, + loading: uploadMutation.isPending, + error: uploadMutation.error, + upload: uploadMutation.mutateAsync, + commit: (resolutions: Record, names: Record) => + commitMutation.mutateAsync({ resolutions, names }), + committing: commitMutation.isPending, + commitError: commitMutation.error, + commitResult, + clearCommitResult, + cancel: cancelMutation.mutateAsync, + cancelling: cancelMutation.isPending, + reset, + }; +} + +export type { NPMImportPreview, NPMImportCommitResult }; diff --git a/frontend/src/locales/en/translation.json b/frontend/src/locales/en/translation.json index c802f270..903370ee 100644 --- a/frontend/src/locales/en/translation.json +++ b/frontend/src/locales/en/translation.json @@ -69,6 +69,8 @@ "accountManagement": "Account Management", "import": "Import", "caddyfile": "Caddyfile", + "importNPM": "Import NPM", + "importJSON": "Import JSON", "backups": "Backups", "logs": "Logs", "securityHeaders": "Security Headers", @@ -761,6 +763,38 @@ "creatingBackup": "Creating backup...", "importing": "Importing CrowdSec..." }, + "importNPM": { + "title": "Import from NPM", + "description": "Import proxy hosts from Nginx Proxy Manager export", + "enterContent": "Please paste NPM export JSON", + "invalidJSON": "Invalid JSON format", + "upload": "Upload & Preview", + "import": "Import", + "success": "Import completed successfully", + "previewTitle": "Preview Import", + "conflict": "Conflict", + "new": "New", + "skip": "Skip", + "keep": "Keep Existing", + "replace": "Replace", + "cancelConfirm": "Are you sure you want to cancel this import?" + }, + "importJSON": { + "title": "Import from JSON", + "description": "Import configuration from JSON export", + "enterContent": "Please paste JSON configuration", + "invalidJSON": "Invalid JSON format", + "upload": "Upload & Preview", + "import": "Import", + "success": "Import completed successfully", + "previewTitle": "Preview Import", + "conflict": "Conflict", + "new": "New", + "skip": "Skip", + "keep": "Keep Existing", + "replace": "Replace", + "cancelConfirm": "Are you sure you want to cancel this import?" + }, "systemSettings": { "title": "System Settings", "settingsSaved": "System settings saved", diff --git a/frontend/src/pages/ImportJSON.tsx b/frontend/src/pages/ImportJSON.tsx new file mode 100644 index 00000000..94987319 --- /dev/null +++ b/frontend/src/pages/ImportJSON.tsx @@ -0,0 +1,312 @@ +import { useState } from 'react' +import { useNavigate } from 'react-router-dom' +import { useTranslation } from 'react-i18next' +import { createBackup } from '../api/backups' +import { useJSONImport } from '../hooks/useJSONImport' +import ImportSuccessModal from '../components/dialogs/ImportSuccessModal' + +export default function ImportJSON() { + const { t } = useTranslation() + const navigate = useNavigate() + const { + preview, + loading, + error, + upload, + commit, + committing, + commitResult, + clearCommitResult, + cancel, + reset, + } = useJSONImport() + const [content, setContent] = useState('') + const [showReview, setShowReview] = useState(false) + const [showSuccessModal, setShowSuccessModal] = useState(false) + const [resolutions, setResolutions] = useState>({}) + const [names] = useState>({}) + + const handleUpload = async () => { + if (!content.trim()) { + return + } + + try { + JSON.parse(content) + } catch { + alert(t('importJSON.invalidJSON')) + return + } + + try { + await upload(content) + setShowReview(true) + } catch { + // Error is handled by hook + } + } + + const handleFileUpload = async (e: React.ChangeEvent) => { + const file = e.target.files?.[0] + if (!file) return + + const text = await file.text() + setContent(text) + } + + const handleCommit = async () => { + try { + await createBackup() + await commit(resolutions, names) + setContent('') + setShowReview(false) + setShowSuccessModal(true) + } catch { + // Error is handled by hook + } + } + + const handleCloseSuccessModal = () => { + setShowSuccessModal(false) + clearCommitResult() + } + + const handleCancel = async () => { + if (confirm(t('importJSON.cancelConfirm'))) { + try { + await cancel() + setShowReview(false) + reset() + } catch { + // Error is handled by hook + } + } + } + + const handleResolutionChange = (domain: string, resolution: string) => { + setResolutions((prev) => ({ ...prev, [domain]: resolution })) + } + + return ( +
+

{t('importJSON.title')}

+ + {error && ( +
+ {error.message} +
+ )} + + {!showReview && ( +
+
+

+ {t('importJSON.title')} +

+

{t('importJSON.description')}

+
+ +
+
+ + +
+ +
+
+ + {t('importCaddy.orPasteContent')} + +
+
+ +
+ +