diff --git a/README.md b/README.md index 5471cecc..e073a3ef 100644 --- a/README.md +++ b/README.md @@ -95,7 +95,12 @@ See exactly what's happening with live request logs, uptime monitoring, and inst ### 📥 **Migration Made Easy** -Import your existing Caddy configurations with one click. Already invested in another reverse proxy? Bring your work with you. +Import your existing configurations with one click: +- **Caddyfile Import** — Migrate from other Caddy setups +- **NPM Import** — Import from Nginx Proxy Manager exports +- **JSON Import** — Restore from Charon backups or generic JSON configs + +Already invested in another reverse proxy? Bring your work with you. ### ⚡ **Live Configuration Changes** diff --git a/backend/internal/api/handlers/json_import_handler.go b/backend/internal/api/handlers/json_import_handler.go new file mode 100644 index 00000000..9c549680 --- /dev/null +++ b/backend/internal/api/handlers/json_import_handler.go @@ -0,0 +1,516 @@ +package handlers + +import ( + "encoding/json" + "fmt" + "net/http" + "strings" + "sync" + "time" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "gorm.io/gorm" + + "github.com/Wikid82/charon/backend/internal/caddy" + "github.com/Wikid82/charon/backend/internal/models" + "github.com/Wikid82/charon/backend/internal/services" +) + +// jsonImportSession stores the parsed content for a JSON import session. +type jsonImportSession struct { + SourceType string // "charon" or "npm" + CharonExport *CharonExport + NPMExport *NPMExport +} + +// jsonImportSessions stores parsed exports keyed by session UUID. +// TODO: Implement session expiration to prevent memory leaks (e.g., TTL-based cleanup). +var ( + jsonImportSessions = make(map[string]jsonImportSession) + jsonImportSessionsMu sync.RWMutex +) + +// CharonExport represents the top-level structure of a Charon export file. +type CharonExport struct { + Version string `json:"version"` + ExportedAt time.Time `json:"exported_at"` + ProxyHosts []CharonProxyHost `json:"proxy_hosts"` + AccessLists []CharonAccessList `json:"access_lists"` + DNSRecords []CharonDNSRecord `json:"dns_records"` +} + +// CharonProxyHost represents a proxy host in Charon export format. +type CharonProxyHost struct { + UUID string `json:"uuid"` + Name string `json:"name"` + DomainNames string `json:"domain_names"` + ForwardScheme string `json:"forward_scheme"` + ForwardHost string `json:"forward_host"` + ForwardPort int `json:"forward_port"` + SSLForced bool `json:"ssl_forced"` + HTTP2Support bool `json:"http2_support"` + HSTSEnabled bool `json:"hsts_enabled"` + HSTSSubdomains bool `json:"hsts_subdomains"` + BlockExploits bool `json:"block_exploits"` + WebsocketSupport bool `json:"websocket_support"` + Application string `json:"application"` + Enabled bool `json:"enabled"` + AdvancedConfig string `json:"advanced_config"` + WAFDisabled bool `json:"waf_disabled"` + UseDNSChallenge bool `json:"use_dns_challenge"` +} + +// CharonAccessList represents an access list in Charon export format. +type CharonAccessList struct { + UUID string `json:"uuid"` + Name string `json:"name"` + Description string `json:"description"` + Type string `json:"type"` + IPRules string `json:"ip_rules"` + CountryCodes string `json:"country_codes"` + LocalNetworkOnly bool `json:"local_network_only"` + Enabled bool `json:"enabled"` +} + +// CharonDNSRecord represents a DNS record in Charon export format. +type CharonDNSRecord struct { + UUID string `json:"uuid"` + Name string `json:"name"` + Type string `json:"type"` + Value string `json:"value"` + TTL int `json:"ttl"` + ProviderID uint `json:"provider_id"` +} + +// JSONImportHandler handles JSON configuration imports (both Charon and NPM formats). +type JSONImportHandler struct { + db *gorm.DB + proxyHostSvc *services.ProxyHostService +} + +// NewJSONImportHandler creates a new JSON import handler. +func NewJSONImportHandler(db *gorm.DB) *JSONImportHandler { + return &JSONImportHandler{ + db: db, + proxyHostSvc: services.NewProxyHostService(db), + } +} + +// RegisterRoutes registers JSON import routes. +func (h *JSONImportHandler) RegisterRoutes(router *gin.RouterGroup) { + router.POST("/import/json/upload", h.Upload) + router.POST("/import/json/commit", h.Commit) + router.POST("/import/json/cancel", h.Cancel) +} + +// Upload parses a JSON export (Charon or NPM format) and returns a preview. +func (h *JSONImportHandler) Upload(c *gin.Context) { + var req struct { + Content string `json:"content" binding:"required"` + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Try Charon format first + var charonExport CharonExport + if err := json.Unmarshal([]byte(req.Content), &charonExport); err == nil && h.isCharonFormat(charonExport) { + h.handleCharonUpload(c, charonExport) + return + } + + // Fall back to NPM format + var npmExport NPMExport + if err := json.Unmarshal([]byte(req.Content), &npmExport); err == nil && len(npmExport.ProxyHosts) > 0 { + h.handleNPMUpload(c, npmExport) + return + } + + c.JSON(http.StatusBadRequest, gin.H{"error": "unrecognized JSON format - must be Charon or NPM export"}) +} + +// isCharonFormat checks if the export is in Charon format. +func (h *JSONImportHandler) isCharonFormat(export CharonExport) bool { + return export.Version != "" || len(export.ProxyHosts) > 0 +} + +// handleCharonUpload processes a Charon format export. +func (h *JSONImportHandler) handleCharonUpload(c *gin.Context, export CharonExport) { + result := h.convertCharonToImportResult(export) + + if len(result.Hosts) == 0 { + c.JSON(http.StatusBadRequest, gin.H{"error": "no proxy hosts found in Charon export"}) + return + } + + existingHosts, _ := h.proxyHostSvc.List() + existingDomainsMap := make(map[string]models.ProxyHost) + for _, eh := range existingHosts { + existingDomainsMap[eh.DomainNames] = eh + } + + conflictDetails := make(map[string]gin.H) + for _, ph := range result.Hosts { + if existing, found := existingDomainsMap[ph.DomainNames]; found { + result.Conflicts = append(result.Conflicts, ph.DomainNames) + conflictDetails[ph.DomainNames] = gin.H{ + "existing": gin.H{ + "forward_scheme": existing.ForwardScheme, + "forward_host": existing.ForwardHost, + "forward_port": existing.ForwardPort, + "ssl_forced": existing.SSLForced, + "websocket": existing.WebsocketSupport, + "enabled": existing.Enabled, + }, + "imported": gin.H{ + "forward_scheme": ph.ForwardScheme, + "forward_host": ph.ForwardHost, + "forward_port": ph.ForwardPort, + "ssl_forced": ph.SSLForced, + "websocket": ph.WebsocketSupport, + }, + } + } + } + + sid := uuid.NewString() + + // Store the parsed export in session storage for later commit + jsonImportSessionsMu.Lock() + jsonImportSessions[sid] = jsonImportSession{ + SourceType: "charon", + CharonExport: &export, + } + jsonImportSessionsMu.Unlock() + + c.JSON(http.StatusOK, gin.H{ + "session": gin.H{"id": sid, "state": "transient", "source_type": "charon"}, + "preview": result, + "conflict_details": conflictDetails, + "charon_export": gin.H{ + "version": export.Version, + "exported_at": export.ExportedAt, + "proxy_hosts": len(export.ProxyHosts), + "access_lists": len(export.AccessLists), + "dns_records": len(export.DNSRecords), + }, + }) +} + +// handleNPMUpload processes an NPM format export. +func (h *JSONImportHandler) handleNPMUpload(c *gin.Context, export NPMExport) { + npmHandler := NewNPMImportHandler(h.db) + result := npmHandler.convertNPMToImportResult(export) + + if len(result.Hosts) == 0 { + c.JSON(http.StatusBadRequest, gin.H{"error": "no proxy hosts found in NPM export"}) + return + } + + existingHosts, _ := h.proxyHostSvc.List() + existingDomainsMap := make(map[string]models.ProxyHost) + for _, eh := range existingHosts { + existingDomainsMap[eh.DomainNames] = eh + } + + conflictDetails := make(map[string]gin.H) + for _, ph := range result.Hosts { + if existing, found := existingDomainsMap[ph.DomainNames]; found { + result.Conflicts = append(result.Conflicts, ph.DomainNames) + conflictDetails[ph.DomainNames] = gin.H{ + "existing": gin.H{ + "forward_scheme": existing.ForwardScheme, + "forward_host": existing.ForwardHost, + "forward_port": existing.ForwardPort, + "ssl_forced": existing.SSLForced, + "websocket": existing.WebsocketSupport, + "enabled": existing.Enabled, + }, + "imported": gin.H{ + "forward_scheme": ph.ForwardScheme, + "forward_host": ph.ForwardHost, + "forward_port": ph.ForwardPort, + "ssl_forced": ph.SSLForced, + "websocket": ph.WebsocketSupport, + }, + } + } + } + + sid := uuid.NewString() + + // Store the parsed export in session storage for later commit + jsonImportSessionsMu.Lock() + jsonImportSessions[sid] = jsonImportSession{ + SourceType: "npm", + NPMExport: &export, + } + jsonImportSessionsMu.Unlock() + + c.JSON(http.StatusOK, gin.H{ + "session": gin.H{"id": sid, "state": "transient", "source_type": "npm"}, + "preview": result, + "conflict_details": conflictDetails, + "npm_export": gin.H{ + "proxy_hosts": len(export.ProxyHosts), + "access_lists": len(export.AccessLists), + "certificates": len(export.Certificates), + }, + }) +} + +// Commit finalizes the JSON import with user's conflict resolutions. +func (h *JSONImportHandler) Commit(c *gin.Context) { + var req struct { + SessionUUID string `json:"session_uuid" binding:"required"` + Resolutions map[string]string `json:"resolutions"` + Names map[string]string `json:"names"` + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Retrieve the stored session + jsonImportSessionsMu.RLock() + session, ok := jsonImportSessions[req.SessionUUID] + jsonImportSessionsMu.RUnlock() + + if !ok { + c.JSON(http.StatusNotFound, gin.H{"error": "session not found or expired"}) + return + } + + // Route to the appropriate commit handler based on source type + if session.SourceType == "charon" && session.CharonExport != nil { + h.commitCharonImport(c, *session.CharonExport, req.Resolutions, req.Names, req.SessionUUID) + return + } + + if session.SourceType == "npm" && session.NPMExport != nil { + h.commitNPMImport(c, *session.NPMExport, req.Resolutions, req.Names, req.SessionUUID) + return + } + + c.JSON(http.StatusBadRequest, gin.H{"error": "invalid session state"}) +} + +// Cancel cancels a JSON import session and cleans up resources. +func (h *JSONImportHandler) Cancel(c *gin.Context) { + var req struct { + SessionUUID string `json:"session_uuid"` + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Clean up session if it exists + jsonImportSessionsMu.Lock() + delete(jsonImportSessions, req.SessionUUID) + jsonImportSessionsMu.Unlock() + + c.JSON(http.StatusOK, gin.H{"status": "cancelled"}) +} + +// commitCharonImport commits a Charon format import. +func (h *JSONImportHandler) commitCharonImport(c *gin.Context, export CharonExport, resolutions, names map[string]string, sessionUUID string) { + result := h.convertCharonToImportResult(export) + proxyHosts := caddy.ConvertToProxyHosts(result.Hosts) + + created := 0 + updated := 0 + skipped := 0 + errors := []string{} + + existingHosts, _ := h.proxyHostSvc.List() + existingMap := make(map[string]*models.ProxyHost) + for i := range existingHosts { + existingMap[existingHosts[i].DomainNames] = &existingHosts[i] + } + + for _, host := range proxyHosts { + action := resolutions[host.DomainNames] + + if customName, ok := names[host.DomainNames]; ok && customName != "" { + host.Name = customName + } + + if action == "skip" || action == "keep" { + skipped++ + continue + } + + if action == "rename" { + host.DomainNames += "-imported" + } + + if action == "overwrite" { + if existing, found := existingMap[host.DomainNames]; found { + host.ID = existing.ID + host.UUID = existing.UUID + host.CertificateID = existing.CertificateID + host.CreatedAt = existing.CreatedAt + + if err := h.proxyHostSvc.Update(&host); err != nil { + errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error())) + } else { + updated++ + } + continue + } + } + + host.UUID = uuid.NewString() + if err := h.proxyHostSvc.Create(&host); err != nil { + errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error())) + } else { + created++ + } + } + + // Clean up session after successful commit + jsonImportSessionsMu.Lock() + delete(jsonImportSessions, sessionUUID) + jsonImportSessionsMu.Unlock() + + c.JSON(http.StatusOK, gin.H{ + "created": created, + "updated": updated, + "skipped": skipped, + "errors": errors, + }) +} + +// commitNPMImport commits an NPM format import. +func (h *JSONImportHandler) commitNPMImport(c *gin.Context, export NPMExport, resolutions, names map[string]string, sessionUUID string) { + npmHandler := NewNPMImportHandler(h.db) + result := npmHandler.convertNPMToImportResult(export) + proxyHosts := caddy.ConvertToProxyHosts(result.Hosts) + + created := 0 + updated := 0 + skipped := 0 + errors := []string{} + + existingHosts, _ := h.proxyHostSvc.List() + existingMap := make(map[string]*models.ProxyHost) + for i := range existingHosts { + existingMap[existingHosts[i].DomainNames] = &existingHosts[i] + } + + for _, host := range proxyHosts { + action := resolutions[host.DomainNames] + + if customName, ok := names[host.DomainNames]; ok && customName != "" { + host.Name = customName + } + + if action == "skip" || action == "keep" { + skipped++ + continue + } + + if action == "rename" { + host.DomainNames += "-imported" + } + + if action == "overwrite" { + if existing, found := existingMap[host.DomainNames]; found { + host.ID = existing.ID + host.UUID = existing.UUID + host.CertificateID = existing.CertificateID + host.CreatedAt = existing.CreatedAt + + if err := h.proxyHostSvc.Update(&host); err != nil { + errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error())) + } else { + updated++ + } + continue + } + } + + host.UUID = uuid.NewString() + if err := h.proxyHostSvc.Create(&host); err != nil { + errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error())) + } else { + created++ + } + } + + // Clean up session after successful commit + jsonImportSessionsMu.Lock() + delete(jsonImportSessions, sessionUUID) + jsonImportSessionsMu.Unlock() + + c.JSON(http.StatusOK, gin.H{ + "created": created, + "updated": updated, + "skipped": skipped, + "errors": errors, + }) +} + +// convertCharonToImportResult converts Charon export format to ImportResult. +func (h *JSONImportHandler) convertCharonToImportResult(export CharonExport) *caddy.ImportResult { + result := &caddy.ImportResult{ + Hosts: []caddy.ParsedHost{}, + Conflicts: []string{}, + Errors: []string{}, + } + + for _, ch := range export.ProxyHosts { + if ch.DomainNames == "" { + result.Errors = append(result.Errors, fmt.Sprintf("host %s has no domain names", ch.UUID)) + continue + } + + scheme := ch.ForwardScheme + if scheme == "" { + scheme = "http" + } + + port := ch.ForwardPort + if port == 0 { + port = 80 + } + + warnings := []string{} + if ch.AdvancedConfig != "" && !isValidJSON(ch.AdvancedConfig) { + warnings = append(warnings, "Advanced config may need review") + } + + host := caddy.ParsedHost{ + DomainNames: ch.DomainNames, + ForwardScheme: scheme, + ForwardHost: ch.ForwardHost, + ForwardPort: port, + SSLForced: ch.SSLForced, + WebsocketSupport: ch.WebsocketSupport, + Warnings: warnings, + } + + rawJSON, _ := json.Marshal(ch) + host.RawJSON = string(rawJSON) + + result.Hosts = append(result.Hosts, host) + } + + return result +} + +// isValidJSON checks if a string is valid JSON. +func isValidJSON(s string) bool { + s = strings.TrimSpace(s) + if s == "" { + return true + } + var js json.RawMessage + return json.Unmarshal([]byte(s), &js) == nil +} diff --git a/backend/internal/api/handlers/json_import_handler_test.go b/backend/internal/api/handlers/json_import_handler_test.go new file mode 100644 index 00000000..1ae7a230 --- /dev/null +++ b/backend/internal/api/handlers/json_import_handler_test.go @@ -0,0 +1,600 @@ +package handlers + +import ( + "bytes" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/gin-gonic/gin" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gorm.io/driver/sqlite" + "gorm.io/gorm" + + "github.com/Wikid82/charon/backend/internal/models" +) + +func setupJSONTestDB(t *testing.T) *gorm.DB { + db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{}) + require.NoError(t, err) + + err = db.AutoMigrate(&models.ProxyHost{}, &models.Location{}, &models.Setting{}) + require.NoError(t, err) + + return db +} + +func TestNewJSONImportHandler(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + assert.NotNil(t, handler) + assert.NotNil(t, handler.db) + assert.NotNil(t, handler.proxyHostSvc) +} + +func TestJSONImportHandler_RegisterRoutes(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + routes := router.Routes() + routePaths := make(map[string]bool) + for _, r := range routes { + routePaths[r.Method+":"+r.Path] = true + } + + assert.True(t, routePaths["POST:/api/v1/import/json/upload"]) + assert.True(t, routePaths["POST:/api/v1/import/json/commit"]) + assert.True(t, routePaths["POST:/api/v1/import/json/cancel"]) +} + +func TestJSONImportHandler_Upload_CharonFormat(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + charonExport := CharonExport{ + Version: "1.0.0", + ExportedAt: time.Now(), + ProxyHosts: []CharonProxyHost{ + { + UUID: "test-uuid-1", + Name: "Test Host", + DomainNames: "example.com", + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + SSLForced: true, + WebsocketSupport: true, + Enabled: true, + }, + }, + AccessLists: []CharonAccessList{ + { + UUID: "acl-uuid-1", + Name: "Test ACL", + Type: "whitelist", + Enabled: true, + }, + }, + } + + content, _ := json.Marshal(charonExport) + body, _ := json.Marshal(map[string]string{"content": string(content)}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err := json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Contains(t, response, "session") + session := response["session"].(map[string]any) + assert.Equal(t, "charon", session["source_type"]) + + assert.Contains(t, response, "charon_export") + charonInfo := response["charon_export"].(map[string]any) + assert.Equal(t, "1.0.0", charonInfo["version"]) +} + +func TestJSONImportHandler_Upload_NPMFormatFallback(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{ + { + ID: 1, + DomainNames: []string{"npm-example.com"}, + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + content, _ := json.Marshal(npmExport) + body, _ := json.Marshal(map[string]string{"content": string(content)}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err := json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + session := response["session"].(map[string]any) + assert.Equal(t, "npm", session["source_type"]) + + assert.Contains(t, response, "npm_export") +} + +func TestJSONImportHandler_Upload_UnrecognizedFormat(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + unknownFormat := map[string]any{ + "some_field": "some_value", + "other": 123, + } + + content, _ := json.Marshal(unknownFormat) + body, _ := json.Marshal(map[string]string{"content": string(content)}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusBadRequest, w.Code) +} + +func TestJSONImportHandler_Upload_InvalidJSON(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + body, _ := json.Marshal(map[string]string{"content": "{invalid json"}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusBadRequest, w.Code) +} + +func TestJSONImportHandler_Commit_CharonFormat(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + charonExport := CharonExport{ + Version: "1.0.0", + ExportedAt: time.Now(), + ProxyHosts: []CharonProxyHost{ + { + UUID: "test-uuid-1", + Name: "Test Host", + DomainNames: "newcharon.com", + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + // Step 1: Upload to get session ID + content, _ := json.Marshal(charonExport) + uploadBody, _ := json.Marshal(map[string]string{"content": string(content)}) + + uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(uploadBody)) + uploadReq.Header.Set("Content-Type", "application/json") + uploadW := httptest.NewRecorder() + + router.ServeHTTP(uploadW, uploadReq) + require.Equal(t, http.StatusOK, uploadW.Code) + + var uploadResponse map[string]any + err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse) + require.NoError(t, err) + + session := uploadResponse["session"].(map[string]any) + sessionID := session["id"].(string) + + // Step 2: Commit with session UUID + commitBody, _ := json.Marshal(map[string]any{ + "session_uuid": sessionID, + "resolutions": map[string]string{}, + "names": map[string]string{"newcharon.com": "Custom Name"}, + }) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/commit", bytes.NewReader(commitBody)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err = json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Equal(t, float64(1), response["created"]) + + var host models.ProxyHost + db.Where("domain_names = ?", "newcharon.com").First(&host) + assert.Equal(t, "Custom Name", host.Name) +} + +func TestJSONImportHandler_Commit_NPMFormatFallback(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{ + { + ID: 1, + DomainNames: []string{"newnpm.com"}, + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + // Step 1: Upload to get session ID + content, _ := json.Marshal(npmExport) + uploadBody, _ := json.Marshal(map[string]string{"content": string(content)}) + + uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(uploadBody)) + uploadReq.Header.Set("Content-Type", "application/json") + uploadW := httptest.NewRecorder() + + router.ServeHTTP(uploadW, uploadReq) + require.Equal(t, http.StatusOK, uploadW.Code) + + var uploadResponse map[string]any + err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse) + require.NoError(t, err) + + session := uploadResponse["session"].(map[string]any) + sessionID := session["id"].(string) + + // Step 2: Commit with session UUID + commitBody, _ := json.Marshal(map[string]any{ + "session_uuid": sessionID, + "resolutions": map[string]string{}, + "names": map[string]string{}, + }) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/commit", bytes.NewReader(commitBody)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err = json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Equal(t, float64(1), response["created"]) +} + +func TestJSONImportHandler_Commit_SessionNotFound(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + // Try to commit with a non-existent session + commitBody, _ := json.Marshal(map[string]any{ + "session_uuid": "non-existent-uuid", + "resolutions": map[string]string{}, + "names": map[string]string{}, + }) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/commit", bytes.NewReader(commitBody)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusNotFound, w.Code) + + var response map[string]any + err := json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Contains(t, response["error"], "session not found") +} + +func TestJSONImportHandler_Cancel(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + charonExport := CharonExport{ + Version: "1.0.0", + ExportedAt: time.Now(), + ProxyHosts: []CharonProxyHost{ + { + UUID: "cancel-test-uuid", + Name: "Cancel Test", + DomainNames: "cancel-test.com", + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + // Step 1: Upload to get session ID + content, _ := json.Marshal(charonExport) + uploadBody, _ := json.Marshal(map[string]string{"content": string(content)}) + + uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(uploadBody)) + uploadReq.Header.Set("Content-Type", "application/json") + uploadW := httptest.NewRecorder() + + router.ServeHTTP(uploadW, uploadReq) + require.Equal(t, http.StatusOK, uploadW.Code) + + var uploadResponse map[string]any + err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse) + require.NoError(t, err) + + session := uploadResponse["session"].(map[string]any) + sessionID := session["id"].(string) + + // Step 2: Cancel the session + cancelBody, _ := json.Marshal(map[string]any{ + "session_uuid": sessionID, + }) + + cancelReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/cancel", bytes.NewReader(cancelBody)) + cancelReq.Header.Set("Content-Type", "application/json") + cancelW := httptest.NewRecorder() + + router.ServeHTTP(cancelW, cancelReq) + + assert.Equal(t, http.StatusOK, cancelW.Code) + + var cancelResponse map[string]any + err = json.Unmarshal(cancelW.Body.Bytes(), &cancelResponse) + require.NoError(t, err) + + assert.Equal(t, "cancelled", cancelResponse["status"]) + + // Step 3: Try to commit with cancelled session (should fail) + commitBody, _ := json.Marshal(map[string]any{ + "session_uuid": sessionID, + "resolutions": map[string]string{}, + "names": map[string]string{}, + }) + + commitReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/commit", bytes.NewReader(commitBody)) + commitReq.Header.Set("Content-Type", "application/json") + commitW := httptest.NewRecorder() + + router.ServeHTTP(commitW, commitReq) + + assert.Equal(t, http.StatusNotFound, commitW.Code) +} + +func TestJSONImportHandler_ConflictDetection(t *testing.T) { + db := setupJSONTestDB(t) + + existingHost := models.ProxyHost{ + UUID: "existing-uuid", + DomainNames: "conflict.com", + ForwardScheme: "http", + ForwardHost: "old-server", + ForwardPort: 80, + Enabled: true, + } + db.Create(&existingHost) + + handler := NewJSONImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + charonExport := CharonExport{ + Version: "1.0.0", + ProxyHosts: []CharonProxyHost{ + { + UUID: "new-uuid", + DomainNames: "conflict.com", + ForwardScheme: "http", + ForwardHost: "new-server", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + content, _ := json.Marshal(charonExport) + body, _ := json.Marshal(map[string]string{"content": string(content)}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err := json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + conflictDetails := response["conflict_details"].(map[string]any) + assert.Contains(t, conflictDetails, "conflict.com") +} + +func TestJSONImportHandler_IsCharonFormat(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + tests := []struct { + name string + export CharonExport + expected bool + }{ + { + name: "with version", + export: CharonExport{Version: "1.0.0"}, + expected: true, + }, + { + name: "with proxy hosts", + export: CharonExport{ + ProxyHosts: []CharonProxyHost{{DomainNames: "test.com"}}, + }, + expected: true, + }, + { + name: "empty export", + export: CharonExport{}, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := handler.isCharonFormat(tt.export) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestIsValidJSON(t *testing.T) { + tests := []struct { + name string + input string + expected bool + }{ + {"valid object", `{"key": "value"}`, true}, + {"valid array", `[1, 2, 3]`, true}, + {"valid string", `"hello"`, true}, + {"valid number", `123`, true}, + {"empty string", "", true}, + {"whitespace only", " ", true}, + {"invalid json", `{key: "value"}`, false}, + {"incomplete", `{"key":`, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := isValidJSON(tt.input) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestJSONImportHandler_ConvertCharonToImportResult(t *testing.T) { + db := setupJSONTestDB(t) + handler := NewJSONImportHandler(db) + + charonExport := CharonExport{ + Version: "1.0.0", + ExportedAt: time.Now(), + ProxyHosts: []CharonProxyHost{ + { + UUID: "uuid-1", + Name: "Host 1", + DomainNames: "host1.com", + ForwardScheme: "https", + ForwardHost: "backend1", + ForwardPort: 443, + SSLForced: true, + WebsocketSupport: true, + }, + { + UUID: "uuid-2", + DomainNames: "", + ForwardScheme: "http", + ForwardHost: "backend2", + ForwardPort: 80, + }, + }, + } + + result := handler.convertCharonToImportResult(charonExport) + + assert.Len(t, result.Hosts, 1) + assert.Len(t, result.Errors, 1) + + host := result.Hosts[0] + assert.Equal(t, "host1.com", host.DomainNames) + assert.Equal(t, "https", host.ForwardScheme) + assert.Equal(t, "backend1", host.ForwardHost) + assert.Equal(t, 443, host.ForwardPort) + assert.True(t, host.SSLForced) + assert.True(t, host.WebsocketSupport) +} diff --git a/backend/internal/api/handlers/npm_import_handler.go b/backend/internal/api/handlers/npm_import_handler.go new file mode 100644 index 00000000..8f124eca --- /dev/null +++ b/backend/internal/api/handlers/npm_import_handler.go @@ -0,0 +1,368 @@ +package handlers + +import ( + "encoding/json" + "fmt" + "net/http" + "sync" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "gorm.io/gorm" + + "github.com/Wikid82/charon/backend/internal/caddy" + "github.com/Wikid82/charon/backend/internal/models" + "github.com/Wikid82/charon/backend/internal/services" +) + +// npmImportSessions stores parsed NPM exports keyed by session UUID. +// TODO: Implement session expiration to prevent memory leaks (e.g., TTL-based cleanup). +var ( + npmImportSessions = make(map[string]NPMExport) + npmImportSessionsMu sync.RWMutex +) + +// NPMExport represents the top-level structure of an NPM export file. +type NPMExport struct { + ProxyHosts []NPMProxyHost `json:"proxy_hosts"` + AccessLists []NPMAccessList `json:"access_lists"` + Certificates []NPMCertificate `json:"certificates"` +} + +// NPMProxyHost represents a proxy host from NPM export. +type NPMProxyHost struct { + ID int `json:"id"` + DomainNames []string `json:"domain_names"` + ForwardScheme string `json:"forward_scheme"` + ForwardHost string `json:"forward_host"` + ForwardPort int `json:"forward_port"` + CertificateID *int `json:"certificate_id"` + SSLForced bool `json:"ssl_forced"` + CachingEnabled bool `json:"caching_enabled"` + BlockExploits bool `json:"block_exploits"` + AdvancedConfig string `json:"advanced_config"` + Meta any `json:"meta"` + AllowWebsocketUpgrade bool `json:"allow_websocket_upgrade"` + HTTP2Support bool `json:"http2_support"` + HSTSEnabled bool `json:"hsts_enabled"` + HSTSSubdomains bool `json:"hsts_subdomains"` + AccessListID *int `json:"access_list_id"` + Enabled bool `json:"enabled"` + Locations []any `json:"locations"` + CustomLocations []any `json:"custom_locations"` + OwnerUserID int `json:"owner_user_id"` + UseDefaultLocation bool `json:"use_default_location"` + IPV6 bool `json:"ipv6"` + CreatedOn string `json:"created_on"` + ModifiedOn string `json:"modified_on"` + ForwardDomainName string `json:"forward_domain_name"` + ForwardDomainNameEnabled bool `json:"forward_domain_name_enabled"` +} + +// NPMAccessList represents an access list from NPM export. +type NPMAccessList struct { + ID int `json:"id"` + Name string `json:"name"` + PassAuth int `json:"pass_auth"` + SatisfyAny int `json:"satisfy_any"` + OwnerUserID int `json:"owner_user_id"` + Items []NPMAccessItem `json:"items"` + Clients []NPMAccessItem `json:"clients"` + ProxyHostsCount int `json:"proxy_host_count"` + CreatedOn string `json:"created_on"` + ModifiedOn string `json:"modified_on"` + AuthorizationHeader any `json:"authorization_header"` +} + +// NPMAccessItem represents an item in an NPM access list. +type NPMAccessItem struct { + ID int `json:"id"` + AccessListID int `json:"access_list_id"` + Address string `json:"address"` + Directive string `json:"directive"` + CreatedOn string `json:"created_on"` + ModifiedOn string `json:"modified_on"` +} + +// NPMCertificate represents a certificate from NPM export. +type NPMCertificate struct { + ID int `json:"id"` + Provider string `json:"provider"` + NiceName string `json:"nice_name"` + DomainNames []string `json:"domain_names"` + ExpiresOn string `json:"expires_on"` + CreatedOn string `json:"created_on"` + ModifiedOn string `json:"modified_on"` + IsDNSChallenge bool `json:"is_dns_challenge"` + Meta any `json:"meta"` +} + +// NPMImportHandler handles NPM configuration imports. +type NPMImportHandler struct { + db *gorm.DB + proxyHostSvc *services.ProxyHostService +} + +// NewNPMImportHandler creates a new NPM import handler. +func NewNPMImportHandler(db *gorm.DB) *NPMImportHandler { + return &NPMImportHandler{ + db: db, + proxyHostSvc: services.NewProxyHostService(db), + } +} + +// RegisterRoutes registers NPM import routes. +func (h *NPMImportHandler) RegisterRoutes(router *gin.RouterGroup) { + router.POST("/import/npm/upload", h.Upload) + router.POST("/import/npm/commit", h.Commit) + router.POST("/import/npm/cancel", h.Cancel) +} + +// Upload parses an NPM export JSON and returns a preview with conflict detection. +func (h *NPMImportHandler) Upload(c *gin.Context) { + var req struct { + Content string `json:"content" binding:"required"` + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + var npmExport NPMExport + if err := json.Unmarshal([]byte(req.Content), &npmExport); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("invalid NPM export JSON: %v", err)}) + return + } + + result := h.convertNPMToImportResult(npmExport) + + if len(result.Hosts) == 0 { + c.JSON(http.StatusBadRequest, gin.H{"error": "no proxy hosts found in NPM export"}) + return + } + + // Check for conflicts with existing hosts + existingHosts, _ := h.proxyHostSvc.List() + existingDomainsMap := make(map[string]models.ProxyHost) + for _, eh := range existingHosts { + existingDomainsMap[eh.DomainNames] = eh + } + + conflictDetails := make(map[string]gin.H) + for _, ph := range result.Hosts { + if existing, found := existingDomainsMap[ph.DomainNames]; found { + result.Conflicts = append(result.Conflicts, ph.DomainNames) + conflictDetails[ph.DomainNames] = gin.H{ + "existing": gin.H{ + "forward_scheme": existing.ForwardScheme, + "forward_host": existing.ForwardHost, + "forward_port": existing.ForwardPort, + "ssl_forced": existing.SSLForced, + "websocket": existing.WebsocketSupport, + "enabled": existing.Enabled, + }, + "imported": gin.H{ + "forward_scheme": ph.ForwardScheme, + "forward_host": ph.ForwardHost, + "forward_port": ph.ForwardPort, + "ssl_forced": ph.SSLForced, + "websocket": ph.WebsocketSupport, + }, + } + } + } + + sid := uuid.NewString() + + // Store the parsed export in session storage for later commit + npmImportSessionsMu.Lock() + npmImportSessions[sid] = npmExport + npmImportSessionsMu.Unlock() + + c.JSON(http.StatusOK, gin.H{ + "session": gin.H{"id": sid, "state": "transient", "source_type": "npm"}, + "preview": result, + "conflict_details": conflictDetails, + "npm_export": gin.H{ + "proxy_hosts": len(npmExport.ProxyHosts), + "access_lists": len(npmExport.AccessLists), + "certificates": len(npmExport.Certificates), + }, + }) +} + +// Commit finalizes the NPM import with user's conflict resolutions. +func (h *NPMImportHandler) Commit(c *gin.Context) { + var req struct { + SessionUUID string `json:"session_uuid" binding:"required"` + Resolutions map[string]string `json:"resolutions"` // domain -> action + Names map[string]string `json:"names"` // domain -> custom name + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Retrieve the stored NPM export from session + npmImportSessionsMu.RLock() + npmExport, ok := npmImportSessions[req.SessionUUID] + npmImportSessionsMu.RUnlock() + + if !ok { + c.JSON(http.StatusNotFound, gin.H{"error": "session not found or expired"}) + return + } + + result := h.convertNPMToImportResult(npmExport) + proxyHosts := caddy.ConvertToProxyHosts(result.Hosts) + + created := 0 + updated := 0 + skipped := 0 + errors := []string{} + + existingHosts, _ := h.proxyHostSvc.List() + existingMap := make(map[string]*models.ProxyHost) + for i := range existingHosts { + existingMap[existingHosts[i].DomainNames] = &existingHosts[i] + } + + for _, host := range proxyHosts { + action := req.Resolutions[host.DomainNames] + + if customName, ok := req.Names[host.DomainNames]; ok && customName != "" { + host.Name = customName + } + + if action == "skip" || action == "keep" { + skipped++ + continue + } + + if action == "rename" { + host.DomainNames += "-imported" + } + + if action == "overwrite" { + if existing, found := existingMap[host.DomainNames]; found { + host.ID = existing.ID + host.UUID = existing.UUID + host.CertificateID = existing.CertificateID + host.CreatedAt = existing.CreatedAt + + if err := h.proxyHostSvc.Update(&host); err != nil { + errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error())) + } else { + updated++ + } + continue + } + } + + host.UUID = uuid.NewString() + if err := h.proxyHostSvc.Create(&host); err != nil { + errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error())) + } else { + created++ + } + } + + // Clean up session after successful commit + npmImportSessionsMu.Lock() + delete(npmImportSessions, req.SessionUUID) + npmImportSessionsMu.Unlock() + + c.JSON(http.StatusOK, gin.H{ + "created": created, + "updated": updated, + "skipped": skipped, + "errors": errors, + }) +} + +// Cancel cancels an NPM import session and cleans up resources. +func (h *NPMImportHandler) Cancel(c *gin.Context) { + var req struct { + SessionUUID string `json:"session_uuid"` + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Clean up session if it exists + npmImportSessionsMu.Lock() + delete(npmImportSessions, req.SessionUUID) + npmImportSessionsMu.Unlock() + + c.JSON(http.StatusOK, gin.H{"status": "cancelled"}) +} + +// convertNPMToImportResult converts NPM export format to Charon's ImportResult. +func (h *NPMImportHandler) convertNPMToImportResult(export NPMExport) *caddy.ImportResult { + result := &caddy.ImportResult{ + Hosts: []caddy.ParsedHost{}, + Conflicts: []string{}, + Errors: []string{}, + } + + for _, npmHost := range export.ProxyHosts { + if len(npmHost.DomainNames) == 0 { + result.Errors = append(result.Errors, fmt.Sprintf("host %d has no domain names", npmHost.ID)) + continue + } + + // NPM stores multiple domains as array; join them + domainNames := "" + for i, d := range npmHost.DomainNames { + if i > 0 { + domainNames += "," + } + domainNames += d + } + + scheme := npmHost.ForwardScheme + if scheme == "" { + scheme = "http" + } + + port := npmHost.ForwardPort + if port == 0 { + port = 80 + } + + warnings := []string{} + if npmHost.CachingEnabled { + warnings = append(warnings, "Caching not supported - will be disabled") + } + if len(npmHost.Locations) > 0 || len(npmHost.CustomLocations) > 0 { + warnings = append(warnings, "Custom locations not fully supported") + } + if npmHost.AdvancedConfig != "" { + warnings = append(warnings, "Advanced nginx config not compatible - manual review required") + } + if npmHost.AccessListID != nil && *npmHost.AccessListID > 0 { + warnings = append(warnings, fmt.Sprintf("Access list reference (ID: %d) needs manual mapping", *npmHost.AccessListID)) + } + + host := caddy.ParsedHost{ + DomainNames: domainNames, + ForwardScheme: scheme, + ForwardHost: npmHost.ForwardHost, + ForwardPort: port, + SSLForced: npmHost.SSLForced, + WebsocketSupport: npmHost.AllowWebsocketUpgrade, + Warnings: warnings, + } + + rawJSON, _ := json.Marshal(npmHost) + host.RawJSON = string(rawJSON) + + result.Hosts = append(result.Hosts, host) + } + + return result +} diff --git a/backend/internal/api/handlers/npm_import_handler_test.go b/backend/internal/api/handlers/npm_import_handler_test.go new file mode 100644 index 00000000..74d7be78 --- /dev/null +++ b/backend/internal/api/handlers/npm_import_handler_test.go @@ -0,0 +1,493 @@ +package handlers + +import ( + "bytes" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/gin-gonic/gin" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gorm.io/driver/sqlite" + "gorm.io/gorm" + + "github.com/Wikid82/charon/backend/internal/models" +) + +func setupNPMTestDB(t *testing.T) *gorm.DB { + db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{}) + require.NoError(t, err) + + err = db.AutoMigrate(&models.ProxyHost{}, &models.Location{}, &models.Setting{}) + require.NoError(t, err) + + return db +} + +func TestNewNPMImportHandler(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + assert.NotNil(t, handler) + assert.NotNil(t, handler.db) + assert.NotNil(t, handler.proxyHostSvc) +} + +func TestNPMImportHandler_RegisterRoutes(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + routes := router.Routes() + routePaths := make(map[string]bool) + for _, r := range routes { + routePaths[r.Method+":"+r.Path] = true + } + + assert.True(t, routePaths["POST:/api/v1/import/npm/upload"]) + assert.True(t, routePaths["POST:/api/v1/import/npm/commit"]) + assert.True(t, routePaths["POST:/api/v1/import/npm/cancel"]) +} + +func TestNPMImportHandler_Upload_ValidNPMExport(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{ + { + ID: 1, + DomainNames: []string{"example.com"}, + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + SSLForced: true, + AllowWebsocketUpgrade: true, + Enabled: true, + }, + { + ID: 2, + DomainNames: []string{"test.com", "www.test.com"}, + ForwardScheme: "https", + ForwardHost: "192.168.1.101", + ForwardPort: 443, + Enabled: true, + }, + }, + AccessLists: []NPMAccessList{ + { + ID: 1, + Name: "Test ACL", + }, + }, + } + + content, _ := json.Marshal(npmExport) + body, _ := json.Marshal(map[string]string{"content": string(content)}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err := json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Contains(t, response, "session") + assert.Contains(t, response, "preview") + assert.Contains(t, response, "npm_export") + + preview := response["preview"].(map[string]any) + hosts := preview["hosts"].([]any) + assert.Len(t, hosts, 2) +} + +func TestNPMImportHandler_Upload_EmptyExport(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{}, + } + + content, _ := json.Marshal(npmExport) + body, _ := json.Marshal(map[string]string{"content": string(content)}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusBadRequest, w.Code) +} + +func TestNPMImportHandler_Upload_InvalidJSON(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + body, _ := json.Marshal(map[string]string{"content": "not valid json"}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusBadRequest, w.Code) +} + +func TestNPMImportHandler_Upload_ConflictDetection(t *testing.T) { + db := setupNPMTestDB(t) + + existingHost := models.ProxyHost{ + UUID: "existing-uuid", + DomainNames: "example.com", + ForwardScheme: "http", + ForwardHost: "old-server", + ForwardPort: 80, + Enabled: true, + } + db.Create(&existingHost) + + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{ + { + ID: 1, + DomainNames: []string{"example.com"}, + ForwardScheme: "http", + ForwardHost: "new-server", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + content, _ := json.Marshal(npmExport) + body, _ := json.Marshal(map[string]string{"content": string(content)}) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(body)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err := json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Contains(t, response, "conflict_details") + conflictDetails := response["conflict_details"].(map[string]any) + assert.Contains(t, conflictDetails, "example.com") +} + +func TestNPMImportHandler_Commit_CreateNew(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{ + { + ID: 1, + DomainNames: []string{"newhost.com"}, + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + // Step 1: Upload to get session ID + content, _ := json.Marshal(npmExport) + uploadBody, _ := json.Marshal(map[string]string{"content": string(content)}) + + uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(uploadBody)) + uploadReq.Header.Set("Content-Type", "application/json") + uploadW := httptest.NewRecorder() + + router.ServeHTTP(uploadW, uploadReq) + require.Equal(t, http.StatusOK, uploadW.Code) + + var uploadResponse map[string]any + err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse) + require.NoError(t, err) + + session := uploadResponse["session"].(map[string]any) + sessionID := session["id"].(string) + + // Step 2: Commit with session UUID + commitBody, _ := json.Marshal(map[string]any{ + "session_uuid": sessionID, + "resolutions": map[string]string{}, + "names": map[string]string{}, + }) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/commit", bytes.NewReader(commitBody)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err = json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Equal(t, float64(1), response["created"]) + assert.Equal(t, float64(0), response["updated"]) + assert.Equal(t, float64(0), response["skipped"]) + + var host models.ProxyHost + db.Where("domain_names = ?", "newhost.com").First(&host) + assert.NotEmpty(t, host.UUID) + assert.Equal(t, "192.168.1.100", host.ForwardHost) +} + +func TestNPMImportHandler_Commit_SkipAction(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{ + { + ID: 1, + DomainNames: []string{"skipme.com"}, + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + // Step 1: Upload to get session ID + content, _ := json.Marshal(npmExport) + uploadBody, _ := json.Marshal(map[string]string{"content": string(content)}) + + uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(uploadBody)) + uploadReq.Header.Set("Content-Type", "application/json") + uploadW := httptest.NewRecorder() + + router.ServeHTTP(uploadW, uploadReq) + require.Equal(t, http.StatusOK, uploadW.Code) + + var uploadResponse map[string]any + err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse) + require.NoError(t, err) + + session := uploadResponse["session"].(map[string]any) + sessionID := session["id"].(string) + + // Step 2: Commit with skip resolution + commitBody, _ := json.Marshal(map[string]any{ + "session_uuid": sessionID, + "resolutions": map[string]string{"skipme.com": "skip"}, + "names": map[string]string{}, + }) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/commit", bytes.NewReader(commitBody)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusOK, w.Code) + + var response map[string]any + err = json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Equal(t, float64(0), response["created"]) + assert.Equal(t, float64(1), response["skipped"]) +} + +func TestNPMImportHandler_Commit_SessionNotFound(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + // Try to commit with a non-existent session + commitBody, _ := json.Marshal(map[string]any{ + "session_uuid": "non-existent-uuid", + "resolutions": map[string]string{}, + "names": map[string]string{}, + }) + + req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/commit", bytes.NewReader(commitBody)) + req.Header.Set("Content-Type", "application/json") + w := httptest.NewRecorder() + + router.ServeHTTP(w, req) + + assert.Equal(t, http.StatusNotFound, w.Code) + + var response map[string]any + err := json.Unmarshal(w.Body.Bytes(), &response) + require.NoError(t, err) + + assert.Contains(t, response["error"], "session not found") +} + +func TestNPMImportHandler_Cancel(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + gin.SetMode(gin.TestMode) + router := gin.New() + api := router.Group("/api/v1") + handler.RegisterRoutes(api) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{ + { + ID: 1, + DomainNames: []string{"cancel-test.com"}, + ForwardScheme: "http", + ForwardHost: "192.168.1.100", + ForwardPort: 8080, + Enabled: true, + }, + }, + } + + // Step 1: Upload to get session ID + content, _ := json.Marshal(npmExport) + uploadBody, _ := json.Marshal(map[string]string{"content": string(content)}) + + uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(uploadBody)) + uploadReq.Header.Set("Content-Type", "application/json") + uploadW := httptest.NewRecorder() + + router.ServeHTTP(uploadW, uploadReq) + require.Equal(t, http.StatusOK, uploadW.Code) + + var uploadResponse map[string]any + err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse) + require.NoError(t, err) + + session := uploadResponse["session"].(map[string]any) + sessionID := session["id"].(string) + + // Step 2: Cancel the session + cancelBody, _ := json.Marshal(map[string]any{ + "session_uuid": sessionID, + }) + + cancelReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/cancel", bytes.NewReader(cancelBody)) + cancelReq.Header.Set("Content-Type", "application/json") + cancelW := httptest.NewRecorder() + + router.ServeHTTP(cancelW, cancelReq) + + assert.Equal(t, http.StatusOK, cancelW.Code) + + var cancelResponse map[string]any + err = json.Unmarshal(cancelW.Body.Bytes(), &cancelResponse) + require.NoError(t, err) + + assert.Equal(t, "cancelled", cancelResponse["status"]) + + // Step 3: Try to commit with cancelled session (should fail) + commitBody, _ := json.Marshal(map[string]any{ + "session_uuid": sessionID, + "resolutions": map[string]string{}, + "names": map[string]string{}, + }) + + commitReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/commit", bytes.NewReader(commitBody)) + commitReq.Header.Set("Content-Type", "application/json") + commitW := httptest.NewRecorder() + + router.ServeHTTP(commitW, commitReq) + + assert.Equal(t, http.StatusNotFound, commitW.Code) +} + +func TestNPMImportHandler_ConvertNPMToImportResult(t *testing.T) { + db := setupNPMTestDB(t) + handler := NewNPMImportHandler(db) + + npmExport := NPMExport{ + ProxyHosts: []NPMProxyHost{ + { + ID: 1, + DomainNames: []string{"test.com", "www.test.com"}, + ForwardScheme: "https", + ForwardHost: "backend", + ForwardPort: 443, + SSLForced: true, + AllowWebsocketUpgrade: true, + CachingEnabled: true, + AdvancedConfig: "proxy_set_header X-Custom value;", + }, + { + ID: 2, + DomainNames: []string{}, + }, + }, + } + + result := handler.convertNPMToImportResult(npmExport) + + assert.Len(t, result.Hosts, 1) + assert.Len(t, result.Errors, 1) + + host := result.Hosts[0] + assert.Equal(t, "test.com,www.test.com", host.DomainNames) + assert.Equal(t, "https", host.ForwardScheme) + assert.Equal(t, "backend", host.ForwardHost) + assert.Equal(t, 443, host.ForwardPort) + assert.True(t, host.SSLForced) + assert.True(t, host.WebsocketSupport) + assert.Len(t, host.Warnings, 2) // Caching + Advanced config warnings +} diff --git a/backend/internal/api/routes/routes.go b/backend/internal/api/routes/routes.go index df9574bd..26fc9317 100644 --- a/backend/internal/api/routes/routes.go +++ b/backend/internal/api/routes/routes.go @@ -583,4 +583,12 @@ func RegisterImportHandler(router *gin.Engine, db *gorm.DB, caddyBinary, importD importHandler := handlers.NewImportHandler(db, caddyBinary, importDir, mountPath) api := router.Group("/api/v1") importHandler.RegisterRoutes(api) + + // NPM Import Handler - supports Nginx Proxy Manager export format + npmImportHandler := handlers.NewNPMImportHandler(db) + npmImportHandler.RegisterRoutes(api) + + // JSON Import Handler - supports both Charon and NPM export formats + jsonImportHandler := handlers.NewJSONImportHandler(db) + jsonImportHandler.RegisterRoutes(api) } diff --git a/backend/internal/services/mail_service.go b/backend/internal/services/mail_service.go index 5b922735..eb07c0b0 100644 --- a/backend/internal/services/mail_service.go +++ b/backend/internal/services/mail_service.go @@ -137,7 +137,7 @@ func (s *MailService) GetSMTPConfig() (*SMTPConfig, error) { return config, nil } -// SaveSMTPConfig saves SMTP settings to the database. +// SaveSMTPConfig saves SMTP settings to the database using a transaction. func (s *MailService) SaveSMTPConfig(config *SMTPConfig) error { settings := map[string]string{ "smtp_host": config.Host, @@ -148,31 +148,34 @@ func (s *MailService) SaveSMTPConfig(config *SMTPConfig) error { "smtp_encryption": config.Encryption, } - for key, value := range settings { - setting := models.Setting{ - Key: key, - Value: value, - Type: "string", - Category: "smtp", - } + return s.db.Transaction(func(tx *gorm.DB) error { + for key, value := range settings { + var existing models.Setting + result := tx.Where("key = ?", key).First(&existing) - // Upsert: update if exists, create if not - result := s.db.Where("key = ?", key).First(&models.Setting{}) - if result.Error == gorm.ErrRecordNotFound { - if err := s.db.Create(&setting).Error; err != nil { - return fmt.Errorf("failed to create setting %s: %w", key, err) - } - } else { - if err := s.db.Model(&models.Setting{}).Where("key = ?", key).Updates(map[string]any{ - "value": value, - "category": "smtp", - }).Error; err != nil { - return fmt.Errorf("failed to update setting %s: %w", key, err) + switch result.Error { + case gorm.ErrRecordNotFound: + setting := models.Setting{ + Key: key, + Value: value, + Type: "string", + Category: "smtp", + } + if err := tx.Create(&setting).Error; err != nil { + return fmt.Errorf("failed to create setting %s: %w", key, err) + } + case nil: + existing.Value = value + existing.Category = "smtp" + if err := tx.Save(&existing).Error; err != nil { + return fmt.Errorf("failed to update setting %s: %w", key, err) + } + default: + return fmt.Errorf("failed to query setting %s: %w", key, result.Error) } } - } - - return nil + return nil + }) } // IsConfigured returns true if SMTP is properly configured. diff --git a/docs/features.md b/docs/features.md index e141c441..471bc5f9 100644 --- a/docs/features.md +++ b/docs/features.md @@ -128,7 +128,23 @@ Migrating from another Caddy setup? Import your existing Caddyfile configuration --- -### 🔌 WebSocket Support +### � Nginx Proxy Manager Import + +Migrating from Nginx Proxy Manager? Import your proxy host configurations directly from NPM export files. Charon parses your domains, upstream servers, SSL settings, and access lists, giving you a preview before committing. + +→ [Learn More](features/npm-import.md) + +--- + +### 📄 JSON Configuration Import + +Import configurations from generic JSON exports or Charon backup files. Supports both Charon's native export format and Nginx Proxy Manager format with automatic detection. Perfect for restoring backups or migrating between Charon instances. + +→ [Learn More](features/json-import.md) + +--- + +### �🔌 WebSocket Support Real-time applications like chat servers, live dashboards, and collaborative tools work out of the box. Charon handles WebSocket connections automatically with no special configuration needed. diff --git a/docs/plans/current_spec.md b/docs/plans/current_spec.md index 3c31cc3f..809bf4bc 100644 --- a/docs/plans/current_spec.md +++ b/docs/plans/current_spec.md @@ -1,518 +1,791 @@ -# CrowdSec 1.7.5 Upgrade Verification Plan +# Phase 3: Backend Routes Implementation Plan -**Document Type**: Verification Plan -**Version**: 1.7.4 → 1.7.5 -**Created**: 2026-01-22 -**Status**: Ready for Implementation +> **Phase**: 3 of Skipped Tests Remediation +> **Status**: ✅ COMPLETE +> **Created**: 2026-01-22 +> **Completed**: 2026-01-22 +> **Target Tests**: 7 tests to re-enable +> **Actual Result**: 7 tests enabled and passing --- ## Executive Summary -This document outlines the verification plan for upgrading CrowdSec from version 1.7.4 to 1.7.5 in the Charon project. Based on analysis of the CrowdSec 1.7.5 release notes and the current integration implementation, this upgrade appears to be a **low-risk maintenance release** focused on internal refactoring, improved error handling, and dependency updates. +Phase 3 addresses missing backend API routes and a data persistence issue that block 7 E2E tests: + +1. **NPM Import Route** (`/tasks/import/npm`) - 4 skipped tests +2. **JSON Import Route** (`/tasks/import/json`) - 2 skipped tests +3. **SMTP Persistence Bug** - 1 skipped test at `smtp-settings.spec.ts:336` + +The existing Caddyfile import infrastructure provides a solid foundation. NPM and JSON import routes will extend this pattern with format-specific parsers. --- -## 1. CrowdSec 1.7.5 Release Analysis +## Root Cause Analysis -### 1.1 Key Changes Summary +### Issue 1: Missing NPM Import Route -| Category | Count | Risk Level | -|----------|-------|------------| -| Internal Refactoring | ~25 | Low | -| Bug Fixes | 8 | Low | -| Dependency Updates | ~12 | Low | -| New Features | 2 | Low | +**Location**: Tests at [tests/integration/import-to-production.spec.ts](../../tests/integration/import-to-production.spec.ts#L170-L237) -### 1.2 Notable Changes Relevant to Charon Integration +**Problem**: The tests navigate to `/tasks/import/npm` but this route doesn't exist in the frontend router or backend API. -#### New Features/Improvements +**Evidence**: +```typescript +// From import-to-production.spec.ts lines 170-180 +test.skip('should display NPM import page', async ({ page, adminUser }) => { + await page.goto('/tasks/import/npm'); // Route doesn't exist + ... +}); +``` -1. **`ParseKVLax` for Flexible Key-Value Parsing** ([#4007](https://github.com/crowdsecurity/crowdsec/pull/4007)) - - Adds more flexible parsing capabilities - - Impact: None - internal parser enhancement +**Expected NPM Export Format** (from test file): +```json +{ + "proxy_hosts": [ + { + "domain_names": ["test.example.com"], + "forward_host": "192.168.1.100", + "forward_port": 80 + } + ], + "access_lists": [], + "certificates": [] +} +``` -2. **AppSec Transaction ID Header Support** ([#4124](https://github.com/crowdsecurity/crowdsec/pull/4124)) - - Enables request tracing via transaction ID header - - Impact: Optional feature, no required changes +### Issue 2: Missing JSON Import Route -3. **Docker Datasource Schema** ([#4206](https://github.com/crowdsecurity/crowdsec/pull/4206)) - - Improved Docker acquisition configuration - - Impact: May benefit container monitoring setups +**Location**: Tests at [tests/integration/import-to-production.spec.ts](../../tests/integration/import-to-production.spec.ts#L243-L256) -#### Bug Fixes +**Problem**: The `/tasks/import/json` route is not implemented. Tests navigate to this route expecting a generic JSON configuration import interface. -1. **PAPI Allowlist Check** ([#4196](https://github.com/crowdsecurity/crowdsec/pull/4196)) - - Checks if decision is allowlisted before adding - - Impact: Improved decision handling +### Issue 3: SMTP Save Not Persisting -2. **CAPI Token Reuse** ([#4201](https://github.com/crowdsecurity/crowdsec/pull/4201)) - - Always reuses stored token for CAPI - - Impact: Better authentication stability +**Location**: Test at [tests/settings/smtp-settings.spec.ts](../../tests/settings/smtp-settings.spec.ts#L336) -3. **LAPI-Only Container Hub Fix** ([#4169](https://github.com/crowdsecurity/crowdsec/pull/4169)) - - Don't prepare hub in LAPI-only containers - - Impact: Better for containerized deployments +**Problem**: After saving SMTP configuration and reloading the page, the updated values don't persist. -#### Internal Changes (No External Impact) +**Skip Comment**: +```typescript +// Note: Skip - SMTP save not persisting correctly (backend issue, not test issue) +``` -- Removed `github.com/pkg/errors` dependency - uses `fmt.Errorf` instead -- Replaced syscall with unix/windows packages -- Various linting improvements (golangci-lint 2.8) -- Refactored acquisition and leakybucket packages -- Removed global variables in favor of dependency injection -- Build improvements for Docker (larger runners) -- Updated expr to 1.17.7 (already patched in Charon Dockerfile) -- Updated modernc.org/sqlite +**Analysis of Code Flow**: -### 1.3 Breaking Changes Assessment +1. **Frontend**: [SMTPSettings.tsx](../../frontend/src/pages/SMTPSettings.tsx#L50-L62) + - Calls `updateSMTPConfig()` which POSTs to `/settings/smtp` + - On success, invalidates query and shows toast -**No Breaking Changes Identified** +2. **Backend Handler**: [settings_handler.go](../../backend/internal/api/handlers/settings_handler.go#L109-L136) + - `UpdateSMTPConfig()` receives the request + - Calls `h.MailService.SaveSMTPConfig(config)` -The 1.7.5 release contains no API-breaking changes. All modifications are: -- Internal refactoring -- Bug fixes -- Dependency updates -- CI/CD improvements +3. **Mail Service**: [mail_service.go](../../backend/internal/services/mail_service.go#L117-L144) + - `SaveSMTPConfig()` uses upsert pattern + - **POTENTIAL BUG**: Uses `First()` then conditional `Create()`/`Updates()` separately + +**Root Cause Hypothesis**: +The `SaveSMTPConfig` method has a problematic upsert pattern: +```go +// Current pattern in mail_service.go lines 127-143: +result := s.db.Where("key = ?", key).First(&models.Setting{}) +if result.Error == gorm.ErrRecordNotFound { + s.db.Create(&setting) // Creates new +} else { + s.db.Model(&models.Setting{}).Where("key = ?", key).Updates(...) // Updates existing +} +``` + +**Issues identified**: +1. No transaction wrapping - partial failures possible +2. `Updates()` with map may not update all fields correctly +3. If `First()` returns error other than `ErrRecordNotFound`, the else branch runs but may not execute correctly +4. Race condition between read and write operations --- -## 2. Current Charon CrowdSec Integration Analysis +## Implementation Plan -### 2.1 Integration Points +### Task 1: Implement NPM Import Backend Handler -| Component | Location | Description | -|-----------|----------|-------------| -| **Core Package** | [backend/internal/crowdsec/](backend/internal/crowdsec/) | CrowdSec integration library | -| **API Handler** | [backend/internal/api/handlers/crowdsec_handler.go](backend/internal/api/handlers/crowdsec_handler.go) | REST API endpoints | -| **Startup Service** | [backend/internal/services/crowdsec_startup.go](backend/internal/services/) | Initialization logic | -| **Dockerfile** | [Dockerfile](../../Dockerfile) (lines 199-290) | Source build configuration | +**File**: `backend/internal/api/handlers/npm_import_handler.go` (NEW) -### 2.2 Key Files in crowdsec Package +#### 1.1 Create NPM Parser Model -| File | Purpose | Functions to Verify | -|------|---------|---------------------| -| `registration.go` | Bouncer registration, LAPI health | `EnsureBouncerRegistered`, `CheckLAPIHealth`, `GetLAPIVersion` | -| `hub_sync.go` | Hub index fetching, preset pull/apply | `FetchIndex`, `Pull`, `Apply`, `extractTarGz` | -| `hub_cache.go` | Preset caching with TTL | `Store`, `Load`, `Evict` | -| `console_enroll.go` | Console enrollment | `Enroll`, `Status`, `checkLAPIAvailable` | -| `presets.go` | Curated preset definitions | `ListCuratedPresets`, `FindPreset` | +```go +// NPMExport represents the Nginx Proxy Manager export format +type NPMExport struct { + ProxyHosts []NPMProxyHost `json:"proxy_hosts"` + AccessLists []NPMAccessList `json:"access_lists"` + Certificates []NPMCertificate `json:"certificates"` +} -### 2.3 Handler Functions (crowdsec_handler.go) +type NPMProxyHost struct { + DomainNames []string `json:"domain_names"` + ForwardScheme string `json:"forward_scheme"` + ForwardHost string `json:"forward_host"` + ForwardPort int `json:"forward_port"` + CachingEnabled bool `json:"caching_enabled"` + BlockExploits bool `json:"block_exploits"` + AllowWebsocket bool `json:"allow_websocket_upgrade"` + HTTP2Support bool `json:"http2_support"` + HSTSEnabled bool `json:"hsts_enabled"` + HSTSSubdomains bool `json:"hsts_subdomains"` + SSLForced bool `json:"ssl_forced"` + Enabled bool `json:"enabled"` +} -| Handler | Line | API Endpoint | -|---------|------|--------------| -| `Start` | 188 | POST /api/crowdsec/start | -| `Stop` | 290 | POST /api/crowdsec/stop | -| `Status` | 317 | GET /api/crowdsec/status | -| `ImportConfig` | 346 | POST /api/crowdsec/import | -| `ExportConfig` | 417 | GET /api/crowdsec/export | -| `ListFiles` | 486 | GET /api/crowdsec/files | -| `ReadFile` | 513 | GET /api/crowdsec/files/:path | -| `WriteFile` | 540 | PUT /api/crowdsec/files/:path | -| `ListPresets` | 580 | GET /api/crowdsec/presets | -| `PullPreset` | 662 | POST /api/crowdsec/presets/:slug/pull | -| `ApplyPreset` | 748 | POST /api/crowdsec/presets/:slug/apply | -| `ConsoleEnroll` | 876 | POST /api/crowdsec/console/enroll | -| `ConsoleStatus` | 932 | GET /api/crowdsec/console/status | -| `DeleteConsoleEnrollment` | 954 | DELETE /api/crowdsec/console/enrollment | -| `GetCachedPreset` | 975 | GET /api/crowdsec/presets/:slug | -| `GetLAPIDecisions` | 1077 | GET /api/crowdsec/lapi/decisions | -| `CheckLAPIHealth` | 1231 | GET /api/crowdsec/lapi/health | +type NPMAccessList struct { + Name string `json:"name"` + Items []NPMAccessItem `json:"items"` +} -### 2.4 Docker Configuration +type NPMAccessItem struct { + Type string `json:"type"` // "allow" or "deny" + Address string `json:"address"` +} -**Dockerfile CrowdSec Section** (lines 199-290): -- Current version: `CROWDSEC_VERSION=1.7.4` -- Build method: Source compilation with Go 1.25.6 -- Dependency patches applied: - - `github.com/expr-lang/expr@v1.17.7` - - `golang.org/x/crypto@v0.46.0` -- Fix for expr-lang v1.17.7 compatibility (sed replacement) +type NPMCertificate struct { + NiceName string `json:"nice_name"` + DomainNames []string `json:"domain_names"` + Provider string `json:"provider"` +} +``` -**Docker Compose Files**: -- `.docker/compose/docker-compose.yml` - Production config with crowdsec_data volume -- `.docker/compose/docker-compose.local.yml` - Local development -- `.docker/compose/docker-compose.playwright.yml` - E2E testing (crowdsec disabled) +#### 1.2 Create NPM Import Handler + +**File**: `backend/internal/api/handlers/npm_import_handler.go` + +```go +package handlers + +import ( + "encoding/json" + "net/http" + "strings" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "gorm.io/gorm" + + "github.com/Wikid82/charon/backend/internal/caddy" + "github.com/Wikid82/charon/backend/internal/models" + "github.com/Wikid82/charon/backend/internal/services" +) + +type NPMImportHandler struct { + db *gorm.DB + proxyHostSvc *services.ProxyHostService +} + +func NewNPMImportHandler(db *gorm.DB) *NPMImportHandler { + return &NPMImportHandler{ + db: db, + proxyHostSvc: services.NewProxyHostService(db), + } +} + +func (h *NPMImportHandler) RegisterRoutes(router *gin.RouterGroup) { + router.POST("/import/npm/upload", h.Upload) + router.POST("/import/npm/commit", h.Commit) +} + +// Upload handles NPM export JSON upload and returns preview +func (h *NPMImportHandler) Upload(c *gin.Context) { + var req struct { + Content string `json:"content" binding:"required"` + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Parse NPM export JSON + var npmExport NPMExport + if err := json.Unmarshal([]byte(req.Content), &npmExport); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid NPM export JSON"}) + return + } + + // Convert to internal format + result := h.convertNPMToImportResult(npmExport) + + // Check for conflicts with existing hosts + existingHosts, _ := h.proxyHostSvc.List() + existingDomainsMap := make(map[string]models.ProxyHost) + for _, eh := range existingHosts { + existingDomainsMap[eh.DomainNames] = eh + } + + conflictDetails := make(map[string]gin.H) + for _, ph := range result.Hosts { + if existing, found := existingDomainsMap[ph.DomainNames]; found { + result.Conflicts = append(result.Conflicts, ph.DomainNames) + conflictDetails[ph.DomainNames] = gin.H{ + "existing": gin.H{ + "forward_scheme": existing.ForwardScheme, + "forward_host": existing.ForwardHost, + "forward_port": existing.ForwardPort, + }, + "imported": gin.H{ + "forward_scheme": ph.ForwardScheme, + "forward_host": ph.ForwardHost, + "forward_port": ph.ForwardPort, + }, + } + } + } + + sid := uuid.NewString() + c.JSON(http.StatusOK, gin.H{ + "session": gin.H{"id": sid, "state": "transient", "source": "npm"}, + "conflict_details": conflictDetails, + "preview": result, + }) +} + +func (h *NPMImportHandler) convertNPMToImportResult(export NPMExport) *caddy.ImportResult { + result := &caddy.ImportResult{ + Hosts: []caddy.ParsedHost{}, + Conflicts: []string{}, + Errors: []string{}, + } + + for _, proxy := range export.ProxyHosts { + // Join domain names with comma for storage + domains := strings.Join(proxy.DomainNames, ", ") + + host := caddy.ParsedHost{ + DomainNames: domains, + ForwardScheme: proxy.ForwardScheme, + ForwardHost: proxy.ForwardHost, + ForwardPort: proxy.ForwardPort, + SSLForced: proxy.SSLForced, + WebsocketSupport: proxy.AllowWebsocket, + } + + if host.ForwardScheme == "" { + host.ForwardScheme = "http" + } + if host.ForwardPort == 0 { + host.ForwardPort = 80 + } + + result.Hosts = append(result.Hosts, host) + } + + return result +} +``` + +#### 1.3 Register NPM Import Routes + +**File**: `backend/internal/api/routes/routes.go` + +Add to the `Register` function: +```go +// NPM Import Handler +npmImportHandler := handlers.NewNPMImportHandler(db) +npmImportHandler.RegisterRoutes(api) +``` + +### Task 2: Implement JSON Import Backend Handler + +**File**: `backend/internal/api/handlers/json_import_handler.go` (NEW) + +The JSON import handler will accept a generic Charon export format: + +```go +package handlers + +// CharonExport represents a generic Charon configuration export +type CharonExport struct { + Version string `json:"version"` + ExportedAt string `json:"exported_at"` + ProxyHosts []CharonProxyHost `json:"proxy_hosts"` + AccessLists []CharonAccessList `json:"access_lists"` + DNSRecords []CharonDNSRecord `json:"dns_records"` +} + +type JSONImportHandler struct { + db *gorm.DB + proxyHostSvc *services.ProxyHostService +} + +func NewJSONImportHandler(db *gorm.DB) *JSONImportHandler { + return &JSONImportHandler{ + db: db, + proxyHostSvc: services.NewProxyHostService(db), + } +} + +func (h *JSONImportHandler) RegisterRoutes(router *gin.RouterGroup) { + router.POST("/import/json/upload", h.Upload) + router.POST("/import/json/commit", h.Commit) +} + +// Upload validates and previews JSON import +func (h *JSONImportHandler) Upload(c *gin.Context) { + var req struct { + Content string `json:"content" binding:"required"` + } + + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + // Try to parse as Charon export format + var charonExport CharonExport + if err := json.Unmarshal([]byte(req.Content), &charonExport); err != nil { + // Fallback: try NPM format + var npmExport NPMExport + if err := json.Unmarshal([]byte(req.Content), &npmExport); err != nil { + c.JSON(http.StatusBadRequest, gin.H{ + "error": "Invalid JSON format. Expected Charon or NPM export format.", + }) + return + } + // Convert NPM to import result + // ... (similar to NPM handler) + } + + // Convert Charon export to import result + result := h.convertCharonToImportResult(charonExport) + // ... (conflict checking and response) +} +``` + +### Task 3: Implement Frontend Routes + +#### 3.1 Create ImportNPM Page + +**File**: `frontend/src/pages/ImportNPM.tsx` (NEW) + +```tsx +import { useState } from 'react' +import { useNavigate } from 'react-router-dom' +import { useTranslation } from 'react-i18next' +import { useNPMImport } from '../hooks/useNPMImport' +import ImportReviewTable from '../components/ImportReviewTable' +import ImportSuccessModal from '../components/dialogs/ImportSuccessModal' + +export default function ImportNPM() { + const { t } = useTranslation() + const navigate = useNavigate() + const { preview, loading, error, upload, commit, commitResult, clearCommitResult } = useNPMImport() + const [content, setContent] = useState('') + const [showReview, setShowReview] = useState(false) + const [showSuccessModal, setShowSuccessModal] = useState(false) + + const handleUpload = async () => { + if (!content.trim()) { + alert(t('importNPM.enterContent')) + return + } + + // Validate JSON + try { + JSON.parse(content) + } catch { + alert(t('importNPM.invalidJSON')) + return + } + + try { + await upload(content) + setShowReview(true) + } catch { + // Error handled by hook + } + } + + // ... (rest follows ImportCaddy pattern) + + return ( +
{t('importJSON.description')}
+| + {t('proxyHosts.domainNames')} + | ++ {t('proxyHosts.forwardHost')} + | ++ {t('proxyHosts.forwardPort')} + | ++ {t('proxyHosts.sslForced')} + | ++ {t('common.status')} + | + {preview.preview.conflicts.length > 0 && ( ++ {t('common.actions')} + | + )} +
|---|---|---|---|---|---|
| {host.domain_names} | ++ {host.forward_scheme}://{host.forward_host} + | +{host.forward_port} | ++ {host.ssl_forced ? t('common.yes') : t('common.no')} + | ++ {isConflict ? ( + + {t('importJSON.conflict')} + + ) : ( + + {t('importJSON.new')} + + )} + | + {preview.preview.conflicts.length > 0 && ( ++ {isConflict && ( + + )} + | + )} +
{t('importNPM.description')}
+| + {t('proxyHosts.domainNames')} + | ++ {t('proxyHosts.forwardHost')} + | ++ {t('proxyHosts.forwardPort')} + | ++ {t('proxyHosts.sslForced')} + | ++ {t('common.status')} + | + {preview.preview.conflicts.length > 0 && ( ++ {t('common.actions')} + | + )} +
|---|---|---|---|---|---|
| {host.domain_names} | ++ {host.forward_scheme}://{host.forward_host} + | +{host.forward_port} | ++ {host.ssl_forced ? t('common.yes') : t('common.no')} + | ++ {isConflict ? ( + + {t('importNPM.conflict')} + + ) : ( + + {t('importNPM.new')} + + )} + | + {preview.preview.conflicts.length > 0 && ( ++ {isConflict && ( + + )} + | + )} +