chore: implement NPM/JSON import routes and fix SMTP persistence
Phase 3 of skipped tests remediation - enables 7 previously skipped E2E tests Backend: Add NPM import handler with session-based upload/commit/cancel Add JSON import handler with Charon/NPM format support Fix SMTP SaveSMTPConfig using transaction-based upsert Add comprehensive unit tests for new handlers Frontend: Add ImportNPM page component following ImportCaddy pattern Add ImportJSON page component with format detection Add useNPMImport and useJSONImport React Query hooks Add API clients for npm/json import endpoints Register routes in App.tsx and navigation in Layout.tsx Add i18n keys for new import pages Tests: 7 E2E tests now enabled and passing Backend coverage: 86.8% Reduced total skipped tests from 98 to 91 Closes: Phase 3 of skipped-tests-remediation plan
This commit is contained in:
@@ -95,7 +95,12 @@ See exactly what's happening with live request logs, uptime monitoring, and inst
|
||||
|
||||
### 📥 **Migration Made Easy**
|
||||
|
||||
Import your existing Caddy configurations with one click. Already invested in another reverse proxy? Bring your work with you.
|
||||
Import your existing configurations with one click:
|
||||
- **Caddyfile Import** — Migrate from other Caddy setups
|
||||
- **NPM Import** — Import from Nginx Proxy Manager exports
|
||||
- **JSON Import** — Restore from Charon backups or generic JSON configs
|
||||
|
||||
Already invested in another reverse proxy? Bring your work with you.
|
||||
|
||||
### ⚡ **Live Configuration Changes**
|
||||
|
||||
|
||||
@@ -0,0 +1,516 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/google/uuid"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/caddy"
|
||||
"github.com/Wikid82/charon/backend/internal/models"
|
||||
"github.com/Wikid82/charon/backend/internal/services"
|
||||
)
|
||||
|
||||
// jsonImportSession stores the parsed content for a JSON import session.
|
||||
type jsonImportSession struct {
|
||||
SourceType string // "charon" or "npm"
|
||||
CharonExport *CharonExport
|
||||
NPMExport *NPMExport
|
||||
}
|
||||
|
||||
// jsonImportSessions stores parsed exports keyed by session UUID.
|
||||
// TODO: Implement session expiration to prevent memory leaks (e.g., TTL-based cleanup).
|
||||
var (
|
||||
jsonImportSessions = make(map[string]jsonImportSession)
|
||||
jsonImportSessionsMu sync.RWMutex
|
||||
)
|
||||
|
||||
// CharonExport represents the top-level structure of a Charon export file.
|
||||
type CharonExport struct {
|
||||
Version string `json:"version"`
|
||||
ExportedAt time.Time `json:"exported_at"`
|
||||
ProxyHosts []CharonProxyHost `json:"proxy_hosts"`
|
||||
AccessLists []CharonAccessList `json:"access_lists"`
|
||||
DNSRecords []CharonDNSRecord `json:"dns_records"`
|
||||
}
|
||||
|
||||
// CharonProxyHost represents a proxy host in Charon export format.
|
||||
type CharonProxyHost struct {
|
||||
UUID string `json:"uuid"`
|
||||
Name string `json:"name"`
|
||||
DomainNames string `json:"domain_names"`
|
||||
ForwardScheme string `json:"forward_scheme"`
|
||||
ForwardHost string `json:"forward_host"`
|
||||
ForwardPort int `json:"forward_port"`
|
||||
SSLForced bool `json:"ssl_forced"`
|
||||
HTTP2Support bool `json:"http2_support"`
|
||||
HSTSEnabled bool `json:"hsts_enabled"`
|
||||
HSTSSubdomains bool `json:"hsts_subdomains"`
|
||||
BlockExploits bool `json:"block_exploits"`
|
||||
WebsocketSupport bool `json:"websocket_support"`
|
||||
Application string `json:"application"`
|
||||
Enabled bool `json:"enabled"`
|
||||
AdvancedConfig string `json:"advanced_config"`
|
||||
WAFDisabled bool `json:"waf_disabled"`
|
||||
UseDNSChallenge bool `json:"use_dns_challenge"`
|
||||
}
|
||||
|
||||
// CharonAccessList represents an access list in Charon export format.
|
||||
type CharonAccessList struct {
|
||||
UUID string `json:"uuid"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
Type string `json:"type"`
|
||||
IPRules string `json:"ip_rules"`
|
||||
CountryCodes string `json:"country_codes"`
|
||||
LocalNetworkOnly bool `json:"local_network_only"`
|
||||
Enabled bool `json:"enabled"`
|
||||
}
|
||||
|
||||
// CharonDNSRecord represents a DNS record in Charon export format.
|
||||
type CharonDNSRecord struct {
|
||||
UUID string `json:"uuid"`
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
Value string `json:"value"`
|
||||
TTL int `json:"ttl"`
|
||||
ProviderID uint `json:"provider_id"`
|
||||
}
|
||||
|
||||
// JSONImportHandler handles JSON configuration imports (both Charon and NPM formats).
|
||||
type JSONImportHandler struct {
|
||||
db *gorm.DB
|
||||
proxyHostSvc *services.ProxyHostService
|
||||
}
|
||||
|
||||
// NewJSONImportHandler creates a new JSON import handler.
|
||||
func NewJSONImportHandler(db *gorm.DB) *JSONImportHandler {
|
||||
return &JSONImportHandler{
|
||||
db: db,
|
||||
proxyHostSvc: services.NewProxyHostService(db),
|
||||
}
|
||||
}
|
||||
|
||||
// RegisterRoutes registers JSON import routes.
|
||||
func (h *JSONImportHandler) RegisterRoutes(router *gin.RouterGroup) {
|
||||
router.POST("/import/json/upload", h.Upload)
|
||||
router.POST("/import/json/commit", h.Commit)
|
||||
router.POST("/import/json/cancel", h.Cancel)
|
||||
}
|
||||
|
||||
// Upload parses a JSON export (Charon or NPM format) and returns a preview.
|
||||
func (h *JSONImportHandler) Upload(c *gin.Context) {
|
||||
var req struct {
|
||||
Content string `json:"content" binding:"required"`
|
||||
}
|
||||
|
||||
if err := c.ShouldBindJSON(&req); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
// Try Charon format first
|
||||
var charonExport CharonExport
|
||||
if err := json.Unmarshal([]byte(req.Content), &charonExport); err == nil && h.isCharonFormat(charonExport) {
|
||||
h.handleCharonUpload(c, charonExport)
|
||||
return
|
||||
}
|
||||
|
||||
// Fall back to NPM format
|
||||
var npmExport NPMExport
|
||||
if err := json.Unmarshal([]byte(req.Content), &npmExport); err == nil && len(npmExport.ProxyHosts) > 0 {
|
||||
h.handleNPMUpload(c, npmExport)
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "unrecognized JSON format - must be Charon or NPM export"})
|
||||
}
|
||||
|
||||
// isCharonFormat checks if the export is in Charon format.
|
||||
func (h *JSONImportHandler) isCharonFormat(export CharonExport) bool {
|
||||
return export.Version != "" || len(export.ProxyHosts) > 0
|
||||
}
|
||||
|
||||
// handleCharonUpload processes a Charon format export.
|
||||
func (h *JSONImportHandler) handleCharonUpload(c *gin.Context, export CharonExport) {
|
||||
result := h.convertCharonToImportResult(export)
|
||||
|
||||
if len(result.Hosts) == 0 {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "no proxy hosts found in Charon export"})
|
||||
return
|
||||
}
|
||||
|
||||
existingHosts, _ := h.proxyHostSvc.List()
|
||||
existingDomainsMap := make(map[string]models.ProxyHost)
|
||||
for _, eh := range existingHosts {
|
||||
existingDomainsMap[eh.DomainNames] = eh
|
||||
}
|
||||
|
||||
conflictDetails := make(map[string]gin.H)
|
||||
for _, ph := range result.Hosts {
|
||||
if existing, found := existingDomainsMap[ph.DomainNames]; found {
|
||||
result.Conflicts = append(result.Conflicts, ph.DomainNames)
|
||||
conflictDetails[ph.DomainNames] = gin.H{
|
||||
"existing": gin.H{
|
||||
"forward_scheme": existing.ForwardScheme,
|
||||
"forward_host": existing.ForwardHost,
|
||||
"forward_port": existing.ForwardPort,
|
||||
"ssl_forced": existing.SSLForced,
|
||||
"websocket": existing.WebsocketSupport,
|
||||
"enabled": existing.Enabled,
|
||||
},
|
||||
"imported": gin.H{
|
||||
"forward_scheme": ph.ForwardScheme,
|
||||
"forward_host": ph.ForwardHost,
|
||||
"forward_port": ph.ForwardPort,
|
||||
"ssl_forced": ph.SSLForced,
|
||||
"websocket": ph.WebsocketSupport,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sid := uuid.NewString()
|
||||
|
||||
// Store the parsed export in session storage for later commit
|
||||
jsonImportSessionsMu.Lock()
|
||||
jsonImportSessions[sid] = jsonImportSession{
|
||||
SourceType: "charon",
|
||||
CharonExport: &export,
|
||||
}
|
||||
jsonImportSessionsMu.Unlock()
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"session": gin.H{"id": sid, "state": "transient", "source_type": "charon"},
|
||||
"preview": result,
|
||||
"conflict_details": conflictDetails,
|
||||
"charon_export": gin.H{
|
||||
"version": export.Version,
|
||||
"exported_at": export.ExportedAt,
|
||||
"proxy_hosts": len(export.ProxyHosts),
|
||||
"access_lists": len(export.AccessLists),
|
||||
"dns_records": len(export.DNSRecords),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// handleNPMUpload processes an NPM format export.
|
||||
func (h *JSONImportHandler) handleNPMUpload(c *gin.Context, export NPMExport) {
|
||||
npmHandler := NewNPMImportHandler(h.db)
|
||||
result := npmHandler.convertNPMToImportResult(export)
|
||||
|
||||
if len(result.Hosts) == 0 {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "no proxy hosts found in NPM export"})
|
||||
return
|
||||
}
|
||||
|
||||
existingHosts, _ := h.proxyHostSvc.List()
|
||||
existingDomainsMap := make(map[string]models.ProxyHost)
|
||||
for _, eh := range existingHosts {
|
||||
existingDomainsMap[eh.DomainNames] = eh
|
||||
}
|
||||
|
||||
conflictDetails := make(map[string]gin.H)
|
||||
for _, ph := range result.Hosts {
|
||||
if existing, found := existingDomainsMap[ph.DomainNames]; found {
|
||||
result.Conflicts = append(result.Conflicts, ph.DomainNames)
|
||||
conflictDetails[ph.DomainNames] = gin.H{
|
||||
"existing": gin.H{
|
||||
"forward_scheme": existing.ForwardScheme,
|
||||
"forward_host": existing.ForwardHost,
|
||||
"forward_port": existing.ForwardPort,
|
||||
"ssl_forced": existing.SSLForced,
|
||||
"websocket": existing.WebsocketSupport,
|
||||
"enabled": existing.Enabled,
|
||||
},
|
||||
"imported": gin.H{
|
||||
"forward_scheme": ph.ForwardScheme,
|
||||
"forward_host": ph.ForwardHost,
|
||||
"forward_port": ph.ForwardPort,
|
||||
"ssl_forced": ph.SSLForced,
|
||||
"websocket": ph.WebsocketSupport,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sid := uuid.NewString()
|
||||
|
||||
// Store the parsed export in session storage for later commit
|
||||
jsonImportSessionsMu.Lock()
|
||||
jsonImportSessions[sid] = jsonImportSession{
|
||||
SourceType: "npm",
|
||||
NPMExport: &export,
|
||||
}
|
||||
jsonImportSessionsMu.Unlock()
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"session": gin.H{"id": sid, "state": "transient", "source_type": "npm"},
|
||||
"preview": result,
|
||||
"conflict_details": conflictDetails,
|
||||
"npm_export": gin.H{
|
||||
"proxy_hosts": len(export.ProxyHosts),
|
||||
"access_lists": len(export.AccessLists),
|
||||
"certificates": len(export.Certificates),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Commit finalizes the JSON import with user's conflict resolutions.
|
||||
func (h *JSONImportHandler) Commit(c *gin.Context) {
|
||||
var req struct {
|
||||
SessionUUID string `json:"session_uuid" binding:"required"`
|
||||
Resolutions map[string]string `json:"resolutions"`
|
||||
Names map[string]string `json:"names"`
|
||||
}
|
||||
|
||||
if err := c.ShouldBindJSON(&req); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
// Retrieve the stored session
|
||||
jsonImportSessionsMu.RLock()
|
||||
session, ok := jsonImportSessions[req.SessionUUID]
|
||||
jsonImportSessionsMu.RUnlock()
|
||||
|
||||
if !ok {
|
||||
c.JSON(http.StatusNotFound, gin.H{"error": "session not found or expired"})
|
||||
return
|
||||
}
|
||||
|
||||
// Route to the appropriate commit handler based on source type
|
||||
if session.SourceType == "charon" && session.CharonExport != nil {
|
||||
h.commitCharonImport(c, *session.CharonExport, req.Resolutions, req.Names, req.SessionUUID)
|
||||
return
|
||||
}
|
||||
|
||||
if session.SourceType == "npm" && session.NPMExport != nil {
|
||||
h.commitNPMImport(c, *session.NPMExport, req.Resolutions, req.Names, req.SessionUUID)
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid session state"})
|
||||
}
|
||||
|
||||
// Cancel cancels a JSON import session and cleans up resources.
|
||||
func (h *JSONImportHandler) Cancel(c *gin.Context) {
|
||||
var req struct {
|
||||
SessionUUID string `json:"session_uuid"`
|
||||
}
|
||||
|
||||
if err := c.ShouldBindJSON(&req); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
// Clean up session if it exists
|
||||
jsonImportSessionsMu.Lock()
|
||||
delete(jsonImportSessions, req.SessionUUID)
|
||||
jsonImportSessionsMu.Unlock()
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{"status": "cancelled"})
|
||||
}
|
||||
|
||||
// commitCharonImport commits a Charon format import.
|
||||
func (h *JSONImportHandler) commitCharonImport(c *gin.Context, export CharonExport, resolutions, names map[string]string, sessionUUID string) {
|
||||
result := h.convertCharonToImportResult(export)
|
||||
proxyHosts := caddy.ConvertToProxyHosts(result.Hosts)
|
||||
|
||||
created := 0
|
||||
updated := 0
|
||||
skipped := 0
|
||||
errors := []string{}
|
||||
|
||||
existingHosts, _ := h.proxyHostSvc.List()
|
||||
existingMap := make(map[string]*models.ProxyHost)
|
||||
for i := range existingHosts {
|
||||
existingMap[existingHosts[i].DomainNames] = &existingHosts[i]
|
||||
}
|
||||
|
||||
for _, host := range proxyHosts {
|
||||
action := resolutions[host.DomainNames]
|
||||
|
||||
if customName, ok := names[host.DomainNames]; ok && customName != "" {
|
||||
host.Name = customName
|
||||
}
|
||||
|
||||
if action == "skip" || action == "keep" {
|
||||
skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
if action == "rename" {
|
||||
host.DomainNames += "-imported"
|
||||
}
|
||||
|
||||
if action == "overwrite" {
|
||||
if existing, found := existingMap[host.DomainNames]; found {
|
||||
host.ID = existing.ID
|
||||
host.UUID = existing.UUID
|
||||
host.CertificateID = existing.CertificateID
|
||||
host.CreatedAt = existing.CreatedAt
|
||||
|
||||
if err := h.proxyHostSvc.Update(&host); err != nil {
|
||||
errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error()))
|
||||
} else {
|
||||
updated++
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
host.UUID = uuid.NewString()
|
||||
if err := h.proxyHostSvc.Create(&host); err != nil {
|
||||
errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error()))
|
||||
} else {
|
||||
created++
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up session after successful commit
|
||||
jsonImportSessionsMu.Lock()
|
||||
delete(jsonImportSessions, sessionUUID)
|
||||
jsonImportSessionsMu.Unlock()
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"created": created,
|
||||
"updated": updated,
|
||||
"skipped": skipped,
|
||||
"errors": errors,
|
||||
})
|
||||
}
|
||||
|
||||
// commitNPMImport commits an NPM format import.
|
||||
func (h *JSONImportHandler) commitNPMImport(c *gin.Context, export NPMExport, resolutions, names map[string]string, sessionUUID string) {
|
||||
npmHandler := NewNPMImportHandler(h.db)
|
||||
result := npmHandler.convertNPMToImportResult(export)
|
||||
proxyHosts := caddy.ConvertToProxyHosts(result.Hosts)
|
||||
|
||||
created := 0
|
||||
updated := 0
|
||||
skipped := 0
|
||||
errors := []string{}
|
||||
|
||||
existingHosts, _ := h.proxyHostSvc.List()
|
||||
existingMap := make(map[string]*models.ProxyHost)
|
||||
for i := range existingHosts {
|
||||
existingMap[existingHosts[i].DomainNames] = &existingHosts[i]
|
||||
}
|
||||
|
||||
for _, host := range proxyHosts {
|
||||
action := resolutions[host.DomainNames]
|
||||
|
||||
if customName, ok := names[host.DomainNames]; ok && customName != "" {
|
||||
host.Name = customName
|
||||
}
|
||||
|
||||
if action == "skip" || action == "keep" {
|
||||
skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
if action == "rename" {
|
||||
host.DomainNames += "-imported"
|
||||
}
|
||||
|
||||
if action == "overwrite" {
|
||||
if existing, found := existingMap[host.DomainNames]; found {
|
||||
host.ID = existing.ID
|
||||
host.UUID = existing.UUID
|
||||
host.CertificateID = existing.CertificateID
|
||||
host.CreatedAt = existing.CreatedAt
|
||||
|
||||
if err := h.proxyHostSvc.Update(&host); err != nil {
|
||||
errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error()))
|
||||
} else {
|
||||
updated++
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
host.UUID = uuid.NewString()
|
||||
if err := h.proxyHostSvc.Create(&host); err != nil {
|
||||
errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error()))
|
||||
} else {
|
||||
created++
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up session after successful commit
|
||||
jsonImportSessionsMu.Lock()
|
||||
delete(jsonImportSessions, sessionUUID)
|
||||
jsonImportSessionsMu.Unlock()
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"created": created,
|
||||
"updated": updated,
|
||||
"skipped": skipped,
|
||||
"errors": errors,
|
||||
})
|
||||
}
|
||||
|
||||
// convertCharonToImportResult converts Charon export format to ImportResult.
|
||||
func (h *JSONImportHandler) convertCharonToImportResult(export CharonExport) *caddy.ImportResult {
|
||||
result := &caddy.ImportResult{
|
||||
Hosts: []caddy.ParsedHost{},
|
||||
Conflicts: []string{},
|
||||
Errors: []string{},
|
||||
}
|
||||
|
||||
for _, ch := range export.ProxyHosts {
|
||||
if ch.DomainNames == "" {
|
||||
result.Errors = append(result.Errors, fmt.Sprintf("host %s has no domain names", ch.UUID))
|
||||
continue
|
||||
}
|
||||
|
||||
scheme := ch.ForwardScheme
|
||||
if scheme == "" {
|
||||
scheme = "http"
|
||||
}
|
||||
|
||||
port := ch.ForwardPort
|
||||
if port == 0 {
|
||||
port = 80
|
||||
}
|
||||
|
||||
warnings := []string{}
|
||||
if ch.AdvancedConfig != "" && !isValidJSON(ch.AdvancedConfig) {
|
||||
warnings = append(warnings, "Advanced config may need review")
|
||||
}
|
||||
|
||||
host := caddy.ParsedHost{
|
||||
DomainNames: ch.DomainNames,
|
||||
ForwardScheme: scheme,
|
||||
ForwardHost: ch.ForwardHost,
|
||||
ForwardPort: port,
|
||||
SSLForced: ch.SSLForced,
|
||||
WebsocketSupport: ch.WebsocketSupport,
|
||||
Warnings: warnings,
|
||||
}
|
||||
|
||||
rawJSON, _ := json.Marshal(ch)
|
||||
host.RawJSON = string(rawJSON)
|
||||
|
||||
result.Hosts = append(result.Hosts, host)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// isValidJSON checks if a string is valid JSON.
|
||||
func isValidJSON(s string) bool {
|
||||
s = strings.TrimSpace(s)
|
||||
if s == "" {
|
||||
return true
|
||||
}
|
||||
var js json.RawMessage
|
||||
return json.Unmarshal([]byte(s), &js) == nil
|
||||
}
|
||||
@@ -0,0 +1,600 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/models"
|
||||
)
|
||||
|
||||
func setupJSONTestDB(t *testing.T) *gorm.DB {
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
|
||||
err = db.AutoMigrate(&models.ProxyHost{}, &models.Location{}, &models.Setting{})
|
||||
require.NoError(t, err)
|
||||
|
||||
return db
|
||||
}
|
||||
|
||||
func TestNewJSONImportHandler(t *testing.T) {
|
||||
db := setupJSONTestDB(t)
|
||||
handler := NewJSONImportHandler(db)
|
||||
|
||||
assert.NotNil(t, handler)
|
||||
assert.NotNil(t, handler.db)
|
||||
assert.NotNil(t, handler.proxyHostSvc)
|
||||
}
|
||||
|
||||
func TestJSONImportHandler_RegisterRoutes(t *testing.T) {
|
||||
db := setupJSONTestDB(t)
|
||||
handler := NewJSONImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
routes := router.Routes()
|
||||
routePaths := make(map[string]bool)
|
||||
for _, r := range routes {
|
||||
routePaths[r.Method+":"+r.Path] = true
|
||||
}
|
||||
|
||||
assert.True(t, routePaths["POST:/api/v1/import/json/upload"])
|
||||
assert.True(t, routePaths["POST:/api/v1/import/json/commit"])
|
||||
assert.True(t, routePaths["POST:/api/v1/import/json/cancel"])
|
||||
}
|
||||
|
||||
func TestJSONImportHandler_Upload_CharonFormat(t *testing.T) {
|
||||
db := setupJSONTestDB(t)
|
||||
handler := NewJSONImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
charonExport := CharonExport{
|
||||
Version: "1.0.0",
|
||||
ExportedAt: time.Now(),
|
||||
ProxyHosts: []CharonProxyHost{
|
||||
{
|
||||
UUID: "test-uuid-1",
|
||||
Name: "Test Host",
|
||||
DomainNames: "example.com",
|
||||
ForwardScheme: "http",
|
||||
ForwardHost: "192.168.1.100",
|
||||
ForwardPort: 8080,
|
||||
SSLForced: true,
|
||||
WebsocketSupport: true,
|
||||
Enabled: true,
|
||||
},
|
||||
},
|
||||
AccessLists: []CharonAccessList{
|
||||
{
|
||||
UUID: "acl-uuid-1",
|
||||
Name: "Test ACL",
|
||||
Type: "whitelist",
|
||||
Enabled: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
content, _ := json.Marshal(charonExport)
|
||||
body, _ := json.Marshal(map[string]string{"content": string(content)})
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
var response map[string]any
|
||||
err := json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Contains(t, response, "session")
|
||||
session := response["session"].(map[string]any)
|
||||
assert.Equal(t, "charon", session["source_type"])
|
||||
|
||||
assert.Contains(t, response, "charon_export")
|
||||
charonInfo := response["charon_export"].(map[string]any)
|
||||
assert.Equal(t, "1.0.0", charonInfo["version"])
|
||||
}
|
||||
|
||||
func TestJSONImportHandler_Upload_NPMFormatFallback(t *testing.T) {
|
||||
db := setupJSONTestDB(t)
|
||||
handler := NewJSONImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
npmExport := NPMExport{
|
||||
ProxyHosts: []NPMProxyHost{
|
||||
{
|
||||
ID: 1,
|
||||
DomainNames: []string{"npm-example.com"},
|
||||
ForwardScheme: "http",
|
||||
ForwardHost: "192.168.1.100",
|
||||
ForwardPort: 8080,
|
||||
Enabled: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
content, _ := json.Marshal(npmExport)
|
||||
body, _ := json.Marshal(map[string]string{"content": string(content)})
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
var response map[string]any
|
||||
err := json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
|
||||
session := response["session"].(map[string]any)
|
||||
assert.Equal(t, "npm", session["source_type"])
|
||||
|
||||
assert.Contains(t, response, "npm_export")
|
||||
}
|
||||
|
||||
func TestJSONImportHandler_Upload_UnrecognizedFormat(t *testing.T) {
|
||||
db := setupJSONTestDB(t)
|
||||
handler := NewJSONImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
unknownFormat := map[string]any{
|
||||
"some_field": "some_value",
|
||||
"other": 123,
|
||||
}
|
||||
|
||||
content, _ := json.Marshal(unknownFormat)
|
||||
body, _ := json.Marshal(map[string]string{"content": string(content)})
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusBadRequest, w.Code)
|
||||
}
|
||||
|
||||
func TestJSONImportHandler_Upload_InvalidJSON(t *testing.T) {
|
||||
db := setupJSONTestDB(t)
|
||||
handler := NewJSONImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
body, _ := json.Marshal(map[string]string{"content": "{invalid json"})
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusBadRequest, w.Code)
|
||||
}
|
||||
|
||||
func TestJSONImportHandler_Commit_CharonFormat(t *testing.T) {
|
||||
db := setupJSONTestDB(t)
|
||||
handler := NewJSONImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
charonExport := CharonExport{
|
||||
Version: "1.0.0",
|
||||
ExportedAt: time.Now(),
|
||||
ProxyHosts: []CharonProxyHost{
|
||||
{
|
||||
UUID: "test-uuid-1",
|
||||
Name: "Test Host",
|
||||
DomainNames: "newcharon.com",
|
||||
ForwardScheme: "http",
|
||||
ForwardHost: "192.168.1.100",
|
||||
ForwardPort: 8080,
|
||||
Enabled: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Step 1: Upload to get session ID
|
||||
content, _ := json.Marshal(charonExport)
|
||||
uploadBody, _ := json.Marshal(map[string]string{"content": string(content)})
|
||||
|
||||
uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(uploadBody))
|
||||
uploadReq.Header.Set("Content-Type", "application/json")
|
||||
uploadW := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(uploadW, uploadReq)
|
||||
require.Equal(t, http.StatusOK, uploadW.Code)
|
||||
|
||||
var uploadResponse map[string]any
|
||||
err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse)
|
||||
require.NoError(t, err)
|
||||
|
||||
session := uploadResponse["session"].(map[string]any)
|
||||
sessionID := session["id"].(string)
|
||||
|
||||
// Step 2: Commit with session UUID
|
||||
commitBody, _ := json.Marshal(map[string]any{
|
||||
"session_uuid": sessionID,
|
||||
"resolutions": map[string]string{},
|
||||
"names": map[string]string{"newcharon.com": "Custom Name"},
|
||||
})
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/commit", bytes.NewReader(commitBody))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
var response map[string]any
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, float64(1), response["created"])
|
||||
|
||||
var host models.ProxyHost
|
||||
db.Where("domain_names = ?", "newcharon.com").First(&host)
|
||||
assert.Equal(t, "Custom Name", host.Name)
|
||||
}
|
||||
|
||||
func TestJSONImportHandler_Commit_NPMFormatFallback(t *testing.T) {
|
||||
db := setupJSONTestDB(t)
|
||||
handler := NewJSONImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
npmExport := NPMExport{
|
||||
ProxyHosts: []NPMProxyHost{
|
||||
{
|
||||
ID: 1,
|
||||
DomainNames: []string{"newnpm.com"},
|
||||
ForwardScheme: "http",
|
||||
ForwardHost: "192.168.1.100",
|
||||
ForwardPort: 8080,
|
||||
Enabled: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Step 1: Upload to get session ID
|
||||
content, _ := json.Marshal(npmExport)
|
||||
uploadBody, _ := json.Marshal(map[string]string{"content": string(content)})
|
||||
|
||||
uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(uploadBody))
|
||||
uploadReq.Header.Set("Content-Type", "application/json")
|
||||
uploadW := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(uploadW, uploadReq)
|
||||
require.Equal(t, http.StatusOK, uploadW.Code)
|
||||
|
||||
var uploadResponse map[string]any
|
||||
err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse)
|
||||
require.NoError(t, err)
|
||||
|
||||
session := uploadResponse["session"].(map[string]any)
|
||||
sessionID := session["id"].(string)
|
||||
|
||||
// Step 2: Commit with session UUID
|
||||
commitBody, _ := json.Marshal(map[string]any{
|
||||
"session_uuid": sessionID,
|
||||
"resolutions": map[string]string{},
|
||||
"names": map[string]string{},
|
||||
})
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/commit", bytes.NewReader(commitBody))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
var response map[string]any
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, float64(1), response["created"])
|
||||
}
|
||||
|
||||
func TestJSONImportHandler_Commit_SessionNotFound(t *testing.T) {
|
||||
db := setupJSONTestDB(t)
|
||||
handler := NewJSONImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
// Try to commit with a non-existent session
|
||||
commitBody, _ := json.Marshal(map[string]any{
|
||||
"session_uuid": "non-existent-uuid",
|
||||
"resolutions": map[string]string{},
|
||||
"names": map[string]string{},
|
||||
})
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/commit", bytes.NewReader(commitBody))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusNotFound, w.Code)
|
||||
|
||||
var response map[string]any
|
||||
err := json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Contains(t, response["error"], "session not found")
|
||||
}
|
||||
|
||||
func TestJSONImportHandler_Cancel(t *testing.T) {
|
||||
db := setupJSONTestDB(t)
|
||||
handler := NewJSONImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
charonExport := CharonExport{
|
||||
Version: "1.0.0",
|
||||
ExportedAt: time.Now(),
|
||||
ProxyHosts: []CharonProxyHost{
|
||||
{
|
||||
UUID: "cancel-test-uuid",
|
||||
Name: "Cancel Test",
|
||||
DomainNames: "cancel-test.com",
|
||||
ForwardScheme: "http",
|
||||
ForwardHost: "192.168.1.100",
|
||||
ForwardPort: 8080,
|
||||
Enabled: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Step 1: Upload to get session ID
|
||||
content, _ := json.Marshal(charonExport)
|
||||
uploadBody, _ := json.Marshal(map[string]string{"content": string(content)})
|
||||
|
||||
uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(uploadBody))
|
||||
uploadReq.Header.Set("Content-Type", "application/json")
|
||||
uploadW := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(uploadW, uploadReq)
|
||||
require.Equal(t, http.StatusOK, uploadW.Code)
|
||||
|
||||
var uploadResponse map[string]any
|
||||
err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse)
|
||||
require.NoError(t, err)
|
||||
|
||||
session := uploadResponse["session"].(map[string]any)
|
||||
sessionID := session["id"].(string)
|
||||
|
||||
// Step 2: Cancel the session
|
||||
cancelBody, _ := json.Marshal(map[string]any{
|
||||
"session_uuid": sessionID,
|
||||
})
|
||||
|
||||
cancelReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/cancel", bytes.NewReader(cancelBody))
|
||||
cancelReq.Header.Set("Content-Type", "application/json")
|
||||
cancelW := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(cancelW, cancelReq)
|
||||
|
||||
assert.Equal(t, http.StatusOK, cancelW.Code)
|
||||
|
||||
var cancelResponse map[string]any
|
||||
err = json.Unmarshal(cancelW.Body.Bytes(), &cancelResponse)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "cancelled", cancelResponse["status"])
|
||||
|
||||
// Step 3: Try to commit with cancelled session (should fail)
|
||||
commitBody, _ := json.Marshal(map[string]any{
|
||||
"session_uuid": sessionID,
|
||||
"resolutions": map[string]string{},
|
||||
"names": map[string]string{},
|
||||
})
|
||||
|
||||
commitReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/commit", bytes.NewReader(commitBody))
|
||||
commitReq.Header.Set("Content-Type", "application/json")
|
||||
commitW := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(commitW, commitReq)
|
||||
|
||||
assert.Equal(t, http.StatusNotFound, commitW.Code)
|
||||
}
|
||||
|
||||
func TestJSONImportHandler_ConflictDetection(t *testing.T) {
|
||||
db := setupJSONTestDB(t)
|
||||
|
||||
existingHost := models.ProxyHost{
|
||||
UUID: "existing-uuid",
|
||||
DomainNames: "conflict.com",
|
||||
ForwardScheme: "http",
|
||||
ForwardHost: "old-server",
|
||||
ForwardPort: 80,
|
||||
Enabled: true,
|
||||
}
|
||||
db.Create(&existingHost)
|
||||
|
||||
handler := NewJSONImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
charonExport := CharonExport{
|
||||
Version: "1.0.0",
|
||||
ProxyHosts: []CharonProxyHost{
|
||||
{
|
||||
UUID: "new-uuid",
|
||||
DomainNames: "conflict.com",
|
||||
ForwardScheme: "http",
|
||||
ForwardHost: "new-server",
|
||||
ForwardPort: 8080,
|
||||
Enabled: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
content, _ := json.Marshal(charonExport)
|
||||
body, _ := json.Marshal(map[string]string{"content": string(content)})
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/upload", bytes.NewReader(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
var response map[string]any
|
||||
err := json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
|
||||
conflictDetails := response["conflict_details"].(map[string]any)
|
||||
assert.Contains(t, conflictDetails, "conflict.com")
|
||||
}
|
||||
|
||||
func TestJSONImportHandler_IsCharonFormat(t *testing.T) {
|
||||
db := setupJSONTestDB(t)
|
||||
handler := NewJSONImportHandler(db)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
export CharonExport
|
||||
expected bool
|
||||
}{
|
||||
{
|
||||
name: "with version",
|
||||
export: CharonExport{Version: "1.0.0"},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "with proxy hosts",
|
||||
export: CharonExport{
|
||||
ProxyHosts: []CharonProxyHost{{DomainNames: "test.com"}},
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "empty export",
|
||||
export: CharonExport{},
|
||||
expected: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := handler.isCharonFormat(tt.export)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsValidJSON(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
expected bool
|
||||
}{
|
||||
{"valid object", `{"key": "value"}`, true},
|
||||
{"valid array", `[1, 2, 3]`, true},
|
||||
{"valid string", `"hello"`, true},
|
||||
{"valid number", `123`, true},
|
||||
{"empty string", "", true},
|
||||
{"whitespace only", " ", true},
|
||||
{"invalid json", `{key: "value"}`, false},
|
||||
{"incomplete", `{"key":`, false},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := isValidJSON(tt.input)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONImportHandler_ConvertCharonToImportResult(t *testing.T) {
|
||||
db := setupJSONTestDB(t)
|
||||
handler := NewJSONImportHandler(db)
|
||||
|
||||
charonExport := CharonExport{
|
||||
Version: "1.0.0",
|
||||
ExportedAt: time.Now(),
|
||||
ProxyHosts: []CharonProxyHost{
|
||||
{
|
||||
UUID: "uuid-1",
|
||||
Name: "Host 1",
|
||||
DomainNames: "host1.com",
|
||||
ForwardScheme: "https",
|
||||
ForwardHost: "backend1",
|
||||
ForwardPort: 443,
|
||||
SSLForced: true,
|
||||
WebsocketSupport: true,
|
||||
},
|
||||
{
|
||||
UUID: "uuid-2",
|
||||
DomainNames: "",
|
||||
ForwardScheme: "http",
|
||||
ForwardHost: "backend2",
|
||||
ForwardPort: 80,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
result := handler.convertCharonToImportResult(charonExport)
|
||||
|
||||
assert.Len(t, result.Hosts, 1)
|
||||
assert.Len(t, result.Errors, 1)
|
||||
|
||||
host := result.Hosts[0]
|
||||
assert.Equal(t, "host1.com", host.DomainNames)
|
||||
assert.Equal(t, "https", host.ForwardScheme)
|
||||
assert.Equal(t, "backend1", host.ForwardHost)
|
||||
assert.Equal(t, 443, host.ForwardPort)
|
||||
assert.True(t, host.SSLForced)
|
||||
assert.True(t, host.WebsocketSupport)
|
||||
}
|
||||
@@ -0,0 +1,368 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"sync"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/google/uuid"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/caddy"
|
||||
"github.com/Wikid82/charon/backend/internal/models"
|
||||
"github.com/Wikid82/charon/backend/internal/services"
|
||||
)
|
||||
|
||||
// npmImportSessions stores parsed NPM exports keyed by session UUID.
|
||||
// TODO: Implement session expiration to prevent memory leaks (e.g., TTL-based cleanup).
|
||||
var (
|
||||
npmImportSessions = make(map[string]NPMExport)
|
||||
npmImportSessionsMu sync.RWMutex
|
||||
)
|
||||
|
||||
// NPMExport represents the top-level structure of an NPM export file.
|
||||
type NPMExport struct {
|
||||
ProxyHosts []NPMProxyHost `json:"proxy_hosts"`
|
||||
AccessLists []NPMAccessList `json:"access_lists"`
|
||||
Certificates []NPMCertificate `json:"certificates"`
|
||||
}
|
||||
|
||||
// NPMProxyHost represents a proxy host from NPM export.
|
||||
type NPMProxyHost struct {
|
||||
ID int `json:"id"`
|
||||
DomainNames []string `json:"domain_names"`
|
||||
ForwardScheme string `json:"forward_scheme"`
|
||||
ForwardHost string `json:"forward_host"`
|
||||
ForwardPort int `json:"forward_port"`
|
||||
CertificateID *int `json:"certificate_id"`
|
||||
SSLForced bool `json:"ssl_forced"`
|
||||
CachingEnabled bool `json:"caching_enabled"`
|
||||
BlockExploits bool `json:"block_exploits"`
|
||||
AdvancedConfig string `json:"advanced_config"`
|
||||
Meta any `json:"meta"`
|
||||
AllowWebsocketUpgrade bool `json:"allow_websocket_upgrade"`
|
||||
HTTP2Support bool `json:"http2_support"`
|
||||
HSTSEnabled bool `json:"hsts_enabled"`
|
||||
HSTSSubdomains bool `json:"hsts_subdomains"`
|
||||
AccessListID *int `json:"access_list_id"`
|
||||
Enabled bool `json:"enabled"`
|
||||
Locations []any `json:"locations"`
|
||||
CustomLocations []any `json:"custom_locations"`
|
||||
OwnerUserID int `json:"owner_user_id"`
|
||||
UseDefaultLocation bool `json:"use_default_location"`
|
||||
IPV6 bool `json:"ipv6"`
|
||||
CreatedOn string `json:"created_on"`
|
||||
ModifiedOn string `json:"modified_on"`
|
||||
ForwardDomainName string `json:"forward_domain_name"`
|
||||
ForwardDomainNameEnabled bool `json:"forward_domain_name_enabled"`
|
||||
}
|
||||
|
||||
// NPMAccessList represents an access list from NPM export.
|
||||
type NPMAccessList struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
PassAuth int `json:"pass_auth"`
|
||||
SatisfyAny int `json:"satisfy_any"`
|
||||
OwnerUserID int `json:"owner_user_id"`
|
||||
Items []NPMAccessItem `json:"items"`
|
||||
Clients []NPMAccessItem `json:"clients"`
|
||||
ProxyHostsCount int `json:"proxy_host_count"`
|
||||
CreatedOn string `json:"created_on"`
|
||||
ModifiedOn string `json:"modified_on"`
|
||||
AuthorizationHeader any `json:"authorization_header"`
|
||||
}
|
||||
|
||||
// NPMAccessItem represents an item in an NPM access list.
|
||||
type NPMAccessItem struct {
|
||||
ID int `json:"id"`
|
||||
AccessListID int `json:"access_list_id"`
|
||||
Address string `json:"address"`
|
||||
Directive string `json:"directive"`
|
||||
CreatedOn string `json:"created_on"`
|
||||
ModifiedOn string `json:"modified_on"`
|
||||
}
|
||||
|
||||
// NPMCertificate represents a certificate from NPM export.
|
||||
type NPMCertificate struct {
|
||||
ID int `json:"id"`
|
||||
Provider string `json:"provider"`
|
||||
NiceName string `json:"nice_name"`
|
||||
DomainNames []string `json:"domain_names"`
|
||||
ExpiresOn string `json:"expires_on"`
|
||||
CreatedOn string `json:"created_on"`
|
||||
ModifiedOn string `json:"modified_on"`
|
||||
IsDNSChallenge bool `json:"is_dns_challenge"`
|
||||
Meta any `json:"meta"`
|
||||
}
|
||||
|
||||
// NPMImportHandler handles NPM configuration imports.
|
||||
type NPMImportHandler struct {
|
||||
db *gorm.DB
|
||||
proxyHostSvc *services.ProxyHostService
|
||||
}
|
||||
|
||||
// NewNPMImportHandler creates a new NPM import handler.
|
||||
func NewNPMImportHandler(db *gorm.DB) *NPMImportHandler {
|
||||
return &NPMImportHandler{
|
||||
db: db,
|
||||
proxyHostSvc: services.NewProxyHostService(db),
|
||||
}
|
||||
}
|
||||
|
||||
// RegisterRoutes registers NPM import routes.
|
||||
func (h *NPMImportHandler) RegisterRoutes(router *gin.RouterGroup) {
|
||||
router.POST("/import/npm/upload", h.Upload)
|
||||
router.POST("/import/npm/commit", h.Commit)
|
||||
router.POST("/import/npm/cancel", h.Cancel)
|
||||
}
|
||||
|
||||
// Upload parses an NPM export JSON and returns a preview with conflict detection.
|
||||
func (h *NPMImportHandler) Upload(c *gin.Context) {
|
||||
var req struct {
|
||||
Content string `json:"content" binding:"required"`
|
||||
}
|
||||
|
||||
if err := c.ShouldBindJSON(&req); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
var npmExport NPMExport
|
||||
if err := json.Unmarshal([]byte(req.Content), &npmExport); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("invalid NPM export JSON: %v", err)})
|
||||
return
|
||||
}
|
||||
|
||||
result := h.convertNPMToImportResult(npmExport)
|
||||
|
||||
if len(result.Hosts) == 0 {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "no proxy hosts found in NPM export"})
|
||||
return
|
||||
}
|
||||
|
||||
// Check for conflicts with existing hosts
|
||||
existingHosts, _ := h.proxyHostSvc.List()
|
||||
existingDomainsMap := make(map[string]models.ProxyHost)
|
||||
for _, eh := range existingHosts {
|
||||
existingDomainsMap[eh.DomainNames] = eh
|
||||
}
|
||||
|
||||
conflictDetails := make(map[string]gin.H)
|
||||
for _, ph := range result.Hosts {
|
||||
if existing, found := existingDomainsMap[ph.DomainNames]; found {
|
||||
result.Conflicts = append(result.Conflicts, ph.DomainNames)
|
||||
conflictDetails[ph.DomainNames] = gin.H{
|
||||
"existing": gin.H{
|
||||
"forward_scheme": existing.ForwardScheme,
|
||||
"forward_host": existing.ForwardHost,
|
||||
"forward_port": existing.ForwardPort,
|
||||
"ssl_forced": existing.SSLForced,
|
||||
"websocket": existing.WebsocketSupport,
|
||||
"enabled": existing.Enabled,
|
||||
},
|
||||
"imported": gin.H{
|
||||
"forward_scheme": ph.ForwardScheme,
|
||||
"forward_host": ph.ForwardHost,
|
||||
"forward_port": ph.ForwardPort,
|
||||
"ssl_forced": ph.SSLForced,
|
||||
"websocket": ph.WebsocketSupport,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sid := uuid.NewString()
|
||||
|
||||
// Store the parsed export in session storage for later commit
|
||||
npmImportSessionsMu.Lock()
|
||||
npmImportSessions[sid] = npmExport
|
||||
npmImportSessionsMu.Unlock()
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"session": gin.H{"id": sid, "state": "transient", "source_type": "npm"},
|
||||
"preview": result,
|
||||
"conflict_details": conflictDetails,
|
||||
"npm_export": gin.H{
|
||||
"proxy_hosts": len(npmExport.ProxyHosts),
|
||||
"access_lists": len(npmExport.AccessLists),
|
||||
"certificates": len(npmExport.Certificates),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Commit finalizes the NPM import with user's conflict resolutions.
|
||||
func (h *NPMImportHandler) Commit(c *gin.Context) {
|
||||
var req struct {
|
||||
SessionUUID string `json:"session_uuid" binding:"required"`
|
||||
Resolutions map[string]string `json:"resolutions"` // domain -> action
|
||||
Names map[string]string `json:"names"` // domain -> custom name
|
||||
}
|
||||
|
||||
if err := c.ShouldBindJSON(&req); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
// Retrieve the stored NPM export from session
|
||||
npmImportSessionsMu.RLock()
|
||||
npmExport, ok := npmImportSessions[req.SessionUUID]
|
||||
npmImportSessionsMu.RUnlock()
|
||||
|
||||
if !ok {
|
||||
c.JSON(http.StatusNotFound, gin.H{"error": "session not found or expired"})
|
||||
return
|
||||
}
|
||||
|
||||
result := h.convertNPMToImportResult(npmExport)
|
||||
proxyHosts := caddy.ConvertToProxyHosts(result.Hosts)
|
||||
|
||||
created := 0
|
||||
updated := 0
|
||||
skipped := 0
|
||||
errors := []string{}
|
||||
|
||||
existingHosts, _ := h.proxyHostSvc.List()
|
||||
existingMap := make(map[string]*models.ProxyHost)
|
||||
for i := range existingHosts {
|
||||
existingMap[existingHosts[i].DomainNames] = &existingHosts[i]
|
||||
}
|
||||
|
||||
for _, host := range proxyHosts {
|
||||
action := req.Resolutions[host.DomainNames]
|
||||
|
||||
if customName, ok := req.Names[host.DomainNames]; ok && customName != "" {
|
||||
host.Name = customName
|
||||
}
|
||||
|
||||
if action == "skip" || action == "keep" {
|
||||
skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
if action == "rename" {
|
||||
host.DomainNames += "-imported"
|
||||
}
|
||||
|
||||
if action == "overwrite" {
|
||||
if existing, found := existingMap[host.DomainNames]; found {
|
||||
host.ID = existing.ID
|
||||
host.UUID = existing.UUID
|
||||
host.CertificateID = existing.CertificateID
|
||||
host.CreatedAt = existing.CreatedAt
|
||||
|
||||
if err := h.proxyHostSvc.Update(&host); err != nil {
|
||||
errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error()))
|
||||
} else {
|
||||
updated++
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
host.UUID = uuid.NewString()
|
||||
if err := h.proxyHostSvc.Create(&host); err != nil {
|
||||
errors = append(errors, fmt.Sprintf("%s: %s", host.DomainNames, err.Error()))
|
||||
} else {
|
||||
created++
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up session after successful commit
|
||||
npmImportSessionsMu.Lock()
|
||||
delete(npmImportSessions, req.SessionUUID)
|
||||
npmImportSessionsMu.Unlock()
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"created": created,
|
||||
"updated": updated,
|
||||
"skipped": skipped,
|
||||
"errors": errors,
|
||||
})
|
||||
}
|
||||
|
||||
// Cancel cancels an NPM import session and cleans up resources.
|
||||
func (h *NPMImportHandler) Cancel(c *gin.Context) {
|
||||
var req struct {
|
||||
SessionUUID string `json:"session_uuid"`
|
||||
}
|
||||
|
||||
if err := c.ShouldBindJSON(&req); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
// Clean up session if it exists
|
||||
npmImportSessionsMu.Lock()
|
||||
delete(npmImportSessions, req.SessionUUID)
|
||||
npmImportSessionsMu.Unlock()
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{"status": "cancelled"})
|
||||
}
|
||||
|
||||
// convertNPMToImportResult converts NPM export format to Charon's ImportResult.
|
||||
func (h *NPMImportHandler) convertNPMToImportResult(export NPMExport) *caddy.ImportResult {
|
||||
result := &caddy.ImportResult{
|
||||
Hosts: []caddy.ParsedHost{},
|
||||
Conflicts: []string{},
|
||||
Errors: []string{},
|
||||
}
|
||||
|
||||
for _, npmHost := range export.ProxyHosts {
|
||||
if len(npmHost.DomainNames) == 0 {
|
||||
result.Errors = append(result.Errors, fmt.Sprintf("host %d has no domain names", npmHost.ID))
|
||||
continue
|
||||
}
|
||||
|
||||
// NPM stores multiple domains as array; join them
|
||||
domainNames := ""
|
||||
for i, d := range npmHost.DomainNames {
|
||||
if i > 0 {
|
||||
domainNames += ","
|
||||
}
|
||||
domainNames += d
|
||||
}
|
||||
|
||||
scheme := npmHost.ForwardScheme
|
||||
if scheme == "" {
|
||||
scheme = "http"
|
||||
}
|
||||
|
||||
port := npmHost.ForwardPort
|
||||
if port == 0 {
|
||||
port = 80
|
||||
}
|
||||
|
||||
warnings := []string{}
|
||||
if npmHost.CachingEnabled {
|
||||
warnings = append(warnings, "Caching not supported - will be disabled")
|
||||
}
|
||||
if len(npmHost.Locations) > 0 || len(npmHost.CustomLocations) > 0 {
|
||||
warnings = append(warnings, "Custom locations not fully supported")
|
||||
}
|
||||
if npmHost.AdvancedConfig != "" {
|
||||
warnings = append(warnings, "Advanced nginx config not compatible - manual review required")
|
||||
}
|
||||
if npmHost.AccessListID != nil && *npmHost.AccessListID > 0 {
|
||||
warnings = append(warnings, fmt.Sprintf("Access list reference (ID: %d) needs manual mapping", *npmHost.AccessListID))
|
||||
}
|
||||
|
||||
host := caddy.ParsedHost{
|
||||
DomainNames: domainNames,
|
||||
ForwardScheme: scheme,
|
||||
ForwardHost: npmHost.ForwardHost,
|
||||
ForwardPort: port,
|
||||
SSLForced: npmHost.SSLForced,
|
||||
WebsocketSupport: npmHost.AllowWebsocketUpgrade,
|
||||
Warnings: warnings,
|
||||
}
|
||||
|
||||
rawJSON, _ := json.Marshal(npmHost)
|
||||
host.RawJSON = string(rawJSON)
|
||||
|
||||
result.Hosts = append(result.Hosts, host)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
@@ -0,0 +1,493 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"github.com/Wikid82/charon/backend/internal/models"
|
||||
)
|
||||
|
||||
func setupNPMTestDB(t *testing.T) *gorm.DB {
|
||||
db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{})
|
||||
require.NoError(t, err)
|
||||
|
||||
err = db.AutoMigrate(&models.ProxyHost{}, &models.Location{}, &models.Setting{})
|
||||
require.NoError(t, err)
|
||||
|
||||
return db
|
||||
}
|
||||
|
||||
func TestNewNPMImportHandler(t *testing.T) {
|
||||
db := setupNPMTestDB(t)
|
||||
handler := NewNPMImportHandler(db)
|
||||
|
||||
assert.NotNil(t, handler)
|
||||
assert.NotNil(t, handler.db)
|
||||
assert.NotNil(t, handler.proxyHostSvc)
|
||||
}
|
||||
|
||||
func TestNPMImportHandler_RegisterRoutes(t *testing.T) {
|
||||
db := setupNPMTestDB(t)
|
||||
handler := NewNPMImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
routes := router.Routes()
|
||||
routePaths := make(map[string]bool)
|
||||
for _, r := range routes {
|
||||
routePaths[r.Method+":"+r.Path] = true
|
||||
}
|
||||
|
||||
assert.True(t, routePaths["POST:/api/v1/import/npm/upload"])
|
||||
assert.True(t, routePaths["POST:/api/v1/import/npm/commit"])
|
||||
assert.True(t, routePaths["POST:/api/v1/import/npm/cancel"])
|
||||
}
|
||||
|
||||
func TestNPMImportHandler_Upload_ValidNPMExport(t *testing.T) {
|
||||
db := setupNPMTestDB(t)
|
||||
handler := NewNPMImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
npmExport := NPMExport{
|
||||
ProxyHosts: []NPMProxyHost{
|
||||
{
|
||||
ID: 1,
|
||||
DomainNames: []string{"example.com"},
|
||||
ForwardScheme: "http",
|
||||
ForwardHost: "192.168.1.100",
|
||||
ForwardPort: 8080,
|
||||
SSLForced: true,
|
||||
AllowWebsocketUpgrade: true,
|
||||
Enabled: true,
|
||||
},
|
||||
{
|
||||
ID: 2,
|
||||
DomainNames: []string{"test.com", "www.test.com"},
|
||||
ForwardScheme: "https",
|
||||
ForwardHost: "192.168.1.101",
|
||||
ForwardPort: 443,
|
||||
Enabled: true,
|
||||
},
|
||||
},
|
||||
AccessLists: []NPMAccessList{
|
||||
{
|
||||
ID: 1,
|
||||
Name: "Test ACL",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
content, _ := json.Marshal(npmExport)
|
||||
body, _ := json.Marshal(map[string]string{"content": string(content)})
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
var response map[string]any
|
||||
err := json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Contains(t, response, "session")
|
||||
assert.Contains(t, response, "preview")
|
||||
assert.Contains(t, response, "npm_export")
|
||||
|
||||
preview := response["preview"].(map[string]any)
|
||||
hosts := preview["hosts"].([]any)
|
||||
assert.Len(t, hosts, 2)
|
||||
}
|
||||
|
||||
func TestNPMImportHandler_Upload_EmptyExport(t *testing.T) {
|
||||
db := setupNPMTestDB(t)
|
||||
handler := NewNPMImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
npmExport := NPMExport{
|
||||
ProxyHosts: []NPMProxyHost{},
|
||||
}
|
||||
|
||||
content, _ := json.Marshal(npmExport)
|
||||
body, _ := json.Marshal(map[string]string{"content": string(content)})
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusBadRequest, w.Code)
|
||||
}
|
||||
|
||||
func TestNPMImportHandler_Upload_InvalidJSON(t *testing.T) {
|
||||
db := setupNPMTestDB(t)
|
||||
handler := NewNPMImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
body, _ := json.Marshal(map[string]string{"content": "not valid json"})
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusBadRequest, w.Code)
|
||||
}
|
||||
|
||||
func TestNPMImportHandler_Upload_ConflictDetection(t *testing.T) {
|
||||
db := setupNPMTestDB(t)
|
||||
|
||||
existingHost := models.ProxyHost{
|
||||
UUID: "existing-uuid",
|
||||
DomainNames: "example.com",
|
||||
ForwardScheme: "http",
|
||||
ForwardHost: "old-server",
|
||||
ForwardPort: 80,
|
||||
Enabled: true,
|
||||
}
|
||||
db.Create(&existingHost)
|
||||
|
||||
handler := NewNPMImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
npmExport := NPMExport{
|
||||
ProxyHosts: []NPMProxyHost{
|
||||
{
|
||||
ID: 1,
|
||||
DomainNames: []string{"example.com"},
|
||||
ForwardScheme: "http",
|
||||
ForwardHost: "new-server",
|
||||
ForwardPort: 8080,
|
||||
Enabled: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
content, _ := json.Marshal(npmExport)
|
||||
body, _ := json.Marshal(map[string]string{"content": string(content)})
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
var response map[string]any
|
||||
err := json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Contains(t, response, "conflict_details")
|
||||
conflictDetails := response["conflict_details"].(map[string]any)
|
||||
assert.Contains(t, conflictDetails, "example.com")
|
||||
}
|
||||
|
||||
func TestNPMImportHandler_Commit_CreateNew(t *testing.T) {
|
||||
db := setupNPMTestDB(t)
|
||||
handler := NewNPMImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
npmExport := NPMExport{
|
||||
ProxyHosts: []NPMProxyHost{
|
||||
{
|
||||
ID: 1,
|
||||
DomainNames: []string{"newhost.com"},
|
||||
ForwardScheme: "http",
|
||||
ForwardHost: "192.168.1.100",
|
||||
ForwardPort: 8080,
|
||||
Enabled: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Step 1: Upload to get session ID
|
||||
content, _ := json.Marshal(npmExport)
|
||||
uploadBody, _ := json.Marshal(map[string]string{"content": string(content)})
|
||||
|
||||
uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(uploadBody))
|
||||
uploadReq.Header.Set("Content-Type", "application/json")
|
||||
uploadW := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(uploadW, uploadReq)
|
||||
require.Equal(t, http.StatusOK, uploadW.Code)
|
||||
|
||||
var uploadResponse map[string]any
|
||||
err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse)
|
||||
require.NoError(t, err)
|
||||
|
||||
session := uploadResponse["session"].(map[string]any)
|
||||
sessionID := session["id"].(string)
|
||||
|
||||
// Step 2: Commit with session UUID
|
||||
commitBody, _ := json.Marshal(map[string]any{
|
||||
"session_uuid": sessionID,
|
||||
"resolutions": map[string]string{},
|
||||
"names": map[string]string{},
|
||||
})
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/commit", bytes.NewReader(commitBody))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
var response map[string]any
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, float64(1), response["created"])
|
||||
assert.Equal(t, float64(0), response["updated"])
|
||||
assert.Equal(t, float64(0), response["skipped"])
|
||||
|
||||
var host models.ProxyHost
|
||||
db.Where("domain_names = ?", "newhost.com").First(&host)
|
||||
assert.NotEmpty(t, host.UUID)
|
||||
assert.Equal(t, "192.168.1.100", host.ForwardHost)
|
||||
}
|
||||
|
||||
func TestNPMImportHandler_Commit_SkipAction(t *testing.T) {
|
||||
db := setupNPMTestDB(t)
|
||||
handler := NewNPMImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
npmExport := NPMExport{
|
||||
ProxyHosts: []NPMProxyHost{
|
||||
{
|
||||
ID: 1,
|
||||
DomainNames: []string{"skipme.com"},
|
||||
ForwardScheme: "http",
|
||||
ForwardHost: "192.168.1.100",
|
||||
ForwardPort: 8080,
|
||||
Enabled: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Step 1: Upload to get session ID
|
||||
content, _ := json.Marshal(npmExport)
|
||||
uploadBody, _ := json.Marshal(map[string]string{"content": string(content)})
|
||||
|
||||
uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(uploadBody))
|
||||
uploadReq.Header.Set("Content-Type", "application/json")
|
||||
uploadW := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(uploadW, uploadReq)
|
||||
require.Equal(t, http.StatusOK, uploadW.Code)
|
||||
|
||||
var uploadResponse map[string]any
|
||||
err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse)
|
||||
require.NoError(t, err)
|
||||
|
||||
session := uploadResponse["session"].(map[string]any)
|
||||
sessionID := session["id"].(string)
|
||||
|
||||
// Step 2: Commit with skip resolution
|
||||
commitBody, _ := json.Marshal(map[string]any{
|
||||
"session_uuid": sessionID,
|
||||
"resolutions": map[string]string{"skipme.com": "skip"},
|
||||
"names": map[string]string{},
|
||||
})
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/commit", bytes.NewReader(commitBody))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
|
||||
var response map[string]any
|
||||
err = json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, float64(0), response["created"])
|
||||
assert.Equal(t, float64(1), response["skipped"])
|
||||
}
|
||||
|
||||
func TestNPMImportHandler_Commit_SessionNotFound(t *testing.T) {
|
||||
db := setupNPMTestDB(t)
|
||||
handler := NewNPMImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
// Try to commit with a non-existent session
|
||||
commitBody, _ := json.Marshal(map[string]any{
|
||||
"session_uuid": "non-existent-uuid",
|
||||
"resolutions": map[string]string{},
|
||||
"names": map[string]string{},
|
||||
})
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/commit", bytes.NewReader(commitBody))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(w, req)
|
||||
|
||||
assert.Equal(t, http.StatusNotFound, w.Code)
|
||||
|
||||
var response map[string]any
|
||||
err := json.Unmarshal(w.Body.Bytes(), &response)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Contains(t, response["error"], "session not found")
|
||||
}
|
||||
|
||||
func TestNPMImportHandler_Cancel(t *testing.T) {
|
||||
db := setupNPMTestDB(t)
|
||||
handler := NewNPMImportHandler(db)
|
||||
|
||||
gin.SetMode(gin.TestMode)
|
||||
router := gin.New()
|
||||
api := router.Group("/api/v1")
|
||||
handler.RegisterRoutes(api)
|
||||
|
||||
npmExport := NPMExport{
|
||||
ProxyHosts: []NPMProxyHost{
|
||||
{
|
||||
ID: 1,
|
||||
DomainNames: []string{"cancel-test.com"},
|
||||
ForwardScheme: "http",
|
||||
ForwardHost: "192.168.1.100",
|
||||
ForwardPort: 8080,
|
||||
Enabled: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Step 1: Upload to get session ID
|
||||
content, _ := json.Marshal(npmExport)
|
||||
uploadBody, _ := json.Marshal(map[string]string{"content": string(content)})
|
||||
|
||||
uploadReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/upload", bytes.NewReader(uploadBody))
|
||||
uploadReq.Header.Set("Content-Type", "application/json")
|
||||
uploadW := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(uploadW, uploadReq)
|
||||
require.Equal(t, http.StatusOK, uploadW.Code)
|
||||
|
||||
var uploadResponse map[string]any
|
||||
err := json.Unmarshal(uploadW.Body.Bytes(), &uploadResponse)
|
||||
require.NoError(t, err)
|
||||
|
||||
session := uploadResponse["session"].(map[string]any)
|
||||
sessionID := session["id"].(string)
|
||||
|
||||
// Step 2: Cancel the session
|
||||
cancelBody, _ := json.Marshal(map[string]any{
|
||||
"session_uuid": sessionID,
|
||||
})
|
||||
|
||||
cancelReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/cancel", bytes.NewReader(cancelBody))
|
||||
cancelReq.Header.Set("Content-Type", "application/json")
|
||||
cancelW := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(cancelW, cancelReq)
|
||||
|
||||
assert.Equal(t, http.StatusOK, cancelW.Code)
|
||||
|
||||
var cancelResponse map[string]any
|
||||
err = json.Unmarshal(cancelW.Body.Bytes(), &cancelResponse)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "cancelled", cancelResponse["status"])
|
||||
|
||||
// Step 3: Try to commit with cancelled session (should fail)
|
||||
commitBody, _ := json.Marshal(map[string]any{
|
||||
"session_uuid": sessionID,
|
||||
"resolutions": map[string]string{},
|
||||
"names": map[string]string{},
|
||||
})
|
||||
|
||||
commitReq := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/commit", bytes.NewReader(commitBody))
|
||||
commitReq.Header.Set("Content-Type", "application/json")
|
||||
commitW := httptest.NewRecorder()
|
||||
|
||||
router.ServeHTTP(commitW, commitReq)
|
||||
|
||||
assert.Equal(t, http.StatusNotFound, commitW.Code)
|
||||
}
|
||||
|
||||
func TestNPMImportHandler_ConvertNPMToImportResult(t *testing.T) {
|
||||
db := setupNPMTestDB(t)
|
||||
handler := NewNPMImportHandler(db)
|
||||
|
||||
npmExport := NPMExport{
|
||||
ProxyHosts: []NPMProxyHost{
|
||||
{
|
||||
ID: 1,
|
||||
DomainNames: []string{"test.com", "www.test.com"},
|
||||
ForwardScheme: "https",
|
||||
ForwardHost: "backend",
|
||||
ForwardPort: 443,
|
||||
SSLForced: true,
|
||||
AllowWebsocketUpgrade: true,
|
||||
CachingEnabled: true,
|
||||
AdvancedConfig: "proxy_set_header X-Custom value;",
|
||||
},
|
||||
{
|
||||
ID: 2,
|
||||
DomainNames: []string{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
result := handler.convertNPMToImportResult(npmExport)
|
||||
|
||||
assert.Len(t, result.Hosts, 1)
|
||||
assert.Len(t, result.Errors, 1)
|
||||
|
||||
host := result.Hosts[0]
|
||||
assert.Equal(t, "test.com,www.test.com", host.DomainNames)
|
||||
assert.Equal(t, "https", host.ForwardScheme)
|
||||
assert.Equal(t, "backend", host.ForwardHost)
|
||||
assert.Equal(t, 443, host.ForwardPort)
|
||||
assert.True(t, host.SSLForced)
|
||||
assert.True(t, host.WebsocketSupport)
|
||||
assert.Len(t, host.Warnings, 2) // Caching + Advanced config warnings
|
||||
}
|
||||
@@ -583,4 +583,12 @@ func RegisterImportHandler(router *gin.Engine, db *gorm.DB, caddyBinary, importD
|
||||
importHandler := handlers.NewImportHandler(db, caddyBinary, importDir, mountPath)
|
||||
api := router.Group("/api/v1")
|
||||
importHandler.RegisterRoutes(api)
|
||||
|
||||
// NPM Import Handler - supports Nginx Proxy Manager export format
|
||||
npmImportHandler := handlers.NewNPMImportHandler(db)
|
||||
npmImportHandler.RegisterRoutes(api)
|
||||
|
||||
// JSON Import Handler - supports both Charon and NPM export formats
|
||||
jsonImportHandler := handlers.NewJSONImportHandler(db)
|
||||
jsonImportHandler.RegisterRoutes(api)
|
||||
}
|
||||
|
||||
@@ -137,7 +137,7 @@ func (s *MailService) GetSMTPConfig() (*SMTPConfig, error) {
|
||||
return config, nil
|
||||
}
|
||||
|
||||
// SaveSMTPConfig saves SMTP settings to the database.
|
||||
// SaveSMTPConfig saves SMTP settings to the database using a transaction.
|
||||
func (s *MailService) SaveSMTPConfig(config *SMTPConfig) error {
|
||||
settings := map[string]string{
|
||||
"smtp_host": config.Host,
|
||||
@@ -148,31 +148,34 @@ func (s *MailService) SaveSMTPConfig(config *SMTPConfig) error {
|
||||
"smtp_encryption": config.Encryption,
|
||||
}
|
||||
|
||||
for key, value := range settings {
|
||||
setting := models.Setting{
|
||||
Key: key,
|
||||
Value: value,
|
||||
Type: "string",
|
||||
Category: "smtp",
|
||||
}
|
||||
return s.db.Transaction(func(tx *gorm.DB) error {
|
||||
for key, value := range settings {
|
||||
var existing models.Setting
|
||||
result := tx.Where("key = ?", key).First(&existing)
|
||||
|
||||
// Upsert: update if exists, create if not
|
||||
result := s.db.Where("key = ?", key).First(&models.Setting{})
|
||||
if result.Error == gorm.ErrRecordNotFound {
|
||||
if err := s.db.Create(&setting).Error; err != nil {
|
||||
return fmt.Errorf("failed to create setting %s: %w", key, err)
|
||||
}
|
||||
} else {
|
||||
if err := s.db.Model(&models.Setting{}).Where("key = ?", key).Updates(map[string]any{
|
||||
"value": value,
|
||||
"category": "smtp",
|
||||
}).Error; err != nil {
|
||||
return fmt.Errorf("failed to update setting %s: %w", key, err)
|
||||
switch result.Error {
|
||||
case gorm.ErrRecordNotFound:
|
||||
setting := models.Setting{
|
||||
Key: key,
|
||||
Value: value,
|
||||
Type: "string",
|
||||
Category: "smtp",
|
||||
}
|
||||
if err := tx.Create(&setting).Error; err != nil {
|
||||
return fmt.Errorf("failed to create setting %s: %w", key, err)
|
||||
}
|
||||
case nil:
|
||||
existing.Value = value
|
||||
existing.Category = "smtp"
|
||||
if err := tx.Save(&existing).Error; err != nil {
|
||||
return fmt.Errorf("failed to update setting %s: %w", key, err)
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("failed to query setting %s: %w", key, result.Error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// IsConfigured returns true if SMTP is properly configured.
|
||||
|
||||
+17
-1
@@ -128,7 +128,23 @@ Migrating from another Caddy setup? Import your existing Caddyfile configuration
|
||||
|
||||
---
|
||||
|
||||
### 🔌 WebSocket Support
|
||||
### � Nginx Proxy Manager Import
|
||||
|
||||
Migrating from Nginx Proxy Manager? Import your proxy host configurations directly from NPM export files. Charon parses your domains, upstream servers, SSL settings, and access lists, giving you a preview before committing.
|
||||
|
||||
→ [Learn More](features/npm-import.md)
|
||||
|
||||
---
|
||||
|
||||
### 📄 JSON Configuration Import
|
||||
|
||||
Import configurations from generic JSON exports or Charon backup files. Supports both Charon's native export format and Nginx Proxy Manager format with automatic detection. Perfect for restoring backups or migrating between Charon instances.
|
||||
|
||||
→ [Learn More](features/json-import.md)
|
||||
|
||||
---
|
||||
|
||||
### �🔌 WebSocket Support
|
||||
|
||||
Real-time applications like chat servers, live dashboards, and collaborative tools work out of the box. Charon handles WebSocket connections automatically with no special configuration needed.
|
||||
|
||||
|
||||
+702
-429
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,8 @@
|
||||
# Skipped Playwright Tests Remediation Plan
|
||||
|
||||
> **Status**: Active
|
||||
> **Status**: Active (Phase 3 Complete)
|
||||
> **Created**: 2024
|
||||
> **Total Skipped Tests**: 98
|
||||
> **Total Skipped Tests**: 91 (was 98, reduced by 7 in Phase 3)
|
||||
> **Target**: Reduce to <10 intentional skips
|
||||
|
||||
## Executive Summary
|
||||
@@ -15,12 +15,14 @@ This plan addresses 98 skipped Playwright E2E tests discovered through comprehen
|
||||
|----------|-------|--------|----------|
|
||||
| Environment-Dependent (Cerberus) | 35 | S | P0 |
|
||||
| Feature Not Implemented | 25 | L | P1 |
|
||||
| Route/API Not Implemented | 12 | M | P1 |
|
||||
| UI Mismatch/Test ID Issues | 10 | S | P2 |
|
||||
| Route/API Not Implemented | 6 | M | P1 |
|
||||
| UI Mismatch/Test ID Issues | 9 | S | P2 |
|
||||
| TestDataManager Auth Issues | 8 | M | P1 |
|
||||
| Flaky/Timing Issues | 5 | S | P2 |
|
||||
| Intentional Skips | 3 | - | - |
|
||||
|
||||
> **Note**: Phase 3 completed - NPM/JSON import routes implemented (6→0), SMTP fix (1 test), reducing total from 98 to 91.
|
||||
|
||||
---
|
||||
|
||||
## Category 1: Environment-Dependent Tests (Cerberus Disabled)
|
||||
@@ -339,13 +341,14 @@ These tests are intentionally skipped with documented reasons:
|
||||
|
||||
### Phase 3: Backend Routes (Week 3-4)
|
||||
**Target**: Implement missing API routes
|
||||
**Status**: ✅ COMPLETE (2026-01-22)
|
||||
|
||||
1. Implement NPM import route
|
||||
2. Implement JSON import route
|
||||
3. Review SMTP persistence issue
|
||||
4. Re-enable import tests (+6 tests)
|
||||
1. ✅ Implemented NPM import route (`POST /api/v1/import/npm/upload`, `commit`, `cancel`)
|
||||
2. ✅ Implemented JSON import route (`POST /api/v1/import/json/upload`, `commit`, `cancel`)
|
||||
3. ✅ Fixed SMTP persistence bug (settings now persist correctly after save)
|
||||
4. ✅ Re-enabled import tests (+7 tests now passing)
|
||||
|
||||
**Estimated Work**: 16-24 hours
|
||||
**Actual Work**: ~20 hours
|
||||
|
||||
### Phase 4: UI Components (Week 5-8)
|
||||
**Target**: Implement missing frontend components
|
||||
@@ -462,3 +465,4 @@ grep -rn "test\.skip\|test\.fixme" tests/ --include="*.spec.ts" > skip-report.tx
|
||||
| Date | Author | Change |
|
||||
|------|--------|--------|
|
||||
| 2024-XX-XX | AI Analysis | Initial plan created |
|
||||
| 2026-01-22 | Implementation Team | Phase 3 complete - NPM/JSON import routes implemented, SMTP persistence fixed, 7 tests re-enabled |
|
||||
|
||||
@@ -16,6 +16,8 @@ const RemoteServers = lazy(() => import('./pages/RemoteServers'))
|
||||
const DNS = lazy(() => import('./pages/DNS'))
|
||||
const ImportCaddy = lazy(() => import('./pages/ImportCaddy'))
|
||||
const ImportCrowdSec = lazy(() => import('./pages/ImportCrowdSec'))
|
||||
const ImportNPM = lazy(() => import('./pages/ImportNPM'))
|
||||
const ImportJSON = lazy(() => import('./pages/ImportJSON'))
|
||||
const Certificates = lazy(() => import('./pages/Certificates'))
|
||||
const DNSProviders = lazy(() => import('./pages/DNSProviders'))
|
||||
const SystemSettings = lazy(() => import('./pages/SystemSettings'))
|
||||
@@ -109,6 +111,8 @@ export default function App() {
|
||||
<Route path="import">
|
||||
<Route path="caddyfile" element={<ImportCaddy />} />
|
||||
<Route path="crowdsec" element={<ImportCrowdSec />} />
|
||||
<Route path="npm" element={<ImportNPM />} />
|
||||
<Route path="json" element={<ImportJSON />} />
|
||||
</Route>
|
||||
</Route>
|
||||
|
||||
|
||||
@@ -0,0 +1,90 @@
|
||||
import client from './client';
|
||||
|
||||
/** Represents a host parsed from a JSON export. */
|
||||
export interface JSONHost {
|
||||
domain_names: string;
|
||||
forward_scheme: string;
|
||||
forward_host: string;
|
||||
forward_port: number;
|
||||
ssl_forced: boolean;
|
||||
websocket_support: boolean;
|
||||
}
|
||||
|
||||
/** Preview of a JSON import with hosts and conflicts. */
|
||||
export interface JSONImportPreview {
|
||||
session: {
|
||||
id: string;
|
||||
state: string;
|
||||
source: string;
|
||||
};
|
||||
preview: {
|
||||
hosts: JSONHost[];
|
||||
conflicts: string[];
|
||||
errors: string[];
|
||||
};
|
||||
conflict_details: Record<string, {
|
||||
existing: {
|
||||
forward_scheme: string;
|
||||
forward_host: string;
|
||||
forward_port: number;
|
||||
ssl_forced: boolean;
|
||||
websocket: boolean;
|
||||
enabled: boolean;
|
||||
};
|
||||
imported: {
|
||||
forward_scheme: string;
|
||||
forward_host: string;
|
||||
forward_port: number;
|
||||
ssl_forced: boolean;
|
||||
websocket: boolean;
|
||||
};
|
||||
}>;
|
||||
}
|
||||
|
||||
/** Result of committing a JSON import operation. */
|
||||
export interface JSONImportCommitResult {
|
||||
created: number;
|
||||
updated: number;
|
||||
skipped: number;
|
||||
errors: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads JSON export content for import preview.
|
||||
* @param content - The JSON export content as a string
|
||||
* @returns Promise resolving to JSONImportPreview with parsed hosts
|
||||
* @throws {AxiosError} If parsing fails or content is invalid
|
||||
*/
|
||||
export const uploadJSONExport = async (content: string): Promise<JSONImportPreview> => {
|
||||
const { data } = await client.post<JSONImportPreview>('/import/json/upload', { content });
|
||||
return data;
|
||||
};
|
||||
|
||||
/**
|
||||
* Commits the JSON import, creating/updating proxy hosts.
|
||||
* @param sessionUuid - The import session UUID
|
||||
* @param resolutions - Map of conflict resolutions (domain -> 'keep'|'replace'|'skip')
|
||||
* @param names - Map of custom names for imported hosts
|
||||
* @returns Promise resolving to JSONImportCommitResult with counts
|
||||
* @throws {AxiosError} If commit fails
|
||||
*/
|
||||
export const commitJSONImport = async (
|
||||
sessionUuid: string,
|
||||
resolutions: Record<string, string>,
|
||||
names: Record<string, string>
|
||||
): Promise<JSONImportCommitResult> => {
|
||||
const { data } = await client.post<JSONImportCommitResult>('/import/json/commit', {
|
||||
session_uuid: sessionUuid,
|
||||
resolutions,
|
||||
names,
|
||||
});
|
||||
return data;
|
||||
};
|
||||
|
||||
/**
|
||||
* Cancels the current JSON import session.
|
||||
* @throws {AxiosError} If cancellation fails
|
||||
*/
|
||||
export const cancelJSONImport = async (): Promise<void> => {
|
||||
await client.post('/import/json/cancel');
|
||||
};
|
||||
@@ -0,0 +1,90 @@
|
||||
import client from './client';
|
||||
|
||||
/** Represents a host parsed from an NPM export. */
|
||||
export interface NPMHost {
|
||||
domain_names: string;
|
||||
forward_scheme: string;
|
||||
forward_host: string;
|
||||
forward_port: number;
|
||||
ssl_forced: boolean;
|
||||
websocket_support: boolean;
|
||||
}
|
||||
|
||||
/** Preview of an NPM import with hosts and conflicts. */
|
||||
export interface NPMImportPreview {
|
||||
session: {
|
||||
id: string;
|
||||
state: string;
|
||||
source: string;
|
||||
};
|
||||
preview: {
|
||||
hosts: NPMHost[];
|
||||
conflicts: string[];
|
||||
errors: string[];
|
||||
};
|
||||
conflict_details: Record<string, {
|
||||
existing: {
|
||||
forward_scheme: string;
|
||||
forward_host: string;
|
||||
forward_port: number;
|
||||
ssl_forced: boolean;
|
||||
websocket: boolean;
|
||||
enabled: boolean;
|
||||
};
|
||||
imported: {
|
||||
forward_scheme: string;
|
||||
forward_host: string;
|
||||
forward_port: number;
|
||||
ssl_forced: boolean;
|
||||
websocket: boolean;
|
||||
};
|
||||
}>;
|
||||
}
|
||||
|
||||
/** Result of committing an NPM import operation. */
|
||||
export interface NPMImportCommitResult {
|
||||
created: number;
|
||||
updated: number;
|
||||
skipped: number;
|
||||
errors: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads NPM export content for import preview.
|
||||
* @param content - The NPM export JSON content as a string
|
||||
* @returns Promise resolving to NPMImportPreview with parsed hosts
|
||||
* @throws {AxiosError} If parsing fails or content is invalid
|
||||
*/
|
||||
export const uploadNPMExport = async (content: string): Promise<NPMImportPreview> => {
|
||||
const { data } = await client.post<NPMImportPreview>('/import/npm/upload', { content });
|
||||
return data;
|
||||
};
|
||||
|
||||
/**
|
||||
* Commits the NPM import, creating/updating proxy hosts.
|
||||
* @param sessionUuid - The import session UUID
|
||||
* @param resolutions - Map of conflict resolutions (domain -> 'keep'|'replace'|'skip')
|
||||
* @param names - Map of custom names for imported hosts
|
||||
* @returns Promise resolving to NPMImportCommitResult with counts
|
||||
* @throws {AxiosError} If commit fails
|
||||
*/
|
||||
export const commitNPMImport = async (
|
||||
sessionUuid: string,
|
||||
resolutions: Record<string, string>,
|
||||
names: Record<string, string>
|
||||
): Promise<NPMImportCommitResult> => {
|
||||
const { data } = await client.post<NPMImportCommitResult>('/import/npm/commit', {
|
||||
session_uuid: sessionUuid,
|
||||
resolutions,
|
||||
names,
|
||||
});
|
||||
return data;
|
||||
};
|
||||
|
||||
/**
|
||||
* Cancels the current NPM import session.
|
||||
* @throws {AxiosError} If cancellation fails
|
||||
*/
|
||||
export const cancelNPMImport = async (): Promise<void> => {
|
||||
await client.post('/import/npm/cancel');
|
||||
};
|
||||
@@ -100,6 +100,8 @@ export default function Layout({ children }: LayoutProps) {
|
||||
children: [
|
||||
{ name: t('navigation.caddyfile'), path: '/tasks/import/caddyfile', icon: '📥' },
|
||||
{ name: t('navigation.crowdsec'), path: '/tasks/import/crowdsec', icon: '🛡️' },
|
||||
{ name: t('navigation.importNPM'), path: '/tasks/import/npm', icon: '📦' },
|
||||
{ name: t('navigation.importJSON'), path: '/tasks/import/json', icon: '📄' },
|
||||
]
|
||||
},
|
||||
{ name: t('navigation.backups'), path: '/tasks/backups', icon: '💾' },
|
||||
|
||||
@@ -0,0 +1,84 @@
|
||||
import { useState } from 'react';
|
||||
import { useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import {
|
||||
uploadJSONExport,
|
||||
commitJSONImport,
|
||||
cancelJSONImport,
|
||||
JSONImportPreview,
|
||||
JSONImportCommitResult,
|
||||
} from '../api/jsonImport';
|
||||
|
||||
/**
|
||||
* Hook for managing JSON import workflow.
|
||||
* Provides upload, commit, and cancel functionality with state management.
|
||||
*/
|
||||
export function useJSONImport() {
|
||||
const queryClient = useQueryClient();
|
||||
const [preview, setPreview] = useState<JSONImportPreview | null>(null);
|
||||
const [sessionId, setSessionId] = useState<string | null>(null);
|
||||
const [commitResult, setCommitResult] = useState<JSONImportCommitResult | null>(null);
|
||||
|
||||
const uploadMutation = useMutation({
|
||||
mutationFn: uploadJSONExport,
|
||||
onSuccess: (data) => {
|
||||
setPreview(data);
|
||||
setSessionId(data.session.id);
|
||||
},
|
||||
});
|
||||
|
||||
const commitMutation = useMutation({
|
||||
mutationFn: ({
|
||||
resolutions,
|
||||
names,
|
||||
}: {
|
||||
resolutions: Record<string, string>;
|
||||
names: Record<string, string>;
|
||||
}) => {
|
||||
if (!sessionId) throw new Error('No active session');
|
||||
return commitJSONImport(sessionId, resolutions, names);
|
||||
},
|
||||
onSuccess: (data) => {
|
||||
setCommitResult(data);
|
||||
setPreview(null);
|
||||
setSessionId(null);
|
||||
queryClient.invalidateQueries({ queryKey: ['proxy-hosts'] });
|
||||
},
|
||||
});
|
||||
|
||||
const cancelMutation = useMutation({
|
||||
mutationFn: cancelJSONImport,
|
||||
onSuccess: () => {
|
||||
setPreview(null);
|
||||
setSessionId(null);
|
||||
},
|
||||
});
|
||||
|
||||
const clearCommitResult = () => {
|
||||
setCommitResult(null);
|
||||
};
|
||||
|
||||
const reset = () => {
|
||||
setPreview(null);
|
||||
setSessionId(null);
|
||||
setCommitResult(null);
|
||||
};
|
||||
|
||||
return {
|
||||
preview,
|
||||
sessionId,
|
||||
loading: uploadMutation.isPending,
|
||||
error: uploadMutation.error,
|
||||
upload: uploadMutation.mutateAsync,
|
||||
commit: (resolutions: Record<string, string>, names: Record<string, string>) =>
|
||||
commitMutation.mutateAsync({ resolutions, names }),
|
||||
committing: commitMutation.isPending,
|
||||
commitError: commitMutation.error,
|
||||
commitResult,
|
||||
clearCommitResult,
|
||||
cancel: cancelMutation.mutateAsync,
|
||||
cancelling: cancelMutation.isPending,
|
||||
reset,
|
||||
};
|
||||
}
|
||||
|
||||
export type { JSONImportPreview, JSONImportCommitResult };
|
||||
@@ -0,0 +1,84 @@
|
||||
import { useState } from 'react';
|
||||
import { useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import {
|
||||
uploadNPMExport,
|
||||
commitNPMImport,
|
||||
cancelNPMImport,
|
||||
NPMImportPreview,
|
||||
NPMImportCommitResult,
|
||||
} from '../api/npmImport';
|
||||
|
||||
/**
|
||||
* Hook for managing NPM import workflow.
|
||||
* Provides upload, commit, and cancel functionality with state management.
|
||||
*/
|
||||
export function useNPMImport() {
|
||||
const queryClient = useQueryClient();
|
||||
const [preview, setPreview] = useState<NPMImportPreview | null>(null);
|
||||
const [sessionId, setSessionId] = useState<string | null>(null);
|
||||
const [commitResult, setCommitResult] = useState<NPMImportCommitResult | null>(null);
|
||||
|
||||
const uploadMutation = useMutation({
|
||||
mutationFn: uploadNPMExport,
|
||||
onSuccess: (data) => {
|
||||
setPreview(data);
|
||||
setSessionId(data.session.id);
|
||||
},
|
||||
});
|
||||
|
||||
const commitMutation = useMutation({
|
||||
mutationFn: ({
|
||||
resolutions,
|
||||
names,
|
||||
}: {
|
||||
resolutions: Record<string, string>;
|
||||
names: Record<string, string>;
|
||||
}) => {
|
||||
if (!sessionId) throw new Error('No active session');
|
||||
return commitNPMImport(sessionId, resolutions, names);
|
||||
},
|
||||
onSuccess: (data) => {
|
||||
setCommitResult(data);
|
||||
setPreview(null);
|
||||
setSessionId(null);
|
||||
queryClient.invalidateQueries({ queryKey: ['proxy-hosts'] });
|
||||
},
|
||||
});
|
||||
|
||||
const cancelMutation = useMutation({
|
||||
mutationFn: cancelNPMImport,
|
||||
onSuccess: () => {
|
||||
setPreview(null);
|
||||
setSessionId(null);
|
||||
},
|
||||
});
|
||||
|
||||
const clearCommitResult = () => {
|
||||
setCommitResult(null);
|
||||
};
|
||||
|
||||
const reset = () => {
|
||||
setPreview(null);
|
||||
setSessionId(null);
|
||||
setCommitResult(null);
|
||||
};
|
||||
|
||||
return {
|
||||
preview,
|
||||
sessionId,
|
||||
loading: uploadMutation.isPending,
|
||||
error: uploadMutation.error,
|
||||
upload: uploadMutation.mutateAsync,
|
||||
commit: (resolutions: Record<string, string>, names: Record<string, string>) =>
|
||||
commitMutation.mutateAsync({ resolutions, names }),
|
||||
committing: commitMutation.isPending,
|
||||
commitError: commitMutation.error,
|
||||
commitResult,
|
||||
clearCommitResult,
|
||||
cancel: cancelMutation.mutateAsync,
|
||||
cancelling: cancelMutation.isPending,
|
||||
reset,
|
||||
};
|
||||
}
|
||||
|
||||
export type { NPMImportPreview, NPMImportCommitResult };
|
||||
@@ -69,6 +69,8 @@
|
||||
"accountManagement": "Account Management",
|
||||
"import": "Import",
|
||||
"caddyfile": "Caddyfile",
|
||||
"importNPM": "Import NPM",
|
||||
"importJSON": "Import JSON",
|
||||
"backups": "Backups",
|
||||
"logs": "Logs",
|
||||
"securityHeaders": "Security Headers",
|
||||
@@ -761,6 +763,38 @@
|
||||
"creatingBackup": "Creating backup...",
|
||||
"importing": "Importing CrowdSec..."
|
||||
},
|
||||
"importNPM": {
|
||||
"title": "Import from NPM",
|
||||
"description": "Import proxy hosts from Nginx Proxy Manager export",
|
||||
"enterContent": "Please paste NPM export JSON",
|
||||
"invalidJSON": "Invalid JSON format",
|
||||
"upload": "Upload & Preview",
|
||||
"import": "Import",
|
||||
"success": "Import completed successfully",
|
||||
"previewTitle": "Preview Import",
|
||||
"conflict": "Conflict",
|
||||
"new": "New",
|
||||
"skip": "Skip",
|
||||
"keep": "Keep Existing",
|
||||
"replace": "Replace",
|
||||
"cancelConfirm": "Are you sure you want to cancel this import?"
|
||||
},
|
||||
"importJSON": {
|
||||
"title": "Import from JSON",
|
||||
"description": "Import configuration from JSON export",
|
||||
"enterContent": "Please paste JSON configuration",
|
||||
"invalidJSON": "Invalid JSON format",
|
||||
"upload": "Upload & Preview",
|
||||
"import": "Import",
|
||||
"success": "Import completed successfully",
|
||||
"previewTitle": "Preview Import",
|
||||
"conflict": "Conflict",
|
||||
"new": "New",
|
||||
"skip": "Skip",
|
||||
"keep": "Keep Existing",
|
||||
"replace": "Replace",
|
||||
"cancelConfirm": "Are you sure you want to cancel this import?"
|
||||
},
|
||||
"systemSettings": {
|
||||
"title": "System Settings",
|
||||
"settingsSaved": "System settings saved",
|
||||
|
||||
@@ -0,0 +1,312 @@
|
||||
import { useState } from 'react'
|
||||
import { useNavigate } from 'react-router-dom'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { createBackup } from '../api/backups'
|
||||
import { useJSONImport } from '../hooks/useJSONImport'
|
||||
import ImportSuccessModal from '../components/dialogs/ImportSuccessModal'
|
||||
|
||||
export default function ImportJSON() {
|
||||
const { t } = useTranslation()
|
||||
const navigate = useNavigate()
|
||||
const {
|
||||
preview,
|
||||
loading,
|
||||
error,
|
||||
upload,
|
||||
commit,
|
||||
committing,
|
||||
commitResult,
|
||||
clearCommitResult,
|
||||
cancel,
|
||||
reset,
|
||||
} = useJSONImport()
|
||||
const [content, setContent] = useState('')
|
||||
const [showReview, setShowReview] = useState(false)
|
||||
const [showSuccessModal, setShowSuccessModal] = useState(false)
|
||||
const [resolutions, setResolutions] = useState<Record<string, string>>({})
|
||||
const [names] = useState<Record<string, string>>({})
|
||||
|
||||
const handleUpload = async () => {
|
||||
if (!content.trim()) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
JSON.parse(content)
|
||||
} catch {
|
||||
alert(t('importJSON.invalidJSON'))
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
await upload(content)
|
||||
setShowReview(true)
|
||||
} catch {
|
||||
// Error is handled by hook
|
||||
}
|
||||
}
|
||||
|
||||
const handleFileUpload = async (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const file = e.target.files?.[0]
|
||||
if (!file) return
|
||||
|
||||
const text = await file.text()
|
||||
setContent(text)
|
||||
}
|
||||
|
||||
const handleCommit = async () => {
|
||||
try {
|
||||
await createBackup()
|
||||
await commit(resolutions, names)
|
||||
setContent('')
|
||||
setShowReview(false)
|
||||
setShowSuccessModal(true)
|
||||
} catch {
|
||||
// Error is handled by hook
|
||||
}
|
||||
}
|
||||
|
||||
const handleCloseSuccessModal = () => {
|
||||
setShowSuccessModal(false)
|
||||
clearCommitResult()
|
||||
}
|
||||
|
||||
const handleCancel = async () => {
|
||||
if (confirm(t('importJSON.cancelConfirm'))) {
|
||||
try {
|
||||
await cancel()
|
||||
setShowReview(false)
|
||||
reset()
|
||||
} catch {
|
||||
// Error is handled by hook
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const handleResolutionChange = (domain: string, resolution: string) => {
|
||||
setResolutions((prev) => ({ ...prev, [domain]: resolution }))
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="p-8">
|
||||
<h1 className="text-3xl font-bold text-white mb-6">{t('importJSON.title')}</h1>
|
||||
|
||||
{error && (
|
||||
<div
|
||||
className="bg-red-900/20 border border-red-500 text-red-400 px-4 py-3 rounded mb-6"
|
||||
role="alert"
|
||||
>
|
||||
{error.message}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!showReview && (
|
||||
<div className="bg-dark-card rounded-lg border border-gray-800 p-6">
|
||||
<div className="mb-6">
|
||||
<h2 className="text-xl font-semibold text-white mb-2">
|
||||
{t('importJSON.title')}
|
||||
</h2>
|
||||
<p className="text-gray-400 text-sm">{t('importJSON.description')}</p>
|
||||
</div>
|
||||
|
||||
<div className="space-y-4">
|
||||
<div>
|
||||
<label
|
||||
htmlFor="json-file-upload"
|
||||
className="block text-sm font-medium text-gray-300 mb-2"
|
||||
>
|
||||
{t('common.upload')}
|
||||
</label>
|
||||
<input
|
||||
id="json-file-upload"
|
||||
type="file"
|
||||
accept=".json,application/json"
|
||||
onChange={handleFileUpload}
|
||||
className="w-full text-sm text-gray-400 file:mr-4 file:py-2 file:px-4 file:rounded-lg file:border-0 file:text-sm file:font-medium file:bg-blue-active file:text-white hover:file:bg-blue-hover file:cursor-pointer cursor-pointer"
|
||||
data-testid="json-import-dropzone"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-4">
|
||||
<div className="flex-1 border-t border-gray-700" />
|
||||
<span className="text-gray-500 text-sm">
|
||||
{t('importCaddy.orPasteContent')}
|
||||
</span>
|
||||
<div className="flex-1 border-t border-gray-700" />
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label
|
||||
htmlFor="json-content"
|
||||
className="block text-sm font-medium text-gray-300 mb-2"
|
||||
>
|
||||
{t('importJSON.enterContent')}
|
||||
</label>
|
||||
<textarea
|
||||
id="json-content"
|
||||
value={content}
|
||||
onChange={(e) => setContent(e.target.value)}
|
||||
className="w-full h-96 bg-gray-900 border border-gray-700 rounded-lg p-4 text-white font-mono text-sm focus:outline-none focus:ring-2 focus:ring-blue-500"
|
||||
placeholder={`{
|
||||
"proxy_hosts": [
|
||||
{
|
||||
"domain_names": ["example.com"],
|
||||
"forward_host": "192.168.1.100",
|
||||
"forward_port": 8080,
|
||||
"forward_scheme": "http"
|
||||
}
|
||||
]
|
||||
}`}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleUpload}
|
||||
disabled={loading || !content.trim()}
|
||||
className="px-6 py-2 bg-blue-active hover:bg-blue-hover text-white rounded-lg font-medium transition-colors disabled:opacity-50"
|
||||
>
|
||||
{loading ? t('common.loading') : t('importJSON.upload')}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{showReview && preview?.preview && (
|
||||
<div className="bg-dark-card rounded-lg border border-gray-800 p-6">
|
||||
<h2 className="text-xl font-semibold text-white mb-4">
|
||||
{t('importJSON.previewTitle')}
|
||||
</h2>
|
||||
|
||||
{preview.preview.errors.length > 0 && (
|
||||
<div
|
||||
className="bg-red-900/20 border border-red-500 text-red-400 px-4 py-3 rounded mb-4"
|
||||
role="alert"
|
||||
>
|
||||
<ul className="list-disc list-inside">
|
||||
{preview.preview.errors.map((err, idx) => (
|
||||
<li key={idx}>{err}</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="overflow-x-auto mb-6">
|
||||
<table className="w-full text-sm text-left text-gray-300">
|
||||
<caption className="sr-only">JSON Import Preview</caption>
|
||||
<thead className="text-xs uppercase bg-gray-800 text-gray-400">
|
||||
<tr>
|
||||
<th scope="col" className="px-4 py-3">
|
||||
{t('proxyHosts.domainNames')}
|
||||
</th>
|
||||
<th scope="col" className="px-4 py-3">
|
||||
{t('proxyHosts.forwardHost')}
|
||||
</th>
|
||||
<th scope="col" className="px-4 py-3">
|
||||
{t('proxyHosts.forwardPort')}
|
||||
</th>
|
||||
<th scope="col" className="px-4 py-3">
|
||||
{t('proxyHosts.sslForced')}
|
||||
</th>
|
||||
<th scope="col" className="px-4 py-3">
|
||||
{t('common.status')}
|
||||
</th>
|
||||
{preview.preview.conflicts.length > 0 && (
|
||||
<th scope="col" className="px-4 py-3">
|
||||
{t('common.actions')}
|
||||
</th>
|
||||
)}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{preview.preview.hosts.map((host, idx) => {
|
||||
const isConflict = preview.preview.conflicts.includes(
|
||||
host.domain_names
|
||||
)
|
||||
return (
|
||||
<tr key={idx} className="border-b border-gray-700">
|
||||
<td className="px-4 py-3">{host.domain_names}</td>
|
||||
<td className="px-4 py-3">
|
||||
{host.forward_scheme}://{host.forward_host}
|
||||
</td>
|
||||
<td className="px-4 py-3">{host.forward_port}</td>
|
||||
<td className="px-4 py-3">
|
||||
{host.ssl_forced ? t('common.yes') : t('common.no')}
|
||||
</td>
|
||||
<td className="px-4 py-3">
|
||||
{isConflict ? (
|
||||
<span className="text-yellow-400">
|
||||
{t('importJSON.conflict')}
|
||||
</span>
|
||||
) : (
|
||||
<span className="text-green-400">
|
||||
{t('importJSON.new')}
|
||||
</span>
|
||||
)}
|
||||
</td>
|
||||
{preview.preview.conflicts.length > 0 && (
|
||||
<td className="px-4 py-3">
|
||||
{isConflict && (
|
||||
<select
|
||||
value={resolutions[host.domain_names] || 'skip'}
|
||||
onChange={(e) =>
|
||||
handleResolutionChange(
|
||||
host.domain_names,
|
||||
e.target.value
|
||||
)
|
||||
}
|
||||
className="bg-gray-800 border border-gray-600 text-white rounded px-2 py-1 text-sm"
|
||||
aria-label={`Resolution for ${host.domain_names}`}
|
||||
>
|
||||
<option value="skip">{t('importJSON.skip')}</option>
|
||||
<option value="keep">{t('importJSON.keep')}</option>
|
||||
<option value="replace">
|
||||
{t('importJSON.replace')}
|
||||
</option>
|
||||
</select>
|
||||
)}
|
||||
</td>
|
||||
)}
|
||||
</tr>
|
||||
)
|
||||
})}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div className="flex gap-4">
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleCommit}
|
||||
disabled={committing}
|
||||
className="px-6 py-2 bg-green-600 hover:bg-green-700 text-white rounded-lg font-medium transition-colors disabled:opacity-50"
|
||||
>
|
||||
{committing ? t('common.loading') : t('importJSON.import')}
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleCancel}
|
||||
className="px-6 py-2 bg-gray-700 hover:bg-gray-600 text-white rounded-lg font-medium transition-colors"
|
||||
>
|
||||
{t('common.cancel')}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<ImportSuccessModal
|
||||
visible={showSuccessModal}
|
||||
onClose={handleCloseSuccessModal}
|
||||
onNavigateDashboard={() => {
|
||||
handleCloseSuccessModal()
|
||||
navigate('/')
|
||||
}}
|
||||
onNavigateHosts={() => {
|
||||
handleCloseSuccessModal()
|
||||
navigate('/proxy-hosts')
|
||||
}}
|
||||
results={commitResult}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,312 @@
|
||||
import { useState } from 'react'
|
||||
import { useNavigate } from 'react-router-dom'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { createBackup } from '../api/backups'
|
||||
import { useNPMImport } from '../hooks/useNPMImport'
|
||||
import ImportSuccessModal from '../components/dialogs/ImportSuccessModal'
|
||||
|
||||
export default function ImportNPM() {
|
||||
const { t } = useTranslation()
|
||||
const navigate = useNavigate()
|
||||
const {
|
||||
preview,
|
||||
loading,
|
||||
error,
|
||||
upload,
|
||||
commit,
|
||||
committing,
|
||||
commitResult,
|
||||
clearCommitResult,
|
||||
cancel,
|
||||
reset,
|
||||
} = useNPMImport()
|
||||
const [content, setContent] = useState('')
|
||||
const [showReview, setShowReview] = useState(false)
|
||||
const [showSuccessModal, setShowSuccessModal] = useState(false)
|
||||
const [resolutions, setResolutions] = useState<Record<string, string>>({})
|
||||
const [names] = useState<Record<string, string>>({})
|
||||
|
||||
const handleUpload = async () => {
|
||||
if (!content.trim()) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
JSON.parse(content)
|
||||
} catch {
|
||||
alert(t('importNPM.invalidJSON'))
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
await upload(content)
|
||||
setShowReview(true)
|
||||
} catch {
|
||||
// Error is handled by hook
|
||||
}
|
||||
}
|
||||
|
||||
const handleFileUpload = async (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const file = e.target.files?.[0]
|
||||
if (!file) return
|
||||
|
||||
const text = await file.text()
|
||||
setContent(text)
|
||||
}
|
||||
|
||||
const handleCommit = async () => {
|
||||
try {
|
||||
await createBackup()
|
||||
await commit(resolutions, names)
|
||||
setContent('')
|
||||
setShowReview(false)
|
||||
setShowSuccessModal(true)
|
||||
} catch {
|
||||
// Error is handled by hook
|
||||
}
|
||||
}
|
||||
|
||||
const handleCloseSuccessModal = () => {
|
||||
setShowSuccessModal(false)
|
||||
clearCommitResult()
|
||||
}
|
||||
|
||||
const handleCancel = async () => {
|
||||
if (confirm(t('importNPM.cancelConfirm'))) {
|
||||
try {
|
||||
await cancel()
|
||||
setShowReview(false)
|
||||
reset()
|
||||
} catch {
|
||||
// Error is handled by hook
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const handleResolutionChange = (domain: string, resolution: string) => {
|
||||
setResolutions((prev) => ({ ...prev, [domain]: resolution }))
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="p-8">
|
||||
<h1 className="text-3xl font-bold text-white mb-6">{t('importNPM.title')}</h1>
|
||||
|
||||
{error && (
|
||||
<div
|
||||
className="bg-red-900/20 border border-red-500 text-red-400 px-4 py-3 rounded mb-6"
|
||||
role="alert"
|
||||
>
|
||||
{error.message}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!showReview && (
|
||||
<div className="bg-dark-card rounded-lg border border-gray-800 p-6">
|
||||
<div className="mb-6">
|
||||
<h2 className="text-xl font-semibold text-white mb-2">
|
||||
{t('importNPM.title')}
|
||||
</h2>
|
||||
<p className="text-gray-400 text-sm">{t('importNPM.description')}</p>
|
||||
</div>
|
||||
|
||||
<div className="space-y-4">
|
||||
<div>
|
||||
<label
|
||||
htmlFor="npm-file-upload"
|
||||
className="block text-sm font-medium text-gray-300 mb-2"
|
||||
>
|
||||
{t('common.upload')}
|
||||
</label>
|
||||
<input
|
||||
id="npm-file-upload"
|
||||
type="file"
|
||||
accept=".json,application/json"
|
||||
onChange={handleFileUpload}
|
||||
className="w-full text-sm text-gray-400 file:mr-4 file:py-2 file:px-4 file:rounded-lg file:border-0 file:text-sm file:font-medium file:bg-blue-active file:text-white hover:file:bg-blue-hover file:cursor-pointer cursor-pointer"
|
||||
data-testid="npm-import-dropzone"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-4">
|
||||
<div className="flex-1 border-t border-gray-700" />
|
||||
<span className="text-gray-500 text-sm">
|
||||
{t('importCaddy.orPasteContent')}
|
||||
</span>
|
||||
<div className="flex-1 border-t border-gray-700" />
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label
|
||||
htmlFor="npm-content"
|
||||
className="block text-sm font-medium text-gray-300 mb-2"
|
||||
>
|
||||
{t('importNPM.enterContent')}
|
||||
</label>
|
||||
<textarea
|
||||
id="npm-content"
|
||||
value={content}
|
||||
onChange={(e) => setContent(e.target.value)}
|
||||
className="w-full h-96 bg-gray-900 border border-gray-700 rounded-lg p-4 text-white font-mono text-sm focus:outline-none focus:ring-2 focus:ring-blue-500"
|
||||
placeholder={`{
|
||||
"proxy_hosts": [
|
||||
{
|
||||
"domain_names": ["example.com"],
|
||||
"forward_host": "192.168.1.100",
|
||||
"forward_port": 8080,
|
||||
"forward_scheme": "http"
|
||||
}
|
||||
]
|
||||
}`}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleUpload}
|
||||
disabled={loading || !content.trim()}
|
||||
className="px-6 py-2 bg-blue-active hover:bg-blue-hover text-white rounded-lg font-medium transition-colors disabled:opacity-50"
|
||||
>
|
||||
{loading ? t('common.loading') : t('importNPM.upload')}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{showReview && preview?.preview && (
|
||||
<div className="bg-dark-card rounded-lg border border-gray-800 p-6">
|
||||
<h2 className="text-xl font-semibold text-white mb-4">
|
||||
{t('importNPM.previewTitle')}
|
||||
</h2>
|
||||
|
||||
{preview.preview.errors.length > 0 && (
|
||||
<div
|
||||
className="bg-red-900/20 border border-red-500 text-red-400 px-4 py-3 rounded mb-4"
|
||||
role="alert"
|
||||
>
|
||||
<ul className="list-disc list-inside">
|
||||
{preview.preview.errors.map((err, idx) => (
|
||||
<li key={idx}>{err}</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="overflow-x-auto mb-6">
|
||||
<table className="w-full text-sm text-left text-gray-300">
|
||||
<caption className="sr-only">NPM Import Preview</caption>
|
||||
<thead className="text-xs uppercase bg-gray-800 text-gray-400">
|
||||
<tr>
|
||||
<th scope="col" className="px-4 py-3">
|
||||
{t('proxyHosts.domainNames')}
|
||||
</th>
|
||||
<th scope="col" className="px-4 py-3">
|
||||
{t('proxyHosts.forwardHost')}
|
||||
</th>
|
||||
<th scope="col" className="px-4 py-3">
|
||||
{t('proxyHosts.forwardPort')}
|
||||
</th>
|
||||
<th scope="col" className="px-4 py-3">
|
||||
{t('proxyHosts.sslForced')}
|
||||
</th>
|
||||
<th scope="col" className="px-4 py-3">
|
||||
{t('common.status')}
|
||||
</th>
|
||||
{preview.preview.conflicts.length > 0 && (
|
||||
<th scope="col" className="px-4 py-3">
|
||||
{t('common.actions')}
|
||||
</th>
|
||||
)}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{preview.preview.hosts.map((host, idx) => {
|
||||
const isConflict = preview.preview.conflicts.includes(
|
||||
host.domain_names
|
||||
)
|
||||
return (
|
||||
<tr key={idx} className="border-b border-gray-700">
|
||||
<td className="px-4 py-3">{host.domain_names}</td>
|
||||
<td className="px-4 py-3">
|
||||
{host.forward_scheme}://{host.forward_host}
|
||||
</td>
|
||||
<td className="px-4 py-3">{host.forward_port}</td>
|
||||
<td className="px-4 py-3">
|
||||
{host.ssl_forced ? t('common.yes') : t('common.no')}
|
||||
</td>
|
||||
<td className="px-4 py-3">
|
||||
{isConflict ? (
|
||||
<span className="text-yellow-400">
|
||||
{t('importNPM.conflict')}
|
||||
</span>
|
||||
) : (
|
||||
<span className="text-green-400">
|
||||
{t('importNPM.new')}
|
||||
</span>
|
||||
)}
|
||||
</td>
|
||||
{preview.preview.conflicts.length > 0 && (
|
||||
<td className="px-4 py-3">
|
||||
{isConflict && (
|
||||
<select
|
||||
value={resolutions[host.domain_names] || 'skip'}
|
||||
onChange={(e) =>
|
||||
handleResolutionChange(
|
||||
host.domain_names,
|
||||
e.target.value
|
||||
)
|
||||
}
|
||||
className="bg-gray-800 border border-gray-600 text-white rounded px-2 py-1 text-sm"
|
||||
aria-label={`Resolution for ${host.domain_names}`}
|
||||
>
|
||||
<option value="skip">{t('importNPM.skip')}</option>
|
||||
<option value="keep">{t('importNPM.keep')}</option>
|
||||
<option value="replace">
|
||||
{t('importNPM.replace')}
|
||||
</option>
|
||||
</select>
|
||||
)}
|
||||
</td>
|
||||
)}
|
||||
</tr>
|
||||
)
|
||||
})}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div className="flex gap-4">
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleCommit}
|
||||
disabled={committing}
|
||||
className="px-6 py-2 bg-green-600 hover:bg-green-700 text-white rounded-lg font-medium transition-colors disabled:opacity-50"
|
||||
>
|
||||
{committing ? t('common.loading') : t('importNPM.import')}
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleCancel}
|
||||
className="px-6 py-2 bg-gray-700 hover:bg-gray-600 text-white rounded-lg font-medium transition-colors"
|
||||
>
|
||||
{t('common.cancel')}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<ImportSuccessModal
|
||||
visible={showSuccessModal}
|
||||
onClose={handleCloseSuccessModal}
|
||||
onNavigateDashboard={() => {
|
||||
handleCloseSuccessModal()
|
||||
navigate('/')
|
||||
}}
|
||||
onNavigateHosts={() => {
|
||||
handleCloseSuccessModal()
|
||||
navigate('/proxy-hosts')
|
||||
}}
|
||||
results={commitResult}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -166,11 +166,10 @@ test.describe('Import to Production E2E', () => {
|
||||
});
|
||||
|
||||
// ===========================================================================
|
||||
// Group B: NPM Import (4 tests) - SKIPPED: Route does not exist
|
||||
// The /tasks/import/npm route is not implemented in the application.
|
||||
// Group B: NPM Import (4 tests)
|
||||
// ===========================================================================
|
||||
test.describe('Group B: NPM Import', () => {
|
||||
test.skip('should display NPM import page', async ({
|
||||
test('should display NPM import page', async ({
|
||||
page,
|
||||
adminUser,
|
||||
}) => {
|
||||
@@ -186,7 +185,7 @@ test.describe('Import to Production E2E', () => {
|
||||
});
|
||||
});
|
||||
|
||||
test.skip('should parse NPM export JSON', async ({
|
||||
test('should parse NPM export JSON', async ({
|
||||
page,
|
||||
adminUser,
|
||||
}) => {
|
||||
@@ -202,7 +201,7 @@ test.describe('Import to Production E2E', () => {
|
||||
});
|
||||
});
|
||||
|
||||
test.skip('should preview NPM import results', async ({
|
||||
test('should preview NPM import results', async ({
|
||||
page,
|
||||
adminUser,
|
||||
}) => {
|
||||
@@ -218,7 +217,7 @@ test.describe('Import to Production E2E', () => {
|
||||
});
|
||||
});
|
||||
|
||||
test.skip('should import NPM proxy hosts and access lists', async ({
|
||||
test('should import NPM proxy hosts and access lists', async ({
|
||||
page,
|
||||
adminUser,
|
||||
}) => {
|
||||
@@ -236,12 +235,10 @@ test.describe('Import to Production E2E', () => {
|
||||
});
|
||||
|
||||
// ===========================================================================
|
||||
// Group C: JSON/Config Import (4 tests) - PARTIALLY SKIPPED
|
||||
// The /tasks/import/json route is not implemented. Tests using generic
|
||||
// /tasks/import/caddyfile are kept active for conflict handling scenarios.
|
||||
// Group C: JSON/Config Import (4 tests)
|
||||
// ===========================================================================
|
||||
test.describe('Group C: JSON/Config Import', () => {
|
||||
test.skip('should display JSON import page', async ({
|
||||
test('should display JSON import page', async ({
|
||||
page,
|
||||
adminUser,
|
||||
}) => {
|
||||
@@ -257,7 +254,7 @@ test.describe('Import to Production E2E', () => {
|
||||
});
|
||||
});
|
||||
|
||||
test.skip('should validate JSON schema before import', async ({
|
||||
test('should validate JSON schema before import', async ({
|
||||
page,
|
||||
adminUser,
|
||||
}) => {
|
||||
|
||||
@@ -331,9 +331,9 @@ test.describe('SMTP Settings', () => {
|
||||
|
||||
/**
|
||||
* Test: Update existing SMTP configuration
|
||||
* Note: Skip - SMTP save not persisting correctly (backend issue, not test issue)
|
||||
* Priority: P0
|
||||
*/
|
||||
test.skip('should update existing SMTP configuration', async ({ page }) => {
|
||||
test('should update existing SMTP configuration', async ({ page }) => {
|
||||
const hostInput = page.locator('#smtp-host');
|
||||
const saveButton = page.getByRole('button', { name: /save/i }).last();
|
||||
|
||||
|
||||
Reference in New Issue
Block a user