Merge pull request #785 from Wikid82/feature/beta-release

Save and Import Functions Hotfix
This commit is contained in:
Jeremy
2026-03-02 17:28:03 -05:00
committed by GitHub
50 changed files with 1761 additions and 98 deletions

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

34
.github/renovate.json vendored
View File

@@ -117,13 +117,45 @@
{
"customType": "regex",
"description": "Track GO_VERSION in Actions workflows",
"fileMatch": ["^\\.github/workflows/.*\\.yml$"],
"managerFilePatterns": ["/^\\.github/workflows/.*\\.yml$/"],
"matchStrings": [
"GO_VERSION: ['\"]?(?<currentValue>[\\d\\.]+)['\"]?"
],
"depNameTemplate": "golang/go",
"datasourceTemplate": "golang-version",
"versioningTemplate": "semver"
},
{
"customType": "regex",
"description": "Track Syft version in workflows and scripts",
"managerFilePatterns": [
"/^\\.github/workflows/nightly-build\\.yml$/",
"/^\\.github/skills/security-scan-docker-image-scripts/run\\.sh$/"
],
"matchStrings": [
"SYFT_VERSION=\\\"v(?<currentValue>[^\\\"\\s]+)\\\"",
"set_default_env \\\"SYFT_VERSION\\\" \\\"v(?<currentValue>[^\\\"]+)\\\""
],
"depNameTemplate": "anchore/syft",
"datasourceTemplate": "github-releases",
"versioningTemplate": "semver",
"extractVersionTemplate": "^v(?<version>.*)$"
},
{
"customType": "regex",
"description": "Track Grype version in workflows and scripts",
"managerFilePatterns": [
"/^\\.github/workflows/supply-chain-pr\\.yml$/",
"/^\\.github/skills/security-scan-docker-image-scripts/run\\.sh$/"
],
"matchStrings": [
"anchore/grype/main/install\\.sh \\| sh -s -- -b /usr/local/bin v(?<currentValue>[0-9]+\\.[0-9]+\\.[0-9]+)",
"set_default_env \\\"GRYPE_VERSION\\\" \\\"v(?<currentValue>[^\\\"]+)\\\""
],
"depNameTemplate": "anchore/grype",
"datasourceTemplate": "github-releases",
"versioningTemplate": "semver",
"extractVersionTemplate": "^v(?<version>.*)$"
}
],

View File

@@ -46,7 +46,7 @@ jobs:
run: bash scripts/ci/check-codeql-parity.sh
- name: Initialize CodeQL
uses: github/codeql-action/init@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4
uses: github/codeql-action/init@c793b717bc78562f491db7b0e93a3a178b099162 # v4
with:
languages: ${{ matrix.language }}
queries: security-and-quality
@@ -86,10 +86,10 @@ jobs:
run: mkdir -p sarif-results
- name: Autobuild
uses: github/codeql-action/autobuild@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4
uses: github/codeql-action/autobuild@c793b717bc78562f491db7b0e93a3a178b099162 # v4
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4
uses: github/codeql-action/analyze@c793b717bc78562f491db7b0e93a3a178b099162 # v4
with:
category: "/language:${{ matrix.language }}"
output: sarif-results/${{ matrix.language }}

View File

@@ -531,7 +531,7 @@ jobs:
- name: Run Trivy scan (table output)
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518 # 0.34.1
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # 0.34.2
with:
image-ref: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }}
format: 'table'
@@ -542,7 +542,7 @@ jobs:
- name: Run Trivy vulnerability scanner (SARIF)
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.skip.outputs.is_feature_push != 'true'
id: trivy
uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518 # 0.34.1
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # 0.34.2
with:
image-ref: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build-and-push.outputs.digest }}
format: 'sarif'
@@ -562,7 +562,7 @@ jobs:
- name: Upload Trivy results
if: env.TRIGGER_EVENT != 'pull_request' && steps.skip.outputs.skip_build != 'true' && steps.trivy-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
uses: github/codeql-action/upload-sarif@c793b717bc78562f491db7b0e93a3a178b099162 # v4.32.5
with:
sarif_file: 'trivy-results.sarif'
category: '.github/workflows/docker-build.yml:build-and-push'
@@ -689,7 +689,7 @@ jobs:
echo "✅ Image freshness validated"
- name: Run Trivy scan on PR image (table output)
uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518 # 0.34.1
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # 0.34.2
with:
image-ref: ${{ steps.pr-image.outputs.image_ref }}
format: 'table'
@@ -698,7 +698,7 @@ jobs:
- name: Run Trivy scan on PR image (SARIF - blocking)
id: trivy-scan
uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518 # 0.34.1
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # 0.34.2
with:
image-ref: ${{ steps.pr-image.outputs.image_ref }}
format: 'sarif'
@@ -719,14 +719,14 @@ jobs:
- name: Upload Trivy scan results
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
uses: github/codeql-action/upload-sarif@c793b717bc78562f491db7b0e93a3a178b099162 # v4.32.5
with:
sarif_file: 'trivy-pr-results.sarif'
category: 'docker-pr-image'
- name: Upload Trivy compatibility results (docker-build category)
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
uses: github/codeql-action/upload-sarif@c793b717bc78562f491db7b0e93a3a178b099162 # v4.32.5
with:
sarif_file: 'trivy-pr-results.sarif'
category: '.github/workflows/docker-build.yml:build-and-push'
@@ -734,7 +734,7 @@ jobs:
- name: Upload Trivy compatibility results (docker-publish alias)
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
uses: github/codeql-action/upload-sarif@c793b717bc78562f491db7b0e93a3a178b099162 # v4.32.5
with:
sarif_file: 'trivy-pr-results.sarif'
category: '.github/workflows/docker-publish.yml:build-and-push'
@@ -742,7 +742,7 @@ jobs:
- name: Upload Trivy compatibility results (nightly alias)
if: always() && steps.trivy-pr-check.outputs.exists == 'true'
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
uses: github/codeql-action/upload-sarif@c793b717bc78562f491db7b0e93a3a178b099162 # v4.32.5
with:
sarif_file: 'trivy-pr-results.sarif'
category: 'trivy-nightly'

View File

@@ -396,14 +396,14 @@ jobs:
severity-cutoff: high
- name: Scan with Trivy
uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518 # 0.34.1
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # 0.34.2
with:
image-ref: ${{ env.GHCR_REGISTRY }}/${{ env.IMAGE_NAME }}@${{ needs.build-and-push-nightly.outputs.digest }}
format: 'sarif'
output: 'trivy-nightly.sarif'
- name: Upload Trivy results
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
uses: github/codeql-action/upload-sarif@c793b717bc78562f491db7b0e93a3a178b099162 # v4.32.5
with:
sarif_file: 'trivy-nightly.sarif'
category: 'trivy-nightly'

View File

@@ -4,6 +4,7 @@ on:
pull_request:
push:
branches:
- nightly
- main
concurrency:

View File

@@ -362,7 +362,7 @@ jobs:
- name: Run Trivy filesystem scan (SARIF output)
if: steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request'
# aquasecurity/trivy-action v0.33.1
uses: aquasecurity/trivy-action@4c61e6329bab9be735ca35291551614bc663dff3
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478
with:
scan-type: 'fs'
scan-ref: ${{ steps.extract.outputs.binary_path }}
@@ -385,7 +385,7 @@ jobs:
- name: Upload Trivy SARIF to GitHub Security
if: always() && steps.trivy-sarif-check.outputs.exists == 'true'
# github/codeql-action v4
uses: github/codeql-action/upload-sarif@0ec47d036c68ae0cf94c629009b1029407111281
uses: github/codeql-action/upload-sarif@b895512248b1b5b0089ac3c33ecf123c2cd6f373
with:
sarif_file: 'trivy-binary-results.sarif'
category: ${{ steps.pr-info.outputs.is_push == 'true' && format('security-scan-{0}', github.event_name == 'workflow_run' && github.event.workflow_run.head_branch || github.ref_name) || format('security-scan-pr-{0}', steps.pr-info.outputs.pr_number) }}
@@ -394,7 +394,7 @@ jobs:
- name: Run Trivy filesystem scan (fail on CRITICAL/HIGH)
if: steps.check-artifact.outputs.artifact_exists == 'true' || github.event_name == 'push' || github.event_name == 'pull_request'
# aquasecurity/trivy-action v0.33.1
uses: aquasecurity/trivy-action@4c61e6329bab9be735ca35291551614bc663dff3
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478
with:
scan-type: 'fs'
scan-ref: ${{ steps.extract.outputs.binary_path }}

View File

@@ -88,7 +88,7 @@ jobs:
BASE_IMAGE=${{ steps.base-image.outputs.digest }}
- name: Run Trivy vulnerability scanner (CRITICAL+HIGH)
uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518 # 0.34.1
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # 0.34.2
with:
image-ref: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build.outputs.digest }}
format: 'table'
@@ -98,7 +98,7 @@ jobs:
- name: Run Trivy vulnerability scanner (SARIF)
id: trivy-sarif
uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518 # 0.34.1
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # 0.34.2
with:
image-ref: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build.outputs.digest }}
format: 'sarif'
@@ -106,12 +106,12 @@ jobs:
severity: 'CRITICAL,HIGH,MEDIUM'
- name: Upload Trivy results to GitHub Security
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
uses: github/codeql-action/upload-sarif@c793b717bc78562f491db7b0e93a3a178b099162 # v4.32.5
with:
sarif_file: 'trivy-weekly-results.sarif'
- name: Run Trivy vulnerability scanner (JSON for artifact)
uses: aquasecurity/trivy-action@e368e328979b113139d6f9068e03accaed98a518 # 0.34.1
uses: aquasecurity/trivy-action@97e0b3872f55f89b95b2f65b3dbab56962816478 # 0.34.2
with:
image-ref: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@${{ steps.build.outputs.digest }}
format: 'json'

View File

@@ -362,7 +362,7 @@ jobs:
- name: Upload SARIF to GitHub Security
if: steps.check-artifact.outputs.artifact_found == 'true'
uses: github/codeql-action/upload-sarif@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4
uses: github/codeql-action/upload-sarif@c793b717bc78562f491db7b0e93a3a178b099162 # v4
continue-on-error: true
with:
sarif_file: grype-results.sarif

View File

@@ -1 +1 @@
v0.19.1
v0.21.0

View File

@@ -85,10 +85,10 @@ require (
go.mongodb.org/mongo-driver/v2 v2.5.0 // indirect
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.65.0 // indirect
go.opentelemetry.io/otel v1.40.0 // indirect
go.opentelemetry.io/otel v1.41.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 // indirect
go.opentelemetry.io/otel/metric v1.40.0 // indirect
go.opentelemetry.io/otel/trace v1.40.0 // indirect
go.opentelemetry.io/otel/metric v1.41.0 // indirect
go.opentelemetry.io/otel/trace v1.41.0 // indirect
go.yaml.in/yaml/v2 v2.4.3 // indirect
golang.org/x/arch v0.24.0 // indirect
golang.org/x/sys v0.41.0 // indirect

View File

@@ -178,20 +178,20 @@ go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.65.0 h1:7iP2uCb7sGddAr30RRS6xjKy7AZ2JtTOPA3oolgVSw8=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.65.0/go.mod h1:c7hN3ddxs/z6q9xwvfLPk+UHlWRQyaeR1LdgfL/66l0=
go.opentelemetry.io/otel v1.40.0 h1:oA5YeOcpRTXq6NN7frwmwFR0Cn3RhTVZvXsP4duvCms=
go.opentelemetry.io/otel v1.40.0/go.mod h1:IMb+uXZUKkMXdPddhwAHm6UfOwJyh4ct1ybIlV14J0g=
go.opentelemetry.io/otel v1.41.0 h1:YlEwVsGAlCvczDILpUXpIpPSL/VPugt7zHThEMLce1c=
go.opentelemetry.io/otel v1.41.0/go.mod h1:Yt4UwgEKeT05QbLwbyHXEwhnjxNO6D8L5PQP51/46dE=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 h1:GqRJVj7UmLjCVyVJ3ZFLdPRmhDUp2zFmQe3RHIOsw24=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0/go.mod h1:ri3aaHSmCTVYu2AWv44YMauwAQc0aqI9gHKIcSbI1pU=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 h1:aTL7F04bJHUlztTsNGJ2l+6he8c+y/b//eR0jjjemT4=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0/go.mod h1:kldtb7jDTeol0l3ewcmd8SDvx3EmIE7lyvqbasU3QC4=
go.opentelemetry.io/otel/metric v1.40.0 h1:rcZe317KPftE2rstWIBitCdVp89A2HqjkxR3c11+p9g=
go.opentelemetry.io/otel/metric v1.40.0/go.mod h1:ib/crwQH7N3r5kfiBZQbwrTge743UDc7DTFVZrrXnqc=
go.opentelemetry.io/otel/metric v1.41.0 h1:rFnDcs4gRzBcsO9tS8LCpgR0dxg4aaxWlJxCno7JlTQ=
go.opentelemetry.io/otel/metric v1.41.0/go.mod h1:xPvCwd9pU0VN8tPZYzDZV/BMj9CM9vs00GuBjeKhJps=
go.opentelemetry.io/otel/sdk v1.40.0 h1:KHW/jUzgo6wsPh9At46+h4upjtccTmuZCFAc9OJ71f8=
go.opentelemetry.io/otel/sdk v1.40.0/go.mod h1:Ph7EFdYvxq72Y8Li9q8KebuYUr2KoeyHx0DRMKrYBUE=
go.opentelemetry.io/otel/sdk/metric v1.40.0 h1:mtmdVqgQkeRxHgRv4qhyJduP3fYJRMX4AtAlbuWdCYw=
go.opentelemetry.io/otel/sdk/metric v1.40.0/go.mod h1:4Z2bGMf0KSK3uRjlczMOeMhKU2rhUqdWNoKcYrtcBPg=
go.opentelemetry.io/otel/trace v1.40.0 h1:WA4etStDttCSYuhwvEa8OP8I5EWu24lkOzp+ZYblVjw=
go.opentelemetry.io/otel/trace v1.40.0/go.mod h1:zeAhriXecNGP/s2SEG3+Y8X9ujcJOTqQ5RgdEJcawiA=
go.opentelemetry.io/otel/trace v1.41.0 h1:Vbk2co6bhj8L59ZJ6/xFTskY+tGAbOnCtQGVVa9TIN0=
go.opentelemetry.io/otel/trace v1.41.0/go.mod h1:U1NU4ULCoxeDKc09yCWdWe+3QoyweJcISEVa1RBzOis=
go.opentelemetry.io/proto/otlp v1.7.1 h1:gTOMpGDb0WTBOP8JaO72iL3auEZhVmAQg4ipjOVAtj4=
go.opentelemetry.io/proto/otlp v1.7.1/go.mod h1:b2rVh6rfI/s2pHWNlB7ILJcRALpcNDzKhACevjI+ZnE=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=

View File

@@ -340,6 +340,11 @@ func TestCommitAndCancel_InvalidSessionUUID(t *testing.T) {
r.ServeHTTP(wCommit, reqCommit)
assert.Equal(t, http.StatusBadRequest, wCommit.Code)
wCancelMissing := httptest.NewRecorder()
reqCancelMissing, _ := http.NewRequest(http.MethodDelete, "/api/v1/import/cancel", http.NoBody)
r.ServeHTTP(wCancelMissing, reqCancelMissing)
assert.Equal(t, http.StatusBadRequest, wCancelMissing.Code)
wCancel := httptest.NewRecorder()
reqCancel, _ := http.NewRequest(http.MethodDelete, "/api/v1/import/cancel?session_uuid=.", http.NoBody)
r.ServeHTTP(wCancel, reqCancel)

View File

@@ -310,6 +310,11 @@ func (h *JSONImportHandler) Cancel(c *gin.Context) {
return
}
if strings.TrimSpace(req.SessionUUID) == "" {
c.JSON(http.StatusBadRequest, gin.H{"error": "session_uuid required"})
return
}
// Clean up session if it exists
jsonImportSessionsMu.Lock()
delete(jsonImportSessions, req.SessionUUID)

View File

@@ -497,6 +497,62 @@ func TestJSONImportHandler_ConflictDetection(t *testing.T) {
assert.Contains(t, conflictDetails, "conflict.com")
}
func TestJSONImportHandler_Cancel_RequiresValidJSONBody(t *testing.T) {
db := setupJSONTestDB(t)
handler := NewJSONImportHandler(db)
gin.SetMode(gin.TestMode)
router := gin.New()
api := router.Group("/api/v1")
handler.RegisterRoutes(api)
t.Run("missing body", func(t *testing.T) {
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/cancel", http.NoBody)
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
})
t.Run("invalid json", func(t *testing.T) {
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/cancel", bytes.NewBufferString("{"))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
})
t.Run("empty object payload", func(t *testing.T) {
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/cancel", bytes.NewBufferString("{}"))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
var resp map[string]string
err := json.Unmarshal(w.Body.Bytes(), &resp)
require.NoError(t, err)
assert.Equal(t, "session_uuid required", resp["error"])
})
t.Run("missing session_uuid payload", func(t *testing.T) {
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/json/cancel", bytes.NewBufferString(`{"foo":"bar"}`))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
var resp map[string]string
err := json.Unmarshal(w.Body.Bytes(), &resp)
require.NoError(t, err)
assert.Equal(t, "session_uuid required", resp["error"])
})
}
func TestJSONImportHandler_IsCharonFormat(t *testing.T) {
db := setupJSONTestDB(t)
handler := NewJSONImportHandler(db)

View File

@@ -4,6 +4,7 @@ import (
"encoding/json"
"fmt"
"net/http"
"strings"
"sync"
"github.com/gin-gonic/gin"
@@ -293,6 +294,11 @@ func (h *NPMImportHandler) Cancel(c *gin.Context) {
return
}
if strings.TrimSpace(req.SessionUUID) == "" {
c.JSON(http.StatusBadRequest, gin.H{"error": "session_uuid required"})
return
}
// Clean up session if it exists
npmImportSessionsMu.Lock()
delete(npmImportSessions, req.SessionUUID)

View File

@@ -453,6 +453,62 @@ func TestNPMImportHandler_Cancel(t *testing.T) {
assert.Equal(t, http.StatusNotFound, commitW.Code)
}
func TestNPMImportHandler_Cancel_RequiresValidJSONBody(t *testing.T) {
db := setupNPMTestDB(t)
handler := NewNPMImportHandler(db)
gin.SetMode(gin.TestMode)
router := gin.New()
api := router.Group("/api/v1")
handler.RegisterRoutes(api)
t.Run("missing body", func(t *testing.T) {
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/cancel", http.NoBody)
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
})
t.Run("invalid json", func(t *testing.T) {
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/cancel", bytes.NewBufferString("{"))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
})
t.Run("empty object payload", func(t *testing.T) {
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/cancel", bytes.NewBufferString("{}"))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
var resp map[string]string
err := json.Unmarshal(w.Body.Bytes(), &resp)
require.NoError(t, err)
assert.Equal(t, "session_uuid required", resp["error"])
})
t.Run("missing session_uuid payload", func(t *testing.T) {
req := httptest.NewRequest(http.MethodPost, "/api/v1/import/npm/cancel", bytes.NewBufferString(`{"foo":"bar"}`))
req.Header.Set("Content-Type", "application/json")
w := httptest.NewRecorder()
router.ServeHTTP(w, req)
assert.Equal(t, http.StatusBadRequest, w.Code)
var resp map[string]string
err := json.Unmarshal(w.Body.Bytes(), &resp)
require.NoError(t, err)
assert.Equal(t, "session_uuid required", resp["error"])
})
}
func TestNPMImportHandler_ConvertNPMToImportResult(t *testing.T) {
db := setupNPMTestDB(t)
handler := NewNPMImportHandler(db)

View File

@@ -0,0 +1,209 @@
package routes_test
import (
"fmt"
"sort"
"strings"
"testing"
"github.com/gin-gonic/gin"
)
type endpointInventoryEntry struct {
Name string
Method string
Path string
Source string
}
func backendImportRouteMatrix() []endpointInventoryEntry {
return []endpointInventoryEntry{
{Name: "Import status", Method: "GET", Path: "/api/v1/import/status", Source: "backend/internal/api/handlers/import_handler.go"},
{Name: "Import preview", Method: "GET", Path: "/api/v1/import/preview", Source: "backend/internal/api/handlers/import_handler.go"},
{Name: "Import upload", Method: "POST", Path: "/api/v1/import/upload", Source: "backend/internal/api/handlers/import_handler.go"},
{Name: "Import upload multi", Method: "POST", Path: "/api/v1/import/upload-multi", Source: "backend/internal/api/handlers/import_handler.go"},
{Name: "Import detect imports", Method: "POST", Path: "/api/v1/import/detect-imports", Source: "backend/internal/api/handlers/import_handler.go"},
{Name: "Import commit", Method: "POST", Path: "/api/v1/import/commit", Source: "backend/internal/api/handlers/import_handler.go"},
{Name: "Import cancel", Method: "DELETE", Path: "/api/v1/import/cancel", Source: "backend/internal/api/handlers/import_handler.go"},
{Name: "NPM import upload", Method: "POST", Path: "/api/v1/import/npm/upload", Source: "backend/internal/api/handlers/npm_import_handler.go"},
{Name: "NPM import commit", Method: "POST", Path: "/api/v1/import/npm/commit", Source: "backend/internal/api/handlers/npm_import_handler.go"},
{Name: "NPM import cancel", Method: "POST", Path: "/api/v1/import/npm/cancel", Source: "backend/internal/api/handlers/npm_import_handler.go"},
{Name: "JSON import upload", Method: "POST", Path: "/api/v1/import/json/upload", Source: "backend/internal/api/handlers/json_import_handler.go"},
{Name: "JSON import commit", Method: "POST", Path: "/api/v1/import/json/commit", Source: "backend/internal/api/handlers/json_import_handler.go"},
{Name: "JSON import cancel", Method: "POST", Path: "/api/v1/import/json/cancel", Source: "backend/internal/api/handlers/json_import_handler.go"},
}
}
func frontendImportRouteMatrix() []endpointInventoryEntry {
return []endpointInventoryEntry{
{Name: "Import status", Method: "GET", Path: "/api/v1/import/status", Source: "frontend/src/api/import.ts"},
{Name: "Import preview", Method: "GET", Path: "/api/v1/import/preview", Source: "frontend/src/api/import.ts"},
{Name: "Import upload", Method: "POST", Path: "/api/v1/import/upload", Source: "frontend/src/api/import.ts"},
{Name: "Import upload multi", Method: "POST", Path: "/api/v1/import/upload-multi", Source: "frontend/src/api/import.ts"},
{Name: "Import commit", Method: "POST", Path: "/api/v1/import/commit", Source: "frontend/src/api/import.ts"},
{Name: "Import cancel", Method: "DELETE", Path: "/api/v1/import/cancel", Source: "frontend/src/api/import.ts"},
{Name: "NPM import upload", Method: "POST", Path: "/api/v1/import/npm/upload", Source: "frontend/src/api/npmImport.ts"},
{Name: "NPM import commit", Method: "POST", Path: "/api/v1/import/npm/commit", Source: "frontend/src/api/npmImport.ts"},
{Name: "NPM import cancel", Method: "POST", Path: "/api/v1/import/npm/cancel", Source: "frontend/src/api/npmImport.ts"},
{Name: "JSON import upload", Method: "POST", Path: "/api/v1/import/json/upload", Source: "frontend/src/api/jsonImport.ts"},
{Name: "JSON import commit", Method: "POST", Path: "/api/v1/import/json/commit", Source: "frontend/src/api/jsonImport.ts"},
{Name: "JSON import cancel", Method: "POST", Path: "/api/v1/import/json/cancel", Source: "frontend/src/api/jsonImport.ts"},
}
}
func saveRouteMatrixForImportWorkflows() []endpointInventoryEntry {
return []endpointInventoryEntry{
{Name: "Backup list", Method: "GET", Path: "/api/v1/backups", Source: "frontend/src/api/backups.ts"},
{Name: "Backup create", Method: "POST", Path: "/api/v1/backups", Source: "frontend/src/api/backups.ts"},
{Name: "Settings list", Method: "GET", Path: "/api/v1/settings", Source: "frontend/src/api/settings.ts"},
{Name: "Settings save", Method: "POST", Path: "/api/v1/settings", Source: "frontend/src/api/settings.ts"},
{Name: "Settings save patch", Method: "PATCH", Path: "/api/v1/settings", Source: "frontend/src/api/settings.ts"},
{Name: "Settings validate URL", Method: "POST", Path: "/api/v1/settings/validate-url", Source: "frontend/src/api/settings.ts"},
{Name: "Settings test URL", Method: "POST", Path: "/api/v1/settings/test-url", Source: "frontend/src/api/settings.ts"},
{Name: "SMTP get", Method: "GET", Path: "/api/v1/settings/smtp", Source: "frontend/src/api/smtp.ts"},
{Name: "SMTP save", Method: "POST", Path: "/api/v1/settings/smtp", Source: "frontend/src/api/smtp.ts"},
{Name: "Proxy host list", Method: "GET", Path: "/api/v1/proxy-hosts", Source: "frontend/src/api/proxyHosts.ts"},
{Name: "Proxy host create", Method: "POST", Path: "/api/v1/proxy-hosts", Source: "frontend/src/api/proxyHosts.ts"},
{Name: "Proxy host get", Method: "GET", Path: "/api/v1/proxy-hosts/:uuid", Source: "frontend/src/api/proxyHosts.ts"},
{Name: "Proxy host update", Method: "PUT", Path: "/api/v1/proxy-hosts/:uuid", Source: "frontend/src/api/proxyHosts.ts"},
{Name: "Proxy host delete", Method: "DELETE", Path: "/api/v1/proxy-hosts/:uuid", Source: "frontend/src/api/proxyHosts.ts"},
}
}
func backendImportSaveInventoryCanonical() []endpointInventoryEntry {
entries := append([]endpointInventoryEntry{}, backendImportRouteMatrix()...)
entries = append(entries, saveRouteMatrixForImportWorkflows()...)
return entries
}
func frontendObservedImportSaveInventory() []endpointInventoryEntry {
entries := append([]endpointInventoryEntry{}, frontendImportRouteMatrix()...)
entries = append(entries, saveRouteMatrixForImportWorkflows()...)
return entries
}
func routeKey(method, path string) string {
return method + " " + path
}
func buildRouteLookup(routes []gin.RouteInfo) (map[string]gin.RouteInfo, map[string]map[string]struct{}) {
byMethodAndPath := make(map[string]gin.RouteInfo, len(routes))
methodsByPath := make(map[string]map[string]struct{})
for _, route := range routes {
key := routeKey(route.Method, route.Path)
byMethodAndPath[key] = route
if _, exists := methodsByPath[route.Path]; !exists {
methodsByPath[route.Path] = map[string]struct{}{}
}
methodsByPath[route.Path][route.Method] = struct{}{}
}
return byMethodAndPath, methodsByPath
}
func methodList(methodSet map[string]struct{}) []string {
methods := make([]string, 0, len(methodSet))
for method := range methodSet {
methods = append(methods, method)
}
sort.Strings(methods)
return methods
}
func assertStrictMethodPathMatrix(t *testing.T, routes []gin.RouteInfo, expected []endpointInventoryEntry, matrixName string) {
t.Helper()
byMethodAndPath, methodsByPath := buildRouteLookup(routes)
seen := map[string]string{}
expectedMethodsByPath := map[string]map[string]struct{}{}
var failures []string
for _, endpoint := range expected {
key := routeKey(endpoint.Method, endpoint.Path)
if previous, duplicated := seen[key]; duplicated {
failures = append(failures, fmt.Sprintf("duplicate expected entry %q (%s and %s)", key, previous, endpoint.Name))
continue
}
seen[key] = endpoint.Name
if _, exists := expectedMethodsByPath[endpoint.Path]; !exists {
expectedMethodsByPath[endpoint.Path] = map[string]struct{}{}
}
expectedMethodsByPath[endpoint.Path][endpoint.Method] = struct{}{}
if _, exists := byMethodAndPath[key]; exists {
continue
}
if methodSet, pathExists := methodsByPath[endpoint.Path]; pathExists {
failures = append(
failures,
fmt.Sprintf("method drift for %s (%s): expected %s, registered methods=[%s]", endpoint.Name, endpoint.Path, endpoint.Method, strings.Join(methodList(methodSet), ", ")),
)
continue
}
failures = append(
failures,
fmt.Sprintf("missing route for %s: expected %s (source=%s)", endpoint.Name, key, endpoint.Source),
)
}
for path, expectedMethodSet := range expectedMethodsByPath {
actualMethodSet, exists := methodsByPath[path]
if !exists {
continue
}
extraMethods := make([]string, 0)
for method := range actualMethodSet {
if _, expectedMethod := expectedMethodSet[method]; !expectedMethod {
extraMethods = append(extraMethods, method)
}
}
if len(extraMethods) > 0 {
sort.Strings(extraMethods)
failures = append(
failures,
fmt.Sprintf(
"unexpected methods for %s: extra=[%s], expected=[%s], registered=[%s]",
path,
strings.Join(extraMethods, ", "),
strings.Join(methodList(expectedMethodSet), ", "),
strings.Join(methodList(actualMethodSet), ", "),
),
)
}
}
if len(failures) > 0 {
t.Fatalf("%s route matrix assertion failed:\n- %s", matrixName, strings.Join(failures, "\n- "))
}
}
func collectRouteMatrixDrift(routes []gin.RouteInfo, expected []endpointInventoryEntry) []string {
byMethodAndPath, methodsByPath := buildRouteLookup(routes)
failures := make([]string, 0)
for _, endpoint := range expected {
key := routeKey(endpoint.Method, endpoint.Path)
if _, exists := byMethodAndPath[key]; exists {
continue
}
if methodSet, pathExists := methodsByPath[endpoint.Path]; pathExists {
failures = append(
failures,
fmt.Sprintf("method drift for %s (%s): expected %s, registered methods=[%s]", endpoint.Name, endpoint.Path, endpoint.Method, strings.Join(methodList(methodSet), ", ")),
)
continue
}
failures = append(
failures,
fmt.Sprintf("missing route for %s: expected %s (source=%s)", endpoint.Name, key, endpoint.Source),
)
}
return failures
}

View File

@@ -0,0 +1,67 @@
package routes_test
import (
"testing"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"github.com/Wikid82/charon/backend/internal/api/routes"
"github.com/Wikid82/charon/backend/internal/config"
)
func TestEndpointInventory_FrontendCanonicalSaveImportContractsExistInBackend(t *testing.T) {
gin.SetMode(gin.TestMode)
db, err := gorm.Open(sqlite.Open("file::memory:?cache=shared&_test_endpoint_inventory"), &gorm.Config{})
require.NoError(t, err)
router := gin.New()
require.NoError(t, routes.Register(router, db, config.Config{JWTSecret: "test-secret"}))
routes.RegisterImportHandler(router, db, config.Config{JWTSecret: "test-secret"}, "echo", "/tmp", "/import/Caddyfile")
assertStrictMethodPathMatrix(t, router.Routes(), backendImportSaveInventoryCanonical(), "backend canonical save/import inventory")
}
func TestEndpointInventory_FrontendParityMatchesCurrentContract(t *testing.T) {
gin.SetMode(gin.TestMode)
db, err := gorm.Open(sqlite.Open("file::memory:?cache=shared&_test_endpoint_inventory_frontend_parity"), &gorm.Config{})
require.NoError(t, err)
router := gin.New()
require.NoError(t, routes.Register(router, db, config.Config{JWTSecret: "test-secret"}))
routes.RegisterImportHandler(router, db, config.Config{JWTSecret: "test-secret"}, "echo", "/tmp", "/import/Caddyfile")
assertStrictMethodPathMatrix(t, router.Routes(), frontendObservedImportSaveInventory(), "frontend observed save/import inventory")
}
func TestEndpointInventory_FrontendParityDetectsActualMismatch(t *testing.T) {
gin.SetMode(gin.TestMode)
db, err := gorm.Open(sqlite.Open("file::memory:?cache=shared&_test_endpoint_inventory_frontend_parity_mismatch"), &gorm.Config{})
require.NoError(t, err)
router := gin.New()
require.NoError(t, routes.Register(router, db, config.Config{JWTSecret: "test-secret"}))
routes.RegisterImportHandler(router, db, config.Config{JWTSecret: "test-secret"}, "echo", "/tmp", "/import/Caddyfile")
contractWithMismatch := append([]endpointInventoryEntry{}, frontendObservedImportSaveInventory()...)
for i := range contractWithMismatch {
if contractWithMismatch[i].Path == "/api/v1/import/cancel" {
contractWithMismatch[i].Method = "POST"
break
}
}
drift := collectRouteMatrixDrift(router.Routes(), contractWithMismatch)
assert.Contains(
t,
drift,
"method drift for Import cancel (/api/v1/import/cancel): expected POST, registered methods=[DELETE]",
)
}

View File

@@ -0,0 +1,23 @@
package routes_test
import (
"path/filepath"
"testing"
"github.com/gin-gonic/gin"
"github.com/Wikid82/charon/backend/internal/api/routes"
"github.com/Wikid82/charon/backend/internal/config"
)
func TestRegisterImportHandler_StrictRouteMatrix(t *testing.T) {
gin.SetMode(gin.TestMode)
db := setupTestImportDB(t)
tempDir := t.TempDir()
importCaddyfilePath := filepath.Join(tempDir, "import", "Caddyfile")
router := gin.New()
routes.RegisterImportHandler(router, db, config.Config{JWTSecret: "test-secret"}, "echo", tempDir, importCaddyfilePath)
assertStrictMethodPathMatrix(t, router.Routes(), backendImportRouteMatrix(), "import")
}

View File

@@ -0,0 +1,25 @@
package routes_test
import (
"testing"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/require"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"github.com/Wikid82/charon/backend/internal/api/routes"
"github.com/Wikid82/charon/backend/internal/config"
)
func TestRegister_StrictSaveRouteMatrixUsedByImportWorkflows(t *testing.T) {
gin.SetMode(gin.TestMode)
db, err := gorm.Open(sqlite.Open("file::memory:?cache=shared&_test_save_contract_matrix"), &gorm.Config{})
require.NoError(t, err)
router := gin.New()
require.NoError(t, routes.Register(router, db, config.Config{JWTSecret: "test-secret"}))
assertStrictMethodPathMatrix(t, router.Routes(), saveRouteMatrixForImportWorkflows(), "save")
}

View File

@@ -1,7 +1,9 @@
## QA Report - PR #779
## QA Report — Import/Save Route Regression Test Suite
- Date: 2026-03-01
- Scope: Post-remediation merge-readiness gates after Caddy Import E2E fix
- Date: 2026-03-02
- Branch: `feature/beta-release` (HEAD `2f90d936`)
- Scope: Regression test coverage for import and save function routes
- Full report: [docs/reports/qa_report_import_save_regression.md](qa_report_import_save_regression.md)
## E2E Status

View File

@@ -0,0 +1,188 @@
## QA Report — Import/Save Route Regression Test Suite
- **Date**: 2026-03-02
- **Branch**: `feature/beta-release`
- **HEAD**: `2f90d936``fix(tests): simplify back/cancel button handling in cross-browser import tests`
- **Scope**: Regression test implementation for import and save function routes
---
## Summary
| DoD Gate | Result | Notes |
|---|---|---|
| Patch Coverage Preflight | ✅ PASS | 100% — 12/12 changed lines covered |
| Backend Unit Tests + Coverage | ✅ PASS | 87.9% statements (threshold: 87%) |
| Frontend Unit Tests + Coverage | ✅ PASS | 89.63% lines (threshold: 87%) |
| TypeScript Type Check | ✅ PASS | 0 type errors |
| Pre-commit Hooks | ✅ PASS | 17/17 hooks passed |
| GORM Security Scan | ⏭️ SKIP | No model files changed |
| Trivy FS Scan | ✅ PASS | 0 HIGH/CRITICAL in npm packages |
| Docker Image Scan | ✅ PASS | 0 HIGH/CRITICAL (13 LOW/MED total) |
| CodeQL Analysis | ✅ PASS | 1 pre-existing warning (not a regression) |
**Overall Verdict: PASS** — All gated checks passed. Two pre-existing items documented below.
---
## New Test Files
Eight test files were added as part of this feature:
| File | Type | Tests |
|---|---|---|
| `backend/internal/api/routes/routes_import_contract_test.go` | Backend unit | Route contract coverage |
| `backend/internal/api/routes/routes_save_contract_test.go` | Backend unit | Route contract coverage |
| `backend/internal/api/routes/endpoint_inventory_test.go` | Backend unit | Endpoint inventory/matrix |
| `frontend/src/api/__tests__/npmImport.test.ts` | Frontend unit | 6 tests |
| `frontend/src/api/__tests__/jsonImport.test.ts` | Frontend unit | 6 tests |
| `frontend/src/hooks/__tests__/useNPMImport.test.tsx` | Frontend unit | 5 tests |
| `frontend/src/hooks/__tests__/useJSONImport.test.tsx` | Frontend unit | 5 tests |
| `tests/integration/import-save-route-regression.spec.ts` | Integration | Route regression spec |
All 22 new frontend tests passed. Backend route package runs clean.
---
## Step 1 — Patch Coverage Preflight
- **Command**: `bash scripts/local-patch-report.sh`
- **Artifacts**: `test-results/local-patch-report.md`, `test-results/local-patch-report.json`
- **Result**: PASS
- **Metrics**:
- Overall patch coverage: 100% (12/12 changed lines)
- Backend changed lines: 8/8 covered (100%)
- Frontend changed lines: 4/4 covered (100%)
---
## Step 2 — Backend Unit Tests + Coverage
- **Command**: `bash scripts/go-test-coverage.sh`
- **Result**: PASS
- **Metrics**:
- Total statements: 87.9%
- `internal/api/routes` package: 87.8%
- Gate threshold: 87%
- **Package results**: 25/26 packages `ok`
- **Known exception**: `internal/api/handlers` — 1 test fails in full suite only
### Pre-existing Backend Failure
| Item | Detail |
|---|---|
| Test | `TestSecurityHandler_UpsertRuleSet_XSSInContent` |
| Package | `internal/api/handlers` |
| File | `security_handler_audit_test.go` |
| Behaviour | Fails in full suite (`FAIL: expected 200, got {"error":"failed to list rule sets"}`); passes in isolation |
| Cause | Parallel test state pollution — shared in-memory SQLite DB contaminated by another test in the same package |
| Introduced by this PR | No — file shows no git changes in this session |
| Regression | No |
---
## Step 3 — Frontend Unit Tests + Coverage
- **Command**: `bash scripts/frontend-test-coverage.sh`
- **Result**: PASS
- **Metrics**:
- Lines: 89.63% (threshold: 87%)
- Statements: 88.96%
- Functions: 86.06%
- Branches: 81.41%
- **Test counts**: 589 passed, 23 skipped, 0 failed, 24 test suites
### New Frontend Test Results
All four new test files passed explicitly:
```
✅ npmImport.test.ts 6 tests passed
✅ jsonImport.test.ts 6 tests passed
✅ useNPMImport.test.tsx 5 tests passed
✅ useJSONImport.test.tsx 5 tests passed
```
---
## Step 4 — TypeScript Type Check
- **Command**: `npm run type-check`
- **Result**: PASS — 0 errors, clean exit
---
## Step 5 — Pre-commit Hooks
- **Command**: `pre-commit run --all-files`
- **Result**: PASS — 17/17 hooks passed
Hooks verified include: `fix-end-of-files`, `trim-trailing-whitespace`, `check-yaml`, `shellcheck`, `actionlint`, `dockerfile-validation`, `go-vet`, `golangci-lint (Fast Linters - BLOCKING)`, `frontend-typecheck`, `frontend-lint`.
---
## Step 6 — GORM Security Scan
- **Result**: SKIPPED
- **Reason**: No files under `backend/internal/models/**` or GORM service/repository paths were modified in this session.
---
## Step 7 — Security Scans
### Trivy Filesystem Scan
- **Command**: `trivy fs . --severity HIGH,CRITICAL --exit-code 1 --skip-dirs .git,node_modules,...`
- **Result**: PASS — 0 HIGH/CRITICAL vulnerabilities
- **Scope**: `package-lock.json` (npm)
- **Report**: `trivy-report.json`
### Docker Image Scan
- **Command**: `.github/skills/scripts/skill-runner.sh security-scan-docker-image`
- **Result**: PASS — 0 HIGH/CRITICAL vulnerabilities
- **Total findings**: 13 (all LOW or MEDIUM severity)
- **Verdict**: Gate passed — no action required
### CodeQL Analysis
- **SARIF files**:
- `codeql-results-go.sarif` — generated 2026-03-02
- `codeql-results-javascript.sarif` — generated 2026-03-02
- **Go results**: 1 finding — `go/cookie-secure-not-set` (warning level)
- **JavaScript results**: 0 findings
- **Result**: PASS (no error-level findings)
#### Pre-existing CodeQL Finding
| Item | Detail |
|---|---|
| Rule | `go/cookie-secure-not-set` |
| File | `internal/api/handlers/auth_handler.go:151159` |
| Severity | Warning (non-blocking) |
| Description | Cookie does not set `Secure` attribute to `true` |
| Context | Intentional design: `secure` flag defaults to `true`; set to `false` **only** for local loopback requests without TLS. This allows the management UI to function over HTTP on `localhost` during development. The code comment explicitly documents this decision: _"Secure: true for HTTPS; false only for local non-HTTPS loopback flows"_ |
| Introduced by this PR | No — `auth_handler.go` was last modified in commits predating HEAD (`e348b5b2`, `00349689`) |
| Regression | No |
| Action | None — accepted as intentional design trade-off for local-dev UX |
---
## Pre-existing Issues Register
| ID | Location | Nature | Regression? | Action |
|---|---|---|---|---|
| PE-001 | `handlers.TestSecurityHandler_UpsertRuleSet_XSSInContent` | Test isolation failure — parallel SQLite state pollution | No | Track separately; fix with test DB isolation |
| PE-002 | `auth_handler.go:151``go/cookie-secure-not-set` | CodeQL warning; intentional local-dev design | No | Accepted; document as acknowledged finding |
---
## Related Commits
| Hash | Message |
|---|---|
| `63e79664` | `test(routes): add strict route matrix tests for import and save workflows` |
| `077e3c1d` | `chore: add integration tests for import/save route regression coverage` |
| `f60a99d0` | `fix(tests): update route validation functions to ensure canonical success responses in import/save regression tests` |
| `b5fd5d57` | `fix(tests): update import handler test to use temporary directory for Caddyfile path` |
| `2f90d936` | `fix(tests): simplify back/cancel button handling in cross-browser import tests` |

View File

@@ -21,7 +21,7 @@
"date-fns": "^4.1.0",
"i18next": "^25.8.13",
"i18next-browser-languagedetector": "^8.2.1",
"lucide-react": "^0.575.0",
"lucide-react": "^0.576.0",
"react": "^19.2.4",
"react-dom": "^19.2.4",
"react-hook-form": "^7.71.2",
@@ -29,7 +29,7 @@
"react-i18next": "^16.5.4",
"react-router-dom": "^7.13.1",
"tailwind-merge": "^3.5.0",
"tldts": "^7.0.23"
"tldts": "^7.0.24"
},
"devDependencies": {
"@eslint/css": "^0.14.1",
@@ -579,9 +579,9 @@
}
},
"node_modules/@csstools/css-syntax-patches-for-csstree": {
"version": "1.0.28",
"resolved": "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.0.28.tgz",
"integrity": "sha512-1NRf1CUBjnr3K7hu8BLxjQrKCxEe8FP/xmPTenAxCRZWVLbmGotkFvG9mfNpjA6k7Bw1bw4BilZq9cu19RA5pg==",
"version": "1.0.29",
"resolved": "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.0.29.tgz",
"integrity": "sha512-jx9GjkkP5YHuTmko2eWAvpPnb0mB4mGRr2U7XwVNwevm8nlpobZEVk+GNmiYMk2VuA75v+plfXWyroWKmICZXg==",
"dev": true,
"funding": [
{
@@ -4543,13 +4543,13 @@
"license": "MIT"
},
"node_modules/cssstyle": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-6.1.0.tgz",
"integrity": "sha512-Ml4fP2UT2K3CUBQnVlbdV/8aFDdlY69E+YnwJM+3VUWl08S3J8c8aRuJqCkD9Py8DHZ7zNNvsfKl8psocHZEFg==",
"version": "6.2.0",
"resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-6.2.0.tgz",
"integrity": "sha512-Fm5NvhYathRnXNVndkUsCCuR63DCLVVwGOOwQw782coXFi5HhkXdu289l59HlXZBawsyNccXfWRYvLzcDCdDig==",
"dev": true,
"license": "MIT",
"dependencies": {
"@asamuzakjp/css-color": "^5.0.0",
"@asamuzakjp/css-color": "^5.0.1",
"@csstools/css-syntax-patches-for-csstree": "^1.0.28",
"css-tree": "^3.1.0",
"lru-cache": "^11.2.6"
@@ -5305,9 +5305,9 @@
}
},
"node_modules/flatted": {
"version": "3.3.3",
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz",
"integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==",
"version": "3.3.4",
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.4.tgz",
"integrity": "sha512-3+mMldrTAPdta5kjX2G2J7iX4zxtnwpdA8Tr2ZSjkyPSanvbZAcy6flmtnXbEybHrDcU9641lxrMfFuUxVz9vA==",
"dev": true,
"license": "ISC"
},
@@ -6343,9 +6343,9 @@
}
},
"node_modules/lucide-react": {
"version": "0.575.0",
"resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.575.0.tgz",
"integrity": "sha512-VuXgKZrk0uiDlWjGGXmKV6MSk9Yy4l10qgVvzGn2AWBx1Ylt0iBexKOAoA6I7JO3m+M9oeovJd3yYENfkUbOeg==",
"version": "0.576.0",
"resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.576.0.tgz",
"integrity": "sha512-koNxU14BXrxUfZQ9cUaP0ES1uyPZKYDjk31FQZB6dQ/x+tXk979sVAn9ppZ/pVeJJyOxVM8j1E+8QEuSc02Vug==",
"license": "ISC",
"peerDependencies": {
"react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0"
@@ -8297,21 +8297,21 @@
}
},
"node_modules/tldts": {
"version": "7.0.23",
"resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.23.tgz",
"integrity": "sha512-ASdhgQIBSay0R/eXggAkQ53G4nTJqTXqC2kbaBbdDwM7SkjyZyO0OaaN1/FH7U/yCeqOHDwFO5j8+Os/IS1dXw==",
"version": "7.0.24",
"resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.24.tgz",
"integrity": "sha512-1r6vQTTt1rUiJkI5vX7KG8PR342Ru/5Oh13kEQP2SMbRSZpOey9SrBe27IDxkoWulx8ShWu4K6C0BkctP8Z1bQ==",
"license": "MIT",
"dependencies": {
"tldts-core": "^7.0.23"
"tldts-core": "^7.0.24"
},
"bin": {
"tldts": "bin/cli.js"
}
},
"node_modules/tldts-core": {
"version": "7.0.23",
"resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.23.tgz",
"integrity": "sha512-0g9vrtDQLrNIiCj22HSe9d4mLVG3g5ph5DZ8zCKBr4OtrspmNB6ss7hVyzArAeE88ceZocIEGkyW1Ime7fxPtQ==",
"version": "7.0.24",
"resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.24.tgz",
"integrity": "sha512-pj7yygNMoMRqG7ML2SDQ0xNIOfN3IBDUcPVM2Sg6hP96oFNN2nqnzHreT3z9xLq85IWJyNTvD38O002DdOrPMw==",
"license": "MIT"
},
"node_modules/to-regex-range": {

View File

@@ -40,7 +40,7 @@
"date-fns": "^4.1.0",
"i18next": "^25.8.13",
"i18next-browser-languagedetector": "^8.2.1",
"lucide-react": "^0.575.0",
"lucide-react": "^0.576.0",
"react": "^19.2.4",
"react-dom": "^19.2.4",
"react-hook-form": "^7.71.2",
@@ -48,7 +48,7 @@
"react-i18next": "^16.5.4",
"react-router-dom": "^7.13.1",
"tailwind-merge": "^3.5.0",
"tldts": "^7.0.23"
"tldts": "^7.0.24"
},
"devDependencies": {
"@eslint/css": "^0.14.1",

View File

@@ -6,12 +6,14 @@ vi.mock('../client', () => ({
default: {
get: vi.fn(),
post: vi.fn(),
delete: vi.fn(),
},
}));
describe('import API', () => {
const mockedGet = vi.mocked(client.get);
const mockedPost = vi.mocked(client.post);
const mockedDelete = vi.mocked(client.delete);
beforeEach(() => {
vi.clearAllMocks();
@@ -71,11 +73,25 @@ describe('import API', () => {
expect(result).toEqual(mockResponse);
});
it('cancelImport posts cancel', async () => {
mockedPost.mockResolvedValue({});
it('cancelImport deletes cancel with required session_uuid query', async () => {
const sessionUUID = 'uuid-cancel-123';
mockedDelete.mockResolvedValue({});
await cancelImport();
expect(client.post).toHaveBeenCalledWith('/import/cancel');
await cancelImport(sessionUUID);
expect(client.delete).toHaveBeenCalledTimes(1);
expect(client.delete).toHaveBeenCalledWith('/import/cancel', {
params: {
session_uuid: sessionUUID,
},
});
const [, requestConfig] = mockedDelete.mock.calls[0];
expect(requestConfig).toEqual({
params: {
session_uuid: sessionUUID,
},
});
});
it('forwards commitImport errors', async () => {
@@ -87,9 +103,9 @@ describe('import API', () => {
it('forwards cancelImport errors', async () => {
const error = new Error('cancel failed');
mockedPost.mockRejectedValue(error);
mockedDelete.mockRejectedValue(error);
await expect(cancelImport()).rejects.toBe(error);
await expect(cancelImport('uuid-cancel-123')).rejects.toBe(error);
});
it('getImportStatus gets status', async () => {

View File

@@ -0,0 +1,96 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { uploadJSONExport, commitJSONImport, cancelJSONImport } from '../jsonImport';
import client from '../client';
vi.mock('../client', () => ({
default: {
post: vi.fn(),
},
}));
describe('jsonImport API', () => {
const mockedPost = vi.mocked(client.post);
beforeEach(() => {
vi.clearAllMocks();
});
it('cancelJSONImport posts cancel endpoint with required session_uuid body', async () => {
const sessionUUID = 'json-session-123';
mockedPost.mockResolvedValue({});
await cancelJSONImport(sessionUUID);
expect(client.post).toHaveBeenCalledWith('/import/json/cancel', {
session_uuid: sessionUUID,
});
});
it('uploadJSONExport posts upload endpoint with content payload', async () => {
const content = '{"proxy_hosts":[]}';
const mockResponse = {
session: {
id: 'json-session-456',
state: 'reviewing',
source: 'json',
},
preview: {
hosts: [],
conflicts: [],
errors: [],
},
conflict_details: {},
};
mockedPost.mockResolvedValue({ data: mockResponse });
const result = await uploadJSONExport(content);
expect(client.post).toHaveBeenCalledWith('/import/json/upload', { content });
expect(result).toEqual(mockResponse);
});
it('commitJSONImport posts commit endpoint with session_uuid, resolutions, and names body', async () => {
const sessionUUID = 'json-session-789';
const resolutions = { 'json.example.com': 'replace' };
const names = { 'json.example.com': 'JSON Example' };
const mockResponse = {
created: 1,
updated: 1,
skipped: 0,
errors: [],
};
mockedPost.mockResolvedValue({ data: mockResponse });
const result = await commitJSONImport(sessionUUID, resolutions, names);
expect(client.post).toHaveBeenCalledWith('/import/json/commit', {
session_uuid: sessionUUID,
resolutions,
names,
});
expect(result).toEqual(mockResponse);
});
it('forwards uploadJSONExport errors', async () => {
const error = new Error('upload failed');
mockedPost.mockRejectedValue(error);
await expect(uploadJSONExport('{"proxy_hosts":[]}')).rejects.toBe(error);
});
it('forwards commitJSONImport errors', async () => {
const error = new Error('commit failed');
mockedPost.mockRejectedValue(error);
await expect(commitJSONImport('json-session-123', {}, {})).rejects.toBe(error);
});
it('forwards cancelJSONImport errors', async () => {
const error = new Error('cancel failed');
mockedPost.mockRejectedValue(error);
await expect(cancelJSONImport('json-session-123')).rejects.toBe(error);
});
});

View File

@@ -0,0 +1,96 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { uploadNPMExport, commitNPMImport, cancelNPMImport } from '../npmImport';
import client from '../client';
vi.mock('../client', () => ({
default: {
post: vi.fn(),
},
}));
describe('npmImport API', () => {
const mockedPost = vi.mocked(client.post);
beforeEach(() => {
vi.clearAllMocks();
});
it('cancelNPMImport posts cancel endpoint with required session_uuid body', async () => {
const sessionUUID = 'npm-session-123';
mockedPost.mockResolvedValue({});
await cancelNPMImport(sessionUUID);
expect(client.post).toHaveBeenCalledWith('/import/npm/cancel', {
session_uuid: sessionUUID,
});
});
it('uploadNPMExport posts upload endpoint with content payload', async () => {
const content = '{"proxy_hosts":[]}';
const mockResponse = {
session: {
id: 'npm-session-456',
state: 'reviewing',
source: 'npm',
},
preview: {
hosts: [],
conflicts: [],
errors: [],
},
conflict_details: {},
};
mockedPost.mockResolvedValue({ data: mockResponse });
const result = await uploadNPMExport(content);
expect(client.post).toHaveBeenCalledWith('/import/npm/upload', { content });
expect(result).toEqual(mockResponse);
});
it('commitNPMImport posts commit endpoint with session_uuid, resolutions, and names body', async () => {
const sessionUUID = 'npm-session-789';
const resolutions = { 'npm.example.com': 'replace' };
const names = { 'npm.example.com': 'NPM Example' };
const mockResponse = {
created: 1,
updated: 1,
skipped: 0,
errors: [],
};
mockedPost.mockResolvedValue({ data: mockResponse });
const result = await commitNPMImport(sessionUUID, resolutions, names);
expect(client.post).toHaveBeenCalledWith('/import/npm/commit', {
session_uuid: sessionUUID,
resolutions,
names,
});
expect(result).toEqual(mockResponse);
});
it('forwards uploadNPMExport errors', async () => {
const error = new Error('upload failed');
mockedPost.mockRejectedValue(error);
await expect(uploadNPMExport('{"proxy_hosts":[]}')).rejects.toBe(error);
});
it('forwards commitNPMImport errors', async () => {
const error = new Error('commit failed');
mockedPost.mockRejectedValue(error);
await expect(commitNPMImport('npm-session-123', {}, {})).rejects.toBe(error);
});
it('forwards cancelNPMImport errors', async () => {
const error = new Error('cancel failed');
mockedPost.mockRejectedValue(error);
await expect(cancelNPMImport('npm-session-123')).rejects.toBe(error);
});
});

View File

@@ -110,10 +110,15 @@ export const commitImport = async (
/**
* Cancels the current import session.
* @param sessionUUID - The import session UUID
* @throws {AxiosError} If cancellation fails
*/
export const cancelImport = async (): Promise<void> => {
await client.post('/import/cancel');
export const cancelImport = async (sessionUUID: string): Promise<void> => {
await client.delete('/import/cancel', {
params: {
session_uuid: sessionUUID,
},
});
};
/**

View File

@@ -83,8 +83,11 @@ export const commitJSONImport = async (
/**
* Cancels the current JSON import session.
* @param sessionUuid - The import session UUID
* @throws {AxiosError} If cancellation fails
*/
export const cancelJSONImport = async (): Promise<void> => {
await client.post('/import/json/cancel');
export const cancelJSONImport = async (sessionUuid: string): Promise<void> => {
await client.post('/import/json/cancel', {
session_uuid: sessionUuid,
});
};

View File

@@ -83,8 +83,11 @@ export const commitNPMImport = async (
/**
* Cancels the current NPM import session.
* @param sessionUuid - The import session UUID
* @throws {AxiosError} If cancellation fails
*/
export const cancelNPMImport = async (): Promise<void> => {
await client.post('/import/npm/cancel');
export const cancelNPMImport = async (sessionUuid: string): Promise<void> => {
await client.post('/import/npm/cancel', {
session_uuid: sessionUuid,
});
};

View File

@@ -208,7 +208,7 @@ describe('useImport', () => {
await result.current.cancel()
})
expect(api.cancelImport).toHaveBeenCalled()
expect(api.cancelImport).toHaveBeenCalledWith('session-3')
await waitFor(() => {
expect(result.current.session).toBeNull()
})

View File

@@ -0,0 +1,190 @@
import { describe, it, expect, vi, beforeEach } from 'vitest'
import { renderHook, act, waitFor } from '@testing-library/react'
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import React from 'react'
import { useJSONImport } from '../useJSONImport'
import * as api from '../../api/jsonImport'
vi.mock('../../api/jsonImport', () => ({
uploadJSONExport: vi.fn(),
commitJSONImport: vi.fn(),
cancelJSONImport: vi.fn(),
}))
const createWrapper = () => {
const queryClient = new QueryClient({
defaultOptions: {
queries: { retry: false },
mutations: { retry: false },
},
})
return ({ children }: { children: React.ReactNode }) => (
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
)
}
describe('useJSONImport', () => {
beforeEach(() => {
vi.clearAllMocks()
})
it('sets preview and sessionId after successful upload', async () => {
const uploadResponse = {
session: {
id: 'json-session-upload',
state: 'reviewing',
source: 'json',
},
preview: {
hosts: [],
conflicts: [],
errors: [],
},
conflict_details: {},
}
vi.mocked(api.uploadJSONExport).mockResolvedValue(uploadResponse)
const { result } = renderHook(() => useJSONImport(), { wrapper: createWrapper() })
await act(async () => {
await result.current.upload('{"proxy_hosts":[]}')
})
await waitFor(() => {
expect(result.current.sessionId).toBe('json-session-upload')
expect(result.current.preview).toEqual(uploadResponse)
})
})
it('commits active session and clears preview/session state', async () => {
const uploadResponse = {
session: {
id: 'json-session-commit',
state: 'reviewing',
source: 'json',
},
preview: {
hosts: [],
conflicts: [],
errors: [],
},
conflict_details: {},
}
const commitResponse = {
created: 1,
updated: 0,
skipped: 0,
errors: [],
}
vi.mocked(api.uploadJSONExport).mockResolvedValue(uploadResponse)
vi.mocked(api.commitJSONImport).mockResolvedValue(commitResponse)
const { result } = renderHook(() => useJSONImport(), { wrapper: createWrapper() })
await act(async () => {
await result.current.upload('{"proxy_hosts":[]}')
})
await waitFor(() => {
expect(result.current.sessionId).toBe('json-session-commit')
})
await act(async () => {
await result.current.commit({ 'json.example.com': 'replace' }, { 'json.example.com': 'JSON Example' })
})
expect(api.commitJSONImport).toHaveBeenCalledWith(
'json-session-commit',
{ 'json.example.com': 'replace' },
{ 'json.example.com': 'JSON Example' }
)
await waitFor(() => {
expect(result.current.sessionId).toBeNull()
expect(result.current.preview).toBeNull()
expect(result.current.commitResult).toEqual(commitResponse)
})
})
it('passes active session UUID to cancelJSONImport', async () => {
const sessionId = 'json-session-123'
vi.mocked(api.uploadJSONExport).mockResolvedValue({
session: {
id: sessionId,
state: 'reviewing',
source: 'json',
},
preview: {
hosts: [],
conflicts: [],
errors: [],
},
conflict_details: {},
})
vi.mocked(api.cancelJSONImport).mockResolvedValue(undefined)
const { result } = renderHook(() => useJSONImport(), { wrapper: createWrapper() })
await act(async () => {
await result.current.upload('{}')
})
await waitFor(() => {
expect(result.current.sessionId).toBe(sessionId)
})
await act(async () => {
await result.current.cancel()
})
expect(api.cancelJSONImport).toHaveBeenCalledWith(sessionId)
await waitFor(() => {
expect(result.current.sessionId).toBeNull()
})
})
it('returns No active session and skips cancel API call when session is missing', async () => {
const { result } = renderHook(() => useJSONImport(), { wrapper: createWrapper() })
await expect(result.current.cancel()).rejects.toThrow('No active session')
expect(api.cancelJSONImport).not.toHaveBeenCalled()
})
it('exposes commit error and preserves session on commit failure', async () => {
const uploadResponse = {
session: {
id: 'json-session-error',
state: 'reviewing',
source: 'json',
},
preview: {
hosts: [],
conflicts: [],
errors: [],
},
conflict_details: {},
}
const commitError = new Error('404 Not Found')
vi.mocked(api.uploadJSONExport).mockResolvedValue(uploadResponse)
vi.mocked(api.commitJSONImport).mockRejectedValue(commitError)
const { result } = renderHook(() => useJSONImport(), { wrapper: createWrapper() })
await act(async () => {
await result.current.upload('{"proxy_hosts":[]}')
})
await expect(result.current.commit({}, {})).rejects.toBe(commitError)
await waitFor(() => {
expect(result.current.commitError).toBe(commitError)
expect(result.current.sessionId).toBe('json-session-error')
expect(result.current.preview).not.toBeNull()
})
})
})

View File

@@ -0,0 +1,190 @@
import { describe, it, expect, vi, beforeEach } from 'vitest'
import { renderHook, act, waitFor } from '@testing-library/react'
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import React from 'react'
import { useNPMImport } from '../useNPMImport'
import * as api from '../../api/npmImport'
vi.mock('../../api/npmImport', () => ({
uploadNPMExport: vi.fn(),
commitNPMImport: vi.fn(),
cancelNPMImport: vi.fn(),
}))
const createWrapper = () => {
const queryClient = new QueryClient({
defaultOptions: {
queries: { retry: false },
mutations: { retry: false },
},
})
return ({ children }: { children: React.ReactNode }) => (
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
)
}
describe('useNPMImport', () => {
beforeEach(() => {
vi.clearAllMocks()
})
it('sets preview and sessionId after successful upload', async () => {
const uploadResponse = {
session: {
id: 'npm-session-upload',
state: 'reviewing',
source: 'npm',
},
preview: {
hosts: [],
conflicts: [],
errors: [],
},
conflict_details: {},
}
vi.mocked(api.uploadNPMExport).mockResolvedValue(uploadResponse)
const { result } = renderHook(() => useNPMImport(), { wrapper: createWrapper() })
await act(async () => {
await result.current.upload('{"proxy_hosts":[]}')
})
await waitFor(() => {
expect(result.current.sessionId).toBe('npm-session-upload')
expect(result.current.preview).toEqual(uploadResponse)
})
})
it('commits active session and clears preview/session state', async () => {
const uploadResponse = {
session: {
id: 'npm-session-commit',
state: 'reviewing',
source: 'npm',
},
preview: {
hosts: [],
conflicts: [],
errors: [],
},
conflict_details: {},
}
const commitResponse = {
created: 1,
updated: 0,
skipped: 0,
errors: [],
}
vi.mocked(api.uploadNPMExport).mockResolvedValue(uploadResponse)
vi.mocked(api.commitNPMImport).mockResolvedValue(commitResponse)
const { result } = renderHook(() => useNPMImport(), { wrapper: createWrapper() })
await act(async () => {
await result.current.upload('{"proxy_hosts":[]}')
})
await waitFor(() => {
expect(result.current.sessionId).toBe('npm-session-commit')
})
await act(async () => {
await result.current.commit({ 'npm.example.com': 'replace' }, { 'npm.example.com': 'NPM Example' })
})
expect(api.commitNPMImport).toHaveBeenCalledWith(
'npm-session-commit',
{ 'npm.example.com': 'replace' },
{ 'npm.example.com': 'NPM Example' }
)
await waitFor(() => {
expect(result.current.sessionId).toBeNull()
expect(result.current.preview).toBeNull()
expect(result.current.commitResult).toEqual(commitResponse)
})
})
it('passes active session UUID to cancelNPMImport', async () => {
const sessionId = 'npm-session-123'
vi.mocked(api.uploadNPMExport).mockResolvedValue({
session: {
id: sessionId,
state: 'reviewing',
source: 'npm',
},
preview: {
hosts: [],
conflicts: [],
errors: [],
},
conflict_details: {},
})
vi.mocked(api.cancelNPMImport).mockResolvedValue(undefined)
const { result } = renderHook(() => useNPMImport(), { wrapper: createWrapper() })
await act(async () => {
await result.current.upload('{}')
})
await waitFor(() => {
expect(result.current.sessionId).toBe(sessionId)
})
await act(async () => {
await result.current.cancel()
})
expect(api.cancelNPMImport).toHaveBeenCalledWith(sessionId)
await waitFor(() => {
expect(result.current.sessionId).toBeNull()
})
})
it('returns No active session and skips cancel API call when session is missing', async () => {
const { result } = renderHook(() => useNPMImport(), { wrapper: createWrapper() })
await expect(result.current.cancel()).rejects.toThrow('No active session')
expect(api.cancelNPMImport).not.toHaveBeenCalled()
})
it('exposes commit error and preserves session on commit failure', async () => {
const uploadResponse = {
session: {
id: 'npm-session-error',
state: 'reviewing',
source: 'npm',
},
preview: {
hosts: [],
conflicts: [],
errors: [],
},
conflict_details: {},
}
const commitError = new Error('404 Not Found')
vi.mocked(api.uploadNPMExport).mockResolvedValue(uploadResponse)
vi.mocked(api.commitNPMImport).mockRejectedValue(commitError)
const { result } = renderHook(() => useNPMImport(), { wrapper: createWrapper() })
await act(async () => {
await result.current.upload('{"proxy_hosts":[]}')
})
await expect(result.current.commit({}, {})).rejects.toBe(commitError)
await waitFor(() => {
expect(result.current.commitError).toBe(commitError)
expect(result.current.sessionId).toBe('npm-session-error')
expect(result.current.preview).not.toBeNull()
})
})
})

View File

@@ -77,7 +77,11 @@ export function useImport() {
});
const cancelMutation = useMutation({
mutationFn: () => cancelImport(),
mutationFn: () => {
const sessionId = uploadPreview?.session?.id || statusQuery.data?.session?.id;
if (!sessionId) throw new Error('No active session');
return cancelImport(sessionId);
},
onSuccess: () => {
// Clear upload preview and remove query cache
setUploadPreview(null);

View File

@@ -46,7 +46,10 @@ export function useJSONImport() {
});
const cancelMutation = useMutation({
mutationFn: cancelJSONImport,
mutationFn: () => {
if (!sessionId) throw new Error('No active session');
return cancelJSONImport(sessionId);
},
onSuccess: () => {
setPreview(null);
setSessionId(null);

View File

@@ -46,7 +46,10 @@ export function useNPMImport() {
});
const cancelMutation = useMutation({
mutationFn: cancelNPMImport,
mutationFn: () => {
if (!sessionId) throw new Error('No active session');
return cancelNPMImport(sessionId);
},
onSuccess: () => {
setPreview(null);
setSessionId(null);

16
package-lock.json generated
View File

@@ -6,7 +6,7 @@
"": {
"dependencies": {
"@typescript/analyze-trace": "^0.10.1",
"tldts": "^7.0.23",
"tldts": "^7.0.24",
"type-check": "^0.4.0",
"typescript": "^5.9.3",
"vite": "^7.3.1"
@@ -3010,21 +3010,21 @@
}
},
"node_modules/tldts": {
"version": "7.0.23",
"resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.23.tgz",
"integrity": "sha512-ASdhgQIBSay0R/eXggAkQ53G4nTJqTXqC2kbaBbdDwM7SkjyZyO0OaaN1/FH7U/yCeqOHDwFO5j8+Os/IS1dXw==",
"version": "7.0.24",
"resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.24.tgz",
"integrity": "sha512-1r6vQTTt1rUiJkI5vX7KG8PR342Ru/5Oh13kEQP2SMbRSZpOey9SrBe27IDxkoWulx8ShWu4K6C0BkctP8Z1bQ==",
"license": "MIT",
"dependencies": {
"tldts-core": "^7.0.23"
"tldts-core": "^7.0.24"
},
"bin": {
"tldts": "bin/cli.js"
}
},
"node_modules/tldts-core": {
"version": "7.0.23",
"resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.23.tgz",
"integrity": "sha512-0g9vrtDQLrNIiCj22HSe9d4mLVG3g5ph5DZ8zCKBr4OtrspmNB6ss7hVyzArAeE88ceZocIEGkyW1Ime7fxPtQ==",
"version": "7.0.24",
"resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.24.tgz",
"integrity": "sha512-pj7yygNMoMRqG7ML2SDQ0xNIOfN3IBDUcPVM2Sg6hP96oFNN2nqnzHreT3z9xLq85IWJyNTvD38O002DdOrPMw==",
"license": "MIT"
},
"node_modules/to-regex-range": {

View File

@@ -11,7 +11,7 @@
},
"dependencies": {
"@typescript/analyze-trace": "^0.10.1",
"tldts": "^7.0.23",
"tldts": "^7.0.24",
"type-check": "^0.4.0",
"typescript": "^5.9.3",
"vite": "^7.3.1"

View File

@@ -19,7 +19,11 @@ if [ ! -f ".version" ]; then
fi
VERSION_FILE=$(cat .version | tr -d '\n' | tr -d '\r')
GIT_TAG="$(git describe --tags --abbrev=0 2>/dev/null || echo "")"
# Use the globally latest semver tag, not just tags reachable from HEAD.
# git describe --tags --abbrev=0 only finds tags in the current branch's
# ancestry, which breaks on feature branches where release tags were applied
# to main/nightly and haven't been merged back yet.
GIT_TAG="$(git tag --sort=-v:refname 2>/dev/null | grep -E '^v?[0-9]+\.[0-9]+' | head -1 || echo "")"
if [ -z "$GIT_TAG" ]; then
echo "No tags in repository; cannot validate .version against tag"

View File

@@ -160,8 +160,8 @@ async function setupImportMocks(
}
});
// Mock cancel endpoint
await page.route('**/api/v1/import/cancel', async (route) => {
// Mock cancel endpoint — pattern ends with * to match DELETE ?session_uuid=... query param
await page.route('**/api/v1/import/cancel*', async (route) => {
hasSession = false;
await route.fulfill({ status: 204 });
});

View File

@@ -0,0 +1,374 @@
import { test, expect, loginUser } from '../fixtures/auth-fixtures';
import { getStorageStateAuthHeaders } from '../utils/api-helpers';
type SessionResponse = {
session?: {
id?: string;
};
};
const SAMPLE_CADDYFILE = `example.com {
reverse_proxy localhost:8080
}`;
const SAMPLE_NPM_OR_JSON_EXPORT = JSON.stringify(
{
proxy_hosts: [
{
domain_names: ['route-regression.example.test'],
forward_host: 'localhost',
forward_port: 8080,
forward_scheme: 'http',
},
],
access_lists: [],
certificates: [],
},
null,
2
);
function expectPredictableRouteMiss(status: number): void {
expect([404, 405]).toContain(status);
}
function expectCanonicalSuccess(status: number, endpoint: string): void {
expect(status, `${endpoint} should return a 2xx success response`).toBeGreaterThanOrEqual(200);
expect(status, `${endpoint} should return a 2xx success response`).toBeLessThan(300);
}
async function readSessionId(response: import('@playwright/test').APIResponse): Promise<string> {
const data = (await response.json()) as SessionResponse;
const sessionId = data?.session?.id;
expect(sessionId).toBeTruthy();
return sessionId as string;
}
async function createBackupAndTrack(
page: import('@playwright/test').Page,
headers: Record<string, string>,
createdBackupFilenames: string[]
): Promise<void> {
const backupBeforeCommit = await page.request.post('/api/v1/backups', {
headers,
data: {},
});
expectCanonicalSuccess(backupBeforeCommit.status(), 'POST /api/v1/backups');
const payload = (await backupBeforeCommit.json()) as { filename?: string };
const filename = payload.filename;
expect(filename).toBeTruthy();
createdBackupFilenames.push(filename as string);
}
async function cleanupCreatedBackups(
page: import('@playwright/test').Page,
headers: Record<string, string>,
createdBackupFilenames: string[]
): Promise<void> {
for (const filename of createdBackupFilenames) {
const cleanup = await page.request.delete(`/api/v1/backups/${encodeURIComponent(filename)}`, { headers });
expectCanonicalSuccess(cleanup.status(), `DELETE /api/v1/backups/${filename}`);
}
}
test.describe('Import/Save Route Regression Coverage', () => {
test('Caddy import flow stages use canonical routes and reject route drift', async ({ page, adminUser }) => {
await loginUser(page, adminUser);
const headers = getStorageStateAuthHeaders();
const createdBackupFilenames: string[] = [];
try {
await test.step('Open Caddy import page and validate route-negative probes', async () => {
await page.goto('/tasks/import/caddyfile', { waitUntil: 'domcontentloaded' });
await expect(page.getByRole('heading', { level: 1 })).toContainText(/import/i);
await expect(page.getByRole('button', { name: /parse|review/i })).toBeVisible();
const wrongStatusMethod = await page.request.post('/api/v1/import/status', {
headers,
data: {},
});
expectPredictableRouteMiss(wrongStatusMethod.status());
const wrongUploadMethod = await page.request.get('/api/v1/import/upload', { headers });
expectPredictableRouteMiss(wrongUploadMethod.status());
const wrongCancelMethod = await page.request.post('/api/v1/import/cancel', {
headers,
data: {},
});
expectPredictableRouteMiss(wrongCancelMethod.status());
});
await test.step('Run canonical Caddy import status/upload/cancel path', async () => {
const statusResponse = await page.request.get('/api/v1/import/status', { headers });
expectCanonicalSuccess(statusResponse.status(), 'GET /api/v1/import/status');
const uploadForCancel = await page.request.post('/api/v1/import/upload', {
headers,
data: { content: SAMPLE_CADDYFILE },
});
expectCanonicalSuccess(uploadForCancel.status(), 'POST /api/v1/import/upload');
const cancelSessionId = await readSessionId(uploadForCancel);
const cancelResponse = await page.request.delete('/api/v1/import/cancel', {
headers,
params: { session_uuid: cancelSessionId },
});
expectCanonicalSuccess(cancelResponse.status(), 'DELETE /api/v1/import/cancel');
});
await test.step('Run canonical Caddy preview/backup-before-commit/commit/post-state path', async () => {
const uploadForCommit = await page.request.post('/api/v1/import/upload', {
headers,
data: { content: SAMPLE_CADDYFILE },
});
expectCanonicalSuccess(uploadForCommit.status(), 'POST /api/v1/import/upload');
const commitSessionId = await readSessionId(uploadForCommit);
const previewResponse = await page.request.get('/api/v1/import/preview', { headers });
expectCanonicalSuccess(previewResponse.status(), 'GET /api/v1/import/preview');
await createBackupAndTrack(page, headers, createdBackupFilenames);
const commitResponse = await page.request.post('/api/v1/import/commit', {
headers,
data: {
session_uuid: commitSessionId,
resolutions: {},
names: {},
},
});
expectCanonicalSuccess(commitResponse.status(), 'POST /api/v1/import/commit');
const postState = await page.request.get('/api/v1/import/status', { headers });
expectCanonicalSuccess(postState.status(), 'GET /api/v1/import/status');
});
} finally {
await test.step('Cleanup created backup artifacts', async () => {
await cleanupCreatedBackups(page, headers, createdBackupFilenames);
});
}
});
test('NPM and JSON import critical routes pass canonical methods and reject drift', async ({ page, adminUser }) => {
await loginUser(page, adminUser);
const headers = getStorageStateAuthHeaders();
await test.step('NPM import upload/commit/cancel with route-mismatch checks', async () => {
await page.goto('/tasks/import/npm', { waitUntil: 'domcontentloaded' });
await expect(page.getByRole('heading').filter({ hasText: /npm/i }).first()).toBeVisible();
await expect(page.getByRole('button', { name: /upload\s*&\s*preview/i })).toBeVisible();
const npmWrongMethod = await page.request.get('/api/v1/import/npm/upload', { headers });
expectPredictableRouteMiss(npmWrongMethod.status());
const npmCancelWrongPath = await page.request.post('/api/v1/import/npm/cancel-session', {
headers,
data: {},
});
expectPredictableRouteMiss(npmCancelWrongPath.status());
const npmUploadForCancel = await page.request.post('/api/v1/import/npm/upload', {
headers,
data: { content: SAMPLE_NPM_OR_JSON_EXPORT },
});
expectCanonicalSuccess(npmUploadForCancel.status(), 'POST /api/v1/import/npm/upload');
const npmCancelSession = await readSessionId(npmUploadForCancel);
const npmCancel = await page.request.post('/api/v1/import/npm/cancel', {
headers,
data: { session_uuid: npmCancelSession },
});
expectCanonicalSuccess(npmCancel.status(), 'POST /api/v1/import/npm/cancel');
const npmUploadForCommit = await page.request.post('/api/v1/import/npm/upload', {
headers,
data: { content: SAMPLE_NPM_OR_JSON_EXPORT },
});
expectCanonicalSuccess(npmUploadForCommit.status(), 'POST /api/v1/import/npm/upload');
const npmCommitSession = await readSessionId(npmUploadForCommit);
const npmCommit = await page.request.post('/api/v1/import/npm/commit', {
headers,
data: {
session_uuid: npmCommitSession,
resolutions: {},
names: {},
},
});
expectCanonicalSuccess(npmCommit.status(), 'POST /api/v1/import/npm/commit');
});
await test.step('JSON import upload/commit/cancel with route-mismatch checks', async () => {
await page.goto('/tasks/import/json', { waitUntil: 'domcontentloaded' });
await expect(page.getByRole('heading').filter({ hasText: /json/i }).first()).toBeVisible();
await expect(page.getByRole('button', { name: /upload\s*&\s*preview/i })).toBeVisible();
const jsonWrongMethod = await page.request.get('/api/v1/import/json/upload', { headers });
expectPredictableRouteMiss(jsonWrongMethod.status());
const jsonCommitWrongPath = await page.request.post('/api/v1/import/json/commit-now', {
headers,
data: {},
});
expectPredictableRouteMiss(jsonCommitWrongPath.status());
const jsonUploadForCancel = await page.request.post('/api/v1/import/json/upload', {
headers,
data: { content: SAMPLE_NPM_OR_JSON_EXPORT },
});
expectCanonicalSuccess(jsonUploadForCancel.status(), 'POST /api/v1/import/json/upload');
const jsonCancelSession = await readSessionId(jsonUploadForCancel);
const jsonCancel = await page.request.post('/api/v1/import/json/cancel', {
headers,
data: { session_uuid: jsonCancelSession },
});
expectCanonicalSuccess(jsonCancel.status(), 'POST /api/v1/import/json/cancel');
const jsonUploadForCommit = await page.request.post('/api/v1/import/json/upload', {
headers,
data: { content: SAMPLE_NPM_OR_JSON_EXPORT },
});
expectCanonicalSuccess(jsonUploadForCommit.status(), 'POST /api/v1/import/json/upload');
const jsonCommitSession = await readSessionId(jsonUploadForCommit);
const jsonCommit = await page.request.post('/api/v1/import/json/commit', {
headers,
data: {
session_uuid: jsonCommitSession,
resolutions: {},
names: {},
},
});
expectCanonicalSuccess(jsonCommit.status(), 'POST /api/v1/import/json/commit');
});
});
test('Save flow routes for settings and proxy-host paths detect 404 regressions', async ({ page, adminUser }) => {
await loginUser(page, adminUser);
const headers = getStorageStateAuthHeaders();
let createdProxyUUID = '';
try {
await test.step('System settings save path succeeds on canonical route', async () => {
await page.goto('/settings/system', { waitUntil: 'domcontentloaded' });
await expect(page.getByRole('heading', { name: /system settings/i })).toBeVisible();
const caddyApiInput = page.locator('#caddy-api');
await expect(caddyApiInput).toBeVisible();
const originalCaddyApi = await caddyApiInput.inputValue();
const requestMarker = `route-regression-${Date.now()}-${Math.floor(Math.random() * 1000)}`;
const updatedCaddyApi = `http://localhost:2019?${requestMarker}=1`;
await caddyApiInput.fill(updatedCaddyApi);
const saveButton = page.getByRole('button', { name: /save settings/i }).first();
await expect(saveButton).toBeEnabled();
const saveResponsePromise = page.waitForResponse((response) => {
const request = response.request();
let payload: { key?: string; value?: string } | undefined;
try {
payload = request.postDataJSON() as { key?: string; value?: string };
} catch {
return false;
}
return (
response.url().includes('/api/v1/settings') &&
request.method() === 'POST' &&
payload.key === 'caddy.admin_api' &&
payload.value === updatedCaddyApi
);
});
await saveButton.click();
const saveResponse = await saveResponsePromise;
expectCanonicalSuccess(saveResponse.status(), 'POST /api/v1/settings (caddy.admin_api)');
// Deterministic UI effect: reloading should preserve the value we just saved.
await page.reload({ waitUntil: 'domcontentloaded' });
await expect(page.locator('#caddy-api')).toHaveValue(updatedCaddyApi);
await caddyApiInput.fill(originalCaddyApi);
const restoreResponsePromise = page.waitForResponse((response) => {
const request = response.request();
let payload: { key?: string; value?: string } | undefined;
try {
payload = request.postDataJSON() as { key?: string; value?: string };
} catch {
return false;
}
return (
response.url().includes('/api/v1/settings') &&
request.method() === 'POST' &&
payload.key === 'caddy.admin_api' &&
payload.value === originalCaddyApi
);
});
await saveButton.click();
const restoreResponse = await restoreResponsePromise;
expectCanonicalSuccess(restoreResponse.status(), 'POST /api/v1/settings (restore caddy.admin_api)');
const wrongSettingsMethod = await page.request.delete('/api/v1/settings', { headers });
expectPredictableRouteMiss(wrongSettingsMethod.status());
});
await test.step('Proxy-host save path succeeds on canonical route and rejects wrong method/path', async () => {
const unique = `${Date.now()}-${Math.floor(Math.random() * 1000)}`;
const createResponse = await page.request.post('/api/v1/proxy-hosts', {
headers,
data: {
name: `PR3 Route Regression ${unique}`,
domain_names: `pr3-route-${unique}.example.test`,
forward_host: 'localhost',
forward_port: 8080,
forward_scheme: 'http',
websocket_support: false,
enabled: true,
},
});
expectCanonicalSuccess(createResponse.status(), 'POST /api/v1/proxy-hosts');
expect([200, 201]).toContain(createResponse.status());
const created = (await createResponse.json()) as { uuid?: string };
createdProxyUUID = created.uuid || '';
expect(createdProxyUUID).toBeTruthy();
const updateResponse = await page.request.put(`/api/v1/proxy-hosts/${createdProxyUUID}`, {
headers,
data: {
name: `PR3 Route Regression Updated ${unique}`,
domain_names: `pr3-route-${unique}.example.test`,
forward_host: 'localhost',
forward_port: 8081,
forward_scheme: 'http',
websocket_support: false,
enabled: true,
},
});
expectCanonicalSuccess(updateResponse.status(), `PUT /api/v1/proxy-hosts/${createdProxyUUID}`);
const wrongProxyMethod = await page.request.post(`/api/v1/proxy-hosts/${createdProxyUUID}`, {
headers,
data: {},
});
expectPredictableRouteMiss(wrongProxyMethod.status());
const wrongProxyPath = await page.request.put('/api/v1/proxy-host', {
headers,
data: {},
});
expectPredictableRouteMiss(wrongProxyPath.status());
});
} finally {
if (createdProxyUUID) {
await test.step('Cleanup created proxy host', async () => {
const cleanup = await page.request.delete(`/api/v1/proxy-hosts/${createdProxyUUID}`, { headers });
expectCanonicalSuccess(cleanup.status(), `DELETE /api/v1/proxy-hosts/${createdProxyUUID}`);
});
}
}
});
});