chore: git cache cleanup

This commit is contained in:
GitHub Actions
2026-03-04 18:34:49 +00:00
parent c32cce2a88
commit 27c252600a
2001 changed files with 683185 additions and 0 deletions
+594
View File
@@ -0,0 +1,594 @@
package patchreport
import (
"bufio"
"fmt"
"os"
"path/filepath"
"regexp"
"sort"
"strconv"
"strings"
)
type LineSet map[int]struct{}
type FileLineSet map[string]LineSet
type CoverageData struct {
Executable FileLineSet
Covered FileLineSet
}
type ScopeCoverage struct {
ChangedLines int `json:"changed_lines"`
CoveredLines int `json:"covered_lines"`
PatchCoveragePct float64 `json:"patch_coverage_pct"`
Status string `json:"status"`
}
type FileCoverageDetail struct {
Path string `json:"path"`
PatchCoveragePct float64 `json:"patch_coverage_pct"`
UncoveredChangedLines int `json:"uncovered_changed_lines"`
UncoveredChangedLineRange []string `json:"uncovered_changed_line_ranges,omitempty"`
}
type ThresholdResolution struct {
Value float64
Source string
Warning string
}
var hunkPattern = regexp.MustCompile(`^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@`)
const maxScannerTokenSize = 2 * 1024 * 1024
func newScannerWithLargeBuffer(input *strings.Reader) *bufio.Scanner {
scanner := bufio.NewScanner(input)
scanner.Buffer(make([]byte, 0, 64*1024), maxScannerTokenSize)
return scanner
}
func newFileScannerWithLargeBuffer(file *os.File) *bufio.Scanner {
scanner := bufio.NewScanner(file)
scanner.Buffer(make([]byte, 0, 64*1024), maxScannerTokenSize)
return scanner
}
func ResolveThreshold(envName string, defaultValue float64, lookup func(string) (string, bool)) ThresholdResolution {
if lookup == nil {
lookup = os.LookupEnv
}
raw, ok := lookup(envName)
if !ok {
return ThresholdResolution{Value: defaultValue, Source: "default"}
}
raw = strings.TrimSpace(raw)
value, err := strconv.ParseFloat(raw, 64)
if err != nil || value < 0 || value > 100 {
return ThresholdResolution{
Value: defaultValue,
Source: "default",
Warning: fmt.Sprintf("Ignoring invalid %s=%q; using default %.1f", envName, raw, defaultValue),
}
}
return ThresholdResolution{Value: value, Source: "env"}
}
func ParseUnifiedDiffChangedLines(diffContent string) (FileLineSet, FileLineSet, error) {
backendChanged := make(FileLineSet)
frontendChanged := make(FileLineSet)
var currentFile string
currentScope := ""
currentNewLine := 0
inHunk := false
scanner := newScannerWithLargeBuffer(strings.NewReader(diffContent))
for scanner.Scan() {
line := scanner.Text()
if strings.HasPrefix(line, "+++") {
currentFile = ""
currentScope = ""
inHunk = false
newFile := strings.TrimSpace(strings.TrimPrefix(line, "+++"))
if newFile == "/dev/null" {
continue
}
newFile = strings.TrimPrefix(newFile, "b/")
newFile = normalizeRepoPath(newFile)
if strings.HasPrefix(newFile, "backend/") {
currentFile = newFile
currentScope = "backend"
} else if strings.HasPrefix(newFile, "frontend/") {
currentFile = newFile
currentScope = "frontend"
}
continue
}
if matches := hunkPattern.FindStringSubmatch(line); matches != nil {
startLine, err := strconv.Atoi(matches[1])
if err != nil {
return nil, nil, fmt.Errorf("parse hunk start line: %w", err)
}
currentNewLine = startLine
inHunk = true
continue
}
if !inHunk || currentFile == "" || currentScope == "" || line == "" {
continue
}
switch line[0] {
case '+':
if strings.HasPrefix(line, "+++") {
continue
}
switch currentScope {
case "backend":
addLine(backendChanged, currentFile, currentNewLine)
case "frontend":
addLine(frontendChanged, currentFile, currentNewLine)
}
currentNewLine++
case '-':
case ' ':
currentNewLine++
case '\\':
default:
}
}
if err := scanner.Err(); err != nil {
return nil, nil, fmt.Errorf("scan diff content: %w", err)
}
return backendChanged, frontendChanged, nil
}
func ParseGoCoverageProfile(profilePath string) (data CoverageData, err error) {
validatedPath, err := validateReadablePath(profilePath)
if err != nil {
return CoverageData{}, fmt.Errorf("validate go coverage profile path: %w", err)
}
// #nosec G304 -- validatedPath is cleaned and resolved to an absolute path by validateReadablePath.
file, err := os.Open(validatedPath)
if err != nil {
return CoverageData{}, fmt.Errorf("open go coverage profile: %w", err)
}
defer func() {
if closeErr := file.Close(); closeErr != nil && err == nil {
err = fmt.Errorf("close go coverage profile: %w", closeErr)
}
}()
data = CoverageData{
Executable: make(FileLineSet),
Covered: make(FileLineSet),
}
scanner := newFileScannerWithLargeBuffer(file)
firstLine := true
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if line == "" {
continue
}
if firstLine {
firstLine = false
if strings.HasPrefix(line, "mode:") {
continue
}
}
fields := strings.Fields(line)
if len(fields) != 3 {
continue
}
count, err := strconv.Atoi(fields[2])
if err != nil {
continue
}
filePart, startLine, endLine, err := parseCoverageRange(fields[0])
if err != nil {
continue
}
normalizedFile := normalizeGoCoveragePath(filePart)
if normalizedFile == "" {
continue
}
for lineNo := startLine; lineNo <= endLine; lineNo++ {
addLine(data.Executable, normalizedFile, lineNo)
if count > 0 {
addLine(data.Covered, normalizedFile, lineNo)
}
}
}
if scanErr := scanner.Err(); scanErr != nil {
return CoverageData{}, fmt.Errorf("scan go coverage profile: %w", scanErr)
}
return data, nil
}
func ParseLCOVProfile(lcovPath string) (data CoverageData, err error) {
validatedPath, err := validateReadablePath(lcovPath)
if err != nil {
return CoverageData{}, fmt.Errorf("validate lcov profile path: %w", err)
}
// #nosec G304 -- validatedPath is cleaned and resolved to an absolute path by validateReadablePath.
file, err := os.Open(validatedPath)
if err != nil {
return CoverageData{}, fmt.Errorf("open lcov profile: %w", err)
}
defer func() {
if closeErr := file.Close(); closeErr != nil && err == nil {
err = fmt.Errorf("close lcov profile: %w", closeErr)
}
}()
data = CoverageData{
Executable: make(FileLineSet),
Covered: make(FileLineSet),
}
currentFiles := make([]string, 0, 2)
scanner := newFileScannerWithLargeBuffer(file)
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
switch {
case strings.HasPrefix(line, "SF:"):
sourceFile := strings.TrimSpace(strings.TrimPrefix(line, "SF:"))
currentFiles = normalizeFrontendCoveragePaths(sourceFile)
case strings.HasPrefix(line, "DA:"):
if len(currentFiles) == 0 {
continue
}
parts := strings.Split(strings.TrimPrefix(line, "DA:"), ",")
if len(parts) < 2 {
continue
}
lineNo, err := strconv.Atoi(strings.TrimSpace(parts[0]))
if err != nil {
continue
}
hits, err := strconv.Atoi(strings.TrimSpace(parts[1]))
if err != nil {
continue
}
for _, filePath := range currentFiles {
addLine(data.Executable, filePath, lineNo)
if hits > 0 {
addLine(data.Covered, filePath, lineNo)
}
}
case line == "end_of_record":
currentFiles = currentFiles[:0]
}
}
if scanErr := scanner.Err(); scanErr != nil {
return CoverageData{}, fmt.Errorf("scan lcov profile: %w", scanErr)
}
return data, nil
}
func ComputeScopeCoverage(changedLines FileLineSet, coverage CoverageData) ScopeCoverage {
changedCount := 0
coveredCount := 0
for filePath, lines := range changedLines {
executable, ok := coverage.Executable[filePath]
if !ok {
continue
}
coveredLines := coverage.Covered[filePath]
for lineNo := range lines {
if _, executableLine := executable[lineNo]; !executableLine {
continue
}
changedCount++
if _, isCovered := coveredLines[lineNo]; isCovered {
coveredCount++
}
}
}
pct := 100.0
if changedCount > 0 {
pct = roundToOneDecimal(float64(coveredCount) * 100 / float64(changedCount))
}
return ScopeCoverage{
ChangedLines: changedCount,
CoveredLines: coveredCount,
PatchCoveragePct: pct,
}
}
func MergeScopeCoverage(scopes ...ScopeCoverage) ScopeCoverage {
changed := 0
covered := 0
for _, scope := range scopes {
changed += scope.ChangedLines
covered += scope.CoveredLines
}
pct := 100.0
if changed > 0 {
pct = roundToOneDecimal(float64(covered) * 100 / float64(changed))
}
return ScopeCoverage{
ChangedLines: changed,
CoveredLines: covered,
PatchCoveragePct: pct,
}
}
func ApplyStatus(scope ScopeCoverage, minThreshold float64) ScopeCoverage {
scope.Status = "pass"
if scope.PatchCoveragePct < minThreshold {
scope.Status = "warn"
}
return scope
}
func ComputeFilesNeedingCoverage(changedLines FileLineSet, coverage CoverageData, minThreshold float64) []FileCoverageDetail {
details := make([]FileCoverageDetail, 0, len(changedLines))
for filePath, lines := range changedLines {
executable, ok := coverage.Executable[filePath]
if !ok {
continue
}
coveredLines := coverage.Covered[filePath]
executableChanged := 0
coveredChanged := 0
uncoveredLines := make([]int, 0, len(lines))
for lineNo := range lines {
if _, executableLine := executable[lineNo]; !executableLine {
continue
}
executableChanged++
if _, isCovered := coveredLines[lineNo]; isCovered {
coveredChanged++
} else {
uncoveredLines = append(uncoveredLines, lineNo)
}
}
if executableChanged == 0 {
continue
}
patchCoveragePct := roundToOneDecimal(float64(coveredChanged) * 100 / float64(executableChanged))
uncoveredCount := executableChanged - coveredChanged
if uncoveredCount == 0 && patchCoveragePct >= minThreshold {
continue
}
sort.Ints(uncoveredLines)
details = append(details, FileCoverageDetail{
Path: filePath,
PatchCoveragePct: patchCoveragePct,
UncoveredChangedLines: uncoveredCount,
UncoveredChangedLineRange: formatLineRanges(uncoveredLines),
})
}
sortFileCoverageDetails(details)
return details
}
func MergeFileCoverageDetails(groups ...[]FileCoverageDetail) []FileCoverageDetail {
count := 0
for _, group := range groups {
count += len(group)
}
merged := make([]FileCoverageDetail, 0, count)
for _, group := range groups {
merged = append(merged, group...)
}
sortFileCoverageDetails(merged)
return merged
}
func SortedWarnings(warnings []string) []string {
filtered := make([]string, 0, len(warnings))
for _, warning := range warnings {
if strings.TrimSpace(warning) != "" {
filtered = append(filtered, warning)
}
}
sort.Strings(filtered)
return filtered
}
func parseCoverageRange(rangePart string) (string, int, int, error) {
pathAndRange := strings.SplitN(rangePart, ":", 2)
if len(pathAndRange) != 2 {
return "", 0, 0, fmt.Errorf("invalid range format")
}
filePart := strings.TrimSpace(pathAndRange[0])
rangeSpec := strings.TrimSpace(pathAndRange[1])
coords := strings.SplitN(rangeSpec, ",", 2)
if len(coords) != 2 {
return "", 0, 0, fmt.Errorf("invalid coordinate format")
}
startParts := strings.SplitN(coords[0], ".", 2)
endParts := strings.SplitN(coords[1], ".", 2)
if len(startParts) == 0 || len(endParts) == 0 {
return "", 0, 0, fmt.Errorf("invalid line coordinate")
}
startLine, err := strconv.Atoi(startParts[0])
if err != nil {
return "", 0, 0, fmt.Errorf("parse start line: %w", err)
}
endLine, err := strconv.Atoi(endParts[0])
if err != nil {
return "", 0, 0, fmt.Errorf("parse end line: %w", err)
}
if startLine <= 0 || endLine <= 0 || endLine < startLine {
return "", 0, 0, fmt.Errorf("invalid line range")
}
return filePart, startLine, endLine, nil
}
func normalizeRepoPath(input string) string {
cleaned := filepath.ToSlash(filepath.Clean(strings.TrimSpace(input)))
cleaned = strings.TrimPrefix(cleaned, "./")
return cleaned
}
func normalizeGoCoveragePath(input string) string {
cleaned := normalizeRepoPath(input)
if cleaned == "" {
return ""
}
if strings.HasPrefix(cleaned, "backend/") {
return cleaned
}
if idx := strings.Index(cleaned, "/backend/"); idx >= 0 {
return cleaned[idx+1:]
}
repoRelativePrefixes := []string{"cmd/", "internal/", "pkg/", "api/", "integration/", "tools/"}
for _, prefix := range repoRelativePrefixes {
if strings.HasPrefix(cleaned, prefix) {
return "backend/" + cleaned
}
}
return cleaned
}
func normalizeFrontendCoveragePaths(input string) []string {
cleaned := normalizeRepoPath(input)
if cleaned == "" {
return nil
}
seen := map[string]struct{}{}
result := make([]string, 0, 3)
add := func(value string) {
value = normalizeRepoPath(value)
if value == "" {
return
}
if _, ok := seen[value]; ok {
return
}
seen[value] = struct{}{}
result = append(result, value)
}
add(cleaned)
if idx := strings.Index(cleaned, "/frontend/"); idx >= 0 {
frontendPath := cleaned[idx+1:]
add(frontendPath)
add(strings.TrimPrefix(frontendPath, "frontend/"))
} else if strings.HasPrefix(cleaned, "frontend/") {
add(strings.TrimPrefix(cleaned, "frontend/"))
} else {
add("frontend/" + cleaned)
}
return result
}
func addLine(set FileLineSet, filePath string, lineNo int) {
if lineNo <= 0 || filePath == "" {
return
}
if _, ok := set[filePath]; !ok {
set[filePath] = make(LineSet)
}
set[filePath][lineNo] = struct{}{}
}
func roundToOneDecimal(value float64) float64 {
return float64(int(value*10+0.5)) / 10
}
func formatLineRanges(lines []int) []string {
if len(lines) == 0 {
return nil
}
ranges := make([]string, 0, len(lines))
start := lines[0]
end := lines[0]
for index := 1; index < len(lines); index++ {
lineNo := lines[index]
if lineNo == end+1 {
end = lineNo
continue
}
ranges = append(ranges, formatLineRange(start, end))
start = lineNo
end = lineNo
}
ranges = append(ranges, formatLineRange(start, end))
return ranges
}
func formatLineRange(start, end int) string {
if start == end {
return strconv.Itoa(start)
}
return fmt.Sprintf("%d-%d", start, end)
}
func sortFileCoverageDetails(details []FileCoverageDetail) {
sort.Slice(details, func(left, right int) bool {
if details[left].PatchCoveragePct != details[right].PatchCoveragePct {
return details[left].PatchCoveragePct < details[right].PatchCoveragePct
}
return details[left].Path < details[right].Path
})
}
func validateReadablePath(rawPath string) (string, error) {
trimmedPath := strings.TrimSpace(rawPath)
if trimmedPath == "" {
return "", fmt.Errorf("path is empty")
}
cleanedPath := filepath.Clean(trimmedPath)
absolutePath, err := filepath.Abs(cleanedPath)
if err != nil {
return "", fmt.Errorf("resolve absolute path: %w", err)
}
return absolutePath, nil
}
@@ -0,0 +1,539 @@
package patchreport
import (
"os"
"path/filepath"
"strings"
"testing"
)
func TestResolveThreshold(t *testing.T) {
t.Parallel()
tests := []struct {
name string
envValue string
envSet bool
defaultValue float64
wantValue float64
wantSource string
wantWarning bool
}{
{
name: "uses default when env is absent",
envSet: false,
defaultValue: 90,
wantValue: 90,
wantSource: "default",
wantWarning: false,
},
{
name: "uses env value when valid",
envSet: true,
envValue: "87.5",
defaultValue: 85,
wantValue: 87.5,
wantSource: "env",
wantWarning: false,
},
{
name: "falls back when env is invalid",
envSet: true,
envValue: "invalid",
defaultValue: 85,
wantValue: 85,
wantSource: "default",
wantWarning: true,
},
{
name: "falls back when env is out of range",
envSet: true,
envValue: "101",
defaultValue: 85,
wantValue: 85,
wantSource: "default",
wantWarning: true,
},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
lookup := func(name string) (string, bool) {
if name != "TARGET" {
t.Fatalf("unexpected env lookup key: %s", name)
}
if !tt.envSet {
return "", false
}
return tt.envValue, true
}
resolved := ResolveThreshold("TARGET", tt.defaultValue, lookup)
if resolved.Value != tt.wantValue {
t.Fatalf("value mismatch: got %.1f want %.1f", resolved.Value, tt.wantValue)
}
if resolved.Source != tt.wantSource {
t.Fatalf("source mismatch: got %s want %s", resolved.Source, tt.wantSource)
}
hasWarning := resolved.Warning != ""
if hasWarning != tt.wantWarning {
t.Fatalf("warning mismatch: got %v want %v (warning=%q)", hasWarning, tt.wantWarning, resolved.Warning)
}
})
}
}
func TestResolveThreshold_WithNilLookupUsesOSLookupEnv(t *testing.T) {
t.Setenv("PATCH_THRESHOLD_TEST", "91.2")
resolved := ResolveThreshold("PATCH_THRESHOLD_TEST", 85.0, nil)
if resolved.Value != 91.2 {
t.Fatalf("expected env value 91.2, got %.1f", resolved.Value)
}
if resolved.Source != "env" {
t.Fatalf("expected source env, got %s", resolved.Source)
}
}
func TestParseUnifiedDiffChangedLines(t *testing.T) {
t.Parallel()
diff := `diff --git a/backend/internal/app.go b/backend/internal/app.go
index 1111111..2222222 100644
--- a/backend/internal/app.go
+++ b/backend/internal/app.go
@@ -10,2 +10,3 @@ func example() {
line10
-line11
+line11 changed
+line12 new
diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx
index 3333333..4444444 100644
--- a/frontend/src/App.tsx
+++ b/frontend/src/App.tsx
@@ -20,0 +21,2 @@ export default function App() {
+new frontend line
+another frontend line
`
backendChanged, frontendChanged, err := ParseUnifiedDiffChangedLines(diff)
if err != nil {
t.Fatalf("ParseUnifiedDiffChangedLines returned error: %v", err)
}
assertHasLines(t, backendChanged, "backend/internal/app.go", []int{11, 12})
assertHasLines(t, frontendChanged, "frontend/src/App.tsx", []int{21, 22})
}
func TestParseUnifiedDiffChangedLines_InvalidHunkStartReturnsError(t *testing.T) {
t.Parallel()
diff := `diff --git a/backend/internal/app.go b/backend/internal/app.go
index 1111111..2222222 100644
--- a/backend/internal/app.go
+++ b/backend/internal/app.go
@@ -1,1 +abc,2 @@
+line
`
backendChanged, frontendChanged, err := ParseUnifiedDiffChangedLines(diff)
if err != nil {
t.Fatalf("expected graceful handling for invalid hunk, got error: %v", err)
}
if len(backendChanged) != 0 || len(frontendChanged) != 0 {
t.Fatalf("expected no changed lines for invalid hunk, got backend=%v frontend=%v", backendChanged, frontendChanged)
}
}
func TestBackendChangedLineCoverageComputation(t *testing.T) {
t.Parallel()
tempDir := t.TempDir()
coverageFile := filepath.Join(tempDir, "coverage.txt")
coverageContent := `mode: atomic
github.com/Wikid82/charon/backend/internal/service.go:10.1,10.20 1 1
github.com/Wikid82/charon/backend/internal/service.go:11.1,11.20 1 0
github.com/Wikid82/charon/backend/internal/service.go:12.1,12.20 1 1
`
if err := os.WriteFile(coverageFile, []byte(coverageContent), 0o600); err != nil {
t.Fatalf("failed to write temp coverage file: %v", err)
}
coverage, err := ParseGoCoverageProfile(coverageFile)
if err != nil {
t.Fatalf("ParseGoCoverageProfile returned error: %v", err)
}
changed := FileLineSet{
"backend/internal/service.go": {10: {}, 11: {}, 15: {}},
}
scope := ComputeScopeCoverage(changed, coverage)
if scope.ChangedLines != 2 {
t.Fatalf("changed lines mismatch: got %d want 2", scope.ChangedLines)
}
if scope.CoveredLines != 1 {
t.Fatalf("covered lines mismatch: got %d want 1", scope.CoveredLines)
}
if scope.PatchCoveragePct != 50.0 {
t.Fatalf("coverage pct mismatch: got %.1f want 50.0", scope.PatchCoveragePct)
}
}
func TestFrontendChangedLineCoverageComputationFromLCOV(t *testing.T) {
t.Parallel()
tempDir := t.TempDir()
lcovFile := filepath.Join(tempDir, "lcov.info")
lcovContent := `TN:
SF:frontend/src/App.tsx
DA:10,1
DA:11,0
DA:12,1
end_of_record
`
if err := os.WriteFile(lcovFile, []byte(lcovContent), 0o600); err != nil {
t.Fatalf("failed to write temp lcov file: %v", err)
}
coverage, err := ParseLCOVProfile(lcovFile)
if err != nil {
t.Fatalf("ParseLCOVProfile returned error: %v", err)
}
changed := FileLineSet{
"frontend/src/App.tsx": {10: {}, 11: {}, 13: {}},
}
scope := ComputeScopeCoverage(changed, coverage)
if scope.ChangedLines != 2 {
t.Fatalf("changed lines mismatch: got %d want 2", scope.ChangedLines)
}
if scope.CoveredLines != 1 {
t.Fatalf("covered lines mismatch: got %d want 1", scope.CoveredLines)
}
if scope.PatchCoveragePct != 50.0 {
t.Fatalf("coverage pct mismatch: got %.1f want 50.0", scope.PatchCoveragePct)
}
status := ApplyStatus(scope, 85)
if status.Status != "warn" {
t.Fatalf("status mismatch: got %s want warn", status.Status)
}
}
func TestParseUnifiedDiffChangedLines_AllowsLongLines(t *testing.T) {
t.Parallel()
longLine := strings.Repeat("x", 128*1024)
diff := strings.Join([]string{
"diff --git a/backend/internal/app.go b/backend/internal/app.go",
"index 1111111..2222222 100644",
"--- a/backend/internal/app.go",
"+++ b/backend/internal/app.go",
"@@ -1,1 +1,2 @@",
" line1",
"+" + longLine,
}, "\n")
backendChanged, _, err := ParseUnifiedDiffChangedLines(diff)
if err != nil {
t.Fatalf("ParseUnifiedDiffChangedLines returned error for long line: %v", err)
}
assertHasLines(t, backendChanged, "backend/internal/app.go", []int{2})
}
func TestParseGoCoverageProfile_AllowsLongLines(t *testing.T) {
t.Parallel()
tempDir := t.TempDir()
coverageFile := filepath.Join(tempDir, "coverage.txt")
longSegment := strings.Repeat("a", 128*1024)
coverageContent := "mode: atomic\n" +
"github.com/Wikid82/charon/backend/internal/" + longSegment + ".go:10.1,10.20 1 1\n"
if err := os.WriteFile(coverageFile, []byte(coverageContent), 0o600); err != nil {
t.Fatalf("failed to write temp coverage file: %v", err)
}
_, err := ParseGoCoverageProfile(coverageFile)
if err != nil {
t.Fatalf("ParseGoCoverageProfile returned error for long line: %v", err)
}
}
func TestParseLCOVProfile_AllowsLongLines(t *testing.T) {
t.Parallel()
tempDir := t.TempDir()
lcovFile := filepath.Join(tempDir, "lcov.info")
longPath := strings.Repeat("a", 128*1024)
lcovContent := strings.Join([]string{
"TN:",
"SF:frontend/src/" + longPath + ".tsx",
"DA:10,1",
"end_of_record",
}, "\n")
if err := os.WriteFile(lcovFile, []byte(lcovContent), 0o600); err != nil {
t.Fatalf("failed to write temp lcov file: %v", err)
}
_, err := ParseLCOVProfile(lcovFile)
if err != nil {
t.Fatalf("ParseLCOVProfile returned error for long line: %v", err)
}
}
func assertHasLines(t *testing.T, changed FileLineSet, file string, expected []int) {
t.Helper()
lines, ok := changed[file]
if !ok {
t.Fatalf("file %s not found in changed lines", file)
}
for _, line := range expected {
if _, hasLine := lines[line]; !hasLine {
t.Fatalf("expected line %d in file %s", line, file)
}
}
}
func TestValidateReadablePath(t *testing.T) {
t.Parallel()
t.Run("returns error for empty path", func(t *testing.T) {
t.Parallel()
_, err := validateReadablePath(" ")
if err == nil {
t.Fatal("expected error for empty path")
}
})
t.Run("returns absolute cleaned path", func(t *testing.T) {
t.Parallel()
path, err := validateReadablePath("./backend/../backend/internal")
if err != nil {
t.Fatalf("expected no error, got %v", err)
}
if !filepath.IsAbs(path) {
t.Fatalf("expected absolute path, got %q", path)
}
})
}
func TestComputeFilesNeedingCoverage_IncludesUncoveredAndSortsDeterministically(t *testing.T) {
t.Parallel()
changed := FileLineSet{
"backend/internal/b.go": {1: {}, 2: {}},
"backend/internal/a.go": {1: {}, 2: {}},
"backend/internal/c.go": {1: {}, 2: {}},
}
coverage := CoverageData{
Executable: FileLineSet{
"backend/internal/a.go": {1: {}, 2: {}},
"backend/internal/b.go": {1: {}, 2: {}},
"backend/internal/c.go": {1: {}, 2: {}},
},
Covered: FileLineSet{
"backend/internal/a.go": {1: {}},
"backend/internal/c.go": {1: {}, 2: {}},
},
}
details := ComputeFilesNeedingCoverage(changed, coverage, 40)
if len(details) != 2 {
t.Fatalf("expected 2 files needing coverage, got %d", len(details))
}
if details[0].Path != "backend/internal/b.go" {
t.Fatalf("expected first file to be backend/internal/b.go, got %s", details[0].Path)
}
if details[0].PatchCoveragePct != 0.0 {
t.Fatalf("expected first file coverage 0.0, got %.1f", details[0].PatchCoveragePct)
}
if details[0].UncoveredChangedLines != 2 {
t.Fatalf("expected first file uncovered lines 2, got %d", details[0].UncoveredChangedLines)
}
if strings.Join(details[0].UncoveredChangedLineRange, ",") != "1-2" {
t.Fatalf("expected first file uncovered ranges 1-2, got %v", details[0].UncoveredChangedLineRange)
}
if details[1].Path != "backend/internal/a.go" {
t.Fatalf("expected second file to be backend/internal/a.go, got %s", details[1].Path)
}
if details[1].PatchCoveragePct != 50.0 {
t.Fatalf("expected second file coverage 50.0, got %.1f", details[1].PatchCoveragePct)
}
if details[1].UncoveredChangedLines != 1 {
t.Fatalf("expected second file uncovered lines 1, got %d", details[1].UncoveredChangedLines)
}
if strings.Join(details[1].UncoveredChangedLineRange, ",") != "2" {
t.Fatalf("expected second file uncovered range 2, got %v", details[1].UncoveredChangedLineRange)
}
}
func TestComputeFilesNeedingCoverage_IncludesFullyCoveredWhenThresholdAbove100(t *testing.T) {
t.Parallel()
changed := FileLineSet{
"backend/internal/fully.go": {10: {}, 11: {}},
}
coverage := CoverageData{
Executable: FileLineSet{
"backend/internal/fully.go": {10: {}, 11: {}},
},
Covered: FileLineSet{
"backend/internal/fully.go": {10: {}, 11: {}},
},
}
details := ComputeFilesNeedingCoverage(changed, coverage, 101)
if len(details) != 1 {
t.Fatalf("expected 1 file detail when threshold is 101, got %d", len(details))
}
if details[0].PatchCoveragePct != 100.0 {
t.Fatalf("expected 100%% patch coverage detail, got %.1f", details[0].PatchCoveragePct)
}
}
func TestMergeFileCoverageDetails_SortsWorstCoverageThenPath(t *testing.T) {
t.Parallel()
merged := MergeFileCoverageDetails(
[]FileCoverageDetail{
{Path: "frontend/src/z.ts", PatchCoveragePct: 50.0},
{Path: "frontend/src/a.ts", PatchCoveragePct: 50.0},
},
[]FileCoverageDetail{
{Path: "backend/internal/w.go", PatchCoveragePct: 0.0},
},
)
if len(merged) != 3 {
t.Fatalf("expected 3 merged items, got %d", len(merged))
}
orderedPaths := []string{merged[0].Path, merged[1].Path, merged[2].Path}
got := strings.Join(orderedPaths, ",")
want := "backend/internal/w.go,frontend/src/a.ts,frontend/src/z.ts"
if got != want {
t.Fatalf("unexpected merged order: got %s want %s", got, want)
}
}
func TestParseCoverageRange_ErrorBranches(t *testing.T) {
t.Parallel()
_, _, _, err := parseCoverageRange("missing-colon")
if err == nil {
t.Fatal("expected error for missing colon")
}
_, _, _, err = parseCoverageRange("file.go:10.1")
if err == nil {
t.Fatal("expected error for missing end coordinate")
}
_, _, _, err = parseCoverageRange("file.go:bad.1,10.1")
if err == nil {
t.Fatal("expected error for bad start line")
}
_, _, _, err = parseCoverageRange("file.go:10.1,9.1")
if err == nil {
t.Fatal("expected error for reversed range")
}
}
func TestSortedWarnings_FiltersBlanksAndSorts(t *testing.T) {
t.Parallel()
sorted := SortedWarnings([]string{"z warning", "", " ", "a warning"})
got := strings.Join(sorted, ",")
want := "a warning,z warning"
if got != want {
t.Fatalf("unexpected warnings ordering: got %q want %q", got, want)
}
}
func TestNormalizePathsAndRanges(t *testing.T) {
t.Parallel()
if got := normalizeGoCoveragePath("internal/service.go"); got != "backend/internal/service.go" {
t.Fatalf("unexpected normalized go path: %s", got)
}
if got := normalizeGoCoveragePath("/tmp/work/backend/internal/service.go"); got != "backend/internal/service.go" {
t.Fatalf("unexpected backend extraction path: %s", got)
}
frontend := normalizeFrontendCoveragePaths("/tmp/work/frontend/src/App.tsx")
if len(frontend) == 0 {
t.Fatal("expected frontend normalized paths")
}
ranges := formatLineRanges([]int{1, 2, 3, 7, 9, 10})
gotRanges := strings.Join(ranges, ",")
wantRanges := "1-3,7,9-10"
if gotRanges != wantRanges {
t.Fatalf("unexpected ranges: got %q want %q", gotRanges, wantRanges)
}
}
func TestScopeCoverageMergeAndStatus(t *testing.T) {
t.Parallel()
merged := MergeScopeCoverage(
ScopeCoverage{ChangedLines: 4, CoveredLines: 3},
ScopeCoverage{ChangedLines: 0, CoveredLines: 0},
)
if merged.ChangedLines != 4 || merged.CoveredLines != 3 || merged.PatchCoveragePct != 75.0 {
t.Fatalf("unexpected merged scope: %+v", merged)
}
if status := ApplyStatus(merged, 70); status.Status != "pass" {
t.Fatalf("expected pass status, got %s", status.Status)
}
}
func TestParseCoverageProfiles_InvalidPath(t *testing.T) {
t.Parallel()
_, err := ParseGoCoverageProfile(" ")
if err == nil {
t.Fatal("expected go profile path validation error")
}
_, err = ParseLCOVProfile("\t")
if err == nil {
t.Fatal("expected lcov profile path validation error")
}
}
func TestNormalizeFrontendCoveragePaths_EmptyInput(t *testing.T) {
t.Parallel()
paths := normalizeFrontendCoveragePaths(" ")
if len(paths) == 0 {
t.Fatalf("expected normalized fallback paths, got %#v", paths)
}
}
func TestAddLine_IgnoresInvalidInputs(t *testing.T) {
t.Parallel()
set := make(FileLineSet)
addLine(set, "", 10)
addLine(set, "backend/internal/x.go", 0)
if len(set) != 0 {
t.Fatalf("expected no entries for invalid addLine input, got %#v", set)
}
}