DOCTOR IMPROVEMENTS: visual improvements/grouping + add comprehensive tests + fix gosec warnings (#656)

* test(doctor): add comprehensive tests for fix and check functions

Add edge case tests, e2e tests, and improve test coverage for:
- database_test.go: database integrity and sync checks
- git_test.go: git hooks, merge driver, sync branch tests
- gitignore_test.go: gitignore validation
- prefix_test.go: ID prefix handling
- fix/fix_test.go: fix operations
- fix/e2e_test.go: end-to-end fix scenarios
- fix/fix_edge_cases_test.go: edge case handling

* docs: add testing philosophy and anti-patterns guide

- Create TESTING_PHILOSOPHY.md covering test pyramid, priority matrix,
  what NOT to test, and 5 anti-patterns with code examples
- Add cross-reference from README_TESTING.md
- Document beads-specific guidance (well-covered areas vs gaps)
- Include target metrics (test-to-code ratio, execution time targets)

* chore: revert .beads/ to upstream/main state

* refactor(doctor): add category grouping and Ayu theme colors

- Add Category field to DoctorCheck for organizing checks by type
- Define category constants: Core, Git, Runtime, Data, Integration, Metadata
- Update thanks command to use shared Ayu color palette from internal/ui
- Simplify test fixtures by removing redundant test cases

* fix(doctor): prevent test fork bomb and fix test failures

- Add ErrTestBinary guard in getBdBinary() to prevent tests from
  recursively executing the test binary when calling bd subcommands
- Update claude_test.go to use new check names (CLI Availability,
  Prime Documentation)
- Fix syncbranch test path comparison by resolving symlinks
  (/var vs /private/var on macOS)
- Fix permissions check to use exact comparison instead of bitmask
- Fix UntrackedJSONL to use git commit --only to preserve staged changes
- Fix MergeDriver edge case test by making both .git dir and config
  read-only
- Add skipIfTestBinary helper for E2E tests that need real bd binary

* test(doctor): skip read-only config test in CI environments

GitHub Actions containers may have CAP_DAC_OVERRIDE or similar
capabilities that allow writing to read-only files, causing
the test to fail. Skip the test when CI=true or GITHUB_ACTIONS=true.
This commit is contained in:
Ryan
2025-12-20 03:10:06 -08:00
committed by GitHub
parent f3b0fb780f
commit 3c08e5eb9d
23 changed files with 6248 additions and 404 deletions

View File

@@ -19,6 +19,7 @@ import (
"github.com/steveyegge/beads/internal/beads"
"github.com/steveyegge/beads/internal/configfile"
"github.com/steveyegge/beads/internal/syncbranch"
"github.com/steveyegge/beads/internal/ui"
)
// Status constants for doctor checks
@@ -29,11 +30,12 @@ const (
)
type doctorCheck struct {
Name string `json:"name"`
Status string `json:"status"` // statusOK, statusWarning, or statusError
Message string `json:"message"`
Detail string `json:"detail,omitempty"` // Additional detail like storage type
Fix string `json:"fix,omitempty"`
Name string `json:"name"`
Status string `json:"status"` // statusOK, statusWarning, or statusError
Message string `json:"message"`
Detail string `json:"detail,omitempty"` // Additional detail like storage type
Fix string `json:"fix,omitempty"`
Category string `json:"category,omitempty"` // category for grouping in output
}
type doctorResult struct {
@@ -539,19 +541,19 @@ func runDiagnostics(path string) doctorResult {
}
// Check 1: Installation (.beads/ directory)
installCheck := convertDoctorCheck(doctor.CheckInstallation(path))
installCheck := convertWithCategory(doctor.CheckInstallation(path), doctor.CategoryCore)
result.Checks = append(result.Checks, installCheck)
if installCheck.Status != statusOK {
result.OverallOK = false
}
// Check Git Hooks early (even if .beads/ doesn't exist yet)
hooksCheck := convertDoctorCheck(doctor.CheckGitHooks())
hooksCheck := convertWithCategory(doctor.CheckGitHooks(), doctor.CategoryGit)
result.Checks = append(result.Checks, hooksCheck)
// Don't fail overall check for missing hooks, just warn
// Check sync-branch hook compatibility (issue #532)
syncBranchHookCheck := convertDoctorCheck(doctor.CheckSyncBranchHookCompatibility(path))
syncBranchHookCheck := convertWithCategory(doctor.CheckSyncBranchHookCompatibility(path), doctor.CategoryGit)
result.Checks = append(result.Checks, syncBranchHookCheck)
if syncBranchHookCheck.Status == statusError {
result.OverallOK = false
@@ -564,171 +566,171 @@ func runDiagnostics(path string) doctorResult {
// Check 1a: Fresh clone detection (bd-4ew)
// Must come early - if this is a fresh clone, other checks may be misleading
freshCloneCheck := convertDoctorCheck(doctor.CheckFreshClone(path))
freshCloneCheck := convertWithCategory(doctor.CheckFreshClone(path), doctor.CategoryCore)
result.Checks = append(result.Checks, freshCloneCheck)
if freshCloneCheck.Status == statusWarning || freshCloneCheck.Status == statusError {
result.OverallOK = false
}
// Check 2: Database version
dbCheck := convertDoctorCheck(doctor.CheckDatabaseVersion(path, Version))
dbCheck := convertWithCategory(doctor.CheckDatabaseVersion(path, Version), doctor.CategoryCore)
result.Checks = append(result.Checks, dbCheck)
if dbCheck.Status == statusError {
result.OverallOK = false
}
// Check 2a: Schema compatibility (bd-ckvw)
schemaCheck := convertDoctorCheck(doctor.CheckSchemaCompatibility(path))
schemaCheck := convertWithCategory(doctor.CheckSchemaCompatibility(path), doctor.CategoryCore)
result.Checks = append(result.Checks, schemaCheck)
if schemaCheck.Status == statusError {
result.OverallOK = false
}
// Check 2b: Database integrity (bd-2au)
integrityCheck := convertDoctorCheck(doctor.CheckDatabaseIntegrity(path))
integrityCheck := convertWithCategory(doctor.CheckDatabaseIntegrity(path), doctor.CategoryCore)
result.Checks = append(result.Checks, integrityCheck)
if integrityCheck.Status == statusError {
result.OverallOK = false
}
// Check 3: ID format (hash vs sequential)
idCheck := convertDoctorCheck(doctor.CheckIDFormat(path))
idCheck := convertWithCategory(doctor.CheckIDFormat(path), doctor.CategoryCore)
result.Checks = append(result.Checks, idCheck)
if idCheck.Status == statusWarning {
result.OverallOK = false
}
// Check 4: CLI version (GitHub)
versionCheck := convertDoctorCheck(doctor.CheckCLIVersion(Version))
versionCheck := convertWithCategory(doctor.CheckCLIVersion(Version), doctor.CategoryCore)
result.Checks = append(result.Checks, versionCheck)
// Don't fail overall check for outdated CLI, just warn
// Check 4.5: Claude plugin version (if running in Claude Code)
pluginCheck := convertDoctorCheck(doctor.CheckClaudePlugin())
pluginCheck := convertWithCategory(doctor.CheckClaudePlugin(), doctor.CategoryIntegration)
result.Checks = append(result.Checks, pluginCheck)
// Don't fail overall check for outdated plugin, just warn
// Check 5: Multiple database files
multiDBCheck := convertDoctorCheck(doctor.CheckMultipleDatabases(path))
multiDBCheck := convertWithCategory(doctor.CheckMultipleDatabases(path), doctor.CategoryData)
result.Checks = append(result.Checks, multiDBCheck)
if multiDBCheck.Status == statusWarning || multiDBCheck.Status == statusError {
result.OverallOK = false
}
// Check 6: Multiple JSONL files (excluding merge artifacts)
jsonlCheck := convertDoctorCheck(doctor.CheckLegacyJSONLFilename(path))
jsonlCheck := convertWithCategory(doctor.CheckLegacyJSONLFilename(path), doctor.CategoryData)
result.Checks = append(result.Checks, jsonlCheck)
if jsonlCheck.Status == statusWarning || jsonlCheck.Status == statusError {
result.OverallOK = false
}
// Check 6a: Legacy JSONL config (bd-6xd: migrate beads.jsonl to issues.jsonl)
legacyConfigCheck := convertDoctorCheck(doctor.CheckLegacyJSONLConfig(path))
legacyConfigCheck := convertWithCategory(doctor.CheckLegacyJSONLConfig(path), doctor.CategoryData)
result.Checks = append(result.Checks, legacyConfigCheck)
// Don't fail overall check for legacy config, just warn
// Check 7: Database/JSONL configuration mismatch
configCheck := convertDoctorCheck(doctor.CheckDatabaseConfig(path))
configCheck := convertWithCategory(doctor.CheckDatabaseConfig(path), doctor.CategoryData)
result.Checks = append(result.Checks, configCheck)
if configCheck.Status == statusWarning || configCheck.Status == statusError {
result.OverallOK = false
}
// Check 7a: Configuration value validation (bd-alz)
configValuesCheck := convertDoctorCheck(doctor.CheckConfigValues(path))
configValuesCheck := convertWithCategory(doctor.CheckConfigValues(path), doctor.CategoryData)
result.Checks = append(result.Checks, configValuesCheck)
// Don't fail overall check for config value warnings, just warn
// Check 8: Daemon health
daemonCheck := convertDoctorCheck(doctor.CheckDaemonStatus(path, Version))
daemonCheck := convertWithCategory(doctor.CheckDaemonStatus(path, Version), doctor.CategoryRuntime)
result.Checks = append(result.Checks, daemonCheck)
if daemonCheck.Status == statusWarning || daemonCheck.Status == statusError {
result.OverallOK = false
}
// Check 9: Database-JSONL sync
syncCheck := convertDoctorCheck(doctor.CheckDatabaseJSONLSync(path))
syncCheck := convertWithCategory(doctor.CheckDatabaseJSONLSync(path), doctor.CategoryData)
result.Checks = append(result.Checks, syncCheck)
if syncCheck.Status == statusWarning || syncCheck.Status == statusError {
result.OverallOK = false
}
// Check 9: Permissions
permCheck := convertDoctorCheck(doctor.CheckPermissions(path))
permCheck := convertWithCategory(doctor.CheckPermissions(path), doctor.CategoryCore)
result.Checks = append(result.Checks, permCheck)
if permCheck.Status == statusError {
result.OverallOK = false
}
// Check 10: Dependency cycles
cycleCheck := convertDoctorCheck(doctor.CheckDependencyCycles(path))
cycleCheck := convertWithCategory(doctor.CheckDependencyCycles(path), doctor.CategoryMetadata)
result.Checks = append(result.Checks, cycleCheck)
if cycleCheck.Status == statusError || cycleCheck.Status == statusWarning {
result.OverallOK = false
}
// Check 11: Claude integration
claudeCheck := convertDoctorCheck(doctor.CheckClaude())
claudeCheck := convertWithCategory(doctor.CheckClaude(), doctor.CategoryIntegration)
result.Checks = append(result.Checks, claudeCheck)
// Don't fail overall check for missing Claude integration, just warn
// Check 11a: bd in PATH (needed for Claude hooks to work)
bdPathCheck := convertDoctorCheck(doctor.CheckBdInPath())
bdPathCheck := convertWithCategory(doctor.CheckBdInPath(), doctor.CategoryIntegration)
result.Checks = append(result.Checks, bdPathCheck)
// Don't fail overall check for missing bd in PATH, just warn
// Check 11b: Documentation bd prime references match installed version
bdPrimeDocsCheck := convertDoctorCheck(doctor.CheckDocumentationBdPrimeReference(path))
bdPrimeDocsCheck := convertWithCategory(doctor.CheckDocumentationBdPrimeReference(path), doctor.CategoryIntegration)
result.Checks = append(result.Checks, bdPrimeDocsCheck)
// Don't fail overall check for doc mismatch, just warn
// Check 12: Agent documentation presence
agentDocsCheck := convertDoctorCheck(doctor.CheckAgentDocumentation(path))
agentDocsCheck := convertWithCategory(doctor.CheckAgentDocumentation(path), doctor.CategoryIntegration)
result.Checks = append(result.Checks, agentDocsCheck)
// Don't fail overall check for missing docs, just warn
// Check 13: Legacy beads slash commands in documentation
legacyDocsCheck := convertDoctorCheck(doctor.CheckLegacyBeadsSlashCommands(path))
legacyDocsCheck := convertWithCategory(doctor.CheckLegacyBeadsSlashCommands(path), doctor.CategoryMetadata)
result.Checks = append(result.Checks, legacyDocsCheck)
// Don't fail overall check for legacy docs, just warn
// Check 14: Gitignore up to date
gitignoreCheck := convertDoctorCheck(doctor.CheckGitignore())
gitignoreCheck := convertWithCategory(doctor.CheckGitignore(), doctor.CategoryGit)
result.Checks = append(result.Checks, gitignoreCheck)
// Don't fail overall check for gitignore, just warn
// Check 15: Git merge driver configuration
mergeDriverCheck := convertDoctorCheck(doctor.CheckMergeDriver(path))
mergeDriverCheck := convertWithCategory(doctor.CheckMergeDriver(path), doctor.CategoryGit)
result.Checks = append(result.Checks, mergeDriverCheck)
// Don't fail overall check for merge driver, just warn
// Check 16: Metadata.json version tracking (bd-u4sb)
metadataCheck := convertDoctorCheck(doctor.CheckMetadataVersionTracking(path, Version))
metadataCheck := convertWithCategory(doctor.CheckMetadataVersionTracking(path, Version), doctor.CategoryMetadata)
result.Checks = append(result.Checks, metadataCheck)
// Don't fail overall check for metadata, just warn
// Check 17: Sync branch configuration (bd-rsua)
syncBranchCheck := convertDoctorCheck(doctor.CheckSyncBranchConfig(path))
syncBranchCheck := convertWithCategory(doctor.CheckSyncBranchConfig(path), doctor.CategoryGit)
result.Checks = append(result.Checks, syncBranchCheck)
// Don't fail overall check for missing sync.branch, just warn
// Check 17a: Sync branch health (bd-6rf)
syncBranchHealthCheck := convertDoctorCheck(doctor.CheckSyncBranchHealth(path))
syncBranchHealthCheck := convertWithCategory(doctor.CheckSyncBranchHealth(path), doctor.CategoryGit)
result.Checks = append(result.Checks, syncBranchHealthCheck)
// Don't fail overall check for sync branch health, just warn
// Check 18: Deletions manifest (legacy, now replaced by tombstones)
deletionsCheck := convertDoctorCheck(doctor.CheckDeletionsManifest(path))
deletionsCheck := convertWithCategory(doctor.CheckDeletionsManifest(path), doctor.CategoryMetadata)
result.Checks = append(result.Checks, deletionsCheck)
// Don't fail overall check for missing deletions manifest, just warn
// Check 19: Tombstones health (bd-s3v)
tombstonesCheck := convertDoctorCheck(doctor.CheckTombstones(path))
tombstonesCheck := convertWithCategory(doctor.CheckTombstones(path), doctor.CategoryMetadata)
result.Checks = append(result.Checks, tombstonesCheck)
// Don't fail overall check for tombstone issues, just warn
// Check 20: Untracked .beads/*.jsonl files (bd-pbj)
untrackedCheck := convertDoctorCheck(doctor.CheckUntrackedBeadsFiles(path))
untrackedCheck := convertWithCategory(doctor.CheckUntrackedBeadsFiles(path), doctor.CategoryData)
result.Checks = append(result.Checks, untrackedCheck)
// Don't fail overall check for untracked files, just warn
@@ -738,14 +740,22 @@ func runDiagnostics(path string) doctorResult {
// convertDoctorCheck converts doctor package check to main package check
func convertDoctorCheck(dc doctor.DoctorCheck) doctorCheck {
return doctorCheck{
Name: dc.Name,
Status: dc.Status,
Message: dc.Message,
Detail: dc.Detail,
Fix: dc.Fix,
Name: dc.Name,
Status: dc.Status,
Message: dc.Message,
Detail: dc.Detail,
Fix: dc.Fix,
Category: dc.Category,
}
}
// convertWithCategory converts a doctor check and sets its category
func convertWithCategory(dc doctor.DoctorCheck, category string) doctorCheck {
check := convertDoctorCheck(dc)
check.Category = category
return check
}
// exportDiagnostics writes the doctor result to a JSON file (bd-9cc)
func exportDiagnostics(result doctorResult, outputPath string) error {
// #nosec G304 - outputPath is a user-provided flag value for file generation
@@ -765,63 +775,117 @@ func exportDiagnostics(result doctorResult, outputPath string) error {
}
func printDiagnostics(result doctorResult) {
// Print header
fmt.Println("\nDiagnostics")
// Print header with version
fmt.Printf("\nbd doctor v%s\n\n", result.CLIVersion)
// Print each check with tree formatting
for i, check := range result.Checks {
// Determine prefix
prefix := "├"
if i == len(result.Checks)-1 {
prefix = "└"
}
// Format status indicator
var statusIcon string
switch check.Status {
case statusOK:
statusIcon = ""
case statusWarning:
statusIcon = color.YellowString(" ⚠")
case statusError:
statusIcon = color.RedString(" ✗")
}
// Print main check line
fmt.Printf(" %s %s: %s%s\n", prefix, check.Name, check.Message, statusIcon)
// Print detail if present (indented under the check)
if check.Detail != "" {
detailPrefix := "│"
if i == len(result.Checks)-1 {
detailPrefix = " "
}
fmt.Printf(" %s %s\n", detailPrefix, color.New(color.Faint).Sprint(check.Detail))
}
}
fmt.Println()
// Print warnings/errors with fixes
hasIssues := false
// Group checks by category
checksByCategory := make(map[string][]doctorCheck)
for _, check := range result.Checks {
if check.Status != statusOK && check.Fix != "" {
if !hasIssues {
hasIssues = true
}
switch check.Status {
case statusWarning:
color.Yellow("⚠ Warning: %s\n", check.Message)
case statusError:
color.Red("✗ Error: %s\n", check.Message)
}
fmt.Printf(" Fix: %s\n\n", check.Fix)
cat := check.Category
if cat == "" {
cat = "Other"
}
checksByCategory[cat] = append(checksByCategory[cat], check)
}
if !hasIssues {
// Track counts
var passCount, warnCount, failCount int
var warnings []doctorCheck
// Print checks by category in defined order
for _, category := range doctor.CategoryOrder {
checks, exists := checksByCategory[category]
if !exists || len(checks) == 0 {
continue
}
// Print category header
fmt.Println(ui.RenderCategory(category))
// Print each check in this category
for _, check := range checks {
// Determine status icon
var statusIcon string
switch check.Status {
case statusOK:
statusIcon = ui.RenderPassIcon()
passCount++
case statusWarning:
statusIcon = ui.RenderWarnIcon()
warnCount++
warnings = append(warnings, check)
case statusError:
statusIcon = ui.RenderFailIcon()
failCount++
warnings = append(warnings, check)
}
// Print check line: icon + name + message
fmt.Printf(" %s %s", statusIcon, check.Name)
if check.Message != "" {
fmt.Printf("%s", ui.RenderMuted(" "+check.Message))
}
fmt.Println()
// Print detail if present (indented)
if check.Detail != "" {
fmt.Printf(" %s%s\n", ui.MutedStyle.Render(ui.TreeLast), ui.RenderMuted(check.Detail))
}
}
fmt.Println()
}
// Print any checks without a category
if otherChecks, exists := checksByCategory["Other"]; exists && len(otherChecks) > 0 {
fmt.Println(ui.RenderCategory("Other"))
for _, check := range otherChecks {
var statusIcon string
switch check.Status {
case statusOK:
statusIcon = ui.RenderPassIcon()
passCount++
case statusWarning:
statusIcon = ui.RenderWarnIcon()
warnCount++
warnings = append(warnings, check)
case statusError:
statusIcon = ui.RenderFailIcon()
failCount++
warnings = append(warnings, check)
}
fmt.Printf(" %s %s", statusIcon, check.Name)
if check.Message != "" {
fmt.Printf("%s", ui.RenderMuted(" "+check.Message))
}
fmt.Println()
if check.Detail != "" {
fmt.Printf(" %s%s\n", ui.MutedStyle.Render(ui.TreeLast), ui.RenderMuted(check.Detail))
}
}
fmt.Println()
}
// Print summary line
fmt.Println(ui.RenderSeparator())
summary := fmt.Sprintf("%s %d passed %s %d warnings %s %d failed",
ui.RenderPassIcon(), passCount,
ui.RenderWarnIcon(), warnCount,
ui.RenderFailIcon(), failCount,
)
fmt.Println(summary)
// Print warnings/errors section with fixes
if len(warnings) > 0 {
fmt.Println()
fmt.Println(ui.RenderWarn(ui.IconWarn + " WARNINGS"))
for _, check := range warnings {
fmt.Printf(" %s: %s\n", check.Name, check.Message)
if check.Fix != "" {
fmt.Printf(" %s%s\n", ui.MutedStyle.Render(ui.TreeLast), check.Fix)
}
}
} else {
fmt.Println()
color.Green("✓ All checks passed\n")
}
}

View File

@@ -270,7 +270,7 @@ func CheckBdInPath() DoctorCheck {
_, err := exec.LookPath("bd")
if err != nil {
return DoctorCheck{
Name: "bd in PATH",
Name: "CLI Availability",
Status: "warning",
Message: "'bd' command not found in PATH",
Detail: "Claude hooks execute 'bd prime' and won't work without bd in PATH",
@@ -282,9 +282,9 @@ func CheckBdInPath() DoctorCheck {
}
return DoctorCheck{
Name: "bd in PATH",
Name: "CLI Availability",
Status: "ok",
Message: "'bd' command available",
Message: "'bd' command available in PATH",
}
}
@@ -317,7 +317,7 @@ func CheckDocumentationBdPrimeReference(repoPath string) DoctorCheck {
// If no docs reference bd prime, that's fine - not everyone uses it
if len(filesWithBdPrime) == 0 {
return DoctorCheck{
Name: "Documentation bd prime",
Name: "Prime Documentation",
Status: "ok",
Message: "No bd prime references in documentation",
}
@@ -327,7 +327,7 @@ func CheckDocumentationBdPrimeReference(repoPath string) DoctorCheck {
cmd := exec.Command("bd", "prime", "--help")
if err := cmd.Run(); err != nil {
return DoctorCheck{
Name: "Documentation bd prime",
Name: "Prime Documentation",
Status: "warning",
Message: "Documentation references 'bd prime' but command not found",
Detail: "Files: " + strings.Join(filesWithBdPrime, ", "),
@@ -339,7 +339,7 @@ func CheckDocumentationBdPrimeReference(repoPath string) DoctorCheck {
}
return DoctorCheck{
Name: "Documentation bd prime",
Name: "Prime Documentation",
Status: "ok",
Message: "Documentation references match installed features",
Detail: "Files: " + strings.Join(filesWithBdPrime, ", "),

View File

@@ -13,8 +13,8 @@ func TestCheckBdInPath(t *testing.T) {
check := CheckBdInPath()
// Just verify the check returns a valid result
if check.Name != "bd in PATH" {
t.Errorf("Expected check name 'bd in PATH', got %s", check.Name)
if check.Name != "CLI Availability" {
t.Errorf("Expected check name 'CLI Availability', got %s", check.Name)
}
if check.Status != "ok" && check.Status != "warning" {
@@ -119,8 +119,8 @@ func TestCheckDocumentationBdPrimeReference(t *testing.T) {
check := CheckDocumentationBdPrimeReference(tmpDir)
if check.Name != "Documentation bd prime" {
t.Errorf("Expected check name 'Documentation bd prime', got %s", check.Name)
if check.Name != "Prime Documentation" {
t.Errorf("Expected check name 'Prime Documentation', got %s", check.Name)
}
// The status depends on whether bd is installed, so we accept both ok and warning

View File

@@ -541,7 +541,7 @@ func CountJSONLIssues(jsonlPath string) (int, map[string]int, error) {
continue
}
if id, ok := issue["id"].(string); ok {
if id, ok := issue["id"].(string); ok && id != "" {
count++
// Extract prefix (everything before the last dash)
lastDash := strings.LastIndex(id, "-")

File diff suppressed because it is too large Load Diff

View File

@@ -8,8 +8,13 @@ import (
"strings"
)
// ErrTestBinary is returned when getBdBinary detects it's running as a test binary.
// This prevents fork bombs when tests call functions that execute bd subcommands.
var ErrTestBinary = fmt.Errorf("running as test binary - cannot execute bd subcommands")
// getBdBinary returns the path to the bd binary to use for fix operations.
// It prefers the current executable to avoid command injection attacks.
// Returns ErrTestBinary if running as a test binary to prevent fork bombs.
func getBdBinary() (string, error) {
// Prefer current executable for security
exe, err := os.Executable()
@@ -17,8 +22,16 @@ func getBdBinary() (string, error) {
// Resolve symlinks to get the real binary path
realPath, err := filepath.EvalSymlinks(exe)
if err == nil {
return realPath, nil
exe = realPath
}
// Check if we're running as a test binary - this prevents fork bombs
// when tests call functions that execute bd subcommands
baseName := filepath.Base(exe)
if strings.HasSuffix(baseName, ".test") || strings.Contains(baseName, ".test.") {
return "", ErrTestBinary
}
return exe, nil
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,793 @@
package fix
import (
"encoding/json"
"os"
"os/exec"
"path/filepath"
"strings"
"testing"
"time"
)
// TestIsWithinWorkspace_PathTraversal tests path traversal attempts
func TestIsWithinWorkspace_PathTraversal(t *testing.T) {
root := t.TempDir()
tests := []struct {
name string
candidate string
want bool
}{
{
name: "simple dotdot traversal",
candidate: filepath.Join(root, "..", "etc"),
want: false,
},
{
name: "dotdot in middle of path",
candidate: filepath.Join(root, "subdir", "..", "..", "etc"),
want: false,
},
{
name: "multiple dotdot",
candidate: filepath.Join(root, "..", "..", ".."),
want: false,
},
{
name: "dotdot stays within workspace",
candidate: filepath.Join(root, "a", "b", "..", "c"),
want: true,
},
{
name: "relative path with dotdot",
candidate: filepath.Join(root, "subdir", "..", "file"),
want: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := isWithinWorkspace(root, tt.candidate)
if got != tt.want {
t.Errorf("isWithinWorkspace(%q, %q) = %v, want %v", root, tt.candidate, got, tt.want)
}
})
}
}
// TestValidateBeadsWorkspace_EdgeCases tests edge cases for workspace validation
func TestValidateBeadsWorkspace_EdgeCases(t *testing.T) {
t.Run("nested .beads directories", func(t *testing.T) {
// Create a workspace with nested .beads directories
dir := setupTestWorkspace(t)
nestedDir := filepath.Join(dir, "subdir")
nestedBeadsDir := filepath.Join(nestedDir, ".beads")
if err := os.MkdirAll(nestedBeadsDir, 0755); err != nil {
t.Fatalf("failed to create nested .beads: %v", err)
}
// Root workspace should be valid
if err := validateBeadsWorkspace(dir); err != nil {
t.Errorf("expected root workspace to be valid, got: %v", err)
}
// Nested workspace should also be valid
if err := validateBeadsWorkspace(nestedDir); err != nil {
t.Errorf("expected nested workspace to be valid, got: %v", err)
}
})
t.Run(".beads as a file not directory", func(t *testing.T) {
dir := t.TempDir()
beadsFile := filepath.Join(dir, ".beads")
// Create .beads as a file instead of directory
if err := os.WriteFile(beadsFile, []byte("not a directory"), 0600); err != nil {
t.Fatalf("failed to create .beads file: %v", err)
}
err := validateBeadsWorkspace(dir)
// NOTE: Current implementation only checks if .beads exists via os.Stat,
// but doesn't verify it's a directory. This test documents current behavior.
// A future improvement could add IsDir() check.
if err == nil {
// Currently passes - implementation doesn't validate it's a directory
t.Log(".beads exists as file - validation passes (edge case)")
}
})
t.Run(".beads as symlink to directory", func(t *testing.T) {
dir := t.TempDir()
// Create actual .beads directory elsewhere
actualBeadsDir := filepath.Join(t.TempDir(), "actual_beads")
if err := os.MkdirAll(actualBeadsDir, 0755); err != nil {
t.Fatalf("failed to create actual beads dir: %v", err)
}
// Create symlink .beads -> actual_beads
symlinkPath := filepath.Join(dir, ".beads")
if err := os.Symlink(actualBeadsDir, symlinkPath); err != nil {
t.Skipf("symlink creation failed (may not be supported): %v", err)
}
// Should be valid - symlink to directory is acceptable
if err := validateBeadsWorkspace(dir); err != nil {
t.Errorf("expected symlinked .beads directory to be valid, got: %v", err)
}
})
t.Run(".beads as symlink to file", func(t *testing.T) {
dir := t.TempDir()
// Create a file
actualFile := filepath.Join(t.TempDir(), "actual_file")
if err := os.WriteFile(actualFile, []byte("test"), 0600); err != nil {
t.Fatalf("failed to create actual file: %v", err)
}
// Create symlink .beads -> file
symlinkPath := filepath.Join(dir, ".beads")
if err := os.Symlink(actualFile, symlinkPath); err != nil {
t.Skipf("symlink creation failed (may not be supported): %v", err)
}
err := validateBeadsWorkspace(dir)
// NOTE: os.Stat follows symlinks, so if symlink points to a file,
// it just sees the file exists and returns no error.
// Current implementation doesn't verify it's a directory.
if err == nil {
t.Log(".beads symlink to file - validation passes (edge case)")
}
})
t.Run(".beads as broken symlink", func(t *testing.T) {
dir := t.TempDir()
// Create symlink to non-existent target
symlinkPath := filepath.Join(dir, ".beads")
if err := os.Symlink("/nonexistent/target", symlinkPath); err != nil {
t.Skipf("symlink creation failed (may not be supported): %v", err)
}
err := validateBeadsWorkspace(dir)
if err == nil {
t.Error("expected error when .beads is a broken symlink")
}
})
t.Run("relative path resolution", func(t *testing.T) {
dir := setupTestWorkspace(t)
// Test with relative path
originalWd, err := os.Getwd()
if err != nil {
t.Fatalf("failed to get working directory: %v", err)
}
defer func() {
if err := os.Chdir(originalWd); err != nil {
t.Logf("failed to restore working directory: %v", err)
}
}()
if err := os.Chdir(filepath.Dir(dir)); err != nil {
t.Fatalf("failed to change directory: %v", err)
}
relPath := filepath.Base(dir)
if err := validateBeadsWorkspace(relPath); err != nil {
t.Errorf("expected relative path to be valid, got: %v", err)
}
})
}
// TestFindJSONLPath_EdgeCases tests edge cases for finding JSONL files
func TestFindJSONLPath_EdgeCases(t *testing.T) {
t.Run("multiple JSONL files - issues.jsonl takes precedence", func(t *testing.T) {
dir := t.TempDir()
beadsDir := filepath.Join(dir, ".beads")
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatalf("failed to create .beads: %v", err)
}
// Create both files
issuesPath := filepath.Join(beadsDir, "issues.jsonl")
beadsPath := filepath.Join(beadsDir, "beads.jsonl")
if err := os.WriteFile(issuesPath, []byte("{}"), 0600); err != nil {
t.Fatalf("failed to create issues.jsonl: %v", err)
}
if err := os.WriteFile(beadsPath, []byte("{}"), 0600); err != nil {
t.Fatalf("failed to create beads.jsonl: %v", err)
}
path := findJSONLPath(beadsDir)
if path != issuesPath {
t.Errorf("expected %s, got %s", issuesPath, path)
}
})
t.Run("only beads.jsonl exists", func(t *testing.T) {
dir := t.TempDir()
beadsDir := filepath.Join(dir, ".beads")
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatalf("failed to create .beads: %v", err)
}
beadsPath := filepath.Join(beadsDir, "beads.jsonl")
if err := os.WriteFile(beadsPath, []byte("{}"), 0600); err != nil {
t.Fatalf("failed to create beads.jsonl: %v", err)
}
path := findJSONLPath(beadsDir)
if path != beadsPath {
t.Errorf("expected %s, got %s", beadsPath, path)
}
})
t.Run("JSONL file as symlink", func(t *testing.T) {
dir := t.TempDir()
beadsDir := filepath.Join(dir, ".beads")
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatalf("failed to create .beads: %v", err)
}
// Create actual file
actualFile := filepath.Join(t.TempDir(), "actual_issues.jsonl")
if err := os.WriteFile(actualFile, []byte("{}"), 0600); err != nil {
t.Fatalf("failed to create actual file: %v", err)
}
// Create symlink
symlinkPath := filepath.Join(beadsDir, "issues.jsonl")
if err := os.Symlink(actualFile, symlinkPath); err != nil {
t.Skipf("symlink creation failed (may not be supported): %v", err)
}
path := findJSONLPath(beadsDir)
if path != symlinkPath {
t.Errorf("expected symlink to be found: %s, got %s", symlinkPath, path)
}
})
t.Run("JSONL file is directory", func(t *testing.T) {
dir := t.TempDir()
beadsDir := filepath.Join(dir, ".beads")
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatalf("failed to create .beads: %v", err)
}
// Create issues.jsonl as directory instead of file
issuesDir := filepath.Join(beadsDir, "issues.jsonl")
if err := os.MkdirAll(issuesDir, 0755); err != nil {
t.Fatalf("failed to create issues.jsonl dir: %v", err)
}
path := findJSONLPath(beadsDir)
// NOTE: Current implementation only checks if path exists via os.Stat,
// but doesn't verify it's a regular file. Returns path even for directories.
// This documents current behavior - a future improvement could add IsRegular() check.
if path == issuesDir {
t.Log("issues.jsonl exists as directory - findJSONLPath returns it (edge case)")
}
})
t.Run("no JSONL files present", func(t *testing.T) {
dir := t.TempDir()
beadsDir := filepath.Join(dir, ".beads")
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatalf("failed to create .beads: %v", err)
}
path := findJSONLPath(beadsDir)
if path != "" {
t.Errorf("expected empty path, got %s", path)
}
})
t.Run("empty beadsDir path", func(t *testing.T) {
path := findJSONLPath("")
if path != "" {
t.Errorf("expected empty path for empty beadsDir, got %s", path)
}
})
t.Run("nonexistent beadsDir", func(t *testing.T) {
path := findJSONLPath("/nonexistent/path/to/beads")
if path != "" {
t.Errorf("expected empty path for nonexistent beadsDir, got %s", path)
}
})
}
// TestGitHooks_EdgeCases tests GitHooks with edge cases
func TestGitHooks_EdgeCases(t *testing.T) {
// Skip if running as test binary (can't execute bd subcommands)
skipIfTestBinary(t)
t.Run("hooks directory does not exist", func(t *testing.T) {
dir := setupTestGitRepo(t)
// Verify .git/hooks doesn't exist or remove it
hooksDir := filepath.Join(dir, ".git", "hooks")
_ = os.RemoveAll(hooksDir)
// GitHooks should create the directory via bd hooks install
err := GitHooks(dir)
if err != nil {
t.Errorf("GitHooks should succeed when hooks directory doesn't exist, got: %v", err)
}
// Verify hooks directory was created
if _, err := os.Stat(hooksDir); os.IsNotExist(err) {
t.Error("expected hooks directory to be created")
}
})
t.Run("git worktree with .git file", func(t *testing.T) {
// Create main repo
mainDir := setupTestGitRepo(t)
// Create a commit so we can create a worktree
testFile := filepath.Join(mainDir, "test.txt")
if err := os.WriteFile(testFile, []byte("test"), 0600); err != nil {
t.Fatalf("failed to create test file: %v", err)
}
runGit(t, mainDir, "add", "test.txt")
runGit(t, mainDir, "commit", "-m", "initial")
// Create a worktree
worktreeDir := t.TempDir()
runGit(t, mainDir, "worktree", "add", worktreeDir, "-b", "feature")
// Create .beads in worktree
beadsDir := filepath.Join(worktreeDir, ".beads")
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatalf("failed to create .beads in worktree: %v", err)
}
// GitHooks should work with worktrees where .git is a file
err := GitHooks(worktreeDir)
if err != nil {
t.Errorf("GitHooks should work with git worktrees, got: %v", err)
}
})
}
// TestMergeDriver_EdgeCases tests MergeDriver with edge cases
func TestMergeDriver_EdgeCases(t *testing.T) {
t.Run("read-only git config file", func(t *testing.T) {
dir := setupTestGitRepo(t)
gitDir := filepath.Join(dir, ".git")
gitConfigPath := filepath.Join(gitDir, "config")
// Make both .git directory and config file read-only to truly prevent writes
// (git might otherwise create a new file and rename it)
if err := os.Chmod(gitConfigPath, 0400); err != nil {
t.Fatalf("failed to make config read-only: %v", err)
}
if err := os.Chmod(gitDir, 0500); err != nil {
t.Fatalf("failed to make .git read-only: %v", err)
}
// Restore write permissions at the end
defer func() {
_ = os.Chmod(gitDir, 0700)
_ = os.Chmod(gitConfigPath, 0600)
}()
// MergeDriver should fail with read-only config
err := MergeDriver(dir)
if err == nil {
t.Error("expected error when git config is read-only")
}
})
t.Run("succeeds after config was previously set", func(t *testing.T) {
dir := setupTestGitRepo(t)
// Set the merge driver config initially
err := MergeDriver(dir)
if err != nil {
t.Fatalf("first MergeDriver call failed: %v", err)
}
// Run again to verify it handles existing config
err = MergeDriver(dir)
if err != nil {
t.Errorf("MergeDriver should succeed when config already exists, got: %v", err)
}
// Verify the config is still correct
cmd := exec.Command("git", "config", "merge.beads.driver")
cmd.Dir = dir
output, err := cmd.Output()
if err != nil {
t.Fatalf("failed to get git config: %v", err)
}
expected := "bd merge %A %O %A %B\n"
if string(output) != expected {
t.Errorf("expected %q, got %q", expected, string(output))
}
})
}
// TestUntrackedJSONL_EdgeCases tests UntrackedJSONL with edge cases
func TestUntrackedJSONL_EdgeCases(t *testing.T) {
t.Run("staged but uncommitted JSONL files", func(t *testing.T) {
dir := setupTestGitRepo(t)
// Create initial commit
testFile := filepath.Join(dir, "test.txt")
if err := os.WriteFile(testFile, []byte("test"), 0600); err != nil {
t.Fatalf("failed to create test file: %v", err)
}
runGit(t, dir, "add", "test.txt")
runGit(t, dir, "commit", "-m", "initial")
// Create a JSONL file and stage it but don't commit
jsonlFile := filepath.Join(dir, ".beads", "deletions.jsonl")
if err := os.WriteFile(jsonlFile, []byte(`{"id":"test-1","ts":"2024-01-01T00:00:00Z","by":"user"}`+"\n"), 0600); err != nil {
t.Fatalf("failed to create JSONL file: %v", err)
}
runGit(t, dir, "add", ".beads/deletions.jsonl")
// Check git status - should show staged file
output := runGit(t, dir, "status", "--porcelain", ".beads/")
if !strings.Contains(output, "A .beads/deletions.jsonl") {
t.Logf("git status output: %s", output)
t.Error("expected file to be staged")
}
// UntrackedJSONL should not process staged files (only untracked)
err := UntrackedJSONL(dir)
if err != nil {
t.Errorf("expected no error, got: %v", err)
}
// File should still be staged, not committed again
output = runGit(t, dir, "status", "--porcelain", ".beads/")
if !strings.Contains(output, "A .beads/deletions.jsonl") {
t.Error("file should still be staged after UntrackedJSONL")
}
})
t.Run("mixed tracked and untracked JSONL files", func(t *testing.T) {
dir := setupTestGitRepo(t)
// Create initial commit with one JSONL file
trackedFile := filepath.Join(dir, ".beads", "issues.jsonl")
if err := os.WriteFile(trackedFile, []byte(`{"id":"test-1"}`+"\n"), 0600); err != nil {
t.Fatalf("failed to create tracked JSONL: %v", err)
}
runGit(t, dir, "add", ".beads/issues.jsonl")
runGit(t, dir, "commit", "-m", "initial")
// Create an untracked JSONL file
untrackedFile := filepath.Join(dir, ".beads", "deletions.jsonl")
if err := os.WriteFile(untrackedFile, []byte(`{"id":"test-2"}`+"\n"), 0600); err != nil {
t.Fatalf("failed to create untracked JSONL: %v", err)
}
// UntrackedJSONL should only process the untracked file
err := UntrackedJSONL(dir)
if err != nil {
t.Errorf("expected no error, got: %v", err)
}
// Verify untracked file was committed
output := runGit(t, dir, "status", "--porcelain", ".beads/")
if output != "" {
t.Errorf("expected clean status, got: %s", output)
}
// Verify both files are now tracked
output = runGit(t, dir, "ls-files", ".beads/")
if !strings.Contains(output, "issues.jsonl") || !strings.Contains(output, "deletions.jsonl") {
t.Errorf("expected both files to be tracked, got: %s", output)
}
})
t.Run("JSONL file outside .beads directory is ignored", func(t *testing.T) {
dir := setupTestGitRepo(t)
// Create initial commit
testFile := filepath.Join(dir, "test.txt")
if err := os.WriteFile(testFile, []byte("test"), 0600); err != nil {
t.Fatalf("failed to create test file: %v", err)
}
runGit(t, dir, "add", "test.txt")
runGit(t, dir, "commit", "-m", "initial")
// Create a JSONL file outside .beads
outsideFile := filepath.Join(dir, "data.jsonl")
if err := os.WriteFile(outsideFile, []byte(`{"test":"data"}`+"\n"), 0600); err != nil {
t.Fatalf("failed to create outside JSONL: %v", err)
}
// UntrackedJSONL should ignore it
err := UntrackedJSONL(dir)
if err != nil {
t.Errorf("expected no error, got: %v", err)
}
// Verify the file is still untracked
output := runGit(t, dir, "status", "--porcelain")
if !strings.Contains(output, "?? data.jsonl") {
t.Error("expected file outside .beads to remain untracked")
}
})
}
// TestMigrateTombstones_EdgeCases tests MigrateTombstones with edge cases
func TestMigrateTombstones_EdgeCases(t *testing.T) {
t.Run("malformed deletions.jsonl with corrupt JSON", func(t *testing.T) {
dir := setupTestWorkspace(t)
deletionsPath := filepath.Join(dir, ".beads", "deletions.jsonl")
jsonlPath := filepath.Join(dir, ".beads", "issues.jsonl")
// Create deletions.jsonl with mix of valid and malformed JSON
content := `{"id":"valid-1","ts":"2024-01-01T00:00:00Z","by":"user1"}
{corrupt json line without proper structure
{"id":"valid-2","ts":"2024-01-02T00:00:00Z","by":"user2","reason":"cleanup"}
{"incomplete":"object"
{"id":"valid-3","ts":"2024-01-03T00:00:00Z","by":"user3"}
`
if err := os.WriteFile(deletionsPath, []byte(content), 0600); err != nil {
t.Fatalf("failed to create deletions.jsonl: %v", err)
}
// Create empty issues.jsonl
if err := os.WriteFile(jsonlPath, []byte(""), 0600); err != nil {
t.Fatalf("failed to create issues.jsonl: %v", err)
}
// Should succeed and migrate only valid records
err := MigrateTombstones(dir)
if err != nil {
t.Fatalf("expected MigrateTombstones to handle malformed JSON, got: %v", err)
}
// Verify only valid records were migrated
resultBytes, err := os.ReadFile(jsonlPath)
if err != nil {
t.Fatalf("failed to read issues.jsonl: %v", err)
}
lines := strings.Split(strings.TrimSpace(string(resultBytes)), "\n")
validCount := 0
for _, line := range lines {
if line == "" {
continue
}
var issue struct {
ID string `json:"id"`
Status string `json:"status"`
}
if err := json.Unmarshal([]byte(line), &issue); err == nil && issue.Status == "tombstone" {
validCount++
}
}
if validCount != 3 {
t.Errorf("expected 3 valid tombstones, got %d", validCount)
}
})
t.Run("deletions without ID field are skipped", func(t *testing.T) {
dir := setupTestWorkspace(t)
deletionsPath := filepath.Join(dir, ".beads", "deletions.jsonl")
jsonlPath := filepath.Join(dir, ".beads", "issues.jsonl")
// Create deletions.jsonl with records missing ID
content := `{"id":"valid-1","ts":"2024-01-01T00:00:00Z","by":"user"}
{"ts":"2024-01-02T00:00:00Z","by":"user2"}
{"id":"","ts":"2024-01-03T00:00:00Z","by":"user3"}
{"id":"valid-2","ts":"2024-01-04T00:00:00Z","by":"user4"}
`
if err := os.WriteFile(deletionsPath, []byte(content), 0600); err != nil {
t.Fatalf("failed to create deletions.jsonl: %v", err)
}
// Create empty issues.jsonl
if err := os.WriteFile(jsonlPath, []byte(""), 0600); err != nil {
t.Fatalf("failed to create issues.jsonl: %v", err)
}
err := MigrateTombstones(dir)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
// Verify only records with valid IDs were migrated
resultBytes, err := os.ReadFile(jsonlPath)
if err != nil {
t.Fatalf("failed to read issues.jsonl: %v", err)
}
lines := strings.Split(strings.TrimSpace(string(resultBytes)), "\n")
validCount := 0
for _, line := range lines {
if line == "" {
continue
}
var issue struct {
ID string `json:"id"`
Status string `json:"status"`
}
if err := json.Unmarshal([]byte(line), &issue); err == nil && issue.ID != "" {
validCount++
}
}
if validCount != 2 {
t.Errorf("expected 2 valid tombstones, got %d", validCount)
}
})
t.Run("handles missing issues.jsonl", func(t *testing.T) {
dir := setupTestWorkspace(t)
deletionsPath := filepath.Join(dir, ".beads", "deletions.jsonl")
jsonlPath := filepath.Join(dir, ".beads", "issues.jsonl")
// Create deletions.jsonl
deletion := legacyDeletionRecord{
ID: "test-123",
Timestamp: time.Now(),
Actor: "testuser",
}
data, _ := json.Marshal(deletion)
if err := os.WriteFile(deletionsPath, append(data, '\n'), 0600); err != nil {
t.Fatalf("failed to create deletions.jsonl: %v", err)
}
// Don't create issues.jsonl - it should be created by MigrateTombstones
err := MigrateTombstones(dir)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
// Verify issues.jsonl was created
if _, err := os.Stat(jsonlPath); os.IsNotExist(err) {
t.Error("expected issues.jsonl to be created")
}
// Verify tombstone was written
content, err := os.ReadFile(jsonlPath)
if err != nil {
t.Fatalf("failed to read issues.jsonl: %v", err)
}
if len(content) == 0 {
t.Error("expected tombstone to be written")
}
})
}
// TestPermissions_EdgeCases tests Permissions with edge cases
func TestPermissions_EdgeCases(t *testing.T) {
t.Run("symbolic link to .beads directory", func(t *testing.T) {
dir := t.TempDir()
// Create actual .beads directory elsewhere
actualBeadsDir := filepath.Join(t.TempDir(), "actual-beads")
if err := os.MkdirAll(actualBeadsDir, 0755); err != nil {
t.Fatalf("failed to create actual .beads: %v", err)
}
// Create symlink to it
symlinkPath := filepath.Join(dir, ".beads")
if err := os.Symlink(actualBeadsDir, symlinkPath); err != nil {
t.Fatalf("failed to create symlink: %v", err)
}
// Permissions should skip symlinked directories
err := Permissions(dir)
if err != nil {
t.Errorf("expected no error for symlinked .beads, got: %v", err)
}
// Verify target directory permissions were not changed
info, err := os.Stat(actualBeadsDir)
if err != nil {
t.Fatalf("failed to stat actual .beads: %v", err)
}
// Should still have 0755, not 0700
if info.Mode().Perm() == 0700 {
t.Error("symlinked directory permissions should not be changed to 0700")
}
})
t.Run("symbolic link to database file", func(t *testing.T) {
dir := setupTestWorkspace(t)
// Create actual database file elsewhere
actualDbPath := filepath.Join(t.TempDir(), "actual-beads.db")
if err := os.WriteFile(actualDbPath, []byte("test"), 0644); err != nil {
t.Fatalf("failed to create actual db: %v", err)
}
// Create symlink to it
dbSymlinkPath := filepath.Join(dir, ".beads", "beads.db")
if err := os.Symlink(actualDbPath, dbSymlinkPath); err != nil {
t.Fatalf("failed to create symlink: %v", err)
}
// Permissions should skip symlinked files
err := Permissions(dir)
if err != nil {
t.Errorf("expected no error for symlinked db, got: %v", err)
}
// Verify target file permissions were not changed
info, err := os.Stat(actualDbPath)
if err != nil {
t.Fatalf("failed to stat actual db: %v", err)
}
// Should still have 0644, not 0600
if info.Mode().Perm() == 0600 {
t.Error("symlinked database permissions should not be changed to 0600")
}
})
t.Run("fixes incorrect .beads directory permissions", func(t *testing.T) {
dir := setupTestWorkspace(t)
beadsDir := filepath.Join(dir, ".beads")
// Set incorrect permissions (too permissive)
if err := os.Chmod(beadsDir, 0755); err != nil {
t.Fatalf("failed to set permissions: %v", err)
}
err := Permissions(dir)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
// Verify permissions were fixed to 0700
info, err := os.Stat(beadsDir)
if err != nil {
t.Fatalf("failed to stat .beads: %v", err)
}
if info.Mode().Perm() != 0700 {
t.Errorf("expected permissions 0700, got %o", info.Mode().Perm())
}
})
t.Run("fixes incorrect database file permissions", func(t *testing.T) {
dir := setupTestWorkspace(t)
dbPath := filepath.Join(dir, ".beads", "beads.db")
if err := os.WriteFile(dbPath, []byte("test"), 0644); err != nil {
t.Fatalf("failed to create db: %v", err)
}
err := Permissions(dir)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
// Verify permissions were fixed to 0600
info, err := os.Stat(dbPath)
if err != nil {
t.Fatalf("failed to stat db: %v", err)
}
if info.Mode().Perm() != 0600 {
t.Errorf("expected permissions 0600, got %o", info.Mode().Perm())
}
})
t.Run("handles missing database file gracefully", func(t *testing.T) {
dir := setupTestWorkspace(t)
// No database file exists
err := Permissions(dir)
if err != nil {
t.Errorf("expected no error when database doesn't exist, got: %v", err)
}
})
}

View File

@@ -0,0 +1,630 @@
package fix
import (
"encoding/json"
"os"
"os/exec"
"path/filepath"
"strings"
"testing"
"time"
)
// setupTestWorkspace creates a temporary directory with a .beads directory
func setupTestWorkspace(t *testing.T) string {
t.Helper()
dir := t.TempDir()
beadsDir := filepath.Join(dir, ".beads")
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatalf("failed to create .beads directory: %v", err)
}
return dir
}
// setupTestGitRepo creates a temporary git repository with a .beads directory
func setupTestGitRepo(t *testing.T) string {
t.Helper()
dir := setupTestWorkspace(t)
// Initialize git repo
cmd := exec.Command("git", "init")
cmd.Dir = dir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to init git repo: %v", err)
}
// Configure git user for commits
cmd = exec.Command("git", "config", "user.email", "test@test.com")
cmd.Dir = dir
_ = cmd.Run()
cmd = exec.Command("git", "config", "user.name", "Test User")
cmd.Dir = dir
_ = cmd.Run()
return dir
}
// runGit runs a git command and returns output
func runGit(t *testing.T, dir string, args ...string) string {
t.Helper()
cmd := exec.Command("git", args...)
cmd.Dir = dir
output, err := cmd.CombinedOutput()
if err != nil {
t.Logf("git %v: %s", args, output)
}
return string(output)
}
// TestValidateBeadsWorkspace tests the workspace validation function
func TestValidateBeadsWorkspace(t *testing.T) {
t.Run("invalid path", func(t *testing.T) {
err := validateBeadsWorkspace("/nonexistent/path/that/does/not/exist")
if err == nil {
t.Error("expected error for nonexistent path")
}
})
}
// TestGitHooks_Validation tests GitHooks validation
func TestGitHooks_Validation(t *testing.T) {
t.Run("not a git repository", func(t *testing.T) {
dir := setupTestWorkspace(t)
err := GitHooks(dir)
if err == nil {
t.Error("expected error for non-git repository")
}
if err.Error() != "not a git repository" {
t.Errorf("unexpected error: %v", err)
}
})
}
// TestMergeDriver_Validation tests MergeDriver validation
func TestMergeDriver_Validation(t *testing.T) {
t.Run("sets correct merge driver config", func(t *testing.T) {
dir := setupTestGitRepo(t)
err := MergeDriver(dir)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
// Verify the config was set
cmd := exec.Command("git", "config", "merge.beads.driver")
cmd.Dir = dir
output, err := cmd.Output()
if err != nil {
t.Fatalf("failed to get git config: %v", err)
}
expected := "bd merge %A %O %A %B\n"
if string(output) != expected {
t.Errorf("expected %q, got %q", expected, string(output))
}
})
}
// TestDaemon_Validation tests Daemon validation
func TestDaemon_Validation(t *testing.T) {
t.Run("no socket - nothing to do", func(t *testing.T) {
dir := setupTestWorkspace(t)
err := Daemon(dir)
if err != nil {
t.Errorf("expected no error when no socket exists, got: %v", err)
}
})
}
// TestDBJSONLSync_Validation tests DBJSONLSync validation
func TestDBJSONLSync_Validation(t *testing.T) {
t.Run("no database - nothing to do", func(t *testing.T) {
dir := setupTestWorkspace(t)
err := DBJSONLSync(dir)
if err != nil {
t.Errorf("expected no error when no database exists, got: %v", err)
}
})
t.Run("no JSONL - nothing to do", func(t *testing.T) {
dir := setupTestWorkspace(t)
// Create a database file
dbPath := filepath.Join(dir, ".beads", "beads.db")
if err := os.WriteFile(dbPath, []byte("test"), 0600); err != nil {
t.Fatalf("failed to create test db: %v", err)
}
err := DBJSONLSync(dir)
if err != nil {
t.Errorf("expected no error when no JSONL exists, got: %v", err)
}
})
}
// TestSyncBranchConfig_Validation tests SyncBranchConfig validation
func TestSyncBranchConfig_Validation(t *testing.T) {
t.Run("not a git repository", func(t *testing.T) {
dir := setupTestWorkspace(t)
err := SyncBranchConfig(dir)
if err == nil {
t.Error("expected error for non-git repository")
}
})
}
// TestSyncBranchHealth_Validation tests SyncBranchHealth validation
func TestSyncBranchHealth_Validation(t *testing.T) {
t.Run("no main or master branch", func(t *testing.T) {
dir := setupTestGitRepo(t)
// Create a commit on a different branch
cmd := exec.Command("git", "checkout", "-b", "other")
cmd.Dir = dir
_ = cmd.Run()
// Create a file and commit
testFile := filepath.Join(dir, "test.txt")
if err := os.WriteFile(testFile, []byte("test"), 0600); err != nil {
t.Fatalf("failed to create test file: %v", err)
}
cmd = exec.Command("git", "add", "test.txt")
cmd.Dir = dir
_ = cmd.Run()
cmd = exec.Command("git", "commit", "-m", "initial")
cmd.Dir = dir
_ = cmd.Run()
err := SyncBranchHealth(dir, "beads-sync")
if err == nil {
t.Error("expected error when neither main nor master exists")
}
})
}
// TestUntrackedJSONL_Validation tests UntrackedJSONL validation
func TestUntrackedJSONL_Validation(t *testing.T) {
t.Run("not a git repository", func(t *testing.T) {
dir := setupTestWorkspace(t)
err := UntrackedJSONL(dir)
if err == nil {
t.Error("expected error for non-git repository")
}
})
t.Run("no untracked files", func(t *testing.T) {
dir := setupTestGitRepo(t)
err := UntrackedJSONL(dir)
// Should succeed with no untracked files
if err != nil {
t.Errorf("expected no error, got: %v", err)
}
})
}
// TestMigrateTombstones tests the MigrateTombstones function
func TestMigrateTombstones(t *testing.T) {
t.Run("no deletions.jsonl - nothing to migrate", func(t *testing.T) {
dir := setupTestWorkspace(t)
err := MigrateTombstones(dir)
if err != nil {
t.Errorf("expected no error when no deletions.jsonl exists, got: %v", err)
}
})
t.Run("empty deletions.jsonl", func(t *testing.T) {
dir := setupTestWorkspace(t)
deletionsPath := filepath.Join(dir, ".beads", "deletions.jsonl")
if err := os.WriteFile(deletionsPath, []byte(""), 0600); err != nil {
t.Fatalf("failed to create deletions.jsonl: %v", err)
}
err := MigrateTombstones(dir)
if err != nil {
t.Errorf("expected no error for empty deletions.jsonl, got: %v", err)
}
})
t.Run("migrates deletions to tombstones", func(t *testing.T) {
dir := setupTestWorkspace(t)
// Create deletions.jsonl with a deletion record
deletionsPath := filepath.Join(dir, ".beads", "deletions.jsonl")
deletion := legacyDeletionRecord{
ID: "test-123",
Timestamp: time.Now(),
Actor: "testuser",
Reason: "test deletion",
}
data, _ := json.Marshal(deletion)
if err := os.WriteFile(deletionsPath, append(data, '\n'), 0600); err != nil {
t.Fatalf("failed to create deletions.jsonl: %v", err)
}
// Create empty issues.jsonl
jsonlPath := filepath.Join(dir, ".beads", "issues.jsonl")
if err := os.WriteFile(jsonlPath, []byte(""), 0600); err != nil {
t.Fatalf("failed to create issues.jsonl: %v", err)
}
err := MigrateTombstones(dir)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
// Verify deletions.jsonl was renamed
if _, err := os.Stat(deletionsPath); !os.IsNotExist(err) {
t.Error("deletions.jsonl should have been renamed")
}
migratedPath := deletionsPath + ".migrated"
if _, err := os.Stat(migratedPath); os.IsNotExist(err) {
t.Error("deletions.jsonl.migrated should exist")
}
// Verify tombstone was written to issues.jsonl
content, err := os.ReadFile(jsonlPath)
if err != nil {
t.Fatalf("failed to read issues.jsonl: %v", err)
}
if len(content) == 0 {
t.Error("expected tombstone to be written to issues.jsonl")
}
// Verify the tombstone content
var issue struct {
ID string `json:"id"`
Status string `json:"status"`
}
if err := json.Unmarshal(content[:len(content)-1], &issue); err != nil {
t.Fatalf("failed to parse tombstone: %v", err)
}
if issue.ID != "test-123" {
t.Errorf("expected ID test-123, got %s", issue.ID)
}
if issue.Status != "tombstone" {
t.Errorf("expected status tombstone, got %s", issue.Status)
}
})
t.Run("skips already existing tombstones", func(t *testing.T) {
dir := setupTestWorkspace(t)
// Create deletions.jsonl with a deletion record
deletionsPath := filepath.Join(dir, ".beads", "deletions.jsonl")
deletion := legacyDeletionRecord{
ID: "test-123",
Timestamp: time.Now(),
Actor: "testuser",
}
data, _ := json.Marshal(deletion)
if err := os.WriteFile(deletionsPath, append(data, '\n'), 0600); err != nil {
t.Fatalf("failed to create deletions.jsonl: %v", err)
}
// Create issues.jsonl with an existing tombstone for the same ID
jsonlPath := filepath.Join(dir, ".beads", "issues.jsonl")
existingTombstone := map[string]interface{}{
"id": "test-123",
"status": "tombstone",
}
existingData, _ := json.Marshal(existingTombstone)
if err := os.WriteFile(jsonlPath, append(existingData, '\n'), 0600); err != nil {
t.Fatalf("failed to create issues.jsonl: %v", err)
}
originalContent, _ := os.ReadFile(jsonlPath)
err := MigrateTombstones(dir)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
// Verify issues.jsonl was not modified (tombstone already exists)
newContent, _ := os.ReadFile(jsonlPath)
if string(newContent) != string(originalContent) {
t.Error("issues.jsonl should not have been modified when tombstone already exists")
}
})
}
// TestLoadLegacyDeletions tests the loadLegacyDeletions helper
func TestLoadLegacyDeletions(t *testing.T) {
t.Run("nonexistent file returns empty map", func(t *testing.T) {
records, err := loadLegacyDeletions("/nonexistent/path")
if err != nil {
t.Errorf("expected no error, got: %v", err)
}
if len(records) != 0 {
t.Errorf("expected empty map, got %d records", len(records))
}
})
t.Run("parses valid deletions", func(t *testing.T) {
dir := t.TempDir()
path := filepath.Join(dir, "deletions.jsonl")
deletion := legacyDeletionRecord{
ID: "test-abc",
Timestamp: time.Now(),
Actor: "user",
Reason: "testing",
}
data, _ := json.Marshal(deletion)
if err := os.WriteFile(path, append(data, '\n'), 0600); err != nil {
t.Fatalf("failed to write file: %v", err)
}
records, err := loadLegacyDeletions(path)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if len(records) != 1 {
t.Fatalf("expected 1 record, got %d", len(records))
}
if records["test-abc"].Actor != "user" {
t.Errorf("expected actor 'user', got %s", records["test-abc"].Actor)
}
})
t.Run("skips invalid lines", func(t *testing.T) {
dir := t.TempDir()
path := filepath.Join(dir, "deletions.jsonl")
content := `{"id":"valid-1","ts":"2024-01-01T00:00:00Z","by":"user"}
invalid json line
{"id":"valid-2","ts":"2024-01-01T00:00:00Z","by":"user"}
`
if err := os.WriteFile(path, []byte(content), 0600); err != nil {
t.Fatalf("failed to write file: %v", err)
}
records, err := loadLegacyDeletions(path)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if len(records) != 2 {
t.Fatalf("expected 2 valid records, got %d", len(records))
}
})
t.Run("skips records without ID", func(t *testing.T) {
dir := t.TempDir()
path := filepath.Join(dir, "deletions.jsonl")
content := `{"id":"valid-1","ts":"2024-01-01T00:00:00Z","by":"user"}
{"ts":"2024-01-01T00:00:00Z","by":"user"}
`
if err := os.WriteFile(path, []byte(content), 0600); err != nil {
t.Fatalf("failed to write file: %v", err)
}
records, err := loadLegacyDeletions(path)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if len(records) != 1 {
t.Fatalf("expected 1 valid record, got %d", len(records))
}
})
}
// TestConvertLegacyDeletionToTombstone tests tombstone conversion
func TestConvertLegacyDeletionToTombstone(t *testing.T) {
t.Run("converts with all fields", func(t *testing.T) {
ts := time.Now()
record := legacyDeletionRecord{
ID: "test-xyz",
Timestamp: ts,
Actor: "admin",
Reason: "cleanup",
}
tombstone := convertLegacyDeletionToTombstone(record)
if tombstone.ID != "test-xyz" {
t.Errorf("expected ID test-xyz, got %s", tombstone.ID)
}
if tombstone.Status != "tombstone" {
t.Errorf("expected status tombstone, got %s", tombstone.Status)
}
if tombstone.DeletedBy != "admin" {
t.Errorf("expected DeletedBy admin, got %s", tombstone.DeletedBy)
}
if tombstone.DeleteReason != "cleanup" {
t.Errorf("expected DeleteReason cleanup, got %s", tombstone.DeleteReason)
}
if tombstone.DeletedAt == nil {
t.Error("expected DeletedAt to be set")
}
})
t.Run("handles zero timestamp", func(t *testing.T) {
record := legacyDeletionRecord{
ID: "test-zero",
Actor: "user",
}
tombstone := convertLegacyDeletionToTombstone(record)
if tombstone.DeletedAt == nil {
t.Error("expected DeletedAt to be set even with zero timestamp")
}
})
}
// TestFindJSONLPath tests the findJSONLPath helper
func TestFindJSONLPath(t *testing.T) {
t.Run("returns empty for no JSONL", func(t *testing.T) {
dir := t.TempDir()
path := findJSONLPath(dir)
if path != "" {
t.Errorf("expected empty path, got %s", path)
}
})
t.Run("finds issues.jsonl", func(t *testing.T) {
dir := t.TempDir()
jsonlPath := filepath.Join(dir, "issues.jsonl")
if err := os.WriteFile(jsonlPath, []byte("{}"), 0600); err != nil {
t.Fatalf("failed to create file: %v", err)
}
path := findJSONLPath(dir)
if path != jsonlPath {
t.Errorf("expected %s, got %s", jsonlPath, path)
}
})
t.Run("finds beads.jsonl as fallback", func(t *testing.T) {
dir := t.TempDir()
jsonlPath := filepath.Join(dir, "beads.jsonl")
if err := os.WriteFile(jsonlPath, []byte("{}"), 0600); err != nil {
t.Fatalf("failed to create file: %v", err)
}
path := findJSONLPath(dir)
if path != jsonlPath {
t.Errorf("expected %s, got %s", jsonlPath, path)
}
})
t.Run("prefers issues.jsonl over beads.jsonl", func(t *testing.T) {
dir := t.TempDir()
issuesPath := filepath.Join(dir, "issues.jsonl")
beadsPath := filepath.Join(dir, "beads.jsonl")
if err := os.WriteFile(issuesPath, []byte("{}"), 0600); err != nil {
t.Fatalf("failed to create issues.jsonl: %v", err)
}
if err := os.WriteFile(beadsPath, []byte("{}"), 0600); err != nil {
t.Fatalf("failed to create beads.jsonl: %v", err)
}
path := findJSONLPath(dir)
if path != issuesPath {
t.Errorf("expected %s, got %s", issuesPath, path)
}
})
}
// TestIsWithinWorkspace tests the isWithinWorkspace helper
func TestIsWithinWorkspace(t *testing.T) {
root := t.TempDir()
tests := []struct {
name string
candidate string
want bool
}{
{
name: "same directory",
candidate: root,
want: true,
},
{
name: "subdirectory",
candidate: filepath.Join(root, "subdir"),
want: true,
},
{
name: "nested subdirectory",
candidate: filepath.Join(root, "sub", "dir", "nested"),
want: true,
},
{
name: "parent directory",
candidate: filepath.Dir(root),
want: false,
},
{
name: "sibling directory",
candidate: filepath.Join(filepath.Dir(root), "sibling"),
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := isWithinWorkspace(root, tt.candidate)
if got != tt.want {
t.Errorf("isWithinWorkspace(%q, %q) = %v, want %v", root, tt.candidate, got, tt.want)
}
})
}
}
// TestDBJSONLSync_MissingDatabase tests DBJSONLSync when database doesn't exist
func TestDBJSONLSync_MissingDatabase(t *testing.T) {
dir := setupTestWorkspace(t)
beadsDir := filepath.Join(dir, ".beads")
// Create only JSONL file
jsonlPath := filepath.Join(beadsDir, "issues.jsonl")
issue := map[string]interface{}{
"id": "test-no-db",
"title": "No DB Test",
"status": "open",
}
data, _ := json.Marshal(issue)
if err := os.WriteFile(jsonlPath, append(data, '\n'), 0600); err != nil {
t.Fatalf("failed to create jsonl: %v", err)
}
// Should return without error since there's nothing to sync
err := DBJSONLSync(dir)
if err != nil {
t.Errorf("expected no error when database doesn't exist, got: %v", err)
}
}
// TestSyncBranchConfig_BranchDoesNotExist tests fixing config when branch doesn't exist
func TestSyncBranchConfig_BranchDoesNotExist(t *testing.T) {
// Skip if running as test binary (can't execute bd subcommands)
skipIfTestBinary(t)
dir := setupTestGitRepo(t)
// Try to run fix without any commits (no branch exists yet)
err := SyncBranchConfig(dir)
if err == nil {
t.Error("expected error when no branch exists")
}
if err != nil && !strings.Contains(err.Error(), "failed to get current branch") {
t.Errorf("unexpected error: %v", err)
}
}
// TestSyncBranchConfig_InvalidRemoteURL tests fix behavior with invalid remote
func TestSyncBranchConfig_InvalidRemoteURL(t *testing.T) {
// Skip if running as test binary (can't execute bd subcommands)
skipIfTestBinary(t)
dir := setupTestGitRepo(t)
// Create initial commit
testFile := filepath.Join(dir, "test.txt")
if err := os.WriteFile(testFile, []byte("test"), 0600); err != nil {
t.Fatalf("failed to create test file: %v", err)
}
runGit(t, dir, "add", "test.txt")
runGit(t, dir, "commit", "-m", "initial commit")
// Add invalid remote
runGit(t, dir, "remote", "add", "origin", "invalid://bad-url")
// Fix should still succeed - it only sets config, doesn't interact with remote
err := SyncBranchConfig(dir)
if err != nil {
t.Fatalf("unexpected error with invalid remote: %v", err)
}
// Verify config was set
cmd := exec.Command("git", "config", "sync.branch")
cmd.Dir = dir
output, err := cmd.Output()
if err != nil {
t.Fatalf("failed to get sync.branch config: %v", err)
}
if strings.TrimSpace(string(output)) == "" {
t.Error("sync.branch config was not set")
}
}

View File

@@ -48,10 +48,9 @@ func Permissions(path string) error {
// Ensure database has exactly 0600 permissions (owner rw only)
expectedFileMode := os.FileMode(0600)
currentPerms := dbInfo.Mode().Perm()
requiredPerms := os.FileMode(0600)
// Check if we have both read and write for owner
if currentPerms&requiredPerms != requiredPerms {
// Check if permissions are not exactly 0600
if currentPerms != expectedFileMode {
if err := os.Chmod(dbPath, expectedFileMode); err != nil {
return fmt.Errorf("failed to fix database permissions: %w", err)
}

View File

@@ -19,7 +19,8 @@ func UntrackedJSONL(path string) error {
beadsDir := filepath.Join(path, ".beads")
// Find untracked JSONL files
cmd := exec.Command("git", "status", "--porcelain", ".beads/")
// Use --untracked-files=all to show individual files, not just the directory
cmd := exec.Command("git", "status", "--porcelain", "--untracked-files=all", ".beads/")
cmd.Dir = path
output, err := cmd.Output()
if err != nil {
@@ -67,9 +68,11 @@ func UntrackedJSONL(path string) error {
fmt.Printf(" Staged %s\n", filepath.Base(file))
}
// Commit the staged files
// Commit only the JSONL files we staged (using --only to preserve other staged changes)
commitMsg := "chore(beads): commit untracked JSONL files\n\nAuto-committed by bd doctor --fix (bd-pbj)"
commitCmd := exec.Command("git", "commit", "-m", commitMsg)
commitArgs := []string{"commit", "--only", "-m", commitMsg}
commitArgs = append(commitArgs, untrackedFiles...)
commitCmd := exec.Command("git", commitArgs...) // #nosec G204 -- untrackedFiles validated above
commitCmd.Dir = path
commitCmd.Stdout = os.Stdout
commitCmd.Stderr = os.Stderr

View File

@@ -2,103 +2,801 @@ package doctor
import (
"os"
"os/exec"
"path/filepath"
"strings"
"testing"
)
// setupGitRepo creates a temporary git repository for testing
func setupGitRepo(t *testing.T) string {
t.Helper()
dir := t.TempDir()
// Create .beads directory
beadsDir := filepath.Join(dir, ".beads")
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatalf("failed to create .beads directory: %v", err)
}
// Initialize git repo
cmd := exec.Command("git", "init")
cmd.Dir = dir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to init git repo: %v", err)
}
// Configure git user for commits
cmd = exec.Command("git", "config", "user.email", "test@test.com")
cmd.Dir = dir
_ = cmd.Run()
cmd = exec.Command("git", "config", "user.name", "Test User")
cmd.Dir = dir
_ = cmd.Run()
return dir
}
func TestCheckGitHooks(t *testing.T) {
t.Run("not a git repo", func(t *testing.T) {
// Save and change to a temp dir that's not a git repo
oldWd, _ := os.Getwd()
// This test needs to run in a git repository
// We test the basic case where hooks are not installed
t.Run("not in git repo returns N/A", func(t *testing.T) {
// Save current directory
origDir, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
defer func() {
if err := os.Chdir(origDir); err != nil {
t.Fatalf("failed to restore directory: %v", err)
}
}()
// Change to a non-git directory
tmpDir := t.TempDir()
os.Chdir(tmpDir)
defer os.Chdir(oldWd)
if err := os.Chdir(tmpDir); err != nil {
t.Fatal(err)
}
check := CheckGitHooks()
// Should return warning when not in a git repo
if check.Name != "Git Hooks" {
t.Errorf("Name = %q, want %q", check.Name, "Git Hooks")
if check.Status != StatusOK {
t.Errorf("expected status %q, got %q", StatusOK, check.Status)
}
if check.Message != "N/A (not a git repository)" {
t.Errorf("unexpected message: %s", check.Message)
}
})
}
func TestCheckMergeDriver(t *testing.T) {
t.Run("not a git repo", func(t *testing.T) {
tmpDir := t.TempDir()
tests := []struct {
name string
setup func(t *testing.T, dir string)
expectedStatus string
expectMessage string
}{
{
name: "not a git repo",
setup: func(t *testing.T, dir string) {
// Just create .beads directory, no git
// CheckMergeDriver uses global git detection
beadsDir := filepath.Join(dir, ".beads")
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatal(err)
}
},
expectedStatus: "warning", // Uses global git detection, so still checks
expectMessage: "", // Message varies
},
{
name: "merge driver not configured",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
},
expectedStatus: "warning",
expectMessage: "Git merge driver not configured",
},
{
name: "correct config",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
cmd := exec.Command("git", "config", "merge.beads.driver", "bd merge %A %O %A %B")
cmd.Dir = dir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to set git config: %v", err)
}
},
expectedStatus: "ok",
expectMessage: "Correctly configured",
},
{
name: "incorrect config with old placeholders",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
cmd := exec.Command("git", "config", "merge.beads.driver", "bd merge %L %O %A %R")
cmd.Dir = dir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to set git config: %v", err)
}
},
expectedStatus: "error",
},
}
check := CheckMergeDriver(tmpDir)
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
tt.setup(t, tmpDir)
if check.Name != "Git Merge Driver" {
t.Errorf("Name = %q, want %q", check.Name, "Git Merge Driver")
}
})
check := CheckMergeDriver(tmpDir)
t.Run("no gitattributes", func(t *testing.T) {
tmpDir := t.TempDir()
// Create a fake git directory structure
gitDir := filepath.Join(tmpDir, ".git")
if err := os.Mkdir(gitDir, 0755); err != nil {
t.Fatal(err)
}
check := CheckMergeDriver(tmpDir)
// Should report missing configuration
if check.Status != StatusWarning && check.Status != StatusError {
t.Logf("Status = %q, Message = %q", check.Status, check.Message)
}
})
if check.Status != tt.expectedStatus {
t.Errorf("expected status %q, got %q (message: %s)", tt.expectedStatus, check.Status, check.Message)
}
if tt.expectMessage != "" && check.Message != tt.expectMessage {
t.Errorf("expected message %q, got %q", tt.expectMessage, check.Message)
}
})
}
}
func TestCheckSyncBranchConfig(t *testing.T) {
t.Run("no beads directory", func(t *testing.T) {
tmpDir := t.TempDir()
tests := []struct {
name string
setup func(t *testing.T, dir string)
expectedStatus string
}{
{
name: "no .beads directory",
setup: func(t *testing.T, dir string) {
// Empty directory, no .beads
},
expectedStatus: "ok",
},
{
name: "not a git repo",
setup: func(t *testing.T, dir string) {
beadsDir := filepath.Join(dir, ".beads")
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatal(err)
}
},
expectedStatus: "ok",
},
{
name: "no remote configured",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
},
expectedStatus: "ok",
},
}
check := CheckSyncBranchConfig(tmpDir)
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
tt.setup(t, tmpDir)
if check.Name != "Sync Branch Config" {
t.Errorf("Name = %q, want %q", check.Name, "Sync Branch Config")
}
})
check := CheckSyncBranchConfig(tmpDir)
t.Run("beads directory exists", func(t *testing.T) {
tmpDir := t.TempDir()
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0755); err != nil {
t.Fatal(err)
}
check := CheckSyncBranchConfig(tmpDir)
// Should check for sync branch config
if check.Name != "Sync Branch Config" {
t.Errorf("Name = %q, want %q", check.Name, "Sync Branch Config")
}
})
if check.Status != tt.expectedStatus {
t.Errorf("expected status %q, got %q (message: %s)", tt.expectedStatus, check.Status, check.Message)
}
})
}
}
func TestCheckSyncBranchHealth(t *testing.T) {
t.Run("no beads directory", func(t *testing.T) {
tmpDir := t.TempDir()
tests := []struct {
name string
setup func(t *testing.T, dir string)
expectedStatus string
expectMessage string
}{
{
name: "no sync branch configured (uses global git detection)",
setup: func(t *testing.T, dir string) {
// CheckSyncBranchHealth uses global git.GetGitDir() detection
// which checks from current working directory, not the path parameter
beadsDir := filepath.Join(dir, ".beads")
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatal(err)
}
},
expectedStatus: "ok",
expectMessage: "N/A (no sync branch configured)",
},
{
name: "no sync branch configured with git",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
},
expectedStatus: "ok",
expectMessage: "N/A (no sync branch configured)",
},
}
check := CheckSyncBranchHealth(tmpDir)
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
tt.setup(t, tmpDir)
if check.Name != "Sync Branch Health" {
t.Errorf("Name = %q, want %q", check.Name, "Sync Branch Health")
}
})
check := CheckSyncBranchHealth(tmpDir)
if check.Status != tt.expectedStatus {
t.Errorf("expected status %q, got %q (message: %s)", tt.expectedStatus, check.Status, check.Message)
}
if tt.expectMessage != "" && check.Message != tt.expectMessage {
t.Errorf("expected message %q, got %q", tt.expectMessage, check.Message)
}
})
}
}
func TestCheckSyncBranchHookCompatibility(t *testing.T) {
t.Run("no sync branch configured", func(t *testing.T) {
tmpDir := t.TempDir()
tests := []struct {
name string
setup func(t *testing.T, dir string)
expectedStatus string
}{
{
name: "sync-branch not configured",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
},
expectedStatus: "ok",
},
}
check := CheckSyncBranchHookCompatibility(tmpDir)
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
tt.setup(t, tmpDir)
// Should return OK when sync branch not configured
if check.Status != StatusOK {
t.Errorf("Status = %q, want %q", check.Status, StatusOK)
}
})
check := CheckSyncBranchHookCompatibility(tmpDir)
if check.Status != tt.expectedStatus {
t.Errorf("expected status %q, got %q", tt.expectedStatus, check.Status)
}
})
}
}
// setupGitRepoInDir initializes a git repo in the given directory with .beads
func setupGitRepoInDir(t *testing.T, dir string) {
t.Helper()
// Create .beads directory
beadsDir := filepath.Join(dir, ".beads")
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatalf("failed to create .beads directory: %v", err)
}
// Initialize git repo
cmd := exec.Command("git", "init")
cmd.Dir = dir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to init git repo: %v", err)
}
// Configure git user for commits
cmd = exec.Command("git", "config", "user.email", "test@test.com")
cmd.Dir = dir
_ = cmd.Run()
cmd = exec.Command("git", "config", "user.name", "Test User")
cmd.Dir = dir
_ = cmd.Run()
}
// Edge case tests for CheckGitHooks
func TestCheckGitHooks_CorruptedHookFiles(t *testing.T) {
tests := []struct {
name string
setup func(t *testing.T, dir string)
expectedStatus string
expectInMsg string
}{
{
name: "pre-commit hook is directory instead of file",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
gitDir := filepath.Join(dir, ".git")
hooksDir := filepath.Join(gitDir, "hooks")
os.MkdirAll(hooksDir, 0755)
// create pre-commit as directory instead of file
os.MkdirAll(filepath.Join(hooksDir, "pre-commit"), 0755)
// create valid post-merge and pre-push hooks
os.WriteFile(filepath.Join(hooksDir, "post-merge"), []byte("#!/bin/sh\nbd sync\n"), 0755)
os.WriteFile(filepath.Join(hooksDir, "pre-push"), []byte("#!/bin/sh\nbd sync\n"), 0755)
},
// os.Stat reports directories as existing, so CheckGitHooks sees it as installed
expectedStatus: "ok",
expectInMsg: "All recommended hooks installed",
},
{
name: "hook file with no execute permissions",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
gitDir := filepath.Join(dir, ".git")
hooksDir := filepath.Join(gitDir, "hooks")
os.MkdirAll(hooksDir, 0755)
// create hooks but with no execute permissions
os.WriteFile(filepath.Join(hooksDir, "pre-commit"), []byte("#!/bin/sh\nbd sync\n"), 0644)
os.WriteFile(filepath.Join(hooksDir, "post-merge"), []byte("#!/bin/sh\nbd sync\n"), 0644)
os.WriteFile(filepath.Join(hooksDir, "pre-push"), []byte("#!/bin/sh\nbd sync\n"), 0644)
},
expectedStatus: "ok",
expectInMsg: "All recommended hooks installed",
},
{
name: "empty hook file",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
gitDir := filepath.Join(dir, ".git")
hooksDir := filepath.Join(gitDir, "hooks")
os.MkdirAll(hooksDir, 0755)
// create empty hook files
os.WriteFile(filepath.Join(hooksDir, "pre-commit"), []byte(""), 0755)
os.WriteFile(filepath.Join(hooksDir, "post-merge"), []byte(""), 0755)
os.WriteFile(filepath.Join(hooksDir, "pre-push"), []byte(""), 0755)
},
expectedStatus: "ok",
expectInMsg: "All recommended hooks installed",
},
{
name: "hook file with binary content",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
gitDir := filepath.Join(dir, ".git")
hooksDir := filepath.Join(gitDir, "hooks")
os.MkdirAll(hooksDir, 0755)
// create hooks with binary content
binaryContent := []byte{0x00, 0x01, 0x02, 0xFF, 0xFE, 0xFD}
os.WriteFile(filepath.Join(hooksDir, "pre-commit"), binaryContent, 0755)
os.WriteFile(filepath.Join(hooksDir, "post-merge"), binaryContent, 0755)
os.WriteFile(filepath.Join(hooksDir, "pre-push"), binaryContent, 0755)
},
expectedStatus: "ok",
expectInMsg: "All recommended hooks installed",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
tt.setup(t, tmpDir)
origDir, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
defer os.Chdir(origDir)
if err := os.Chdir(tmpDir); err != nil {
t.Fatal(err)
}
check := CheckGitHooks()
if check.Status != tt.expectedStatus {
t.Errorf("expected status %q, got %q (message: %s)", tt.expectedStatus, check.Status, check.Message)
}
if tt.expectInMsg != "" && !strings.Contains(check.Message, tt.expectInMsg) {
t.Errorf("expected message to contain %q, got %q", tt.expectInMsg, check.Message)
}
})
}
}
// Edge case tests for CheckMergeDriver
func TestCheckMergeDriver_PartiallyConfigured(t *testing.T) {
tests := []struct {
name string
setup func(t *testing.T, dir string)
expectedStatus string
expectInMsg string
}{
{
name: "only merge.beads.name configured",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
cmd := exec.Command("git", "config", "merge.beads.name", "Beads merge driver")
cmd.Dir = dir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to set git config: %v", err)
}
},
expectedStatus: "warning",
expectInMsg: "not configured",
},
{
name: "empty merge driver config",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
cmd := exec.Command("git", "config", "merge.beads.driver", "")
cmd.Dir = dir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to set git config: %v", err)
}
},
// git config trims to empty string, which is non-standard
expectedStatus: "warning",
expectInMsg: "Non-standard",
},
{
name: "merge driver with extra spaces",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
cmd := exec.Command("git", "config", "merge.beads.driver", " bd merge %A %O %A %B ")
cmd.Dir = dir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to set git config: %v", err)
}
},
// git config stores the value with spaces, but the code trims it
expectedStatus: "ok",
expectInMsg: "Correctly configured",
},
{
name: "merge driver with wrong bd path",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
cmd := exec.Command("git", "config", "merge.beads.driver", "/usr/local/bin/bd merge %A %O %A %B")
cmd.Dir = dir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to set git config: %v", err)
}
},
expectedStatus: "warning",
expectInMsg: "Non-standard",
},
{
name: "merge driver with only two placeholders",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
cmd := exec.Command("git", "config", "merge.beads.driver", "bd merge %A %O")
cmd.Dir = dir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to set git config: %v", err)
}
},
expectedStatus: "warning",
expectInMsg: "Non-standard",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
tt.setup(t, tmpDir)
check := CheckMergeDriver(tmpDir)
if check.Status != tt.expectedStatus {
t.Errorf("expected status %q, got %q (message: %s)", tt.expectedStatus, check.Status, check.Message)
}
if tt.expectInMsg != "" && !strings.Contains(check.Message, tt.expectInMsg) {
t.Errorf("expected message to contain %q, got %q", tt.expectInMsg, check.Message)
}
})
}
}
// Edge case tests for CheckSyncBranchConfig
func TestCheckSyncBranchConfig_MultipleRemotes(t *testing.T) {
tests := []struct {
name string
setup func(t *testing.T, dir string)
expectedStatus string
expectInMsg string
}{
{
name: "multiple remotes without sync-branch",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
// add multiple remotes
cmd := exec.Command("git", "remote", "add", "origin", "https://github.com/user/repo.git")
cmd.Dir = dir
_ = cmd.Run()
cmd = exec.Command("git", "remote", "add", "upstream", "https://github.com/upstream/repo.git")
cmd.Dir = dir
_ = cmd.Run()
},
expectedStatus: "warning",
expectInMsg: "not configured",
},
{
name: "multiple remotes with sync-branch configured via env",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
// add multiple remotes
cmd := exec.Command("git", "remote", "add", "origin", "https://github.com/user/repo.git")
cmd.Dir = dir
_ = cmd.Run()
cmd = exec.Command("git", "remote", "add", "upstream", "https://github.com/upstream/repo.git")
cmd.Dir = dir
_ = cmd.Run()
// use env var to configure sync-branch since config package reads from cwd
os.Setenv("BEADS_SYNC_BRANCH", "beads-sync")
t.Cleanup(func() { os.Unsetenv("BEADS_SYNC_BRANCH") })
},
expectedStatus: "ok",
expectInMsg: "Configured",
},
{
name: "no remotes at all",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
},
expectedStatus: "ok",
expectInMsg: "no remote configured",
},
{
name: "on sync branch itself via env (error case)",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
// create and checkout sync branch
cmd := exec.Command("git", "checkout", "-b", "beads-sync")
cmd.Dir = dir
_ = cmd.Run()
// use env var to configure sync-branch
os.Setenv("BEADS_SYNC_BRANCH", "beads-sync")
t.Cleanup(func() { os.Unsetenv("BEADS_SYNC_BRANCH") })
},
expectedStatus: "error",
expectInMsg: "On sync branch",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
tt.setup(t, tmpDir)
check := CheckSyncBranchConfig(tmpDir)
if check.Status != tt.expectedStatus {
t.Errorf("expected status %q, got %q (message: %s)", tt.expectedStatus, check.Status, check.Message)
}
if tt.expectInMsg != "" && !strings.Contains(check.Message, tt.expectInMsg) {
t.Errorf("expected message to contain %q, got %q", tt.expectInMsg, check.Message)
}
})
}
}
// Edge case tests for CheckSyncBranchHealth
func TestCheckSyncBranchHealth_DetachedHEAD(t *testing.T) {
tests := []struct {
name string
setup func(t *testing.T, dir string)
expectedStatus string
expectInMsg string
}{
{
name: "detached HEAD without sync-branch",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
// create initial commit
testFile := filepath.Join(dir, "test.txt")
os.WriteFile(testFile, []byte("test"), 0644)
cmd := exec.Command("git", "add", "test.txt")
cmd.Dir = dir
_ = cmd.Run()
cmd = exec.Command("git", "commit", "-m", "initial commit")
cmd.Dir = dir
_ = cmd.Run()
// detach HEAD
cmd = exec.Command("git", "checkout", "HEAD^0")
cmd.Dir = dir
_ = cmd.Run()
},
expectedStatus: "ok",
expectInMsg: "no sync branch configured",
},
{
name: "detached HEAD with sync-branch configured via env",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
// create initial commit
testFile := filepath.Join(dir, "test.txt")
os.WriteFile(testFile, []byte("test"), 0644)
cmd := exec.Command("git", "add", "test.txt")
cmd.Dir = dir
_ = cmd.Run()
cmd = exec.Command("git", "commit", "-m", "initial commit")
cmd.Dir = dir
_ = cmd.Run()
// create sync branch
cmd = exec.Command("git", "branch", "beads-sync")
cmd.Dir = dir
_ = cmd.Run()
// detach HEAD
cmd = exec.Command("git", "checkout", "HEAD^0")
cmd.Dir = dir
_ = cmd.Run()
// use env var to configure sync-branch
os.Setenv("BEADS_SYNC_BRANCH", "beads-sync")
t.Cleanup(func() { os.Unsetenv("BEADS_SYNC_BRANCH") })
},
expectedStatus: "ok",
expectInMsg: "remote", // no remote configured, so returns "N/A (remote ... not found)"
},
{
name: "sync branch exists but remote doesn't",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
// create initial commit
testFile := filepath.Join(dir, "test.txt")
os.WriteFile(testFile, []byte("test"), 0644)
cmd := exec.Command("git", "add", "test.txt")
cmd.Dir = dir
_ = cmd.Run()
cmd = exec.Command("git", "commit", "-m", "initial commit")
cmd.Dir = dir
_ = cmd.Run()
// create sync branch
cmd = exec.Command("git", "branch", "beads-sync")
cmd.Dir = dir
_ = cmd.Run()
// use env var to configure sync-branch
os.Setenv("BEADS_SYNC_BRANCH", "beads-sync")
t.Cleanup(func() { os.Unsetenv("BEADS_SYNC_BRANCH") })
},
expectedStatus: "ok",
expectInMsg: "remote",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
tt.setup(t, tmpDir)
check := CheckSyncBranchHealth(tmpDir)
if check.Status != tt.expectedStatus {
t.Errorf("expected status %q, got %q (message: %s)", tt.expectedStatus, check.Status, check.Message)
}
if tt.expectInMsg != "" && !strings.Contains(check.Message, tt.expectInMsg) {
t.Errorf("expected message to contain %q, got %q", tt.expectInMsg, check.Message)
}
})
}
}
// Edge case tests for CheckSyncBranchHookCompatibility
func TestCheckSyncBranchHookCompatibility_OldHookFormat(t *testing.T) {
tests := []struct {
name string
setup func(t *testing.T, dir string)
expectedStatus string
expectInMsg string
}{
{
name: "old hook without version marker",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
// create old-style pre-push hook without version
gitDir := filepath.Join(dir, ".git")
hooksDir := filepath.Join(gitDir, "hooks")
os.MkdirAll(hooksDir, 0755)
hookContent := "#!/bin/sh\n# Old hook without version\nbd sync\n"
os.WriteFile(filepath.Join(hooksDir, "pre-push"), []byte(hookContent), 0755)
// use env var to configure sync-branch
os.Setenv("BEADS_SYNC_BRANCH", "beads-sync")
t.Cleanup(func() { os.Unsetenv("BEADS_SYNC_BRANCH") })
},
expectedStatus: "warning",
expectInMsg: "not a bd hook",
},
{
name: "hook with version 0.28.0 (old format)",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
gitDir := filepath.Join(dir, ".git")
hooksDir := filepath.Join(gitDir, "hooks")
os.MkdirAll(hooksDir, 0755)
hookContent := "#!/bin/sh\n# bd-hooks-version: 0.28.0\nbd sync\n"
os.WriteFile(filepath.Join(hooksDir, "pre-push"), []byte(hookContent), 0755)
// use env var to configure sync-branch
os.Setenv("BEADS_SYNC_BRANCH", "beads-sync")
t.Cleanup(func() { os.Unsetenv("BEADS_SYNC_BRANCH") })
},
expectedStatus: "error",
expectInMsg: "incompatible",
},
{
name: "hook with version 0.29.0 (compatible)",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
gitDir := filepath.Join(dir, ".git")
hooksDir := filepath.Join(gitDir, "hooks")
os.MkdirAll(hooksDir, 0755)
hookContent := "#!/bin/sh\n# bd-hooks-version: 0.29.0\nbd sync\n"
os.WriteFile(filepath.Join(hooksDir, "pre-push"), []byte(hookContent), 0755)
// use env var to configure sync-branch
os.Setenv("BEADS_SYNC_BRANCH", "beads-sync")
t.Cleanup(func() { os.Unsetenv("BEADS_SYNC_BRANCH") })
},
expectedStatus: "ok",
expectInMsg: "compatible",
},
{
name: "hook with malformed version",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
gitDir := filepath.Join(dir, ".git")
hooksDir := filepath.Join(gitDir, "hooks")
os.MkdirAll(hooksDir, 0755)
hookContent := "#!/bin/sh\n# bd-hooks-version: invalid\nbd sync\n"
os.WriteFile(filepath.Join(hooksDir, "pre-push"), []byte(hookContent), 0755)
// use env var to configure sync-branch
os.Setenv("BEADS_SYNC_BRANCH", "beads-sync")
t.Cleanup(func() { os.Unsetenv("BEADS_SYNC_BRANCH") })
},
expectedStatus: "error",
expectInMsg: "incompatible",
},
{
name: "hook with version marker but no value",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
gitDir := filepath.Join(dir, ".git")
hooksDir := filepath.Join(gitDir, "hooks")
os.MkdirAll(hooksDir, 0755)
hookContent := "#!/bin/sh\n# bd-hooks-version:\nbd sync\n"
os.WriteFile(filepath.Join(hooksDir, "pre-push"), []byte(hookContent), 0755)
// use env var to configure sync-branch
os.Setenv("BEADS_SYNC_BRANCH", "beads-sync")
t.Cleanup(func() { os.Unsetenv("BEADS_SYNC_BRANCH") })
},
expectedStatus: "warning",
expectInMsg: "Could not determine",
},
{
name: "hook in shared hooks directory (core.hooksPath)",
setup: func(t *testing.T, dir string) {
setupGitRepoInDir(t, dir)
// create shared hooks directory
sharedHooksDir := filepath.Join(dir, ".git-hooks")
os.MkdirAll(sharedHooksDir, 0755)
hookContent := "#!/bin/sh\n# bd-hooks-version: 0.29.0\nbd sync\n"
os.WriteFile(filepath.Join(sharedHooksDir, "pre-push"), []byte(hookContent), 0755)
// configure core.hooksPath
cmd := exec.Command("git", "config", "core.hooksPath", ".git-hooks")
cmd.Dir = dir
_ = cmd.Run()
// use env var to configure sync-branch
os.Setenv("BEADS_SYNC_BRANCH", "beads-sync")
t.Cleanup(func() { os.Unsetenv("BEADS_SYNC_BRANCH") })
},
expectedStatus: "ok",
expectInMsg: "compatible",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
tt.setup(t, tmpDir)
check := CheckSyncBranchHookCompatibility(tmpDir)
if check.Status != tt.expectedStatus {
t.Errorf("expected status %q, got %q (message: %s)", tt.expectedStatus, check.Status, check.Message)
}
if tt.expectInMsg != "" && !strings.Contains(check.Message, tt.expectInMsg) {
t.Errorf("expected message to contain %q, got %q", tt.expectInMsg, check.Message)
}
})
}
}

View File

@@ -4,6 +4,7 @@ import (
"os"
"path/filepath"
"runtime"
"strings"
"testing"
)
@@ -354,3 +355,783 @@ daemon.log
})
}
}
func TestFixGitignore_PartialPatterns(t *testing.T) {
tests := []struct {
name string
initialContent string
expectAllPatterns bool
description string
}{
{
name: "partial patterns - missing some merge artifacts",
initialContent: `# SQLite databases
*.db
*.db-journal
daemon.log
# Has some merge artifacts but not all
beads.base.jsonl
beads.left.jsonl
`,
expectAllPatterns: true,
description: "should add missing merge artifact patterns",
},
{
name: "partial patterns - has db wildcards but missing specific ones",
initialContent: `*.db
daemon.log
beads.base.jsonl
beads.left.jsonl
beads.right.jsonl
beads.base.meta.json
beads.left.meta.json
beads.right.meta.json
`,
expectAllPatterns: true,
description: "should add missing *.db?* pattern",
},
{
name: "outdated pattern syntax - old db patterns",
initialContent: `# Old style database patterns
*.sqlite
*.sqlite3
daemon.log
# Missing modern patterns
`,
expectAllPatterns: true,
description: "should replace outdated patterns with current template",
},
{
name: "conflicting patterns - has negation without base pattern",
initialContent: `# Conflicting setup
!issues.jsonl
!metadata.json
# Missing the actual ignore patterns
`,
expectAllPatterns: true,
description: "should fix by using canonical template",
},
{
name: "empty gitignore",
initialContent: "",
expectAllPatterns: true,
description: "should add all required patterns to empty file",
},
{
name: "already correct gitignore",
initialContent: GitignoreTemplate,
expectAllPatterns: true,
description: "should preserve correct template unchanged",
},
{
name: "has all required patterns but different formatting",
initialContent: `*.db
*.db?*
*.db-journal
daemon.log
beads.base.jsonl
beads.left.jsonl
beads.right.jsonl
beads.base.meta.json
beads.left.meta.json
beads.right.meta.json
`,
expectAllPatterns: true,
description: "FixGitignore replaces with canonical template",
},
{
name: "partial patterns with user comments",
initialContent: `# My custom comment
*.db
daemon.log
# User added this
custom-pattern.txt
`,
expectAllPatterns: true,
description: "FixGitignore replaces entire file, user comments will be lost",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
oldDir, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
if err := os.Chdir(tmpDir); err != nil {
t.Fatal(err)
}
defer func() {
if err := os.Chdir(oldDir); err != nil {
t.Error(err)
}
}()
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0750); err != nil {
t.Fatal(err)
}
gitignorePath := filepath.Join(".beads", ".gitignore")
if err := os.WriteFile(gitignorePath, []byte(tt.initialContent), 0600); err != nil {
t.Fatal(err)
}
err = FixGitignore()
if err != nil {
t.Fatalf("FixGitignore failed: %v", err)
}
content, err := os.ReadFile(gitignorePath)
if err != nil {
t.Fatalf("Failed to read gitignore after fix: %v", err)
}
contentStr := string(content)
// Verify all required patterns are present
if tt.expectAllPatterns {
for _, pattern := range requiredPatterns {
if !strings.Contains(contentStr, pattern) {
t.Errorf("Missing required pattern after fix: %s\nContent:\n%s", pattern, contentStr)
}
}
}
// Verify content matches template exactly (FixGitignore always writes the template)
if contentStr != GitignoreTemplate {
t.Errorf("Content does not match GitignoreTemplate.\nExpected:\n%s\n\nGot:\n%s", GitignoreTemplate, contentStr)
}
})
}
}
func TestFixGitignore_PreservesNothing(t *testing.T) {
// This test documents that FixGitignore does NOT preserve custom patterns
// It always replaces with the canonical template
tmpDir := t.TempDir()
oldDir, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
if err := os.Chdir(tmpDir); err != nil {
t.Fatal(err)
}
defer func() {
if err := os.Chdir(oldDir); err != nil {
t.Error(err)
}
}()
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0750); err != nil {
t.Fatal(err)
}
customContent := `# User custom patterns
custom-file.txt
*.backup
# Required patterns
*.db
*.db?*
daemon.log
beads.base.jsonl
beads.left.jsonl
beads.right.jsonl
beads.base.meta.json
beads.left.meta.json
beads.right.meta.json
`
gitignorePath := filepath.Join(".beads", ".gitignore")
if err := os.WriteFile(gitignorePath, []byte(customContent), 0600); err != nil {
t.Fatal(err)
}
err = FixGitignore()
if err != nil {
t.Fatalf("FixGitignore failed: %v", err)
}
content, err := os.ReadFile(gitignorePath)
if err != nil {
t.Fatalf("Failed to read gitignore: %v", err)
}
contentStr := string(content)
// Verify custom patterns are NOT preserved
if strings.Contains(contentStr, "custom-file.txt") {
t.Error("Custom pattern 'custom-file.txt' should not be preserved")
}
if strings.Contains(contentStr, "*.backup") {
t.Error("Custom pattern '*.backup' should not be preserved")
}
// Verify it matches template exactly
if contentStr != GitignoreTemplate {
t.Error("Content should match GitignoreTemplate exactly after fix")
}
}
func TestFixGitignore_Symlink(t *testing.T) {
// Skip on Windows as symlink creation requires elevated privileges
if runtime.GOOS == "windows" {
t.Skip("Skipping symlink test on Windows")
}
tmpDir := t.TempDir()
oldDir, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
if err := os.Chdir(tmpDir); err != nil {
t.Fatal(err)
}
defer func() {
if err := os.Chdir(oldDir); err != nil {
t.Error(err)
}
}()
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0750); err != nil {
t.Fatal(err)
}
// Create a target file that the symlink will point to
targetPath := filepath.Join(tmpDir, "target_gitignore")
if err := os.WriteFile(targetPath, []byte("old content"), 0600); err != nil {
t.Fatal(err)
}
// Create symlink at .beads/.gitignore pointing to target
gitignorePath := filepath.Join(".beads", ".gitignore")
if err := os.Symlink(targetPath, gitignorePath); err != nil {
t.Fatal(err)
}
// Run FixGitignore - it should write through the symlink
// (os.WriteFile follows symlinks, it doesn't replace them)
err = FixGitignore()
if err != nil {
t.Fatalf("FixGitignore failed: %v", err)
}
// Verify it's still a symlink (os.WriteFile follows symlinks)
info, err := os.Lstat(gitignorePath)
if err != nil {
t.Fatalf("Failed to stat .gitignore: %v", err)
}
if info.Mode()&os.ModeSymlink == 0 {
t.Error("Expected symlink to be preserved (os.WriteFile follows symlinks)")
}
// Verify content is correct (reading through symlink)
content, err := os.ReadFile(gitignorePath)
if err != nil {
t.Fatalf("Failed to read .gitignore: %v", err)
}
if string(content) != GitignoreTemplate {
t.Error("Content doesn't match GitignoreTemplate")
}
// Verify target file was updated with correct content
targetContent, err := os.ReadFile(targetPath)
if err != nil {
t.Fatalf("Failed to read target file: %v", err)
}
if string(targetContent) != GitignoreTemplate {
t.Error("Target file content doesn't match GitignoreTemplate")
}
// Note: permissions are set on the target file, not the symlink itself
targetInfo, err := os.Stat(targetPath)
if err != nil {
t.Fatalf("Failed to stat target file: %v", err)
}
if targetInfo.Mode().Perm() != 0600 {
t.Errorf("Expected target file permissions 0600, got %o", targetInfo.Mode().Perm())
}
}
func TestFixGitignore_NonASCIICharacters(t *testing.T) {
tests := []struct {
name string
initialContent string
description string
}{
{
name: "UTF-8 characters in comments",
initialContent: `# SQLite databases 数据库
*.db
# Daemon files 守护进程文件
daemon.log
`,
description: "handles UTF-8 characters in comments",
},
{
name: "emoji in content",
initialContent: `# 🚀 Database files
*.db
# 📝 Logs
daemon.log
`,
description: "handles emoji characters",
},
{
name: "mixed unicode patterns",
initialContent: `# файлы базы данных
*.db
# Arquivos de registro
daemon.log
`,
description: "handles Cyrillic and Latin-based unicode",
},
{
name: "unicode patterns with required content",
initialContent: `# Unicode comment ñ é ü
*.db
*.db?*
daemon.log
beads.base.jsonl
beads.left.jsonl
beads.right.jsonl
beads.base.meta.json
beads.left.meta.json
beads.right.meta.json
`,
description: "replaces file even when required patterns present with unicode",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
oldDir, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
if err := os.Chdir(tmpDir); err != nil {
t.Fatal(err)
}
defer func() {
if err := os.Chdir(oldDir); err != nil {
t.Error(err)
}
}()
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0750); err != nil {
t.Fatal(err)
}
gitignorePath := filepath.Join(".beads", ".gitignore")
if err := os.WriteFile(gitignorePath, []byte(tt.initialContent), 0600); err != nil {
t.Fatal(err)
}
err = FixGitignore()
if err != nil {
t.Fatalf("FixGitignore failed: %v", err)
}
// Verify content is replaced with template (ASCII only)
content, err := os.ReadFile(gitignorePath)
if err != nil {
t.Fatalf("Failed to read .gitignore: %v", err)
}
if string(content) != GitignoreTemplate {
t.Errorf("Content doesn't match GitignoreTemplate\nExpected:\n%s\n\nGot:\n%s", GitignoreTemplate, string(content))
}
})
}
}
func TestFixGitignore_VeryLongLines(t *testing.T) {
tests := []struct {
name string
setupFunc func(t *testing.T, tmpDir string) string
description string
expectSuccess bool
}{
{
name: "single very long line (10KB)",
setupFunc: func(t *testing.T, tmpDir string) string {
// Create a 10KB line
longLine := strings.Repeat("x", 10*1024)
content := "# Comment\n" + longLine + "\n*.db\n"
return content
},
description: "handles 10KB single line",
expectSuccess: true,
},
{
name: "multiple long lines",
setupFunc: func(t *testing.T, tmpDir string) string {
line1 := "# " + strings.Repeat("a", 5000)
line2 := "# " + strings.Repeat("b", 5000)
line3 := "# " + strings.Repeat("c", 5000)
content := line1 + "\n" + line2 + "\n" + line3 + "\n*.db\n"
return content
},
description: "handles multiple long lines",
expectSuccess: true,
},
{
name: "very long pattern line",
setupFunc: func(t *testing.T, tmpDir string) string {
// Create a pattern with extremely long filename
longPattern := strings.Repeat("very_long_filename_", 500) + ".db"
content := "# Comment\n" + longPattern + "\n*.db\n"
return content
},
description: "handles very long pattern names",
expectSuccess: true,
},
{
name: "100KB single line",
setupFunc: func(t *testing.T, tmpDir string) string {
// Create a 100KB line
longLine := strings.Repeat("y", 100*1024)
content := "# Comment\n" + longLine + "\n*.db\n"
return content
},
description: "handles 100KB single line",
expectSuccess: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
oldDir, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
if err := os.Chdir(tmpDir); err != nil {
t.Fatal(err)
}
defer func() {
if err := os.Chdir(oldDir); err != nil {
t.Error(err)
}
}()
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0750); err != nil {
t.Fatal(err)
}
initialContent := tt.setupFunc(t, tmpDir)
gitignorePath := filepath.Join(".beads", ".gitignore")
if err := os.WriteFile(gitignorePath, []byte(initialContent), 0600); err != nil {
t.Fatal(err)
}
err = FixGitignore()
if tt.expectSuccess {
if err != nil {
t.Fatalf("FixGitignore failed: %v", err)
}
// Verify content is replaced with template
content, err := os.ReadFile(gitignorePath)
if err != nil {
t.Fatalf("Failed to read .gitignore: %v", err)
}
if string(content) != GitignoreTemplate {
t.Error("Content doesn't match GitignoreTemplate")
}
} else {
if err == nil {
t.Error("Expected error, got nil")
}
}
})
}
}
func TestCheckGitignore_VariousStatuses(t *testing.T) {
tests := []struct {
name string
setupFunc func(t *testing.T, tmpDir string)
expectedStatus string
expectedFix string
description string
}{
{
name: "missing .beads directory",
setupFunc: func(t *testing.T, tmpDir string) {
// Don't create .beads directory
},
expectedStatus: StatusWarning,
expectedFix: "Run: bd init (safe to re-run) or bd doctor --fix",
description: "returns warning when .beads directory doesn't exist",
},
{
name: "missing .gitignore file",
setupFunc: func(t *testing.T, tmpDir string) {
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0750); err != nil {
t.Fatal(err)
}
},
expectedStatus: StatusWarning,
expectedFix: "Run: bd init (safe to re-run) or bd doctor --fix",
description: "returns warning when .gitignore doesn't exist",
},
{
name: "perfect gitignore",
setupFunc: func(t *testing.T, tmpDir string) {
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0750); err != nil {
t.Fatal(err)
}
gitignorePath := filepath.Join(beadsDir, ".gitignore")
if err := os.WriteFile(gitignorePath, []byte(GitignoreTemplate), 0600); err != nil {
t.Fatal(err)
}
},
expectedStatus: StatusOK,
expectedFix: "",
description: "returns ok when gitignore matches template",
},
{
name: "missing one merge artifact pattern",
setupFunc: func(t *testing.T, tmpDir string) {
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0750); err != nil {
t.Fatal(err)
}
content := `*.db
*.db?*
daemon.log
beads.base.jsonl
beads.left.jsonl
beads.base.meta.json
beads.left.meta.json
beads.right.meta.json
`
gitignorePath := filepath.Join(beadsDir, ".gitignore")
if err := os.WriteFile(gitignorePath, []byte(content), 0600); err != nil {
t.Fatal(err)
}
},
expectedStatus: StatusWarning,
expectedFix: "Run: bd doctor --fix or bd init (safe to re-run)",
description: "returns warning when missing beads.right.jsonl",
},
{
name: "missing multiple required patterns",
setupFunc: func(t *testing.T, tmpDir string) {
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0750); err != nil {
t.Fatal(err)
}
content := `*.db
daemon.log
`
gitignorePath := filepath.Join(beadsDir, ".gitignore")
if err := os.WriteFile(gitignorePath, []byte(content), 0600); err != nil {
t.Fatal(err)
}
},
expectedStatus: StatusWarning,
expectedFix: "Run: bd doctor --fix or bd init (safe to re-run)",
description: "returns warning when missing multiple patterns",
},
{
name: "empty gitignore file",
setupFunc: func(t *testing.T, tmpDir string) {
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0750); err != nil {
t.Fatal(err)
}
gitignorePath := filepath.Join(beadsDir, ".gitignore")
if err := os.WriteFile(gitignorePath, []byte(""), 0600); err != nil {
t.Fatal(err)
}
},
expectedStatus: StatusWarning,
expectedFix: "Run: bd doctor --fix or bd init (safe to re-run)",
description: "returns warning for empty file",
},
{
name: "gitignore with only comments",
setupFunc: func(t *testing.T, tmpDir string) {
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0750); err != nil {
t.Fatal(err)
}
content := `# Comment 1
# Comment 2
# Comment 3
`
gitignorePath := filepath.Join(beadsDir, ".gitignore")
if err := os.WriteFile(gitignorePath, []byte(content), 0600); err != nil {
t.Fatal(err)
}
},
expectedStatus: StatusWarning,
expectedFix: "Run: bd doctor --fix or bd init (safe to re-run)",
description: "returns warning for comments-only file",
},
{
name: "gitignore as symlink pointing to valid file",
setupFunc: func(t *testing.T, tmpDir string) {
if runtime.GOOS == "windows" {
t.Skip("Skipping symlink test on Windows")
}
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0750); err != nil {
t.Fatal(err)
}
targetPath := filepath.Join(tmpDir, "target_gitignore")
if err := os.WriteFile(targetPath, []byte(GitignoreTemplate), 0600); err != nil {
t.Fatal(err)
}
gitignorePath := filepath.Join(beadsDir, ".gitignore")
if err := os.Symlink(targetPath, gitignorePath); err != nil {
t.Fatal(err)
}
},
expectedStatus: StatusOK,
expectedFix: "",
description: "follows symlink and checks content",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
oldDir, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
if err := os.Chdir(tmpDir); err != nil {
t.Fatal(err)
}
defer func() {
if err := os.Chdir(oldDir); err != nil {
t.Error(err)
}
}()
tt.setupFunc(t, tmpDir)
check := CheckGitignore()
if check.Status != tt.expectedStatus {
t.Errorf("Expected status %s, got %s", tt.expectedStatus, check.Status)
}
if tt.expectedFix != "" && !strings.Contains(check.Fix, tt.expectedFix) {
t.Errorf("Expected fix to contain %q, got %q", tt.expectedFix, check.Fix)
}
if tt.expectedFix == "" && check.Fix != "" {
t.Errorf("Expected no fix message, got: %s", check.Fix)
}
})
}
}
func TestFixGitignore_SubdirectoryGitignore(t *testing.T) {
// This test verifies that FixGitignore only operates on .beads/.gitignore
// and doesn't touch other .gitignore files
tmpDir := t.TempDir()
oldDir, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
if err := os.Chdir(tmpDir); err != nil {
t.Fatal(err)
}
defer func() {
if err := os.Chdir(oldDir); err != nil {
t.Error(err)
}
}()
// Create .beads directory and gitignore
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0750); err != nil {
t.Fatal(err)
}
// Create .beads/.gitignore with old content
beadsGitignorePath := filepath.Join(".beads", ".gitignore")
oldBeadsContent := "old beads content"
if err := os.WriteFile(beadsGitignorePath, []byte(oldBeadsContent), 0600); err != nil {
t.Fatal(err)
}
// Create a subdirectory with its own .gitignore
subDir := filepath.Join(tmpDir, "subdir")
if err := os.Mkdir(subDir, 0750); err != nil {
t.Fatal(err)
}
subGitignorePath := filepath.Join(subDir, ".gitignore")
subGitignoreContent := "subdirectory gitignore content"
if err := os.WriteFile(subGitignorePath, []byte(subGitignoreContent), 0600); err != nil {
t.Fatal(err)
}
// Create root .gitignore
rootGitignorePath := filepath.Join(tmpDir, ".gitignore")
rootGitignoreContent := "root gitignore content"
if err := os.WriteFile(rootGitignorePath, []byte(rootGitignoreContent), 0600); err != nil {
t.Fatal(err)
}
// Run FixGitignore
err = FixGitignore()
if err != nil {
t.Fatalf("FixGitignore failed: %v", err)
}
// Verify .beads/.gitignore was updated
beadsContent, err := os.ReadFile(beadsGitignorePath)
if err != nil {
t.Fatalf("Failed to read .beads/.gitignore: %v", err)
}
if string(beadsContent) != GitignoreTemplate {
t.Error(".beads/.gitignore should be updated to template")
}
// Verify subdirectory .gitignore was NOT touched
subContent, err := os.ReadFile(subGitignorePath)
if err != nil {
t.Fatalf("Failed to read subdir/.gitignore: %v", err)
}
if string(subContent) != subGitignoreContent {
t.Error("subdirectory .gitignore should not be modified")
}
// Verify root .gitignore was NOT touched
rootContent, err := os.ReadFile(rootGitignorePath)
if err != nil {
t.Fatalf("Failed to read root .gitignore: %v", err)
}
if string(rootContent) != rootGitignoreContent {
t.Error("root .gitignore should not be modified")
}
}

View File

@@ -18,20 +18,6 @@ func TestCheckInstallation(t *testing.T) {
t.Errorf("expected name 'Installation', got %s", check.Name)
}
})
t.Run("beads directory exists", func(t *testing.T) {
tmpDir := t.TempDir()
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0755); err != nil {
t.Fatal(err)
}
check := CheckInstallation(tmpDir)
if check.Status != StatusOK {
t.Errorf("expected StatusOK, got %s", check.Status)
}
})
}
func TestCheckMultipleDatabases(t *testing.T) {

View File

@@ -6,129 +6,81 @@ import (
"testing"
)
func TestCheckIDFormat(t *testing.T) {
t.Run("no beads directory", func(t *testing.T) {
tmpDir := t.TempDir()
check := CheckIDFormat(tmpDir)
// TestIntegrityChecks_NoBeadsDir verifies all integrity check functions handle
// missing .beads directories gracefully. This replaces 4 individual subtests.
func TestIntegrityChecks_NoBeadsDir(t *testing.T) {
checks := []struct {
name string
fn func(string) DoctorCheck
wantName string
}{
{"IDFormat", CheckIDFormat, "Issue IDs"},
{"DependencyCycles", CheckDependencyCycles, "Dependency Cycles"},
{"Tombstones", CheckTombstones, "Tombstones"},
{"DeletionsManifest", CheckDeletionsManifest, "Deletions Manifest"},
}
// Should handle missing .beads gracefully
if check.Name != "Issue IDs" {
t.Errorf("Name = %q, want %q", check.Name, "Issue IDs")
}
})
for _, tc := range checks {
t.Run(tc.name, func(t *testing.T) {
tmpDir := t.TempDir()
result := tc.fn(tmpDir)
t.Run("no database file", func(t *testing.T) {
tmpDir := t.TempDir()
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0755); err != nil {
t.Fatal(err)
}
check := CheckIDFormat(tmpDir)
// Should report "will use hash-based IDs" for new install
if check.Status != StatusOK {
t.Errorf("Status = %q, want %q", check.Status, StatusOK)
}
})
if result.Name != tc.wantName {
t.Errorf("Name = %q, want %q", result.Name, tc.wantName)
}
})
}
}
func TestCheckDependencyCycles(t *testing.T) {
t.Run("no beads directory", func(t *testing.T) {
tmpDir := t.TempDir()
check := CheckDependencyCycles(tmpDir)
// TestIntegrityChecks_EmptyBeadsDir verifies all integrity check functions return OK
// when .beads directory exists but is empty (no database/files to check).
func TestIntegrityChecks_EmptyBeadsDir(t *testing.T) {
checks := []struct {
name string
fn func(string) DoctorCheck
}{
{"IDFormat", CheckIDFormat},
{"DependencyCycles", CheckDependencyCycles},
{"Tombstones", CheckTombstones},
{"DeletionsManifest", CheckDeletionsManifest},
}
// Should handle missing directory gracefully
if check.Name != "Dependency Cycles" {
t.Errorf("Name = %q, want %q", check.Name, "Dependency Cycles")
}
})
for _, tc := range checks {
t.Run(tc.name, func(t *testing.T) {
tmpDir := t.TempDir()
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0755); err != nil {
t.Fatal(err)
}
t.Run("no database", func(t *testing.T) {
tmpDir := t.TempDir()
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0755); err != nil {
t.Fatal(err)
}
result := tc.fn(tmpDir)
check := CheckDependencyCycles(tmpDir)
// Should return OK when no database (nothing to check)
if check.Status != StatusOK {
t.Errorf("Status = %q, want %q", check.Status, StatusOK)
}
})
if result.Status != StatusOK {
t.Errorf("Status = %q, want %q", result.Status, StatusOK)
}
})
}
}
func TestCheckTombstones(t *testing.T) {
t.Run("no beads directory", func(t *testing.T) {
tmpDir := t.TempDir()
check := CheckTombstones(tmpDir)
// TestCheckDeletionsManifest_LegacyFile tests the specific case where a legacy
// deletions.jsonl file exists and should trigger a warning.
func TestCheckDeletionsManifest_LegacyFile(t *testing.T) {
tmpDir := t.TempDir()
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0755); err != nil {
t.Fatal(err)
}
// Should handle missing directory
if check.Name != "Tombstones" {
t.Errorf("Name = %q, want %q", check.Name, "Tombstones")
}
})
// Create a deletions.jsonl file
deletionsPath := filepath.Join(beadsDir, "deletions.jsonl")
if err := os.WriteFile(deletionsPath, []byte(`{"id":"test-1"}`), 0644); err != nil {
t.Fatal(err)
}
t.Run("empty beads directory", func(t *testing.T) {
tmpDir := t.TempDir()
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0755); err != nil {
t.Fatal(err)
}
check := CheckDeletionsManifest(tmpDir)
check := CheckTombstones(tmpDir)
// Should return OK when no tombstones file
if check.Status != StatusOK {
t.Errorf("Status = %q, want %q", check.Status, StatusOK)
}
})
}
func TestCheckDeletionsManifest(t *testing.T) {
t.Run("no beads directory", func(t *testing.T) {
tmpDir := t.TempDir()
check := CheckDeletionsManifest(tmpDir)
if check.Name != "Deletions Manifest" {
t.Errorf("Name = %q, want %q", check.Name, "Deletions Manifest")
}
})
t.Run("no deletions file", func(t *testing.T) {
tmpDir := t.TempDir()
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0755); err != nil {
t.Fatal(err)
}
check := CheckDeletionsManifest(tmpDir)
// Should return OK when no deletions.jsonl (nothing to migrate)
if check.Status != StatusOK {
t.Errorf("Status = %q, want %q", check.Status, StatusOK)
}
})
t.Run("has deletions file", func(t *testing.T) {
tmpDir := t.TempDir()
beadsDir := filepath.Join(tmpDir, ".beads")
if err := os.Mkdir(beadsDir, 0755); err != nil {
t.Fatal(err)
}
// Create a deletions.jsonl file
deletionsPath := filepath.Join(beadsDir, "deletions.jsonl")
if err := os.WriteFile(deletionsPath, []byte(`{"id":"test-1"}`), 0644); err != nil {
t.Fatal(err)
}
check := CheckDeletionsManifest(tmpDir)
// Should warn about legacy deletions file
if check.Status != StatusWarning {
t.Errorf("Status = %q, want %q", check.Status, StatusWarning)
}
})
// Should warn about legacy deletions file
if check.Status != StatusWarning {
t.Errorf("Status = %q, want %q", check.Status, StatusWarning)
}
}

View File

@@ -43,14 +43,14 @@ func CheckLegacyBeadsSlashCommands(repoPath string) DoctorCheck {
if len(filesWithLegacyCommands) == 0 {
return DoctorCheck{
Name: "Documentation",
Name: "Legacy Commands",
Status: "ok",
Message: "No legacy beads slash commands detected",
}
}
return DoctorCheck{
Name: "Integration Pattern",
Name: "Legacy Commands",
Status: "warning",
Message: fmt.Sprintf("Old beads integration detected in %s", strings.Join(filesWithLegacyCommands, ", ")),
Detail: "Found: /beads:* slash command references (deprecated)\n" +

View File

@@ -0,0 +1,692 @@
package doctor
import (
"fmt"
"os"
"path/filepath"
"testing"
)
// TestPrefixDetection_MultiplePrefixes tests CountJSONLIssues with mixed prefixes
func TestPrefixDetection_MultiplePrefixes(t *testing.T) {
tests := []struct {
name string
content string
expectedCount int
expectedPrefixes map[string]int
}{
{
name: "single prefix",
content: `{"id":"bd-1","title":"Issue 1"}
{"id":"bd-2","title":"Issue 2"}
{"id":"bd-3","title":"Issue 3"}
`,
expectedCount: 3,
expectedPrefixes: map[string]int{
"bd": 3,
},
},
{
name: "two prefixes evenly distributed",
content: `{"id":"bd-1","title":"Issue 1"}
{"id":"proj-2","title":"Issue 2"}
{"id":"bd-3","title":"Issue 3"}
{"id":"proj-4","title":"Issue 4"}
`,
expectedCount: 4,
expectedPrefixes: map[string]int{
"bd": 2,
"proj": 2,
},
},
{
name: "three prefixes after merge",
content: `{"id":"bd-1","title":"Issue 1"}
{"id":"proj-2","title":"Issue 2"}
{"id":"beads-3","title":"Issue 3"}
{"id":"bd-4","title":"Issue 4"}
{"id":"proj-5","title":"Issue 5"}
`,
expectedCount: 5,
expectedPrefixes: map[string]int{
"bd": 2,
"proj": 2,
"beads": 1,
},
},
{
name: "multiple prefixes with clear majority",
content: `{"id":"bd-1","title":"Issue 1"}
{"id":"bd-2","title":"Issue 2"}
{"id":"bd-3","title":"Issue 3"}
{"id":"bd-4","title":"Issue 4"}
{"id":"bd-5","title":"Issue 5"}
{"id":"bd-6","title":"Issue 6"}
{"id":"bd-7","title":"Issue 7"}
{"id":"proj-8","title":"Issue 8"}
{"id":"beads-9","title":"Issue 9"}
`,
expectedCount: 9,
expectedPrefixes: map[string]int{
"bd": 7,
"proj": 1,
"beads": 1,
},
},
{
name: "prefix mismatch scenario after branch merge",
content: `{"id":"feature-1","title":"Feature branch issue"}
{"id":"feature-2","title":"Feature branch issue"}
{"id":"main-3","title":"Main branch issue"}
{"id":"main-4","title":"Main branch issue"}
{"id":"main-5","title":"Main branch issue"}
`,
expectedCount: 5,
expectedPrefixes: map[string]int{
"feature": 2,
"main": 3,
},
},
{
name: "legacy and new prefixes mixed",
content: `{"id":"beads-1","title":"Old prefix"}
{"id":"beads-2","title":"Old prefix"}
{"id":"bd-3","title":"New prefix"}
{"id":"bd-4","title":"New prefix"}
{"id":"bd-5","title":"New prefix"}
{"id":"bd-6","title":"New prefix"}
`,
expectedCount: 6,
expectedPrefixes: map[string]int{
"beads": 2,
"bd": 4,
},
},
{
name: "prefix with multiple dashes",
content: `{"id":"my-project-123","title":"Issue 1"}
{"id":"my-project-456","title":"Issue 2"}
{"id":"other-proj-789","title":"Issue 3"}
`,
expectedCount: 3,
expectedPrefixes: map[string]int{
"my-project": 2,
"other-proj": 1,
},
},
{
name: "issue IDs without dashes",
content: `{"id":"abc123","title":"No dash ID"}
{"id":"def456","title":"No dash ID"}
{"id":"bd-1","title":"Normal ID"}
`,
expectedCount: 3,
expectedPrefixes: map[string]int{
"abc123": 1,
"def456": 1,
"bd": 1,
},
},
{
name: "empty lines and whitespace",
content: `{"id":"bd-1","title":"Issue 1"}
{"id":"bd-2","title":"Issue 2"}
{"id":"proj-3","title":"Issue 3"}
`,
expectedCount: 3,
expectedPrefixes: map[string]int{
"bd": 2,
"proj": 1,
},
},
{
name: "tombstones mixed with regular issues",
content: `{"id":"bd-1","title":"Issue 1","status":"open"}
{"id":"bd-2","title":"Issue 2","status":"tombstone"}
{"id":"proj-3","title":"Issue 3","status":"closed"}
{"id":"bd-4","title":"Issue 4","status":"tombstone"}
`,
expectedCount: 4,
expectedPrefixes: map[string]int{
"bd": 3,
"proj": 1,
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
jsonlPath := filepath.Join(tmpDir, "issues.jsonl")
if err := os.WriteFile(jsonlPath, []byte(tt.content), 0600); err != nil {
t.Fatalf("failed to create JSONL: %v", err)
}
count, prefixes, err := CountJSONLIssues(jsonlPath)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if count != tt.expectedCount {
t.Errorf("expected count %d, got %d", tt.expectedCount, count)
}
if len(prefixes) != len(tt.expectedPrefixes) {
t.Errorf("expected %d unique prefixes, got %d", len(tt.expectedPrefixes), len(prefixes))
}
for expectedPrefix, expectedCount := range tt.expectedPrefixes {
actualCount, found := prefixes[expectedPrefix]
if !found {
t.Errorf("expected prefix %q not found in results", expectedPrefix)
continue
}
if actualCount != expectedCount {
t.Errorf("prefix %q: expected count %d, got %d", expectedPrefix, expectedCount, actualCount)
}
}
})
}
}
// TestPrefixDetection_MalformedJSON tests handling of malformed JSON with prefix detection
func TestPrefixDetection_MalformedJSON(t *testing.T) {
tests := []struct {
name string
content string
expectedCount int
expectError bool
}{
{
name: "some invalid lines",
content: `{"id":"bd-1","title":"Valid"}
invalid json line
{"id":"bd-2","title":"Valid"}
not-json
{"id":"proj-3","title":"Valid"}
`,
expectedCount: 3,
expectError: true,
},
{
name: "missing id field",
content: `{"id":"bd-1","title":"Valid"}
{"title":"No ID field"}
{"id":"bd-2","title":"Valid"}
`,
expectedCount: 2,
expectError: false,
},
{
name: "id field is not string",
content: `{"id":"bd-1","title":"Valid"}
{"id":123,"title":"Numeric ID"}
{"id":"bd-2","title":"Valid"}
`,
expectedCount: 2,
expectError: false,
},
{
name: "empty id field",
content: `{"id":"bd-1","title":"Valid"}
{"id":"","title":"Empty ID"}
{"id":"bd-2","title":"Valid"}
`,
expectedCount: 2,
expectError: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
jsonlPath := filepath.Join(tmpDir, "issues.jsonl")
if err := os.WriteFile(jsonlPath, []byte(tt.content), 0600); err != nil {
t.Fatalf("failed to create JSONL: %v", err)
}
count, _, err := CountJSONLIssues(jsonlPath)
if tt.expectError && err == nil {
t.Error("expected error, got nil")
}
if !tt.expectError && err != nil {
t.Errorf("unexpected error: %v", err)
}
if count != tt.expectedCount {
t.Errorf("expected count %d, got %d", tt.expectedCount, count)
}
})
}
}
// TestPrefixDetection_MostCommonPrefix tests the logic for detecting the most common prefix
func TestPrefixDetection_MostCommonPrefix(t *testing.T) {
tests := []struct {
name string
content string
expectedMostCommon string
expectedMostCommonCount int
}{
{
name: "clear majority",
content: `{"id":"bd-1"}
{"id":"bd-2"}
{"id":"bd-3"}
{"id":"bd-4"}
{"id":"proj-5"}
`,
expectedMostCommon: "bd",
expectedMostCommonCount: 4,
},
{
name: "tied prefixes - first alphabetically",
content: `{"id":"alpha-1"}
{"id":"alpha-2"}
{"id":"beta-3"}
{"id":"beta-4"}
`,
expectedMostCommon: "alpha",
expectedMostCommonCount: 2,
},
{
name: "three-way split with clear leader",
content: `{"id":"primary-1"}
{"id":"primary-2"}
{"id":"primary-3"}
{"id":"secondary-4"}
{"id":"tertiary-5"}
`,
expectedMostCommon: "primary",
expectedMostCommonCount: 3,
},
{
name: "after merge conflict resolution",
content: `{"id":"main-1"}
{"id":"main-2"}
{"id":"main-3"}
{"id":"main-4"}
{"id":"main-5"}
{"id":"feature-6"}
{"id":"feature-7"}
{"id":"hotfix-8"}
`,
expectedMostCommon: "main",
expectedMostCommonCount: 5,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
jsonlPath := filepath.Join(tmpDir, "issues.jsonl")
if err := os.WriteFile(jsonlPath, []byte(tt.content), 0600); err != nil {
t.Fatalf("failed to create JSONL: %v", err)
}
_, prefixes, err := CountJSONLIssues(jsonlPath)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
var mostCommonPrefix string
maxCount := 0
for prefix, count := range prefixes {
if count > maxCount || (count == maxCount && prefix < mostCommonPrefix) {
maxCount = count
mostCommonPrefix = prefix
}
}
if mostCommonPrefix != tt.expectedMostCommon {
t.Errorf("expected most common prefix %q, got %q", tt.expectedMostCommon, mostCommonPrefix)
}
if maxCount != tt.expectedMostCommonCount {
t.Errorf("expected most common count %d, got %d", tt.expectedMostCommonCount, maxCount)
}
})
}
}
// TestPrefixMismatchDetection tests detection of prefix mismatches that should trigger warnings
func TestPrefixMismatchDetection(t *testing.T) {
tests := []struct {
name string
jsonlContent string
dbPrefix string
shouldWarn bool
description string
}{
{
name: "perfect match",
jsonlContent: `{"id":"bd-1"}
{"id":"bd-2"}
{"id":"bd-3"}
`,
dbPrefix: "bd",
shouldWarn: false,
description: "all issues use database prefix",
},
{
name: "complete mismatch",
jsonlContent: `{"id":"proj-1"}
{"id":"proj-2"}
{"id":"proj-3"}
`,
dbPrefix: "bd",
shouldWarn: true,
description: "no issues use database prefix",
},
{
name: "majority mismatch",
jsonlContent: `{"id":"proj-1"}
{"id":"proj-2"}
{"id":"proj-3"}
{"id":"proj-4"}
{"id":"bd-5"}
`,
dbPrefix: "bd",
shouldWarn: true,
description: "80% of issues use wrong prefix",
},
{
name: "minority mismatch",
jsonlContent: `{"id":"bd-1"}
{"id":"bd-2"}
{"id":"bd-3"}
{"id":"bd-4"}
{"id":"proj-5"}
`,
dbPrefix: "bd",
shouldWarn: false,
description: "only 20% use wrong prefix, not majority",
},
{
name: "exactly half mismatch",
jsonlContent: `{"id":"bd-1"}
{"id":"bd-2"}
{"id":"proj-3"}
{"id":"proj-4"}
`,
dbPrefix: "bd",
shouldWarn: false,
description: "50-50 split should not warn",
},
{
name: "just over majority threshold",
jsonlContent: `{"id":"bd-1"}
{"id":"bd-2"}
{"id":"proj-3"}
{"id":"proj-4"}
{"id":"proj-5"}
`,
dbPrefix: "bd",
shouldWarn: true,
description: "60% use wrong prefix, should warn",
},
{
name: "multiple wrong prefixes",
jsonlContent: `{"id":"proj-1"}
{"id":"feature-2"}
{"id":"hotfix-3"}
{"id":"bd-4"}
`,
dbPrefix: "bd",
shouldWarn: false, // no single prefix has majority, so no warning
description: "75% use various wrong prefixes but no single majority",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
jsonlPath := filepath.Join(tmpDir, "issues.jsonl")
if err := os.WriteFile(jsonlPath, []byte(tt.jsonlContent), 0600); err != nil {
t.Fatalf("failed to create JSONL: %v", err)
}
jsonlCount, jsonlPrefixes, err := CountJSONLIssues(jsonlPath)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
var mostCommonPrefix string
maxCount := 0
for prefix, count := range jsonlPrefixes {
if count > maxCount {
maxCount = count
mostCommonPrefix = prefix
}
}
shouldWarn := mostCommonPrefix != tt.dbPrefix && maxCount > jsonlCount/2
if shouldWarn != tt.shouldWarn {
t.Errorf("%s: expected shouldWarn=%v, got %v (most common: %s with %d/%d issues)",
tt.description, tt.shouldWarn, shouldWarn, mostCommonPrefix, maxCount, jsonlCount)
}
})
}
}
// TestPrefixRenaming_SimulatedMergeConflict tests prefix handling in merge conflict scenarios
func TestPrefixRenaming_SimulatedMergeConflict(t *testing.T) {
t.Run("merge from branch with different prefix", func(t *testing.T) {
tmpDir := t.TempDir()
jsonlPath := filepath.Join(tmpDir, "issues.jsonl")
content := `{"id":"main-1","title":"Main branch issue"}
{"id":"main-2","title":"Main branch issue"}
{"id":"main-3","title":"Main branch issue"}
{"id":"feature-4","title":"Feature branch issue - added in merge"}
{"id":"feature-5","title":"Feature branch issue - added in merge"}
`
if err := os.WriteFile(jsonlPath, []byte(content), 0600); err != nil {
t.Fatalf("failed to create JSONL: %v", err)
}
count, prefixes, err := CountJSONLIssues(jsonlPath)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if count != 5 {
t.Errorf("expected 5 issues, got %d", count)
}
if prefixes["main"] != 3 {
t.Errorf("expected 3 'main' prefix issues, got %d", prefixes["main"])
}
if prefixes["feature"] != 2 {
t.Errorf("expected 2 'feature' prefix issues, got %d", prefixes["feature"])
}
})
t.Run("three-way merge with different prefixes", func(t *testing.T) {
tmpDir := t.TempDir()
jsonlPath := filepath.Join(tmpDir, "issues.jsonl")
content := `{"id":"main-1","title":"Main"}
{"id":"feature-a-2","title":"Feature A"}
{"id":"feature-b-3","title":"Feature B"}
{"id":"main-4","title":"Main"}
{"id":"feature-a-5","title":"Feature A"}
`
if err := os.WriteFile(jsonlPath, []byte(content), 0600); err != nil {
t.Fatalf("failed to create JSONL: %v", err)
}
count, prefixes, err := CountJSONLIssues(jsonlPath)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if count != 5 {
t.Errorf("expected 5 issues, got %d", count)
}
expectedPrefixes := map[string]int{
"main": 2,
"feature-a": 2,
"feature-b": 1,
}
for prefix, expectedCount := range expectedPrefixes {
if prefixes[prefix] != expectedCount {
t.Errorf("prefix %q: expected %d, got %d", prefix, expectedCount, prefixes[prefix])
}
}
})
}
// TestPrefixExtraction_EdgeCases tests edge cases in prefix extraction logic
func TestPrefixExtraction_EdgeCases(t *testing.T) {
tests := []struct {
name string
issueID string
expectedPrefix string
}{
{
name: "standard format",
issueID: "bd-123",
expectedPrefix: "bd",
},
{
name: "multi-part prefix",
issueID: "my-project-abc",
expectedPrefix: "my-project",
},
{
name: "no dash",
issueID: "abc123",
expectedPrefix: "abc123",
},
{
name: "trailing dash",
issueID: "bd-",
expectedPrefix: "bd",
},
{
name: "leading dash",
issueID: "-123",
expectedPrefix: "",
},
{
name: "multiple consecutive dashes",
issueID: "bd--123",
expectedPrefix: "bd-",
},
{
name: "numeric prefix",
issueID: "2024-123",
expectedPrefix: "2024",
},
{
name: "mixed case",
issueID: "BD-123",
expectedPrefix: "BD",
},
{
name: "very long prefix",
issueID: "this-is-a-very-long-project-prefix-name-123",
expectedPrefix: "this-is-a-very-long-project-prefix-name",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
jsonlPath := filepath.Join(tmpDir, "issues.jsonl")
content := `{"id":"` + tt.issueID + `","title":"Test"}` + "\n"
if err := os.WriteFile(jsonlPath, []byte(content), 0600); err != nil {
t.Fatalf("failed to create JSONL: %v", err)
}
_, prefixes, err := CountJSONLIssues(jsonlPath)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if _, found := prefixes[tt.expectedPrefix]; !found && tt.expectedPrefix != "" {
t.Errorf("expected prefix %q not found, got prefixes: %v", tt.expectedPrefix, prefixes)
}
})
}
}
// TestPrefixDetection_LargeScale tests prefix detection with larger datasets
func TestPrefixDetection_LargeScale(t *testing.T) {
t.Run("1000 issues single prefix", func(t *testing.T) {
tmpDir := t.TempDir()
jsonlPath := filepath.Join(tmpDir, "issues.jsonl")
f, err := os.Create(jsonlPath)
if err != nil {
t.Fatalf("failed to create JSONL: %v", err)
}
defer f.Close()
for i := 1; i <= 1000; i++ {
fmt.Fprintf(f, `{"id":"bd-%d","title":"Issue"}`+"\n", i)
}
count, prefixes, err := CountJSONLIssues(jsonlPath)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if count != 1000 {
t.Errorf("expected 1000 issues, got %d", count)
}
if prefixes["bd"] != 1000 {
t.Errorf("expected 1000 'bd' prefix issues, got %d", prefixes["bd"])
}
})
t.Run("1000 issues mixed prefixes", func(t *testing.T) {
tmpDir := t.TempDir()
jsonlPath := filepath.Join(tmpDir, "issues.jsonl")
f, err := os.Create(jsonlPath)
if err != nil {
t.Fatalf("failed to create JSONL: %v", err)
}
defer f.Close()
for i := 1; i <= 700; i++ {
fmt.Fprintf(f, `{"id":"bd-%d"}`+"\n", i)
}
for i := 1; i <= 200; i++ {
fmt.Fprintf(f, `{"id":"proj-%d"}`+"\n", i)
}
for i := 1; i <= 100; i++ {
fmt.Fprintf(f, `{"id":"feat-%d"}`+"\n", i)
}
count, prefixes, err := CountJSONLIssues(jsonlPath)
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
if count != 1000 {
t.Errorf("expected 1000 issues, got %d", count)
}
expected := map[string]int{
"bd": 700,
"proj": 200,
"feat": 100,
}
for prefix, expectedCount := range expected {
if prefixes[prefix] != expectedCount {
t.Errorf("prefix %q: expected %d, got %d", prefix, expectedCount, prefixes[prefix])
}
}
})
}

View File

@@ -7,14 +7,35 @@ const (
StatusError = "error"
)
// Category constants for grouping doctor checks
const (
CategoryCore = "Core System"
CategoryGit = "Git Integration"
CategoryRuntime = "Runtime"
CategoryData = "Data & Config"
CategoryIntegration = "Integrations"
CategoryMetadata = "Metadata"
)
// CategoryOrder defines the display order for categories
var CategoryOrder = []string{
CategoryCore,
CategoryData,
CategoryGit,
CategoryRuntime,
CategoryIntegration,
CategoryMetadata,
}
// MinSyncBranchHookVersion is the minimum hook version that supports sync-branch bypass (issue #532)
const MinSyncBranchHookVersion = "0.29.0"
// DoctorCheck represents a single diagnostic check result
type DoctorCheck struct {
Name string `json:"name"`
Status string `json:"status"` // StatusOK, StatusWarning, or StatusError
Message string `json:"message"`
Detail string `json:"detail,omitempty"`
Fix string `json:"fix,omitempty"`
Name string `json:"name"`
Status string `json:"status"` // StatusOK, StatusWarning, or StatusError
Message string `json:"message"`
Detail string `json:"detail,omitempty"`
Fix string `json:"fix,omitempty"`
Category string `json:"category,omitempty"` // category for grouping in output
}

View File

@@ -6,38 +6,39 @@ import (
"github.com/charmbracelet/lipgloss"
"github.com/spf13/cobra"
"github.com/steveyegge/beads/internal/ui"
)
// lipgloss styles for the thanks page
// lipgloss styles for the thanks page using Ayu theme
var (
thanksTitleStyle = lipgloss.NewStyle().
Bold(true).
Foreground(lipgloss.Color("7")) // white, bold
Foreground(ui.ColorWarn)
thanksSubtitleStyle = lipgloss.NewStyle().
Faint(true)
Foreground(ui.ColorMuted)
thanksSectionStyle = lipgloss.NewStyle().
Foreground(lipgloss.Color("6")). // cyan for section headers
Foreground(ui.ColorAccent).
Bold(true)
thanksNameStyle = lipgloss.NewStyle().
Foreground(lipgloss.Color("2")) // green
Foreground(ui.ColorPass)
thanksLabelStyle = lipgloss.NewStyle().
Foreground(lipgloss.Color("3")) // yellow
Foreground(ui.ColorWarn)
thanksDimStyle = lipgloss.NewStyle().
Faint(true)
Foreground(ui.ColorMuted)
)
// thanksBoxStyle returns a box style with dynamic width
func thanksBoxStyle(width int) lipgloss.Style {
return lipgloss.NewStyle().
BorderStyle(lipgloss.DoubleBorder()).
BorderForeground(lipgloss.Color("7")).
BorderForeground(ui.ColorMuted).
Padding(1, 4).
Width(width - 4). // account for border
Width(width - 4).
Align(lipgloss.Center)
}

View File

@@ -2,6 +2,8 @@
This project uses a two-tier testing approach to balance speed and thoroughness.
> **Testing Philosophy**: For guidance on what to test, anti-patterns to avoid, and target metrics, see [TESTING_PHILOSOPHY.md](TESTING_PHILOSOPHY.md).
## Test Categories
### Fast Tests (Unit Tests)

260
docs/TESTING_PHILOSOPHY.md Normal file
View File

@@ -0,0 +1,260 @@
# Testing Philosophy
This document covers **what to test** and **what not to test**. For how to run tests, see [TESTING.md](TESTING.md).
## The Test Pyramid
```
┌─────────────────┐
│ E2E Tests │ ← PR/Deploy only (slow, expensive)
│ ~5% of tests │
└────────┬────────┘
┌──────────────┴──────────────┐
│ Integration Tests │ ← PR gate (moderate)
│ ~15% of tests │
└──────────────┬──────────────┘
┌────────────────────────┴────────────────────────┐
│ Unit Tests (Fast) │ ← Every save/commit
│ ~80% of tests │
└─────────────────────────────────────────────────┘
```
### Tier 1: Fast Tests (< 5 seconds total)
**When**: Every file save, pre-commit hooks, continuous during development
- Pure function tests (no I/O)
- In-memory data structure tests
- Business logic validation
- Mock all external dependencies
**In beads**: Core logic tests using `newTestStore()` with in-memory SQLite
### Tier 2: Integration Tests (< 30 seconds)
**When**: Pre-push, PR checks
- Real file system operations
- Git operations with temp repos
- Config file parsing
- CLI argument handling
**In beads**: Tests tagged with `//go:build integration`, daemon tests
### Tier 3: E2E / Smoke Tests (1-5 minutes)
**When**: PR merge, pre-deploy, nightly
- Full `bd init``bd doctor``bd doctor --fix` workflow
- Real API calls (to staging)
- Cross-platform verification
---
## What Makes a Test "Right"
A good test:
1. **Catches a bug you'd actually ship** - not theoretical edge cases
2. **Documents expected behavior** - serves as living documentation
3. **Runs fast enough to not skip** - slow tests get disabled
4. **Isn't duplicated elsewhere** - tests one thing, one way
---
## What to Test (Priority Matrix)
| Priority | What | Why | Examples in beads |
|----------|------|-----|-------------------|
| **High** | Core business logic | This is what users depend on | `sync`, `doctor`, `export`, `import` |
| **High** | Error paths that could corrupt data | Data loss is catastrophic | Config handling, git operations, JSONL integrity |
| **Medium** | Edge cases from production bugs | Discovered through real issues | Orphan handling, ID collision detection |
| **Low** | Display/formatting | Visual output, can be manually verified | Table formatting, color output |
---
## What NOT to Test Extensively
### Simple utility functions
Trust the language. Don't test that `strings.TrimSpace` works.
### Every permutation of inputs
Use table-driven tests with representative cases instead of exhaustive permutations.
```go
// BAD: 10 separate test functions
func TestPriority0(t *testing.T) { ... }
func TestPriority1(t *testing.T) { ... }
func TestPriority2(t *testing.T) { ... }
// GOOD: One table-driven test
func TestPriorityMapping(t *testing.T) {
cases := []struct{ in, want int }{
{0, 4}, {1, 0}, {5, 3}, // includes boundary
}
for _, tc := range cases {
t.Run(fmt.Sprintf("priority_%d", tc.in), func(t *testing.T) {
got := mapPriority(tc.in)
if got != tc.want { t.Errorf(...) }
})
}
}
```
### Obvious behavior
Don't test "if file exists, return true" - trust the implementation.
### Same logic through different entry points
If you test a function directly, don't also test it through every caller.
---
## Anti-Patterns to Avoid
### 1. Trivial Assertions
Testing obvious happy paths that would pass with trivial implementations.
```go
// BAD: What bug would this catch?
func TestValidateBeadsWorkspace(t *testing.T) {
dir := setupTestWorkspace(t)
if err := validateBeadsWorkspace(dir); err != nil {
t.Errorf("expected no error, got: %v", err)
}
}
// GOOD: Test the interesting error cases
func TestValidateBeadsWorkspace(t *testing.T) {
cases := []struct{
name string
setup func(t *testing.T) string
wantErr string
}{
{"missing .beads dir", setupNoBeadsDir, "not a beads workspace"},
{"corrupted db", setupCorruptDB, "database is corrupted"},
{"permission denied", setupNoReadAccess, "permission denied"},
}
// ...
}
```
### 2. Duplicate Error Path Testing
Testing the same logic multiple ways instead of once with table-driven tests.
```go
// BAD: Repetitive individual assertions
if config.PriorityMap["0"] != 4 { t.Errorf(...) }
if config.PriorityMap["1"] != 0 { t.Errorf(...) }
if config.PriorityMap["2"] != 1 { t.Errorf(...) }
// GOOD: Table-driven
for k, want := range expectedMap {
if got := config.PriorityMap[k]; got != want {
t.Errorf("PriorityMap[%q] = %d, want %d", k, got, want)
}
}
```
### 3. I/O Heavy Tests Without Mocking
Unit tests that execute real commands or heavy I/O when they could mock.
```go
// BAD: Actually executes bd killall in unit test
func TestDaemonFix(t *testing.T) {
exec.Command("bd", "killall").Run()
// ...
}
// GOOD: Mock the execution or use integration test tag
func TestDaemonFix(t *testing.T) {
executor := &mockExecutor{}
fix := NewDaemonFix(executor)
// ...
}
```
### 4. Testing Implementation, Not Behavior
Tests that break when you refactor, even though behavior is unchanged.
```go
// BAD: Tests internal state
if len(daemon.connectionPool) != 3 { t.Error(...) }
// GOOD: Tests observable behavior
if resp, err := daemon.HandleRequest(req); err != nil { t.Error(...) }
```
### 5. Missing Boundary Tests
Testing known-good values but not boundaries and invalid inputs.
```go
// BAD: Only tests middle values
TestPriority(1) // works
TestPriority(2) // works
// GOOD: Tests boundaries and invalid
TestPriority(-1) // invalid - expect error
TestPriority(0) // boundary - min valid
TestPriority(4) // boundary - max valid
TestPriority(5) // boundary - first invalid
```
---
## Target Metrics
| Metric | Target | Current (beads) | Status |
|--------|--------|-----------------|--------|
| Test-to-code ratio | 0.5:1 - 1.5:1 | 0.85:1 | Healthy |
| Fast test suite | < 5 seconds | 3.8 seconds | Good |
| Integration tests | < 30 seconds | ~15 seconds | Good |
| Compilation overhead | Minimize | 180 seconds | Bottleneck |
### Interpretation
- **0.5:1** - Light coverage, fast iteration (acceptable for utilities)
- **1:1** - Solid coverage for most projects (our target)
- **1.5:1** - Heavy coverage for critical systems
- **2:1+** - Over-engineered, maintenance burden
---
## Beads-Specific Guidance
### Well-Covered (Maintain)
| Area | Why It's Well-Tested |
|------|---------------------|
| Sync/Export/Import | Data integrity critical - comprehensive edge cases |
| SQLite transactions | Rollback safety, atomicity guarantees |
| Merge operations | 3-way merge with conflict resolution |
| Daemon locking | Prevents corruption from multiple instances |
### Needs Attention
| Area | Gap | Priority |
|------|-----|----------|
| Daemon lifecycle | Shutdown/signal handling | Medium |
| Concurrent operations | Stress testing under load | Medium |
| Boundary validation | Edge inputs in mapping functions | Low |
### Skip These
- Display formatting tests (manually verify)
- Simple getters/setters
- Tests that duplicate SQLite's guarantees
---
## Related Docs
- [TESTING.md](TESTING.md) - How to run tests
- [README_TESTING.md](README_TESTING.md) - Fast vs integration test strategy
- [dev-notes/TEST_SUITE_AUDIT.md](dev-notes/TEST_SUITE_AUDIT.md) - Test refactoring progress

View File

@@ -107,6 +107,13 @@ func TestGetBeadsWorktreePath(t *testing.T) {
// Should point to the main repo's .git/beads-worktrees, not the worktree's
mainGitDir := filepath.Join(mainRepoPath, ".git")
expectedPath := filepath.Join(mainGitDir, "beads-worktrees", "beads-sync")
// Resolve symlinks for comparison (on macOS, /var -> /private/var)
resolvedExpected, err := filepath.EvalSymlinks(mainRepoPath)
if err == nil {
expectedPath = filepath.Join(resolvedExpected, ".git", "beads-worktrees", "beads-sync")
}
if path != expectedPath {
t.Errorf("Expected path %q, got %q", expectedPath, path)
}

130
internal/ui/styles.go Normal file
View File

@@ -0,0 +1,130 @@
// Package ui provides terminal styling for beads CLI output.
// Uses the Ayu color theme with adaptive light/dark mode support.
package ui
import (
"strings"
"github.com/charmbracelet/lipgloss"
)
// Ayu theme color palette
// Dark: https://terminalcolors.com/themes/ayu/dark/
// Light: https://terminalcolors.com/themes/ayu/light/
var (
// Semantic status colors (Ayu theme - adaptive light/dark)
ColorPass = lipgloss.AdaptiveColor{
Light: "#86b300", // ayu light bright green
Dark: "#c2d94c", // ayu dark bright green
}
ColorWarn = lipgloss.AdaptiveColor{
Light: "#f2ae49", // ayu light bright yellow
Dark: "#ffb454", // ayu dark bright yellow
}
ColorFail = lipgloss.AdaptiveColor{
Light: "#f07171", // ayu light bright red
Dark: "#f07178", // ayu dark bright red
}
ColorMuted = lipgloss.AdaptiveColor{
Light: "#828c99", // ayu light muted
Dark: "#6c7680", // ayu dark muted
}
ColorAccent = lipgloss.AdaptiveColor{
Light: "#399ee6", // ayu light bright blue
Dark: "#59c2ff", // ayu dark bright blue
}
)
// Status styles - consistent across all commands
var (
PassStyle = lipgloss.NewStyle().Foreground(ColorPass)
WarnStyle = lipgloss.NewStyle().Foreground(ColorWarn)
FailStyle = lipgloss.NewStyle().Foreground(ColorFail)
MutedStyle = lipgloss.NewStyle().Foreground(ColorMuted)
AccentStyle = lipgloss.NewStyle().Foreground(ColorAccent)
)
// CategoryStyle for section headers - bold with accent color
var CategoryStyle = lipgloss.NewStyle().Bold(true).Foreground(ColorAccent)
// Status icons - consistent semantic indicators
const (
IconPass = "✓"
IconWarn = "⚠"
IconFail = "✗"
IconSkip = "-"
IconInfo = ""
)
// Tree characters for hierarchical display
const (
TreeChild = "⎿ " // child indicator
TreeLast = "└─ " // last child / detail line
TreeIndent = " " // 2-space indent per level
)
// Separators
const (
SeparatorLight = "──────────────────────────────────────────"
SeparatorHeavy = "══════════════════════════════════════════"
)
// RenderPass renders text with pass (green) styling
func RenderPass(s string) string {
return PassStyle.Render(s)
}
// RenderWarn renders text with warning (yellow) styling
func RenderWarn(s string) string {
return WarnStyle.Render(s)
}
// RenderFail renders text with fail (red) styling
func RenderFail(s string) string {
return FailStyle.Render(s)
}
// RenderMuted renders text with muted (gray) styling
func RenderMuted(s string) string {
return MutedStyle.Render(s)
}
// RenderAccent renders text with accent (blue) styling
func RenderAccent(s string) string {
return AccentStyle.Render(s)
}
// RenderCategory renders a category header in uppercase with accent color
func RenderCategory(s string) string {
return CategoryStyle.Render(strings.ToUpper(s))
}
// RenderSeparator renders the light separator line in muted color
func RenderSeparator() string {
return MutedStyle.Render(SeparatorLight)
}
// RenderPassIcon renders the pass icon with styling
func RenderPassIcon() string {
return PassStyle.Render(IconPass)
}
// RenderWarnIcon renders the warning icon with styling
func RenderWarnIcon() string {
return WarnStyle.Render(IconWarn)
}
// RenderFailIcon renders the fail icon with styling
func RenderFailIcon() string {
return FailStyle.Render(IconFail)
}
// RenderSkipIcon renders the skip icon with styling
func RenderSkipIcon() string {
return MutedStyle.Render(IconSkip)
}
// RenderInfoIcon renders the info icon with styling
func RenderInfoIcon() string {
return AccentStyle.Render(IconInfo)
}