fix(rename-prefix): sync JSONL before and after prefix rename (#893)

- Pull from sync-branch before rename if configured
- Import all issues from JSONL before rename to prevent data loss
- Export directly to JSONL after rename (don't rely on flushManager)
- Apply same pattern to --repair mode
- Add newSilentLogger() for production use (not test-only)
- Add comprehensive tests for JSONL update scenarios

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
kustrun
2026-01-04 19:53:31 +01:00
committed by GitHub
parent 9880eaf734
commit 16af63dc73
4 changed files with 687 additions and 13 deletions

View File

@@ -149,6 +149,12 @@ func SetupStderrLogger(jsonFormat bool, level slog.Level) daemonLogger {
// newTestLogger creates a no-op logger for testing.
// Logs are discarded - use this when you don't need to verify log output.
func newTestLogger() daemonLogger {
return newSilentLogger()
}
// newSilentLogger creates a logger that discards all output.
// Use this for operations that need a logger but shouldn't produce output.
func newSilentLogger() daemonLogger {
return daemonLogger{
logger: slog.New(slog.NewTextHandler(io.Discard, nil)),
}

View File

@@ -1,6 +1,7 @@
package main
import (
"bufio"
"cmp"
"context"
"database/sql"
@@ -15,6 +16,7 @@ import (
"github.com/spf13/cobra"
"github.com/steveyegge/beads/internal/storage"
"github.com/steveyegge/beads/internal/storage/sqlite"
"github.com/steveyegge/beads/internal/syncbranch"
"github.com/steveyegge/beads/internal/types"
"github.com/steveyegge/beads/internal/ui"
"github.com/steveyegge/beads/internal/utils"
@@ -82,6 +84,51 @@ NOTE: This is a rare operation. Most users never need this command.`,
os.Exit(1)
}
// Get JSONL path for sync operations
jsonlPath := findJSONLPath()
// If sync-branch is configured, pull latest remote issues first
// This ensures we have all issues from remote before renaming
if !dryRun && syncbranch.IsConfigured() {
silentLog := newSilentLogger()
pulled, err := syncBranchPull(ctx, store, silentLog)
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to pull sync-branch: %v\n", err)
fmt.Fprintf(os.Stderr, "Continue anyway? Issues from remote may be missing.\n")
} else if pulled {
fmt.Printf("Pulled latest issues from sync-branch\n")
}
}
// Force import from JSONL to ensure DB has all issues before rename
// This prevents data loss if JSONL has issues from other workspaces
if !dryRun && jsonlPath != "" {
if _, err := os.Stat(jsonlPath); err == nil {
// JSONL exists - force import to sync all issues to DB
issues, err := parseJSONLFile(jsonlPath)
if err != nil {
fmt.Fprintf(os.Stderr, "Error: failed to read JSONL before rename: %v\n", err)
os.Exit(1)
}
if len(issues) > 0 {
opts := ImportOptions{
DryRun: false,
SkipUpdate: false,
Strict: false,
SkipPrefixValidation: true, // Allow any prefix during rename
}
result, err := importIssuesCore(ctx, dbPath, store, issues, opts)
if err != nil {
fmt.Fprintf(os.Stderr, "Error: failed to sync JSONL before rename: %v\n", err)
os.Exit(1)
}
if result.Created > 0 || result.Updated > 0 {
fmt.Printf("Synced %d issues from JSONL before rename\n", result.Created+result.Updated)
}
}
}
}
oldPrefix, err := store.GetConfig(ctx, "issue_prefix")
if err != nil || oldPrefix == "" {
fmt.Fprintf(os.Stderr, "Error: failed to get current prefix: %v\n", err)
@@ -163,7 +210,34 @@ NOTE: This is a rare operation. Most users never need this command.`,
os.Exit(1)
}
// Schedule full export (IDs changed, incremental won't work)
// Force export to JSONL with new IDs
// Safe because we imported all JSONL issues before rename
if jsonlPath != "" {
// Clear metadata hashes so integrity check doesn't fail
_ = store.SetMetadata(ctx, "jsonl_content_hash", "")
_ = store.SetMetadata(ctx, "export_hashes", "")
_ = store.SetJSONLFileHash(ctx, "")
// Get all renamed issues from DB and export directly
renamedIssues, err := store.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to get issues for export: %v\n", err)
} else {
// Get dependencies for each issue
for _, issue := range renamedIssues {
deps, _ := store.GetDependencyRecords(ctx, issue.ID)
issue.Dependencies = deps
}
// Write directly to JSONL
if _, err := writeJSONLAtomic(jsonlPath, renamedIssues); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to export: %v\n", err)
fmt.Fprintf(os.Stderr, "Run 'bd export --force' to update JSONL\n")
} else {
fmt.Printf("Updated %s with new IDs\n", jsonlPath)
}
}
}
// Also schedule for flush manager if available
markDirtyAndScheduleFullExport()
fmt.Printf("%s Successfully renamed prefix from %s to %s\n", ui.RenderPass("✓"), ui.RenderAccent(oldPrefix), ui.RenderAccent(newPrefix))
@@ -364,7 +438,35 @@ func repairPrefixes(ctx context.Context, st storage.Storage, actorName string, t
return fmt.Errorf("failed to update config: %w", err)
}
// Schedule full export (IDs changed, incremental won't work)
// Force export to JSONL with new IDs
// Safe because we imported all JSONL issues before repair (done in caller)
jsonlPath := findJSONLPath()
if jsonlPath != "" {
// Clear metadata hashes so integrity check doesn't fail
_ = st.SetMetadata(ctx, "jsonl_content_hash", "")
_ = st.SetMetadata(ctx, "export_hashes", "")
_ = st.SetJSONLFileHash(ctx, "")
// Get all renamed issues from DB and export directly
renamedIssues, err := st.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to get issues for export: %v\n", err)
} else {
// Get dependencies for each issue
for _, issue := range renamedIssues {
deps, _ := st.GetDependencyRecords(ctx, issue.ID)
issue.Dependencies = deps
}
// Write directly to JSONL
if _, err := writeJSONLAtomic(jsonlPath, renamedIssues); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to export: %v\n", err)
fmt.Fprintf(os.Stderr, "Run 'bd export --force' to update JSONL\n")
} else {
fmt.Printf("Updated %s with new IDs\n", jsonlPath)
}
}
}
// Also schedule for flush manager if available
markDirtyAndScheduleFullExport()
fmt.Printf("\n%s Successfully consolidated %d prefixes into %s\n",
@@ -468,6 +570,42 @@ func generateRepairHashID(ctx context.Context, conn *sql.Conn, prefix string, is
return newID, nil
}
// parseJSONLFile reads and parses a JSONL file into a slice of issues
func parseJSONLFile(jsonlPath string) ([]*types.Issue, error) {
f, err := os.Open(jsonlPath)
if err != nil {
return nil, fmt.Errorf("failed to open JSONL file: %w", err)
}
defer f.Close()
var issues []*types.Issue
scanner := bufio.NewScanner(f)
// Increase buffer to handle large JSON lines
scanner.Buffer(make([]byte, 0, 1024), 2*1024*1024) // 2MB max line size
lineNum := 0
for scanner.Scan() {
lineNum++
line := scanner.Text()
if line == "" {
continue
}
var issue types.Issue
if err := json.Unmarshal([]byte(line), &issue); err != nil {
return nil, fmt.Errorf("parse error at line %d: %w", lineNum, err)
}
issue.SetDefaults()
issues = append(issues, &issue)
}
if err := scanner.Err(); err != nil {
return nil, fmt.Errorf("scanner error: %w", err)
}
return issues, nil
}
func init() {
renamePrefixCmd.Flags().Bool("dry-run", false, "Preview changes without applying them")
renamePrefixCmd.Flags().Bool("repair", false, "Repair database with multiple prefixes by consolidating them")

View File

@@ -0,0 +1,516 @@
package main
import (
"bufio"
"context"
"encoding/json"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/steveyegge/beads/internal/storage/sqlite"
"github.com/steveyegge/beads/internal/types"
)
// TestRenamePrefixUpdatesJSONL verifies that rename-prefix updates the JSONL file
// with the new IDs immediately after renaming
func TestRenamePrefixUpdatesJSONL(t *testing.T) {
// Create temp directory for test
tempDir := t.TempDir()
testDBPath := filepath.Join(tempDir, ".beads", "beads.db")
jsonlPath := filepath.Join(tempDir, ".beads", "issues.jsonl")
// Create .beads directory
if err := os.MkdirAll(filepath.Dir(testDBPath), 0750); err != nil {
t.Fatalf("failed to create .beads dir: %v", err)
}
// Create store
st, err := sqlite.New(context.Background(), testDBPath)
if err != nil {
t.Fatalf("failed to create store: %v", err)
}
defer st.Close()
ctx := context.Background()
// Set initial prefix
if err := st.SetConfig(ctx, "issue_prefix", "old"); err != nil {
t.Fatalf("failed to set prefix: %v", err)
}
// Create test issues
now := time.Now()
issue1 := &types.Issue{
ID: "old-abc",
Title: "Test issue 1",
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
}
issue2 := &types.Issue{
ID: "old-def",
Title: "Test issue 2",
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
}
if err := st.CreateIssue(ctx, issue1, "test"); err != nil {
t.Fatalf("failed to create issue1: %v", err)
}
if err := st.CreateIssue(ctx, issue2, "test"); err != nil {
t.Fatalf("failed to create issue2: %v", err)
}
// Write initial JSONL with old IDs
if err := writeTestJSONL(jsonlPath, []*types.Issue{issue1, issue2}); err != nil {
t.Fatalf("failed to write initial JSONL: %v", err)
}
// Verify JSONL has old IDs
jsonlIssues, err := parseJSONLFile(jsonlPath)
if err != nil {
t.Fatalf("failed to parse initial JSONL: %v", err)
}
for _, issue := range jsonlIssues {
if !strings.HasPrefix(issue.ID, "old-") {
t.Fatalf("expected old- prefix, got %s", issue.ID)
}
}
// Simulate rename-prefix by calling renamePrefixInDB directly
// Note: In integration tests, we'd call the actual command
issues, err := st.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("failed to search issues: %v", err)
}
// Set up globals for the test (needed by renamePrefixInDB)
oldStore := store
oldActor := actor
store = st
actor = "test"
defer func() {
store = oldStore
actor = oldActor
}()
if err := renamePrefixInDB(ctx, "old", "new", issues); err != nil {
t.Fatalf("renamePrefixInDB failed: %v", err)
}
// Manually export (simulating what the command does after rename)
// In the real command, flushManager.FlushNow() would do this
renamedIssues, err := st.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("failed to search renamed issues: %v", err)
}
if err := writeTestJSONL(jsonlPath, renamedIssues); err != nil {
t.Fatalf("failed to write renamed JSONL: %v", err)
}
// Verify JSONL now has new IDs
finalIssues, err := parseJSONLFile(jsonlPath)
if err != nil {
t.Fatalf("failed to parse final JSONL: %v", err)
}
if len(finalIssues) != 2 {
t.Fatalf("expected 2 issues in JSONL, got %d", len(finalIssues))
}
for _, issue := range finalIssues {
if !strings.HasPrefix(issue.ID, "new-") {
t.Errorf("expected new- prefix, got %s", issue.ID)
}
}
// Verify specific IDs
idMap := make(map[string]bool)
for _, issue := range finalIssues {
idMap[issue.ID] = true
}
if !idMap["new-abc"] {
t.Error("expected new-abc in JSONL")
}
if !idMap["new-def"] {
t.Error("expected new-def in JSONL")
}
}
// TestRenamePrefixImportsFromJSONLFirst verifies that rename-prefix imports
// issues from JSONL before renaming to prevent data loss
func TestRenamePrefixImportsFromJSONLFirst(t *testing.T) {
// Create temp directory for test
tempDir := t.TempDir()
testDBPath := filepath.Join(tempDir, ".beads", "beads.db")
jsonlPath := filepath.Join(tempDir, ".beads", "issues.jsonl")
// Create .beads directory
if err := os.MkdirAll(filepath.Dir(testDBPath), 0750); err != nil {
t.Fatalf("failed to create .beads dir: %v", err)
}
// Create store
st, err := sqlite.New(context.Background(), testDBPath)
if err != nil {
t.Fatalf("failed to create store: %v", err)
}
defer st.Close()
ctx := context.Background()
// Set initial prefix
if err := st.SetConfig(ctx, "issue_prefix", "old"); err != nil {
t.Fatalf("failed to set prefix: %v", err)
}
// Create one issue in DB
now := time.Now()
dbIssue := &types.Issue{
ID: "old-abc",
Title: "DB issue",
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
}
if err := st.CreateIssue(ctx, dbIssue, "test"); err != nil {
t.Fatalf("failed to create DB issue: %v", err)
}
// Write JSONL with an EXTRA issue (simulating other workspace)
jsonlExtraIssue := &types.Issue{
ID: "old-xyz",
Title: "JSONL-only issue from other workspace",
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
}
if err := writeTestJSONL(jsonlPath, []*types.Issue{dbIssue, jsonlExtraIssue}); err != nil {
t.Fatalf("failed to write JSONL: %v", err)
}
// Parse JSONL and import extra issues (simulating what rename-prefix does)
jsonlIssues, err := parseJSONLFile(jsonlPath)
if err != nil {
t.Fatalf("failed to parse JSONL: %v", err)
}
// Import issues from JSONL (this is what the fix adds)
opts := ImportOptions{
DryRun: false,
SkipUpdate: false,
Strict: false,
SkipPrefixValidation: true,
}
result, err := importIssuesCore(ctx, testDBPath, st, jsonlIssues, opts)
if err != nil {
t.Fatalf("failed to import from JSONL: %v", err)
}
// Should have imported the extra issue
if result.Created != 1 {
t.Errorf("expected 1 issue created from JSONL, got %d", result.Created)
}
// Verify DB now has both issues
allIssues, err := st.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("failed to search issues: %v", err)
}
if len(allIssues) != 2 {
t.Fatalf("expected 2 issues in DB after import, got %d", len(allIssues))
}
// Now perform rename
oldStore := store
oldActor := actor
store = st
actor = "test"
defer func() {
store = oldStore
actor = oldActor
}()
if err := renamePrefixInDB(ctx, "old", "new", allIssues); err != nil {
t.Fatalf("renamePrefixInDB failed: %v", err)
}
// Export to JSONL
renamedIssues, err := st.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("failed to search renamed issues: %v", err)
}
if err := writeTestJSONL(jsonlPath, renamedIssues); err != nil {
t.Fatalf("failed to write renamed JSONL: %v", err)
}
// Verify BOTH issues are in final JSONL with new prefix
finalIssues, err := parseJSONLFile(jsonlPath)
if err != nil {
t.Fatalf("failed to parse final JSONL: %v", err)
}
if len(finalIssues) != 2 {
t.Fatalf("expected 2 issues in final JSONL (no data loss), got %d", len(finalIssues))
}
// Verify all have new prefix
for _, issue := range finalIssues {
if !strings.HasPrefix(issue.ID, "new-") {
t.Errorf("expected new- prefix, got %s", issue.ID)
}
}
// Verify the originally JSONL-only issue was preserved
foundXYZ := false
for _, issue := range finalIssues {
if issue.ID == "new-xyz" {
foundXYZ = true
if issue.Title != "JSONL-only issue from other workspace" {
t.Errorf("wrong title for new-xyz: %s", issue.Title)
}
break
}
}
if !foundXYZ {
t.Error("JSONL-only issue (old-xyz -> new-xyz) was lost during rename!")
}
}
// TestRenamePrefixNoJSONL verifies that rename works when no JSONL file exists
func TestRenamePrefixNoJSONL(t *testing.T) {
// Create temp directory for test
tempDir := t.TempDir()
testDBPath := filepath.Join(tempDir, ".beads", "beads.db")
jsonlPath := filepath.Join(tempDir, ".beads", "issues.jsonl")
// Create .beads directory
if err := os.MkdirAll(filepath.Dir(testDBPath), 0750); err != nil {
t.Fatalf("failed to create .beads dir: %v", err)
}
// Ensure no JSONL exists
_ = os.Remove(jsonlPath)
// Create store
st, err := sqlite.New(context.Background(), testDBPath)
if err != nil {
t.Fatalf("failed to create store: %v", err)
}
defer st.Close()
ctx := context.Background()
// Set initial prefix
if err := st.SetConfig(ctx, "issue_prefix", "old"); err != nil {
t.Fatalf("failed to set prefix: %v", err)
}
// Create test issue
now := time.Now()
issue := &types.Issue{
ID: "old-abc",
Title: "Test issue",
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
}
if err := st.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("failed to create issue: %v", err)
}
// Verify no JSONL exists
if _, err := os.Stat(jsonlPath); !os.IsNotExist(err) {
t.Fatal("JSONL should not exist for this test")
}
// Perform rename
issues, err := st.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("failed to search issues: %v", err)
}
oldStore := store
oldActor := actor
store = st
actor = "test"
defer func() {
store = oldStore
actor = oldActor
}()
if err := renamePrefixInDB(ctx, "old", "new", issues); err != nil {
t.Fatalf("renamePrefixInDB failed: %v", err)
}
// Verify DB was renamed correctly
renamedIssues, err := st.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("failed to search renamed issues: %v", err)
}
if len(renamedIssues) != 1 {
t.Fatalf("expected 1 issue after rename, got %d", len(renamedIssues))
}
if renamedIssues[0].ID != "new-abc" {
t.Errorf("expected new-abc, got %s", renamedIssues[0].ID)
}
}
// TestRepairPrefixesUpdatesJSONL verifies that --repair mode properly updates JSONL
// with new IDs after consolidating multiple prefixes
func TestRepairPrefixesUpdatesJSONL(t *testing.T) {
// Create temp directory for test
tempDir := t.TempDir()
testDBPath := filepath.Join(tempDir, ".beads", "beads.db")
jsonlPath := filepath.Join(tempDir, ".beads", "issues.jsonl")
// Create .beads directory
if err := os.MkdirAll(filepath.Dir(testDBPath), 0750); err != nil {
t.Fatalf("failed to create .beads dir: %v", err)
}
// Create store
st, err := sqlite.New(context.Background(), testDBPath)
if err != nil {
t.Fatalf("failed to create store: %v", err)
}
defer st.Close()
// Set global dbPath so findJSONLPath() finds the right file
oldDBPath := dbPath
dbPath = testDBPath
defer func() { dbPath = oldDBPath }()
ctx := context.Background()
// Set initial prefix to "new" (target prefix)
if err := st.SetConfig(ctx, "issue_prefix", "new"); err != nil {
t.Fatalf("failed to set prefix: %v", err)
}
// Create issues with MIXED prefixes directly in DB (simulating corruption or merge)
db := st.UnderlyingDB()
now := time.Now()
// Issues with correct prefix
_, err = db.ExecContext(ctx, `
INSERT INTO issues (id, title, status, priority, issue_type, created_at, updated_at)
VALUES (?, ?, 'open', 2, 'task', ?, ?)
`, "new-abc", "Correct prefix issue", now, now)
if err != nil {
t.Fatalf("failed to create new-abc: %v", err)
}
// Issues with OLD prefix (simulating issues from before rename)
_, err = db.ExecContext(ctx, `
INSERT INTO issues (id, title, status, priority, issue_type, created_at, updated_at)
VALUES (?, ?, 'open', 2, 'task', ?, ?)
`, "old-xyz", "Old prefix issue from other workspace", now, now)
if err != nil {
t.Fatalf("failed to create old-xyz: %v", err)
}
// Write JSONL with the old/mixed IDs (simulating state before repair)
oldIssue1 := &types.Issue{ID: "new-abc", Title: "Correct prefix issue", Status: types.StatusOpen, Priority: 2, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now}
oldIssue2 := &types.Issue{ID: "old-xyz", Title: "Old prefix issue from other workspace", Status: types.StatusOpen, Priority: 2, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now}
if err := writeTestJSONL(jsonlPath, []*types.Issue{oldIssue1, oldIssue2}); err != nil {
t.Fatalf("failed to write initial JSONL: %v", err)
}
// Verify JSONL has mixed prefixes
initialIssues, err := parseJSONLFile(jsonlPath)
if err != nil {
t.Fatalf("failed to parse initial JSONL: %v", err)
}
hasOld := false
hasNew := false
for _, issue := range initialIssues {
if strings.HasPrefix(issue.ID, "old-") {
hasOld = true
}
if strings.HasPrefix(issue.ID, "new-") {
hasNew = true
}
}
if !hasOld || !hasNew {
t.Fatal("initial JSONL should have mixed prefixes")
}
// Get all issues and detect prefixes
allIssues, err := st.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("failed to search issues: %v", err)
}
prefixes := detectPrefixes(allIssues)
if len(prefixes) != 2 {
t.Fatalf("expected 2 prefixes, got %d", len(prefixes))
}
// Run repair
if err := repairPrefixes(ctx, st, "test", "new", allIssues, prefixes, false); err != nil {
t.Fatalf("repairPrefixes failed: %v", err)
}
// Verify JSONL was updated with all new- prefixes
finalIssues, err := parseJSONLFile(jsonlPath)
if err != nil {
t.Fatalf("failed to parse final JSONL: %v", err)
}
if len(finalIssues) != 2 {
t.Fatalf("expected 2 issues in final JSONL, got %d", len(finalIssues))
}
// All issues should now have new- prefix
for _, issue := range finalIssues {
if !strings.HasPrefix(issue.ID, "new-") {
t.Errorf("expected new- prefix after repair, got %s", issue.ID)
}
}
// The original new-abc should still exist
foundABC := false
for _, issue := range finalIssues {
if issue.ID == "new-abc" {
foundABC = true
break
}
}
if !foundABC {
t.Error("new-abc should still exist after repair")
}
}
// writeTestJSONL writes issues to a JSONL file for testing
func writeTestJSONL(path string, issues []*types.Issue) error {
f, err := os.Create(path)
if err != nil {
return err
}
defer f.Close()
w := bufio.NewWriter(f)
encoder := json.NewEncoder(w)
for _, issue := range issues {
if err := encoder.Encode(issue); err != nil {
return err
}
}
return w.Flush()
}

View File

@@ -2,6 +2,8 @@ package main
import (
"context"
"os"
"path/filepath"
"testing"
"time"
@@ -10,24 +12,36 @@ import (
)
func TestRepairMultiplePrefixes(t *testing.T) {
// Create a temporary database
dbPath := t.TempDir() + "/test.db"
store, err := sqlite.New(context.Background(), dbPath)
// Create a temporary database with .beads directory structure
tempDir := t.TempDir()
testDBPath := filepath.Join(tempDir, ".beads", "beads.db")
// Create .beads directory
if err := os.MkdirAll(filepath.Dir(testDBPath), 0750); err != nil {
t.Fatalf("failed to create .beads dir: %v", err)
}
testStore, err := sqlite.New(context.Background(), testDBPath)
if err != nil {
t.Fatalf("failed to create store: %v", err)
}
defer store.Close()
defer testStore.Close()
// Set global dbPath so findJSONLPath() finds the right location
oldDBPath := dbPath
dbPath = testDBPath
defer func() { dbPath = oldDBPath }()
ctx := context.Background()
// Set initial prefix
if err := store.SetConfig(ctx, "issue_prefix", "test"); err != nil {
if err := testStore.SetConfig(ctx, "issue_prefix", "test"); err != nil {
t.Fatalf("failed to set prefix: %v", err)
}
// Create issues with multiple prefixes (simulating corruption)
// We need to directly insert into the database to bypass prefix validation
db := store.UnderlyingDB()
db := testStore.UnderlyingDB()
now := time.Now()
issues := []struct {
@@ -52,7 +66,7 @@ func TestRepairMultiplePrefixes(t *testing.T) {
}
// Verify we have multiple prefixes
allIssues, err := store.SearchIssues(ctx, "", types.IssueFilter{})
allIssues, err := testStore.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("failed to search issues: %v", err)
}
@@ -63,12 +77,12 @@ func TestRepairMultiplePrefixes(t *testing.T) {
}
// Test repair
if err := repairPrefixes(ctx, store, "test", "test", allIssues, prefixes, false); err != nil {
if err := repairPrefixes(ctx, testStore, "test", "test", allIssues, prefixes, false); err != nil {
t.Fatalf("repair failed: %v", err)
}
// Verify all issues now have correct prefix
allIssues, err = store.SearchIssues(ctx, "", types.IssueFilter{})
allIssues, err = testStore.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("failed to search issues after repair: %v", err)
}
@@ -84,7 +98,7 @@ func TestRepairMultiplePrefixes(t *testing.T) {
// Verify the original test-1 and test-2 are unchanged
for _, id := range []string{"test-1", "test-2"} {
issue, err := store.GetIssue(ctx, id)
issue, err := testStore.GetIssue(ctx, id)
if err != nil {
t.Fatalf("expected issue %s to exist unchanged: %v", id, err)
}
@@ -112,7 +126,7 @@ func TestRepairMultiplePrefixes(t *testing.T) {
// Verify old IDs no longer exist
for _, oldID := range []string{"old-1", "old-2", "another-1"} {
issue, err := store.GetIssue(ctx, oldID)
issue, err := testStore.GetIssue(ctx, oldID)
if err == nil && issue != nil {
t.Fatalf("expected old ID %s to no longer exist", oldID)
}