refactor: remove all deletions.jsonl code (bd-fom)

Complete removal of the legacy deletions.jsonl manifest system.
Tombstones are now the sole deletion mechanism.

Removed:
- internal/deletions/ - entire package
- cmd/bd/deleted.go - deleted command
- cmd/bd/doctor/fix/deletions.go - HydrateDeletionsManifest
- Tests for all removed functionality

Cleaned:
- cmd/bd/sync.go - removed sanitize, auto-compact
- cmd/bd/delete.go - removed dual-writes
- cmd/bd/doctor.go - removed checkDeletionsManifest
- internal/importer/importer.go - removed deletions checks
- internal/syncbranch/worktree.go - removed deletions merge
- cmd/bd/integrity.go - updated validation (warn-only on decrease)

Files removed: 12
Lines removed: ~7500

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Steve Yegge
2025-12-16 14:20:32 -08:00
parent e0528de590
commit 9f76cfda01
32 changed files with 298 additions and 7534 deletions
+2 -445
View File
@@ -1,18 +1,12 @@
package importer
import (
"bytes"
"context"
"fmt"
"os"
"os/exec"
"path/filepath"
"regexp"
"sort"
"strings"
"time"
"github.com/steveyegge/beads/internal/deletions"
"github.com/steveyegge/beads/internal/storage"
"github.com/steveyegge/beads/internal/storage/sqlite"
"github.com/steveyegge/beads/internal/types"
@@ -41,10 +35,8 @@ type Options struct {
RenameOnImport bool // Rename imported issues to match database prefix
SkipPrefixValidation bool // Skip prefix validation (for auto-import)
OrphanHandling OrphanHandling // How to handle missing parent issues (default: allow)
ClearDuplicateExternalRefs bool // Clear duplicate external_ref values instead of erroring
NoGitHistory bool // Skip git history backfill for deletions (prevents spurious deletion during JSONL migrations)
IgnoreDeletions bool // Import issues even if they're in the deletions manifest
ProtectLocalExportIDs map[string]bool // IDs from left snapshot to protect from git-history-backfill (bd-sync-deletion fix)
ClearDuplicateExternalRefs bool // Clear duplicate external_ref values instead of erroring
ProtectLocalExportIDs map[string]bool // IDs from left snapshot to protect from deletion (bd-sync-deletion fix)
}
// Result contains statistics about the import operation
@@ -60,14 +52,6 @@ type Result struct {
ExpectedPrefix string // Database configured prefix
MismatchPrefixes map[string]int // Map of mismatched prefixes to count
SkippedDependencies []string // Dependencies skipped due to FK constraint violations
Purged int // Issues purged from DB (found in deletions manifest)
PurgedIDs []string // IDs that were purged
SkippedDeleted int // Issues skipped because they're in deletions manifest
SkippedDeletedIDs []string // IDs that were skipped due to deletions manifest
ConvertedToTombstone int // Legacy deletions.jsonl entries converted to tombstones (bd-wucl)
ConvertedTombstoneIDs []string // IDs that were converted to tombstones
PreservedLocalExport int // Issues preserved because they were in local export (bd-sync-deletion fix)
PreservedLocalIDs []string // IDs that were preserved from local export
}
// ImportIssues handles the core import logic used by both manual and auto-import.
@@ -122,63 +106,6 @@ func ImportIssues(ctx context.Context, dbPath string, store storage.Storage, iss
opts.OrphanHandling = sqliteStore.GetOrphanHandling(ctx)
}
// Handle deletions manifest and tombstones (bd-dve)
//
// Phase 1 (Dual-Write):
// - Tombstones in JSONL are imported as-is (they're issues with status=tombstone)
// - Legacy deletions.jsonl entries are converted to tombstones
// - Non-tombstone issues in deletions manifest are skipped (backwards compat)
//
// Note: Tombstones from JSONL take precedence over legacy deletions.jsonl
if !opts.IgnoreDeletions && dbPath != "" {
beadsDir := filepath.Dir(dbPath)
deletionsPath := deletions.DefaultPath(beadsDir)
loadResult, err := deletions.LoadDeletions(deletionsPath)
if err == nil && len(loadResult.Records) > 0 {
// Build a map of existing tombstones from JSONL for quick lookup
tombstoneIDs := make(map[string]bool)
for _, issue := range issues {
if issue.IsTombstone() {
tombstoneIDs[issue.ID] = true
}
}
var filteredIssues []*types.Issue
for _, issue := range issues {
// Tombstones are always imported (they represent deletions in the new format)
if issue.IsTombstone() {
filteredIssues = append(filteredIssues, issue)
continue
}
if _, found := loadResult.Records[issue.ID]; found {
// Non-tombstone issue is in deletions manifest - skip it
// (this maintains backward compatibility during transition)
// Note: Individual skip messages removed (bd-wsqt) - caller shows summary
result.SkippedDeleted++
result.SkippedDeletedIDs = append(result.SkippedDeletedIDs, issue.ID)
} else {
filteredIssues = append(filteredIssues, issue)
}
}
// Convert legacy deletions.jsonl entries to tombstones if not already in JSONL
for id, del := range loadResult.Records {
if tombstoneIDs[id] {
// Already have a tombstone for this ID in JSONL, skip
continue
}
// Convert this deletion record to a tombstone (bd-wucl)
tombstone := convertDeletionToTombstone(id, del)
filteredIssues = append(filteredIssues, tombstone)
result.ConvertedToTombstone++
result.ConvertedTombstoneIDs = append(result.ConvertedTombstoneIDs, id)
}
issues = filteredIssues
}
}
// Check and handle prefix mismatches
issues, err = handlePrefixMismatch(ctx, sqliteStore, issues, opts, result)
if err != nil {
@@ -219,15 +146,6 @@ func ImportIssues(ctx context.Context, dbPath string, store storage.Storage, iss
return nil, err
}
// Purge deleted issues from DB based on deletions manifest
// Issues that are in the manifest but not in JSONL should be deleted from DB
if !opts.DryRun {
if err := purgeDeletedIssues(ctx, sqliteStore, dbPath, issues, opts, result); err != nil {
// Non-fatal - just log warning
fmt.Fprintf(os.Stderr, "Warning: failed to purge deleted issues: %v\n", err)
}
}
// Checkpoint WAL to ensure data persistence and reduce WAL file size
if err := sqliteStore.CheckpointWAL(ctx); err != nil {
// Non-fatal - just log warning
@@ -922,367 +840,6 @@ func importComments(ctx context.Context, sqliteStore *sqlite.SQLiteStorage, issu
return nil
}
// purgeDeletedIssues converts DB issues to tombstones if they are in the deletions
// manifest but not in the incoming JSONL. This enables deletion propagation across clones.
// Also uses git history fallback for deletions that were pruned from the manifest,
// unless opts.NoGitHistory is set (useful during JSONL filename migrations).
//
// Note (bd-dve): With inline tombstones, most deletions are now handled during import
// via convertDeletionToTombstone. This function primarily handles:
// 1. DB-only issues that need to be tombstoned (not in JSONL at all)
// 2. Git history fallback for pruned deletions
func purgeDeletedIssues(ctx context.Context, sqliteStore *sqlite.SQLiteStorage, dbPath string, jsonlIssues []*types.Issue, opts Options, result *Result) error {
// Get deletions manifest path (same directory as database)
beadsDir := filepath.Dir(dbPath)
deletionsPath := deletions.DefaultPath(beadsDir)
// Load deletions manifest (gracefully handles missing/empty file)
loadResult, err := deletions.LoadDeletions(deletionsPath)
if err != nil {
return fmt.Errorf("failed to load deletions manifest: %w", err)
}
// Log any warnings from loading
for _, warning := range loadResult.Warnings {
fmt.Fprintf(os.Stderr, "Warning: %s\n", warning)
}
// Build set of IDs in the incoming JSONL for O(1) lookup
jsonlIDs := make(map[string]bool, len(jsonlIssues))
for _, issue := range jsonlIssues {
jsonlIDs[issue.ID] = true
}
// Get all DB issues (exclude existing tombstones - they're already deleted)
dbIssues, err := sqliteStore.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
return fmt.Errorf("failed to get DB issues: %w", err)
}
// Collect IDs that need git history check (not in JSONL, not in manifest)
var needGitCheck []string
// Find DB issues that:
// 1. Are NOT in the JSONL (not synced from remote)
// 2. ARE in the deletions manifest (were deleted elsewhere)
// 3. Are NOT already tombstones
for _, dbIssue := range dbIssues {
if jsonlIDs[dbIssue.ID] {
// Issue is in JSONL, keep it (tombstone or not)
continue
}
if del, found := loadResult.Records[dbIssue.ID]; found {
// SAFETY GUARD (bd-k92d): Prevent deletion of open/in_progress issues without explicit warning
// This protects against data loss from:
// 1. Repo ID mismatches causing incorrect deletions
// 2. Race conditions during daemon sync
// 3. Accidental deletion of active work
if dbIssue.Status == types.StatusOpen || dbIssue.Status == types.StatusInProgress {
fmt.Fprintf(os.Stderr, "⚠️ WARNING: Refusing to delete %s with status=%s\n", dbIssue.ID, dbIssue.Status)
fmt.Fprintf(os.Stderr, " Title: %s\n", dbIssue.Title)
fmt.Fprintf(os.Stderr, " This issue is in deletions.jsonl but still open/in_progress in your database.\n")
fmt.Fprintf(os.Stderr, " This may indicate:\n")
fmt.Fprintf(os.Stderr, " - A repo ID mismatch (check with 'bd migrate --update-repo-id')\n")
fmt.Fprintf(os.Stderr, " - A sync race condition with unpushed local changes\n")
fmt.Fprintf(os.Stderr, " - Accidental deletion on another clone\n")
fmt.Fprintf(os.Stderr, " To force deletion: bd delete %s\n", dbIssue.ID)
fmt.Fprintf(os.Stderr, " To keep this issue: remove it from .beads/deletions.jsonl\n\n")
continue
}
// Issue is in deletions manifest - convert to tombstone (bd-dve)
if err := sqliteStore.CreateTombstone(ctx, dbIssue.ID, del.Actor, del.Reason); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to create tombstone for %s: %v\n", dbIssue.ID, err)
continue
}
// Log the tombstone creation with metadata
fmt.Fprintf(os.Stderr, "Tombstoned %s (deleted %s by %s", dbIssue.ID, del.Timestamp.Format("2006-01-02 15:04:05"), del.Actor)
if del.Reason != "" {
fmt.Fprintf(os.Stderr, ", reason: %s", del.Reason)
}
fmt.Fprintf(os.Stderr, ")\n")
result.Purged++
result.PurgedIDs = append(result.PurgedIDs, dbIssue.ID)
} else {
// Not in JSONL and not in deletions manifest
// This could be:
// 1. Local work (new issue not yet exported)
// 2. Deletion was pruned from manifest (check git history)
// 3. Issue was in local export but lost during pull/merge (bd-sync-deletion fix)
// Check if this issue was in our local export (left snapshot)
// If so, it's local work that got lost during merge - preserve it!
if opts.ProtectLocalExportIDs != nil && opts.ProtectLocalExportIDs[dbIssue.ID] {
fmt.Fprintf(os.Stderr, "Preserving %s (was in local export, lost during merge)\n", dbIssue.ID)
result.PreservedLocalExport++
result.PreservedLocalIDs = append(result.PreservedLocalIDs, dbIssue.ID)
continue
}
needGitCheck = append(needGitCheck, dbIssue.ID)
}
}
// Git history fallback for potential pruned deletions
// Skip if --no-git-history flag is set (prevents spurious deletions during JSONL migrations)
if len(needGitCheck) > 0 && !opts.NoGitHistory {
deletedViaGit := checkGitHistoryForDeletions(beadsDir, needGitCheck)
// Safety guard (bd-21a): Prevent mass deletion when JSONL appears reset
// If git-history-backfill would delete a large percentage of issues,
// this likely indicates the JSONL was reset (git reset, branch switch, etc.)
// rather than intentional deletions
totalDBIssues := len(dbIssues)
deleteCount := len(deletedViaGit)
if deleteCount > 0 && totalDBIssues > 0 {
deletePercent := float64(deleteCount) / float64(totalDBIssues) * 100
// Abort if would delete >50% of issues - this is almost certainly a reset
if deletePercent > 50 {
fmt.Fprintf(os.Stderr, "Warning: git-history-backfill would tombstone %d of %d issues (%.1f%%) - aborting\n",
deleteCount, totalDBIssues, deletePercent)
fmt.Fprintf(os.Stderr, "This usually means the JSONL was reset (git reset, branch switch, etc.)\n")
fmt.Fprintf(os.Stderr, "If these are legitimate deletions, add them to deletions.jsonl manually\n")
// Don't delete anything - abort the backfill
deleteCount = 0
deletedViaGit = nil
} else if deleteCount > 10 {
// Warn (but proceed) if deleting >10 issues
fmt.Fprintf(os.Stderr, "Warning: git-history-backfill will tombstone %d issues (%.1f%% of %d total)\n",
deleteCount, deletePercent, totalDBIssues)
}
}
for _, id := range deletedViaGit {
// SAFETY GUARD (bd-k92d): Check if this is an open/in_progress issue before deleting
// Get the issue from database to check its status
issue, err := sqliteStore.GetIssue(ctx, id)
if err == nil && issue != nil {
if issue.Status == types.StatusOpen || issue.Status == types.StatusInProgress {
fmt.Fprintf(os.Stderr, "⚠️ WARNING: git-history-backfill refusing to delete %s with status=%s\n", id, issue.Status)
fmt.Fprintf(os.Stderr, " Title: %s\n", issue.Title)
fmt.Fprintf(os.Stderr, " This issue was found in git history but is still open/in_progress.\n")
fmt.Fprintf(os.Stderr, " This may indicate:\n")
fmt.Fprintf(os.Stderr, " - A repo ID mismatch between clones\n")
fmt.Fprintf(os.Stderr, " - The issue was re-created after being deleted\n")
fmt.Fprintf(os.Stderr, " - Local uncommitted work that conflicts with remote history\n")
fmt.Fprintf(os.Stderr, " To force deletion: bd delete %s\n", id)
fmt.Fprintf(os.Stderr, " To prevent git-history checks: use --no-git-history flag\n\n")
continue
}
}
// Backfill the deletions manifest (self-healing)
// bd-ffr9: Skip writing to deletions.jsonl if tombstone migration is complete
if !deletions.IsTombstoneMigrationComplete(beadsDir) {
backfillRecord := deletions.DeletionRecord{
ID: id,
Timestamp: time.Now().UTC(),
Actor: "git-history-backfill",
Reason: "recovered from git history (pruned from manifest)",
}
if err := deletions.AppendDeletion(deletionsPath, backfillRecord); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to backfill deletion record for %s: %v\n", id, err)
}
}
// Convert to tombstone (bd-dve)
if err := sqliteStore.CreateTombstone(ctx, id, "git-history-backfill", "recovered from git history (pruned from manifest)"); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to create tombstone for %s (git-recovered): %v\n", id, err)
continue
}
fmt.Fprintf(os.Stderr, "Tombstoned %s (recovered from git history, pruned from manifest)\n", id)
result.Purged++
result.PurgedIDs = append(result.PurgedIDs, id)
}
} else if len(needGitCheck) > 0 && opts.NoGitHistory {
// Log that we skipped git history check due to flag
fmt.Fprintf(os.Stderr, "Skipped git history check for %d issue(s) (--no-git-history flag set)\n", len(needGitCheck))
}
return nil
}
// checkGitHistoryForDeletions checks if IDs were ever in the JSONL history.
// Returns the IDs that were found in git history (meaning they were deleted,
// and the deletion record was pruned from the manifest).
//
// Uses batched git log search for efficiency when checking multiple IDs.
func checkGitHistoryForDeletions(beadsDir string, ids []string) []string {
if len(ids) == 0 {
return nil
}
// Find the actual git repo root using git rev-parse (bd-bhd)
// This handles monorepos and nested projects where .beads isn't at repo root
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
cmd := exec.CommandContext(ctx, "git", "rev-parse", "--show-toplevel")
cmd.Dir = beadsDir
output, err := cmd.Output()
if err != nil {
// Not in a git repo or git not available - can't do history check
return nil
}
repoRoot := strings.TrimSpace(string(output))
// Compute relative path from repo root to issues.jsonl
// beadsDir is absolute, compute its path relative to repoRoot
absBeadsDir, err := filepath.Abs(beadsDir)
if err != nil {
return nil
}
relBeadsDir, err := filepath.Rel(repoRoot, absBeadsDir)
if err != nil {
return nil
}
// Build JSONL path relative to repo root (bd-6xd: issues.jsonl is canonical)
jsonlPath := filepath.Join(relBeadsDir, "issues.jsonl")
var deleted []string
// For efficiency, batch IDs into a single git command when possible
// We use git log with -S to search for string additions/removals
if len(ids) <= 10 {
// Small batch: check each ID individually for accuracy
for _, id := range ids {
if wasEverInJSONL(repoRoot, jsonlPath, id) {
deleted = append(deleted, id)
}
}
} else {
// Large batch: use grep pattern for efficiency
// This may have some false positives, but is much faster
deleted = batchCheckGitHistory(repoRoot, jsonlPath, ids)
}
return deleted
}
// gitHistoryTimeout is the maximum time to wait for git history searches.
// Prevents hangs on large repositories (bd-f0n).
const gitHistoryTimeout = 30 * time.Second
// wasEverInJSONL checks if a single ID was ever present in the JSONL via git history.
// Returns true if the ID was found in any commit (added or removed).
// The caller is responsible for confirming the ID is NOT currently in JSONL
// to determine that it was deleted (vs still present).
func wasEverInJSONL(repoRoot, jsonlPath, id string) bool {
// git log --all -S "\"id\":\"bd-xxx\"" --oneline -- .beads/issues.jsonl
// This searches for commits that added or removed the ID string
// Note: -S uses literal string matching, not regex, so no escaping needed
searchPattern := fmt.Sprintf(`"id":"%s"`, id)
// Use context with timeout to prevent hangs on large repos (bd-f0n)
ctx, cancel := context.WithTimeout(context.Background(), gitHistoryTimeout)
defer cancel()
// #nosec G204 - searchPattern is constructed from validated issue IDs
cmd := exec.CommandContext(ctx, "git", "log", "--all", "-S", searchPattern, "--oneline", "--", jsonlPath)
cmd.Dir = repoRoot
var stdout bytes.Buffer
cmd.Stdout = &stdout
cmd.Stderr = nil // Ignore stderr
if err := cmd.Run(); err != nil {
// Git command failed - could be shallow clone, not a git repo, timeout, etc.
// Conservative: assume issue is local work, don't delete
return false
}
// If output is non-empty, the ID was found in git history (was once in JSONL).
// Since caller already verified ID is NOT currently in JSONL, this means deleted.
return len(bytes.TrimSpace(stdout.Bytes())) > 0
}
// batchCheckGitHistory checks multiple IDs at once using git log with pattern matching.
// Returns the IDs that were found in git history.
func batchCheckGitHistory(repoRoot, jsonlPath string, ids []string) []string {
// Build a regex pattern to match any of the IDs
// Pattern: "id":"bd-xxx"|"id":"bd-yyy"|...
// Escape regex special characters in IDs to avoid malformed patterns (bd-bgs)
patterns := make([]string, 0, len(ids))
for _, id := range ids {
escapedID := regexp.QuoteMeta(id)
patterns = append(patterns, fmt.Sprintf(`"id":"%s"`, escapedID))
}
searchPattern := strings.Join(patterns, "|")
// Use context with timeout to prevent hangs on large repos (bd-f0n)
ctx, cancel := context.WithTimeout(context.Background(), gitHistoryTimeout)
defer cancel()
// Use git log -G (regex) for batch search
// #nosec G204 - searchPattern is constructed from validated issue IDs
cmd := exec.CommandContext(ctx, "git", "log", "--all", "-G", searchPattern, "-p", "--", jsonlPath)
cmd.Dir = repoRoot
var stdout bytes.Buffer
cmd.Stdout = &stdout
cmd.Stderr = nil // Ignore stderr
if err := cmd.Run(); err != nil {
// Git command failed (timeout, shallow clone, etc.) - fall back to individual checks
// Individual checks also have timeout protection
var deleted []string
for _, id := range ids {
if wasEverInJSONL(repoRoot, jsonlPath, id) {
deleted = append(deleted, id)
}
}
return deleted
}
output := stdout.String()
if output == "" {
return nil
}
// Parse output to find which IDs were actually in history
var deleted []string
for _, id := range ids {
searchStr := fmt.Sprintf(`"id":"%s"`, id)
if strings.Contains(output, searchStr) {
deleted = append(deleted, id)
}
}
return deleted
}
// Helper functions
// convertDeletionToTombstone converts a legacy DeletionRecord to a tombstone Issue.
// This is used during import to migrate from deletions.jsonl to inline tombstones (bd-dve).
// Note: We use zero for priority to indicate unknown (bd-9auw).
// IssueType must be a valid type for validation, so we use TypeTask as default.
func convertDeletionToTombstone(id string, del deletions.DeletionRecord) *types.Issue {
deletedAt := del.Timestamp
return &types.Issue{
ID: id,
Title: "(deleted)",
Description: "",
Status: types.StatusTombstone,
Priority: 0, // Unknown priority (0 = unset, distinguishes from user-set values)
IssueType: types.TypeTask, // Default type (must be valid for validation)
CreatedAt: del.Timestamp,
UpdatedAt: del.Timestamp,
DeletedAt: &deletedAt,
DeletedBy: del.Actor,
DeleteReason: del.Reason,
OriginalType: "", // Not available in legacy deletions.jsonl
}
}
func GetPrefixList(prefixes map[string]int) []string {
var result []string
keys := make([]string, 0, len(prefixes))
@@ -1,728 +0,0 @@
package importer
import (
"context"
"os"
"os/exec"
"path/filepath"
"testing"
"time"
"github.com/steveyegge/beads/internal/deletions"
"github.com/steveyegge/beads/internal/storage/sqlite"
"github.com/steveyegge/beads/internal/types"
)
// TestAutoImportPurgesBugBd4pv tests that auto-import doesn't incorrectly purge
// issues due to git history backfill finding them in old commits.
// This is a reproduction test for bd-4pv.
func TestAutoImportPurgesBugBd4pv(t *testing.T) {
// Create a temp directory for a test git repo
tmpDir := t.TempDir()
repoDir := filepath.Join(tmpDir, "test-repo")
beadsDir := filepath.Join(repoDir, ".beads")
// Initialize git repo
if err := os.MkdirAll(repoDir, 0755); err != nil {
t.Fatalf("failed to create repo dir: %v", err)
}
cmd := exec.Command("git", "init")
cmd.Dir = repoDir
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to init git repo: %v\n%s", err, out)
}
// Configure git user for commits
cmd = exec.Command("git", "config", "user.email", "test@test.com")
cmd.Dir = repoDir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to config git email: %v", err)
}
cmd = exec.Command("git", "config", "user.name", "Test User")
cmd.Dir = repoDir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to config git name: %v", err)
}
// Create .beads directory
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatalf("failed to create .beads dir: %v", err)
}
// Create initial issues.jsonl with 5 issues
jsonlPath := filepath.Join(beadsDir, "issues.jsonl")
initialContent := `{"id":"bd-abc1","title":"Issue 1","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-abc2","title":"Issue 2","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-abc3","title":"Issue 3","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-abc4","title":"Issue 4","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-abc5","title":"Issue 5","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
`
if err := os.WriteFile(jsonlPath, []byte(initialContent), 0644); err != nil {
t.Fatalf("failed to write initial JSONL: %v", err)
}
// Commit the initial state
cmd = exec.Command("git", "add", ".beads/issues.jsonl")
cmd.Dir = repoDir
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to git add: %v\n%s", err, out)
}
cmd = exec.Command("git", "commit", "-m", "Initial issues")
cmd.Dir = repoDir
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to git commit: %v\n%s", err, out)
}
// Now simulate what happens during auto-import:
// 1. Database is empty
// 2. Auto-import detects issues in git and imports them
ctx := context.Background()
dbPath := filepath.Join(beadsDir, "beads.db")
store, err := sqlite.New(ctx, dbPath)
if err != nil {
t.Fatalf("failed to create store: %v", err)
}
defer store.Close()
// Set up prefix
if err := store.SetConfig(ctx, "issue_prefix", "bd"); err != nil {
t.Fatalf("failed to set prefix: %v", err)
}
// Parse the JSONL issues
now := time.Now()
issues := []*types.Issue{
{ID: "bd-abc1", Title: "Issue 1", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-abc2", Title: "Issue 2", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-abc3", Title: "Issue 3", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-abc4", Title: "Issue 4", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-abc5", Title: "Issue 5", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
}
// Do the import WITHOUT NoGitHistory (the buggy behavior)
opts := Options{
DryRun: false,
SkipUpdate: false,
SkipPrefixValidation: true,
NoGitHistory: false, // Bug: should be true for auto-import
}
result, err := ImportIssues(ctx, dbPath, store, issues, opts)
if err != nil {
t.Fatalf("import failed: %v", err)
}
// Check how many issues are in the database
allIssues, err := store.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("failed to search issues: %v", err)
}
// With the bug, some or all issues might be purged
// because git history finds them in the commit and thinks they were "deleted"
t.Logf("Import result: created=%d, updated=%d, purged=%d, purgedIDs=%v",
result.Created, result.Updated, result.Purged, result.PurgedIDs)
t.Logf("Issues in DB after import: %d", len(allIssues))
// The correct behavior is 5 issues in DB
// The bug would result in fewer (potentially 0) due to incorrect purging
if len(allIssues) != 5 {
t.Errorf("Expected 5 issues in DB, got %d. This is the bd-4pv bug!", len(allIssues))
t.Logf("Purged IDs: %v", result.PurgedIDs)
}
}
// TestGitHistoryBackfillPurgesLocalIssues tests the scenario where git history
// backfill incorrectly purges issues that exist locally but were never in the remote JSONL.
// This is another aspect of the bd-4pv bug.
func TestGitHistoryBackfillPurgesLocalIssues(t *testing.T) {
// Create a temp directory for a test git repo
tmpDir := t.TempDir()
repoDir := filepath.Join(tmpDir, "test-repo")
beadsDir := filepath.Join(repoDir, ".beads")
// Initialize git repo
if err := os.MkdirAll(repoDir, 0755); err != nil {
t.Fatalf("failed to create repo dir: %v", err)
}
cmd := exec.Command("git", "init")
cmd.Dir = repoDir
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to init git repo: %v\n%s", err, out)
}
// Configure git user for commits
cmd = exec.Command("git", "config", "user.email", "test@test.com")
cmd.Dir = repoDir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to config git email: %v", err)
}
cmd = exec.Command("git", "config", "user.name", "Test User")
cmd.Dir = repoDir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to config git name: %v", err)
}
// Create .beads directory
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatalf("failed to create .beads dir: %v", err)
}
// Create initial issues.jsonl with 1 issue
jsonlPath := filepath.Join(beadsDir, "issues.jsonl")
initialContent := `{"id":"bd-shared1","title":"Shared Issue","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
`
if err := os.WriteFile(jsonlPath, []byte(initialContent), 0644); err != nil {
t.Fatalf("failed to write initial JSONL: %v", err)
}
// Create empty deletions.jsonl
deletionsPath := deletions.DefaultPath(beadsDir)
if err := os.WriteFile(deletionsPath, []byte(""), 0644); err != nil {
t.Fatalf("failed to write deletions: %v", err)
}
// Commit the initial state
cmd = exec.Command("git", "add", ".beads/")
cmd.Dir = repoDir
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to git add: %v\n%s", err, out)
}
cmd = exec.Command("git", "commit", "-m", "Initial issues")
cmd.Dir = repoDir
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to git commit: %v\n%s", err, out)
}
// Create database with the shared issue AND local issues
ctx := context.Background()
dbPath := filepath.Join(beadsDir, "beads.db")
store, err := sqlite.New(ctx, dbPath)
if err != nil {
t.Fatalf("failed to create store: %v", err)
}
defer store.Close()
// Set up prefix
if err := store.SetConfig(ctx, "issue_prefix", "bd"); err != nil {
t.Fatalf("failed to set prefix: %v", err)
}
// Create issues in DB: 1 shared (in JSONL) + 4 local-only
now := time.Now()
dbIssues := []*types.Issue{
{ID: "bd-shared1", Title: "Shared Issue", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-local1", Title: "Local Issue 1", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-local2", Title: "Local Issue 2", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-local3", Title: "Local Issue 3", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-local4", Title: "Local Issue 4", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
}
for _, issue := range dbIssues {
issue.ContentHash = issue.ComputeContentHash()
if err := store.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("failed to create issue %s: %v", issue.ID, err)
}
}
// Verify DB has 5 issues
allBefore, err := store.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("failed to search issues: %v", err)
}
if len(allBefore) != 5 {
t.Fatalf("Expected 5 issues before import, got %d", len(allBefore))
}
// Now import from JSONL (only has 1 issue: bd-shared1)
// WITHOUT NoGitHistory - this is the bug
incomingIssues := []*types.Issue{
{ID: "bd-shared1", Title: "Shared Issue", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
}
opts := Options{
DryRun: false,
SkipUpdate: false,
SkipPrefixValidation: true,
NoGitHistory: false, // Bug: local issues might be purged if they appear in git history
}
result, err := ImportIssues(ctx, dbPath, store, incomingIssues, opts)
if err != nil {
t.Fatalf("import failed: %v", err)
}
t.Logf("Import result: created=%d, updated=%d, unchanged=%d, purged=%d, purgedIDs=%v",
result.Created, result.Updated, result.Unchanged, result.Purged, result.PurgedIDs)
// Check how many issues are in the database
allAfter, err := store.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("failed to search issues: %v", err)
}
t.Logf("Issues in DB after import: %d", len(allAfter))
for _, issue := range allAfter {
t.Logf(" - %s: %s", issue.ID, issue.Title)
}
// Expected: bd-shared1 + bd-local1..4 = 5 issues
// The local issues should NOT be purged because:
// 1. They're not in the deletions manifest
// 2. They were never in git history (they're local-only)
// 3. NoGitHistory=false but git history check shouldn't find bd-local* in history
if len(allAfter) != 5 {
t.Errorf("Expected 5 issues in DB, got %d. Local issues may have been incorrectly purged!", len(allAfter))
}
// Should have no purges (bd-local* were never in git history)
if result.Purged != 0 {
t.Errorf("Expected 0 purged issues, got %d (IDs: %v)", result.Purged, result.PurgedIDs)
}
}
// TestNoGitHistoryPreventsIncorrectPurge tests that setting NoGitHistory prevents
// the purge of issues that exist in the DB but not in JSONL during auto-import.
// This is the fix for bd-4pv - auto-import should NOT run git history backfill.
func TestNoGitHistoryPreventsIncorrectPurge(t *testing.T) {
// Create a temp directory for a test git repo
tmpDir := t.TempDir()
repoDir := filepath.Join(tmpDir, "test-repo")
beadsDir := filepath.Join(repoDir, ".beads")
// Initialize git repo
if err := os.MkdirAll(repoDir, 0755); err != nil {
t.Fatalf("failed to create repo dir: %v", err)
}
cmd := exec.Command("git", "init")
cmd.Dir = repoDir
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to init git repo: %v\n%s", err, out)
}
// Configure git user for commits
cmd = exec.Command("git", "config", "user.email", "test@test.com")
cmd.Dir = repoDir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to config git email: %v", err)
}
cmd = exec.Command("git", "config", "user.name", "Test User")
cmd.Dir = repoDir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to config git name: %v", err)
}
// Create .beads directory
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatalf("failed to create .beads dir: %v", err)
}
// Create issues.jsonl with 1 issue
jsonlPath := filepath.Join(beadsDir, "issues.jsonl")
initialContent := `{"id":"bd-shared1","title":"Shared Issue","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
`
if err := os.WriteFile(jsonlPath, []byte(initialContent), 0644); err != nil {
t.Fatalf("failed to write initial JSONL: %v", err)
}
// Create empty deletions.jsonl
deletionsPath := deletions.DefaultPath(beadsDir)
if err := os.WriteFile(deletionsPath, []byte(""), 0644); err != nil {
t.Fatalf("failed to write deletions: %v", err)
}
// Commit
cmd = exec.Command("git", "add", ".beads/")
cmd.Dir = repoDir
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to git add: %v\n%s", err, out)
}
cmd = exec.Command("git", "commit", "-m", "Initial issues")
cmd.Dir = repoDir
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to git commit: %v\n%s", err, out)
}
// Create database with 5 issues (1 shared + 4 local-only)
ctx := context.Background()
dbPath := filepath.Join(beadsDir, "beads.db")
store, err := sqlite.New(ctx, dbPath)
if err != nil {
t.Fatalf("failed to create store: %v", err)
}
defer store.Close()
// Set up prefix
if err := store.SetConfig(ctx, "issue_prefix", "bd"); err != nil {
t.Fatalf("failed to set prefix: %v", err)
}
// Create all 5 issues in DB
now := time.Now()
dbIssues := []*types.Issue{
{ID: "bd-shared1", Title: "Shared Issue", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-local1", Title: "Local Issue 1", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-local2", Title: "Local Issue 2", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-local3", Title: "Local Issue 3", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-local4", Title: "Local Issue 4", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
}
for _, issue := range dbIssues {
issue.ContentHash = issue.ComputeContentHash()
if err := store.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("failed to create issue %s: %v", issue.ID, err)
}
}
// Import from JSONL (only has 1 issue) WITH NoGitHistory=true (the fix)
incomingIssues := []*types.Issue{
{ID: "bd-shared1", Title: "Shared Issue", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
}
opts := Options{
DryRun: false,
SkipUpdate: false,
SkipPrefixValidation: true,
NoGitHistory: true, // Fix: skip git history backfill during auto-import
}
result, err := ImportIssues(ctx, dbPath, store, incomingIssues, opts)
if err != nil {
t.Fatalf("import failed: %v", err)
}
t.Logf("Import result: created=%d, updated=%d, unchanged=%d, purged=%d, purgedIDs=%v",
result.Created, result.Updated, result.Unchanged, result.Purged, result.PurgedIDs)
// Check how many issues are in the database
allAfter, err := store.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("failed to search issues: %v", err)
}
t.Logf("Issues in DB after import: %d", len(allAfter))
for _, issue := range allAfter {
t.Logf(" - %s: %s", issue.ID, issue.Title)
}
// With NoGitHistory=true, the 4 local issues should NOT be purged
// because we skip git history backfill entirely during auto-import.
// This is the correct behavior for auto-import - local work should be preserved.
// Expected: all 5 issues remain
if len(allAfter) != 5 {
t.Errorf("Expected 5 issues in DB (local work preserved), got %d", len(allAfter))
}
// Should have no purges
if result.Purged != 0 {
t.Errorf("Expected 0 purged issues (NoGitHistory prevents purge), got %d (IDs: %v)", result.Purged, result.PurgedIDs)
}
}
// TestAutoImportWithNoGitHistoryFlag tests the fix for bd-4pv
func TestAutoImportWithNoGitHistoryFlag(t *testing.T) {
// Create a temp directory for a test git repo
tmpDir := t.TempDir()
repoDir := filepath.Join(tmpDir, "test-repo")
beadsDir := filepath.Join(repoDir, ".beads")
// Initialize git repo
if err := os.MkdirAll(repoDir, 0755); err != nil {
t.Fatalf("failed to create repo dir: %v", err)
}
cmd := exec.Command("git", "init")
cmd.Dir = repoDir
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to init git repo: %v\n%s", err, out)
}
// Configure git user for commits
cmd = exec.Command("git", "config", "user.email", "test@test.com")
cmd.Dir = repoDir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to config git email: %v", err)
}
cmd = exec.Command("git", "config", "user.name", "Test User")
cmd.Dir = repoDir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to config git name: %v", err)
}
// Create .beads directory
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatalf("failed to create .beads dir: %v", err)
}
// Create initial issues.jsonl with 5 issues
jsonlPath := filepath.Join(beadsDir, "issues.jsonl")
initialContent := `{"id":"bd-xyz1","title":"Issue 1","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-xyz2","title":"Issue 2","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-xyz3","title":"Issue 3","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-xyz4","title":"Issue 4","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-xyz5","title":"Issue 5","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
`
if err := os.WriteFile(jsonlPath, []byte(initialContent), 0644); err != nil {
t.Fatalf("failed to write initial JSONL: %v", err)
}
// Also create a deletions.jsonl (empty)
deletionsPath := deletions.DefaultPath(beadsDir)
if err := os.WriteFile(deletionsPath, []byte(""), 0644); err != nil {
t.Fatalf("failed to write deletions: %v", err)
}
// Commit the initial state
cmd = exec.Command("git", "add", ".beads/")
cmd.Dir = repoDir
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to git add: %v\n%s", err, out)
}
cmd = exec.Command("git", "commit", "-m", "Initial issues")
cmd.Dir = repoDir
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to git commit: %v\n%s", err, out)
}
ctx := context.Background()
dbPath := filepath.Join(beadsDir, "beads.db")
store, err := sqlite.New(ctx, dbPath)
if err != nil {
t.Fatalf("failed to create store: %v", err)
}
defer store.Close()
// Set up prefix
if err := store.SetConfig(ctx, "issue_prefix", "bd"); err != nil {
t.Fatalf("failed to set prefix: %v", err)
}
// Parse the JSONL issues
now := time.Now()
issues := []*types.Issue{
{ID: "bd-xyz1", Title: "Issue 1", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-xyz2", Title: "Issue 2", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-xyz3", Title: "Issue 3", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-xyz4", Title: "Issue 4", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-xyz5", Title: "Issue 5", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
}
// Do the import WITH NoGitHistory (the fix)
opts := Options{
DryRun: false,
SkipUpdate: false,
SkipPrefixValidation: true,
NoGitHistory: true, // Fix: skip git history backfill during auto-import
}
result, err := ImportIssues(ctx, dbPath, store, issues, opts)
if err != nil {
t.Fatalf("import failed: %v", err)
}
// Check how many issues are in the database
allIssues, err := store.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("failed to search issues: %v", err)
}
t.Logf("Import result: created=%d, updated=%d, purged=%d",
result.Created, result.Updated, result.Purged)
t.Logf("Issues in DB after import: %d", len(allIssues))
// With the fix, all 5 issues should be in DB
if len(allIssues) != 5 {
t.Errorf("Expected 5 issues in DB, got %d", len(allIssues))
}
// Should have no purges
if result.Purged != 0 {
t.Errorf("Expected 0 purged issues, got %d", result.Purged)
}
}
// TestMassDeletionSafetyGuard tests the fix for bd-21a where git-history-backfill
// would incorrectly purge the entire database when a JSONL was reset.
// The safety guard should abort if >50% of issues would be deleted.
func TestMassDeletionSafetyGuard(t *testing.T) {
// Create a temp directory for a test git repo
tmpDir := t.TempDir()
repoDir := filepath.Join(tmpDir, "test-repo")
beadsDir := filepath.Join(repoDir, ".beads")
// Initialize git repo
if err := os.MkdirAll(repoDir, 0755); err != nil {
t.Fatalf("failed to create repo dir: %v", err)
}
cmd := exec.Command("git", "init")
cmd.Dir = repoDir
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to init git repo: %v\n%s", err, out)
}
// Configure git user for commits
cmd = exec.Command("git", "config", "user.email", "test@test.com")
cmd.Dir = repoDir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to config git email: %v", err)
}
cmd = exec.Command("git", "config", "user.name", "Test User")
cmd.Dir = repoDir
if err := cmd.Run(); err != nil {
t.Fatalf("failed to config git name: %v", err)
}
// Create .beads directory
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatalf("failed to create .beads dir: %v", err)
}
// Create initial issues.jsonl with 10 issues
jsonlPath := filepath.Join(beadsDir, "issues.jsonl")
initialContent := `{"id":"bd-mass01","title":"Issue 1","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-mass02","title":"Issue 2","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-mass03","title":"Issue 3","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-mass04","title":"Issue 4","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-mass05","title":"Issue 5","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-mass06","title":"Issue 6","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-mass07","title":"Issue 7","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-mass08","title":"Issue 8","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-mass09","title":"Issue 9","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-mass10","title":"Issue 10","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
`
if err := os.WriteFile(jsonlPath, []byte(initialContent), 0644); err != nil {
t.Fatalf("failed to write initial JSONL: %v", err)
}
// Also create a deletions.jsonl (empty)
deletionsPath := deletions.DefaultPath(beadsDir)
if err := os.WriteFile(deletionsPath, []byte(""), 0644); err != nil {
t.Fatalf("failed to write deletions: %v", err)
}
// Commit the initial state
cmd = exec.Command("git", "add", ".beads/")
cmd.Dir = repoDir
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to git add: %v\n%s", err, out)
}
cmd = exec.Command("git", "commit", "-m", "Initial issues with 10 entries")
cmd.Dir = repoDir
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to git commit: %v\n%s", err, out)
}
ctx := context.Background()
dbPath := filepath.Join(beadsDir, "beads.db")
store, err := sqlite.New(ctx, dbPath)
if err != nil {
t.Fatalf("failed to create store: %v", err)
}
defer store.Close()
// Set up prefix
if err := store.SetConfig(ctx, "issue_prefix", "bd"); err != nil {
t.Fatalf("failed to set prefix: %v", err)
}
// First, import all 10 issues to the database
now := time.Now()
allIssues := []*types.Issue{
{ID: "bd-mass01", Title: "Issue 1", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-mass02", Title: "Issue 2", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-mass03", Title: "Issue 3", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-mass04", Title: "Issue 4", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-mass05", Title: "Issue 5", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-mass06", Title: "Issue 6", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-mass07", Title: "Issue 7", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-mass08", Title: "Issue 8", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-mass09", Title: "Issue 9", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-mass10", Title: "Issue 10", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
}
// Initial import - NoGitHistory to just populate the DB
opts := Options{
DryRun: false,
SkipUpdate: false,
SkipPrefixValidation: true,
NoGitHistory: true,
}
_, err = ImportIssues(ctx, dbPath, store, allIssues, opts)
if err != nil {
t.Fatalf("initial import failed: %v", err)
}
// Verify all 10 issues are in DB
dbIssues, err := store.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("failed to search issues: %v", err)
}
if len(dbIssues) != 10 {
t.Fatalf("Expected 10 issues after initial import, got %d", len(dbIssues))
}
// Now simulate a "reset" scenario:
// JSONL is reset to only have 2 issues (80% would be deleted)
resetContent := `{"id":"bd-mass01","title":"Issue 1","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
{"id":"bd-mass02","title":"Issue 2","status":"open","priority":1,"issue_type":"task","created_at":"2025-01-01T00:00:00Z","updated_at":"2025-01-01T00:00:00Z"}
`
if err := os.WriteFile(jsonlPath, []byte(resetContent), 0644); err != nil {
t.Fatalf("failed to write reset JSONL: %v", err)
}
// Commit the reset state
cmd = exec.Command("git", "add", ".beads/issues.jsonl")
cmd.Dir = repoDir
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to git add reset: %v\n%s", err, out)
}
cmd = exec.Command("git", "commit", "-m", "Reset JSONL to 2 issues")
cmd.Dir = repoDir
if out, err := cmd.CombinedOutput(); err != nil {
t.Fatalf("failed to git commit reset: %v\n%s", err, out)
}
// Now try to import the reset JSONL WITH git history enabled
// This should trigger the safety guard since 8/10 = 80% > 50%
resetIssues := []*types.Issue{
{ID: "bd-mass01", Title: "Issue 1", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
{ID: "bd-mass02", Title: "Issue 2", Status: types.StatusOpen, Priority: 1, IssueType: types.TypeTask, CreatedAt: now, UpdatedAt: now},
}
opts = Options{
DryRun: false,
SkipUpdate: false,
SkipPrefixValidation: true,
NoGitHistory: false, // Enable git history - this is the test!
}
result, err := ImportIssues(ctx, dbPath, store, resetIssues, opts)
if err != nil {
t.Fatalf("import failed: %v", err)
}
// The safety guard should have prevented any purges
// because 8/10 = 80% > 50% threshold
t.Logf("Import result: created=%d, updated=%d, unchanged=%d, purged=%d",
result.Created, result.Updated, result.Unchanged, result.Purged)
// Verify all 10 issues are STILL in DB (safety guard prevented deletion)
dbIssues, err = store.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("failed to search issues after reset import: %v", err)
}
t.Logf("Issues in DB after reset import: %d", len(dbIssues))
if len(dbIssues) != 10 {
t.Errorf("Expected 10 issues in DB (safety guard should prevent purge), got %d", len(dbIssues))
}
if result.Purged != 0 {
t.Errorf("Expected 0 purged issues (safety guard), got %d (IDs: %v)", result.Purged, result.PurgedIDs)
}
}
-259
View File
@@ -9,7 +9,6 @@ import (
"testing"
"time"
"github.com/steveyegge/beads/internal/deletions"
"github.com/steveyegge/beads/internal/storage/sqlite"
"github.com/steveyegge/beads/internal/types"
)
@@ -1075,88 +1074,6 @@ func TestConcurrentExternalRefImports(t *testing.T) {
})
}
func TestCheckGitHistoryForDeletions_EmptyList(t *testing.T) {
// Empty list should return nil
result := checkGitHistoryForDeletions("/tmp/test", nil)
if result != nil {
t.Errorf("Expected nil for empty list, got %v", result)
}
result = checkGitHistoryForDeletions("/tmp/test", []string{})
if result != nil {
t.Errorf("Expected nil for empty slice, got %v", result)
}
}
func TestCheckGitHistoryForDeletions_NonGitDir(t *testing.T) {
// Non-git directory should return empty (conservative behavior)
tmpDir := t.TempDir()
result := checkGitHistoryForDeletions(tmpDir, []string{"bd-test"})
if len(result) != 0 {
t.Errorf("Expected empty result for non-git dir, got %v", result)
}
}
func TestWasEverInJSONL_NonGitDir(t *testing.T) {
// Non-git directory should return false (conservative behavior)
tmpDir := t.TempDir()
result := wasEverInJSONL(tmpDir, ".beads/beads.jsonl", "bd-test")
if result {
t.Error("Expected false for non-git dir")
}
}
func TestBatchCheckGitHistory_NonGitDir(t *testing.T) {
// Non-git directory should return empty (falls back to individual checks)
tmpDir := t.TempDir()
result := batchCheckGitHistory(tmpDir, ".beads/beads.jsonl", []string{"bd-test1", "bd-test2"})
if len(result) != 0 {
t.Errorf("Expected empty result for non-git dir, got %v", result)
}
}
func TestConvertDeletionToTombstone(t *testing.T) {
ts := time.Date(2025, 12, 5, 14, 30, 0, 0, time.UTC)
del := deletions.DeletionRecord{
ID: "bd-test",
Timestamp: ts,
Actor: "alice",
Reason: "no longer needed",
}
tombstone := convertDeletionToTombstone("bd-test", del)
if tombstone.ID != "bd-test" {
t.Errorf("Expected ID 'bd-test', got %q", tombstone.ID)
}
if tombstone.Status != types.StatusTombstone {
t.Errorf("Expected status 'tombstone', got %q", tombstone.Status)
}
if tombstone.Title != "(deleted)" {
t.Errorf("Expected title '(deleted)', got %q", tombstone.Title)
}
if tombstone.DeletedAt == nil || !tombstone.DeletedAt.Equal(ts) {
t.Errorf("Expected DeletedAt to be %v, got %v", ts, tombstone.DeletedAt)
}
if tombstone.DeletedBy != "alice" {
t.Errorf("Expected DeletedBy 'alice', got %q", tombstone.DeletedBy)
}
if tombstone.DeleteReason != "no longer needed" {
t.Errorf("Expected DeleteReason 'no longer needed', got %q", tombstone.DeleteReason)
}
if tombstone.OriginalType != "" {
t.Errorf("Expected empty OriginalType, got %q", tombstone.OriginalType)
}
// Verify priority uses zero to indicate unknown (bd-9auw)
if tombstone.Priority != 0 {
t.Errorf("Expected Priority 0 (unknown), got %d", tombstone.Priority)
}
// IssueType must be valid for validation, so it defaults to task
if tombstone.IssueType != types.TypeTask {
t.Errorf("Expected IssueType 'task', got %q", tombstone.IssueType)
}
}
func TestImportIssues_TombstoneFromJSONL(t *testing.T) {
ctx := context.Background()
@@ -1225,182 +1142,6 @@ func TestImportIssues_TombstoneFromJSONL(t *testing.T) {
}
}
func TestImportIssues_TombstoneNotFilteredByDeletionsManifest(t *testing.T) {
ctx := context.Background()
tmpDir := t.TempDir()
tmpDB := tmpDir + "/test.db"
store, err := sqlite.New(context.Background(), tmpDB)
if err != nil {
t.Fatalf("Failed to create store: %v", err)
}
defer store.Close()
if err := store.SetConfig(ctx, "issue_prefix", "test"); err != nil {
t.Fatalf("Failed to set prefix: %v", err)
}
// Create a deletions manifest entry
deletionsPath := deletions.DefaultPath(tmpDir)
delRecord := deletions.DeletionRecord{
ID: "test-abc123",
Timestamp: time.Now().Add(-time.Hour),
Actor: "alice",
Reason: "old deletion",
}
if err := deletions.AppendDeletion(deletionsPath, delRecord); err != nil {
t.Fatalf("Failed to write deletion record: %v", err)
}
// Create a tombstone in JSONL for the same issue
deletedAt := time.Now()
tombstone := &types.Issue{
ID: "test-abc123",
Title: "(deleted)",
Status: types.StatusTombstone,
Priority: 2,
IssueType: types.TypeTask,
CreatedAt: time.Now().Add(-24 * time.Hour),
UpdatedAt: deletedAt,
DeletedAt: &deletedAt,
DeletedBy: "bob",
DeleteReason: "JSONL tombstone",
}
result, err := ImportIssues(ctx, tmpDB, store, []*types.Issue{tombstone}, Options{})
if err != nil {
t.Fatalf("Import failed: %v", err)
}
// The tombstone should be imported (not filtered by deletions manifest)
if result.Created != 1 {
t.Errorf("Expected 1 created (tombstone), got %d", result.Created)
}
if result.SkippedDeleted != 0 {
t.Errorf("Expected 0 skipped deleted (tombstone should not be filtered), got %d", result.SkippedDeleted)
}
}
// TestImportIssues_LegacyDeletionsConvertedToTombstones tests that entries in
// deletions.jsonl are converted to tombstones during import (bd-hp0m)
func TestImportIssues_LegacyDeletionsConvertedToTombstones(t *testing.T) {
ctx := context.Background()
tmpDir := t.TempDir()
tmpDB := tmpDir + "/test.db"
store, err := sqlite.New(context.Background(), tmpDB)
if err != nil {
t.Fatalf("Failed to create store: %v", err)
}
defer store.Close()
if err := store.SetConfig(ctx, "issue_prefix", "test"); err != nil {
t.Fatalf("Failed to set prefix: %v", err)
}
// Create a deletions manifest with one entry
deletionsPath := deletions.DefaultPath(tmpDir)
deleteTime := time.Now().Add(-time.Hour)
del := deletions.DeletionRecord{
ID: "test-abc",
Timestamp: deleteTime,
Actor: "alice",
Reason: "duplicate of test-xyz",
}
if err := deletions.AppendDeletion(deletionsPath, del); err != nil {
t.Fatalf("Failed to write deletion record: %v", err)
}
// Create a regular issue (not in deletions)
regularIssue := &types.Issue{
ID: "test-def",
Title: "Regular issue",
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeTask,
CreatedAt: time.Now().Add(-24 * time.Hour),
UpdatedAt: time.Now(),
}
// Create an issue that's in the deletions manifest (non-tombstone)
deletedIssue := &types.Issue{
ID: "test-abc",
Title: "This will be skipped and converted",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeBug,
CreatedAt: time.Now().Add(-48 * time.Hour),
UpdatedAt: time.Now().Add(-2 * time.Hour),
}
// Import both issues
result, err := ImportIssues(ctx, tmpDB, store, []*types.Issue{regularIssue, deletedIssue}, Options{})
if err != nil {
t.Fatalf("Import failed: %v", err)
}
// Regular issue should be created
// The deleted issue is skipped (in deletions manifest), but a tombstone is created from deletions.jsonl
// So we expect: 1 regular + 1 tombstone = 2 created
if result.Created != 2 {
t.Errorf("Expected 2 created (1 regular + 1 tombstone from deletions.jsonl), got %d", result.Created)
}
if result.SkippedDeleted != 1 {
t.Errorf("Expected 1 skipped deleted (issue in deletions.jsonl), got %d", result.SkippedDeleted)
}
// Verify ConvertedToTombstone counter (bd-wucl)
if result.ConvertedToTombstone != 1 {
t.Errorf("Expected 1 converted to tombstone, got %d", result.ConvertedToTombstone)
}
if len(result.ConvertedTombstoneIDs) != 1 || result.ConvertedTombstoneIDs[0] != "test-abc" {
t.Errorf("Expected ConvertedTombstoneIDs [test-abc], got %v", result.ConvertedTombstoneIDs)
}
// Verify regular issue was imported
issues, err := store.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("Failed to search issues: %v", err)
}
foundRegular := false
for _, i := range issues {
if i.ID == "test-def" {
foundRegular = true
}
}
if !foundRegular {
t.Error("Regular issue not found after import")
}
// Verify tombstone was created from deletions.jsonl
allIssues, err := store.SearchIssues(ctx, "", types.IssueFilter{IncludeTombstones: true})
if err != nil {
t.Fatalf("Failed to search all issues: %v", err)
}
var tombstone *types.Issue
for _, i := range allIssues {
if i.ID == "test-abc" {
tombstone = i
break
}
}
// test-abc should be a tombstone (was in JSONL and deletions)
if tombstone == nil {
t.Fatal("Expected tombstone for test-abc not found")
}
if tombstone.Status != types.StatusTombstone {
t.Errorf("Expected test-abc to be tombstone, got status %q", tombstone.Status)
}
if tombstone.DeletedBy != "alice" {
t.Errorf("Expected DeletedBy 'alice', got %q", tombstone.DeletedBy)
}
if tombstone.DeleteReason != "duplicate of test-xyz" {
t.Errorf("Expected DeleteReason 'duplicate of test-xyz', got %q", tombstone.DeleteReason)
}
}
// TestImportOrphanSkip_CountMismatch verifies that orphaned issues are properly
// skipped during import and tracked in the result count (bd-ckej).
//
-354
View File
@@ -1,354 +0,0 @@
package importer
import (
"context"
"os"
"path/filepath"
"testing"
"time"
"github.com/steveyegge/beads/internal/deletions"
"github.com/steveyegge/beads/internal/storage/sqlite"
"github.com/steveyegge/beads/internal/types"
)
// TestPurgeDeletedIssues tests that issues in the deletions manifest are converted to tombstones during import
func TestPurgeDeletedIssues(t *testing.T) {
ctx := context.Background()
tmpDir := t.TempDir()
// Create database
dbPath := filepath.Join(tmpDir, "beads.db")
store, err := sqlite.New(ctx, dbPath)
if err != nil {
t.Fatalf("failed to create database: %v", err)
}
defer store.Close()
// Initialize prefix
if err := store.SetConfig(ctx, "issue_prefix", "test"); err != nil {
t.Fatalf("failed to set prefix: %v", err)
}
// Create some issues in the database
issue1 := &types.Issue{
ID: "test-abc",
Title: "Issue 1",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
}
// issue2 is CLOSED so it can be safely deleted (bd-k92d: safety guard prevents deleting open/in_progress)
closedTime := time.Now().UTC()
issue2 := &types.Issue{
ID: "test-def",
Title: "Issue 2",
Status: types.StatusClosed,
Priority: 1,
IssueType: types.TypeTask,
ClosedAt: &closedTime,
}
issue3 := &types.Issue{
ID: "test-ghi",
Title: "Issue 3 (local work)",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
}
for _, iss := range []*types.Issue{issue1, issue2, issue3} {
if err := store.CreateIssue(ctx, iss, "test"); err != nil {
t.Fatalf("failed to create issue %s: %v", iss.ID, err)
}
}
// Create a deletions manifest with issue2 deleted
deletionsPath := deletions.DefaultPath(tmpDir)
delRecord := deletions.DeletionRecord{
ID: "test-def",
Timestamp: time.Now().UTC(),
Actor: "test-user",
Reason: "test deletion",
}
if err := deletions.AppendDeletion(deletionsPath, delRecord); err != nil {
t.Fatalf("failed to create deletions manifest: %v", err)
}
// Simulate import with only issue1 in the JSONL (issue2 was deleted, issue3 is local work)
jsonlIssues := []*types.Issue{issue1}
result := &Result{
IDMapping: make(map[string]string),
MismatchPrefixes: make(map[string]int),
}
// Call purgeDeletedIssues
if err := purgeDeletedIssues(ctx, store, dbPath, jsonlIssues, Options{}, result); err != nil {
t.Fatalf("purgeDeletedIssues failed: %v", err)
}
// Verify issue2 was tombstoned (bd-dve: now converts to tombstone instead of hard-delete)
if result.Purged != 1 {
t.Errorf("expected 1 purged issue, got %d", result.Purged)
}
if len(result.PurgedIDs) != 1 || result.PurgedIDs[0] != "test-def" {
t.Errorf("expected PurgedIDs to contain 'test-def', got %v", result.PurgedIDs)
}
// Verify issue2 is now a tombstone (not hard-deleted)
// GetIssue returns nil for tombstones by default, so use IncludeTombstones filter
issues, err := store.SearchIssues(ctx, "", types.IssueFilter{IncludeTombstones: true})
if err != nil {
t.Fatalf("SearchIssues failed: %v", err)
}
var iss2 *types.Issue
for _, iss := range issues {
if iss.ID == "test-def" {
iss2 = iss
break
}
}
if iss2 == nil {
t.Errorf("expected issue2 to exist as tombstone, but it was hard-deleted")
} else if iss2.Status != types.StatusTombstone {
t.Errorf("expected issue2 to be a tombstone, got status %q", iss2.Status)
}
// Verify issue1 still exists (in JSONL)
iss1, err := store.GetIssue(ctx, "test-abc")
if err != nil {
t.Fatalf("GetIssue failed: %v", err)
}
if iss1 == nil {
t.Errorf("expected issue1 to still exist")
}
// Verify issue3 still exists (local work, not in deletions manifest)
iss3, err := store.GetIssue(ctx, "test-ghi")
if err != nil {
t.Fatalf("GetIssue failed: %v", err)
}
if iss3 == nil {
t.Errorf("expected issue3 (local work) to still exist")
}
}
// TestPurgeDeletedIssues_NoDeletionsManifest tests that import works without a deletions manifest
func TestPurgeDeletedIssues_NoDeletionsManifest(t *testing.T) {
ctx := context.Background()
tmpDir := t.TempDir()
// Create database
dbPath := filepath.Join(tmpDir, "beads.db")
store, err := sqlite.New(ctx, dbPath)
if err != nil {
t.Fatalf("failed to create database: %v", err)
}
defer store.Close()
// Initialize prefix
if err := store.SetConfig(ctx, "issue_prefix", "test"); err != nil {
t.Fatalf("failed to set prefix: %v", err)
}
// Create an issue in the database
issue := &types.Issue{
ID: "test-abc",
Title: "Issue 1",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
}
if err := store.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("failed to create issue: %v", err)
}
// No deletions manifest exists
jsonlIssues := []*types.Issue{issue}
result := &Result{
IDMapping: make(map[string]string),
MismatchPrefixes: make(map[string]int),
}
// Call purgeDeletedIssues - should succeed with no errors
if err := purgeDeletedIssues(ctx, store, dbPath, jsonlIssues, Options{}, result); err != nil {
t.Fatalf("purgeDeletedIssues failed: %v", err)
}
// Verify nothing was purged
if result.Purged != 0 {
t.Errorf("expected 0 purged issues, got %d", result.Purged)
}
// Verify issue still exists
iss, err := store.GetIssue(ctx, "test-abc")
if err != nil {
t.Fatalf("GetIssue failed: %v", err)
}
if iss == nil {
t.Errorf("expected issue to still exist")
}
}
// TestPurgeDeletedIssues_ProtectLocalExportIDs tests that issues in ProtectLocalExportIDs
// are not tombstoned even if they're not in the JSONL (bd-sync-deletion fix)
func TestPurgeDeletedIssues_ProtectLocalExportIDs(t *testing.T) {
ctx := context.Background()
tmpDir := t.TempDir()
// Create database
dbPath := filepath.Join(tmpDir, "beads.db")
store, err := sqlite.New(ctx, dbPath)
if err != nil {
t.Fatalf("failed to create database: %v", err)
}
defer store.Close()
// Initialize prefix
if err := store.SetConfig(ctx, "issue_prefix", "test"); err != nil {
t.Fatalf("failed to set prefix: %v", err)
}
// Create issues in the database:
// - issue1: in JSONL (should survive)
// - issue2: NOT in JSONL, but in ProtectLocalExportIDs (should survive - this is the fix)
// - issue3: NOT in JSONL, NOT protected (would be checked by git-history, but we skip that)
issue1 := &types.Issue{
ID: "test-abc",
Title: "Issue 1 (in JSONL)",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
}
issue2 := &types.Issue{
ID: "test-def",
Title: "Issue 2 (protected local export)",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
}
issue3 := &types.Issue{
ID: "test-ghi",
Title: "Issue 3 (unprotected)",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
}
for _, iss := range []*types.Issue{issue1, issue2, issue3} {
if err := store.CreateIssue(ctx, iss, "test"); err != nil {
t.Fatalf("failed to create issue %s: %v", iss.ID, err)
}
}
// Simulate import where JSONL only has issue1 (issue2 was in our local export but lost during merge)
jsonlIssues := []*types.Issue{issue1}
result := &Result{
IDMapping: make(map[string]string),
MismatchPrefixes: make(map[string]int),
}
// Set ProtectLocalExportIDs to protect issue2 (simulates left snapshot protection)
opts := Options{
ProtectLocalExportIDs: map[string]bool{
"test-def": true, // Protect issue2
},
NoGitHistory: true, // Skip git history check for this test
}
// Call purgeDeletedIssues
if err := purgeDeletedIssues(ctx, store, dbPath, jsonlIssues, opts, result); err != nil {
t.Fatalf("purgeDeletedIssues failed: %v", err)
}
// Verify issue2 was preserved (the fix!)
if result.PreservedLocalExport != 1 {
t.Errorf("expected 1 preserved issue, got %d", result.PreservedLocalExport)
}
if len(result.PreservedLocalIDs) != 1 || result.PreservedLocalIDs[0] != "test-def" {
t.Errorf("expected PreservedLocalIDs to contain 'test-def', got %v", result.PreservedLocalIDs)
}
// Verify issue1 still exists (was in JSONL)
iss1, err := store.GetIssue(ctx, "test-abc")
if err != nil {
t.Fatalf("GetIssue failed: %v", err)
}
if iss1 == nil {
t.Errorf("expected issue1 to still exist")
}
// Verify issue2 still exists (was protected)
iss2, err := store.GetIssue(ctx, "test-def")
if err != nil {
t.Fatalf("GetIssue failed: %v", err)
}
if iss2 == nil {
t.Errorf("expected issue2 (protected local export) to still exist - THIS IS THE FIX")
}
// Verify issue3 still exists (not in deletions, git history check skipped)
iss3, err := store.GetIssue(ctx, "test-ghi")
if err != nil {
t.Fatalf("GetIssue failed: %v", err)
}
if iss3 == nil {
t.Errorf("expected issue3 to still exist (git history check skipped)")
}
}
// TestPurgeDeletedIssues_EmptyDeletionsManifest tests that import works with empty deletions manifest
func TestPurgeDeletedIssues_EmptyDeletionsManifest(t *testing.T) {
ctx := context.Background()
tmpDir := t.TempDir()
// Create database
dbPath := filepath.Join(tmpDir, "beads.db")
store, err := sqlite.New(ctx, dbPath)
if err != nil {
t.Fatalf("failed to create database: %v", err)
}
defer store.Close()
// Initialize prefix
if err := store.SetConfig(ctx, "issue_prefix", "test"); err != nil {
t.Fatalf("failed to set prefix: %v", err)
}
// Create an issue in the database
issue := &types.Issue{
ID: "test-abc",
Title: "Issue 1",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
}
if err := store.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("failed to create issue: %v", err)
}
// Create empty deletions manifest
deletionsPath := deletions.DefaultPath(tmpDir)
if err := os.WriteFile(deletionsPath, []byte{}, 0644); err != nil {
t.Fatalf("failed to create empty deletions manifest: %v", err)
}
jsonlIssues := []*types.Issue{issue}
result := &Result{
IDMapping: make(map[string]string),
MismatchPrefixes: make(map[string]int),
}
// Call purgeDeletedIssues - should succeed with no errors
if err := purgeDeletedIssues(ctx, store, dbPath, jsonlIssues, Options{}, result); err != nil {
t.Fatalf("purgeDeletedIssues failed: %v", err)
}
// Verify nothing was purged
if result.Purged != 0 {
t.Errorf("expected 0 purged issues, got %d", result.Purged)
}
}