bd sync: 2025-11-25 12:03:21

This commit is contained in:
Steve Yegge
2025-11-25 12:03:21 -08:00
parent be784a0b4b
commit f7adb935df
2 changed files with 637 additions and 1 deletions

View File

@@ -52,7 +52,7 @@
{"id":"bd-j3zt","title":"Fix mypy errors in beads-mcp","description":"Running `mypy .` in `integrations/beads-mcp` reports 287 errors. These should be addressed to improve type safety and code quality.","status":"open","priority":3,"issue_type":"task","created_at":"2025-11-20T18:53:28.557708-05:00","updated_at":"2025-11-20T18:53:28.557708-05:00"} {"id":"bd-j3zt","title":"Fix mypy errors in beads-mcp","description":"Running `mypy .` in `integrations/beads-mcp` reports 287 errors. These should be addressed to improve type safety and code quality.","status":"open","priority":3,"issue_type":"task","created_at":"2025-11-20T18:53:28.557708-05:00","updated_at":"2025-11-20T18:53:28.557708-05:00"}
{"id":"bd-koab","title":"Import should continue on FOREIGN KEY constraint violations from deletions","description":"# Problem\n\nWhen importing JSONL after a merge that includes deletions, we may encounter FOREIGN KEY constraint violations if:\n- Issue A was deleted in one branch\n- Issue B (that depends on A) was modified in another branch \n- The merge keeps the deletion of A and the modification of B\n- Import tries to import B with a dependency/reference to deleted A\n\nCurrently import fails completely on such constraint violations, requiring manual intervention.\n\n# Solution\n\nAdd IsForeignKeyConstraintError() helper similar to IsUniqueConstraintError()\n\nUpdate import code to:\n1. Detect FOREIGN KEY constraint violations\n2. Log a warning with the issue ID and constraint\n3. Continue importing remaining issues\n4. Report summary of skipped issues at the end\n\n# Implementation Notes\n\n- Add to internal/storage/sqlite/util.go\n- Pattern: strings.Contains(err.Error(), \"FOREIGN KEY constraint failed\")\n- Update importer to handle these errors gracefully\n- Keep track of skipped issues for summary reporting","notes":"## Implementation Complete\n\nAdded FOREIGN KEY constraint violation handling to the importer:\n\n**Changes made:**\n\n1. **internal/importer/importer.go**\n - Added SkippedDependencies field to Result struct\n - Updated importDependencies() to accept result parameter\n - Added FK constraint detection using sqlite.IsForeignKeyConstraintError()\n - Log warning for each skipped dependency\n - Track skipped dependencies in result\n\n2. **cmd/bd/import_shared.go**\n - Added SkippedDependencies field to ImportResult struct\n - Updated result conversion to include skipped dependencies\n\n3. **cmd/bd/import.go**\n - Added summary reporting for skipped dependencies\n - Displays warning with list of skipped dependencies and helpful context\n\n**Behavior:**\n- When a FOREIGN KEY constraint violation is encountered during dependency import:\n - A warning is logged: 'Warning: Skipping dependency due to missing reference: issue-a → issue-b (blocks)'\n - The dependency is tracked in result.SkippedDependencies\n - Import continues with remaining dependencies\n - Summary at end lists all skipped dependencies with context message\n\n**Testing:**\n- All existing importer tests pass\n- Build succeeds\n- Ready for real-world testing when FK constraint violations are encountered","status":"closed","priority":2,"issue_type":"feature","created_at":"2025-11-23T21:37:02.811665-08:00","updated_at":"2025-11-24T00:01:27.559495-08:00","closed_at":"2025-11-23T23:31:04.325337-08:00"} {"id":"bd-koab","title":"Import should continue on FOREIGN KEY constraint violations from deletions","description":"# Problem\n\nWhen importing JSONL after a merge that includes deletions, we may encounter FOREIGN KEY constraint violations if:\n- Issue A was deleted in one branch\n- Issue B (that depends on A) was modified in another branch \n- The merge keeps the deletion of A and the modification of B\n- Import tries to import B with a dependency/reference to deleted A\n\nCurrently import fails completely on such constraint violations, requiring manual intervention.\n\n# Solution\n\nAdd IsForeignKeyConstraintError() helper similar to IsUniqueConstraintError()\n\nUpdate import code to:\n1. Detect FOREIGN KEY constraint violations\n2. Log a warning with the issue ID and constraint\n3. Continue importing remaining issues\n4. Report summary of skipped issues at the end\n\n# Implementation Notes\n\n- Add to internal/storage/sqlite/util.go\n- Pattern: strings.Contains(err.Error(), \"FOREIGN KEY constraint failed\")\n- Update importer to handle these errors gracefully\n- Keep track of skipped issues for summary reporting","notes":"## Implementation Complete\n\nAdded FOREIGN KEY constraint violation handling to the importer:\n\n**Changes made:**\n\n1. **internal/importer/importer.go**\n - Added SkippedDependencies field to Result struct\n - Updated importDependencies() to accept result parameter\n - Added FK constraint detection using sqlite.IsForeignKeyConstraintError()\n - Log warning for each skipped dependency\n - Track skipped dependencies in result\n\n2. **cmd/bd/import_shared.go**\n - Added SkippedDependencies field to ImportResult struct\n - Updated result conversion to include skipped dependencies\n\n3. **cmd/bd/import.go**\n - Added summary reporting for skipped dependencies\n - Displays warning with list of skipped dependencies and helpful context\n\n**Behavior:**\n- When a FOREIGN KEY constraint violation is encountered during dependency import:\n - A warning is logged: 'Warning: Skipping dependency due to missing reference: issue-a → issue-b (blocks)'\n - The dependency is tracked in result.SkippedDependencies\n - Import continues with remaining dependencies\n - Summary at end lists all skipped dependencies with context message\n\n**Testing:**\n- All existing importer tests pass\n- Build succeeds\n- Ready for real-world testing when FK constraint violations are encountered","status":"closed","priority":2,"issue_type":"feature","created_at":"2025-11-23T21:37:02.811665-08:00","updated_at":"2025-11-24T00:01:27.559495-08:00","closed_at":"2025-11-23T23:31:04.325337-08:00"}
{"id":"bd-ktng","title":"Optimize CLI test suite - eliminate redundant git init calls","description":"Current: Each of 13 CLI tests calls git init (31s total). Solution: Use single test binary built once in init(), skip git operations where possible, or use mock filesystem.","status":"closed","priority":2,"issue_type":"task","created_at":"2025-11-04T11:23:13.660276-08:00","updated_at":"2025-11-23T23:01:49.003046-08:00","closed_at":"2025-11-23T22:55:32.103839-08:00","comments":[{"id":1,"issue_id":"bd-ktng","author":"stevey","text":"Completed first phase: refactored sync_test.go to use helper functions.\n\nChanges:\n- Added 3 helper functions: setupGitRepo(), setupGitRepoWithBranch(), setupMinimalGitRepo()\n- Refactored 19 test functions in sync_test.go\n- Eliminated ~300 lines of duplicate git initialization boilerplate\n- All tests pass\n\nThe helpers consolidate the repeated pattern of:\n1. Creating temp directory\n2. Initializing git repo\n3. Configuring git user\n4. Creating initial commit\n5. Handling cleanup\n\nNext steps:\n- Apply similar optimization to other test files (daemon_test.go, hooks_test.go, etc.)\n- Measure actual performance improvement","created_at":"2025-11-25T17:10:06Z"}]} {"id":"bd-ktng","title":"Optimize CLI test suite - eliminate redundant git init calls","description":"Current: Each of 13 CLI tests calls git init (31s total). Solution: Use single test binary built once in init(), skip git operations where possible, or use mock filesystem.","status":"closed","priority":2,"issue_type":"task","created_at":"2025-11-04T11:23:13.660276-08:00","updated_at":"2025-11-23T23:01:49.003046-08:00","closed_at":"2025-11-23T22:55:32.103839-08:00","comments":[{"id":1,"issue_id":"bd-ktng","author":"stevey","text":"Completed first phase: refactored sync_test.go to use helper functions.\n\nChanges:\n- Added 3 helper functions: setupGitRepo(), setupGitRepoWithBranch(), setupMinimalGitRepo()\n- Refactored 19 test functions in sync_test.go\n- Eliminated ~300 lines of duplicate git initialization boilerplate\n- All tests pass\n\nThe helpers consolidate the repeated pattern of:\n1. Creating temp directory\n2. Initializing git repo\n3. Configuring git user\n4. Creating initial commit\n5. Handling cleanup\n\nNext steps:\n- Apply similar optimization to other test files (daemon_test.go, hooks_test.go, etc.)\n- Measure actual performance improvement","created_at":"2025-11-25T17:10:06Z"}]}
{"id":"bd-kzo","title":"End-to-end test: deletion propagation across clones","description":"Parent: bd-imj\n\n## Task\nCreate integration test verifying deletions propagate correctly between clones.\n\n## Test Scenario\n1. Create two \"clones\" (separate .beads directories)\n2. Clone A: create issue bd-xxx\n3. Sync both clones (both have bd-xxx)\n4. Clone A: delete bd-xxx\n5. Sync Clone A (deletions.jsonl updated)\n6. Sync Clone B (should pull deletions.jsonl)\n7. Clone B: import should purge bd-xxx from DB\n\n## Edge Cases to Test\n- Simultaneous deletion in both clones (should be idempotent)\n- Deletion of non-existent issue (no-op, still recorded)\n- Corrupt deletions.jsonl line (skipped with warning)\n- Empty/missing deletions.jsonl (graceful handling)\n\n## Acceptance Criteria\n- [ ] Test creates isolated clone environments\n- [ ] Verifies deletion propagates via manifest\n- [ ] Verifies duplicate deletions are handled\n- [ ] Verifies corrupt line recovery\n- [ ] Can run in CI (no real git remotes needed)","status":"open","priority":1,"issue_type":"task","created_at":"2025-11-25T10:50:57.436227-08:00","updated_at":"2025-11-25T10:50:57.436227-08:00"} {"id":"bd-kzo","title":"End-to-end test: deletion propagation across clones","description":"Parent: bd-imj\n\n## Task\nCreate integration test verifying deletions propagate correctly between clones.\n\n## Test Scenario\n1. Create two \"clones\" (separate .beads directories)\n2. Clone A: create issue bd-xxx\n3. Sync both clones (both have bd-xxx)\n4. Clone A: delete bd-xxx\n5. Sync Clone A (deletions.jsonl updated)\n6. Sync Clone B (should pull deletions.jsonl)\n7. Clone B: import should purge bd-xxx from DB\n\n## Edge Cases to Test\n- Simultaneous deletion in both clones (should be idempotent)\n- Deletion of non-existent issue (no-op, still recorded)\n- Corrupt deletions.jsonl line (skipped with warning)\n- Empty/missing deletions.jsonl (graceful handling)\n\n## Acceptance Criteria\n- [ ] Test creates isolated clone environments\n- [ ] Verifies deletion propagates via manifest\n- [ ] Verifies duplicate deletions are handled\n- [ ] Verifies corrupt line recovery\n- [ ] Can run in CI (no real git remotes needed)","status":"closed","priority":1,"issue_type":"task","created_at":"2025-11-25T10:50:57.436227-08:00","updated_at":"2025-11-25T12:03:08.790655-08:00","closed_at":"2025-11-25T12:03:08.790655-08:00"}
{"id":"bd-l0r","title":"CRITICAL: bd sync pushes stale DB state instead of trusting JSONL truth on pull","description":"When a clone has stale DB (688 issues) and pulls fresh JSONL (62 issues), bd sync detects 1009.7% divergence but proceeds to re-export the stale DB and push it, overwriting the correct JSONL state. \n\nRepro:\n1. Clone has 688 issues in DB (628 closed)\n2. git pull brings JSONL with 62 issues (cleanup applied)\n3. bd sync warns about divergence but exports DB→JSONL\n4. Pushes 688 issues back to remote, undoing cleanup\n\nExpected: JSONL is source of truth after pull. DB should be wiped and reimported.\nActual: DB overwrites JSONL, pushing stale state upstream.\n\nImpact: Breaks multi-clone coordination. Cleanup work gets undone.","status":"closed","priority":0,"issue_type":"bug","created_at":"2025-11-23T22:29:37.668882-08:00","updated_at":"2025-11-25T09:10:06.18172-08:00","closed_at":"2025-11-24T01:45:33.004694-08:00"} {"id":"bd-l0r","title":"CRITICAL: bd sync pushes stale DB state instead of trusting JSONL truth on pull","description":"When a clone has stale DB (688 issues) and pulls fresh JSONL (62 issues), bd sync detects 1009.7% divergence but proceeds to re-export the stale DB and push it, overwriting the correct JSONL state. \n\nRepro:\n1. Clone has 688 issues in DB (628 closed)\n2. git pull brings JSONL with 62 issues (cleanup applied)\n3. bd sync warns about divergence but exports DB→JSONL\n4. Pushes 688 issues back to remote, undoing cleanup\n\nExpected: JSONL is source of truth after pull. DB should be wiped and reimported.\nActual: DB overwrites JSONL, pushing stale state upstream.\n\nImpact: Breaks multi-clone coordination. Cleanup work gets undone.","status":"closed","priority":0,"issue_type":"bug","created_at":"2025-11-23T22:29:37.668882-08:00","updated_at":"2025-11-25T09:10:06.18172-08:00","closed_at":"2025-11-24T01:45:33.004694-08:00"}
{"id":"bd-l954","title":"Performance Testing Framework","description":"Add comprehensive performance testing for beads focusing on optimization guidance and validating 10K+ database scale. Uses standard Go tooling, follows existing patterns, minimal complexity.\n\nComponents:\n- Benchmark suite for critical operations at 10K-20K scale\n- Fixture generator for realistic test data (epic hierarchies, cross-links)\n- User diagnostics via bd doctor --perf\n- Always-on profiling integration\n\nGoals:\n- Identify bottlenecks for optimization work\n- Validate performance at 10K+ issue scale\n- Enable users to collect diagnostics for bug reports\n- Support both SQLite and JSONL import paths","status":"open","priority":2,"issue_type":"epic","created_at":"2025-11-13T22:22:11.203467-08:00","updated_at":"2025-11-13T22:22:11.203467-08:00"} {"id":"bd-l954","title":"Performance Testing Framework","description":"Add comprehensive performance testing for beads focusing on optimization guidance and validating 10K+ database scale. Uses standard Go tooling, follows existing patterns, minimal complexity.\n\nComponents:\n- Benchmark suite for critical operations at 10K-20K scale\n- Fixture generator for realistic test data (epic hierarchies, cross-links)\n- User diagnostics via bd doctor --perf\n- Always-on profiling integration\n\nGoals:\n- Identify bottlenecks for optimization work\n- Validate performance at 10K+ issue scale\n- Enable users to collect diagnostics for bug reports\n- Support both SQLite and JSONL import paths","status":"open","priority":2,"issue_type":"epic","created_at":"2025-11-13T22:22:11.203467-08:00","updated_at":"2025-11-13T22:22:11.203467-08:00"}
{"id":"bd-m0w","title":"Add test coverage for internal/validation package","description":"","status":"open","priority":2,"issue_type":"task","created_at":"2025-11-20T21:21:24.129559-05:00","updated_at":"2025-11-20T21:21:24.129559-05:00","dependencies":[{"issue_id":"bd-m0w","depends_on_id":"bd-ge7","type":"blocks","created_at":"2025-11-20T21:21:31.350477-05:00","created_by":"daemon"}]} {"id":"bd-m0w","title":"Add test coverage for internal/validation package","description":"","status":"open","priority":2,"issue_type":"task","created_at":"2025-11-20T21:21:24.129559-05:00","updated_at":"2025-11-20T21:21:24.129559-05:00","dependencies":[{"issue_id":"bd-m0w","depends_on_id":"bd-ge7","type":"blocks","created_at":"2025-11-20T21:21:31.350477-05:00","created_by":"daemon"}]}

View File

@@ -0,0 +1,636 @@
//go:build integration
// +build integration
package main
import (
"bytes"
"context"
"encoding/json"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/steveyegge/beads/internal/deletions"
"github.com/steveyegge/beads/internal/importer"
"github.com/steveyegge/beads/internal/storage/sqlite"
"github.com/steveyegge/beads/internal/types"
)
// importJSONLFile parses a JSONL file and imports using ImportIssues
func importJSONLFile(ctx context.Context, store *sqlite.SQLiteStorage, dbPath, jsonlPath string, opts importer.Options) (*importer.Result, error) {
data, err := os.ReadFile(jsonlPath)
if err != nil {
if os.IsNotExist(err) {
// Empty import if file doesn't exist
return importer.ImportIssues(ctx, dbPath, store, nil, opts)
}
return nil, err
}
var issues []*types.Issue
decoder := json.NewDecoder(bytes.NewReader(data))
for decoder.More() {
var issue types.Issue
if err := decoder.Decode(&issue); err != nil {
return nil, err
}
issues = append(issues, &issue)
}
return importer.ImportIssues(ctx, dbPath, store, issues, opts)
}
// TestDeletionPropagation_AcrossClones verifies that when an issue is deleted
// in one clone, the deletion propagates to other clones via the deletions manifest.
func TestDeletionPropagation_AcrossClones(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
ctx := context.Background()
tempDir := t.TempDir()
// Create "remote" repository
remoteDir := filepath.Join(tempDir, "remote")
if err := os.MkdirAll(remoteDir, 0750); err != nil {
t.Fatalf("Failed to create remote dir: %v", err)
}
runGitCmd(t, remoteDir, "init", "--bare")
// Create clone1 (will create and delete issue)
clone1Dir := filepath.Join(tempDir, "clone1")
runGitCmd(t, tempDir, "clone", remoteDir, clone1Dir)
configureGit(t, clone1Dir)
// Create clone2 (will receive deletion via sync)
clone2Dir := filepath.Join(tempDir, "clone2")
runGitCmd(t, tempDir, "clone", remoteDir, clone2Dir)
configureGit(t, clone2Dir)
// Initialize beads in clone1
clone1BeadsDir := filepath.Join(clone1Dir, ".beads")
if err := os.MkdirAll(clone1BeadsDir, 0750); err != nil {
t.Fatalf("Failed to create .beads dir: %v", err)
}
clone1DBPath := filepath.Join(clone1BeadsDir, "beads.db")
clone1Store := newTestStore(t, clone1DBPath)
defer clone1Store.Close()
// Create an issue in clone1
issue := &types.Issue{
Title: "Issue to be deleted",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
if err := clone1Store.CreateIssue(ctx, issue, "test-user"); err != nil {
t.Fatalf("Failed to create issue: %v", err)
}
issueID := issue.ID
t.Logf("Created issue: %s", issueID)
// Export to JSONL
clone1JSONLPath := filepath.Join(clone1BeadsDir, "beads.jsonl")
if err := exportIssuesToJSONL(ctx, clone1Store, clone1JSONLPath); err != nil {
t.Fatalf("Failed to export: %v", err)
}
// Commit and push from clone1
runGitCmd(t, clone1Dir, "add", ".beads")
runGitCmd(t, clone1Dir, "commit", "-m", "Add issue")
runGitCmd(t, clone1Dir, "push", "origin", "master")
// Clone2 pulls the issue
runGitCmd(t, clone2Dir, "pull")
// Initialize beads in clone2
clone2BeadsDir := filepath.Join(clone2Dir, ".beads")
clone2DBPath := filepath.Join(clone2BeadsDir, "beads.db")
clone2Store := newTestStore(t, clone2DBPath)
defer clone2Store.Close()
// Import to clone2
clone2JSONLPath := filepath.Join(clone2BeadsDir, "beads.jsonl")
result, err := importJSONLFile(ctx, clone2Store, clone2DBPath, clone2JSONLPath, importer.Options{})
if err != nil {
t.Fatalf("Failed to import to clone2: %v", err)
}
t.Logf("Clone2 import: created=%d, updated=%d", result.Created, result.Updated)
// Verify clone2 has the issue
clone2Issue, err := clone2Store.GetIssue(ctx, issueID)
if err != nil {
t.Fatalf("Failed to get issue from clone2: %v", err)
}
if clone2Issue == nil {
t.Fatal("Clone2 should have the issue after import")
}
t.Log("✓ Both clones have the issue")
// Clone1 deletes the issue
if err := clone1Store.DeleteIssue(ctx, issueID); err != nil {
t.Fatalf("Failed to delete issue from clone1: %v", err)
}
// Record deletion in manifest
clone1DeletionsPath := filepath.Join(clone1BeadsDir, "deletions.jsonl")
delRecord := deletions.DeletionRecord{
ID: issueID,
Timestamp: time.Now().UTC(),
Actor: "test-user",
Reason: "test deletion",
}
if err := deletions.AppendDeletion(clone1DeletionsPath, delRecord); err != nil {
t.Fatalf("Failed to record deletion: %v", err)
}
// Re-export JSONL (issue is now gone)
if err := exportIssuesToJSONL(ctx, clone1Store, clone1JSONLPath); err != nil {
t.Fatalf("Failed to export after deletion: %v", err)
}
// Commit and push deletion
runGitCmd(t, clone1Dir, "add", ".beads")
runGitCmd(t, clone1Dir, "commit", "-m", "Delete issue")
runGitCmd(t, clone1Dir, "push", "origin", "master")
t.Log("✓ Clone1 deleted issue and pushed")
// Clone2 pulls the deletion
runGitCmd(t, clone2Dir, "pull")
// Verify deletions.jsonl was synced to clone2
clone2DeletionsPath := filepath.Join(clone2BeadsDir, "deletions.jsonl")
if _, err := os.Stat(clone2DeletionsPath); err != nil {
t.Fatalf("deletions.jsonl should be synced to clone2: %v", err)
}
// Import to clone2 (should purge the deleted issue)
result, err = importJSONLFile(ctx, clone2Store, clone2DBPath, clone2JSONLPath, importer.Options{})
if err != nil {
t.Fatalf("Failed to import after deletion sync: %v", err)
}
t.Logf("Clone2 import after sync: purged=%d, purgedIDs=%v", result.Purged, result.PurgedIDs)
// Verify clone2 no longer has the issue
clone2Issue, err = clone2Store.GetIssue(ctx, issueID)
if err != nil {
t.Fatalf("Failed to check issue in clone2: %v", err)
}
if clone2Issue != nil {
t.Errorf("Clone2 should NOT have the issue after sync (deletion should propagate)")
} else {
t.Log("✓ Deletion propagated to clone2")
}
// Verify purge count
if result.Purged != 1 {
t.Errorf("Expected 1 purged issue, got %d", result.Purged)
}
}
// TestDeletionPropagation_SimultaneousDeletions verifies that when both clones
// delete the same issue, the deletions are handled idempotently.
func TestDeletionPropagation_SimultaneousDeletions(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
ctx := context.Background()
tempDir := t.TempDir()
// Create "remote" repository
remoteDir := filepath.Join(tempDir, "remote")
if err := os.MkdirAll(remoteDir, 0750); err != nil {
t.Fatalf("Failed to create remote dir: %v", err)
}
runGitCmd(t, remoteDir, "init", "--bare")
// Create clone1
clone1Dir := filepath.Join(tempDir, "clone1")
runGitCmd(t, tempDir, "clone", remoteDir, clone1Dir)
configureGit(t, clone1Dir)
// Create clone2
clone2Dir := filepath.Join(tempDir, "clone2")
runGitCmd(t, tempDir, "clone", remoteDir, clone2Dir)
configureGit(t, clone2Dir)
// Initialize beads in clone1
clone1BeadsDir := filepath.Join(clone1Dir, ".beads")
if err := os.MkdirAll(clone1BeadsDir, 0750); err != nil {
t.Fatalf("Failed to create .beads dir: %v", err)
}
clone1DBPath := filepath.Join(clone1BeadsDir, "beads.db")
clone1Store := newTestStore(t, clone1DBPath)
defer clone1Store.Close()
// Create an issue in clone1
issue := &types.Issue{
Title: "Issue deleted by both",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
if err := clone1Store.CreateIssue(ctx, issue, "test-user"); err != nil {
t.Fatalf("Failed to create issue: %v", err)
}
issueID := issue.ID
// Export and push
clone1JSONLPath := filepath.Join(clone1BeadsDir, "beads.jsonl")
if err := exportIssuesToJSONL(ctx, clone1Store, clone1JSONLPath); err != nil {
t.Fatalf("Failed to export: %v", err)
}
runGitCmd(t, clone1Dir, "add", ".beads")
runGitCmd(t, clone1Dir, "commit", "-m", "Add issue")
runGitCmd(t, clone1Dir, "push", "origin", "master")
// Clone2 pulls and imports
runGitCmd(t, clone2Dir, "pull")
clone2BeadsDir := filepath.Join(clone2Dir, ".beads")
clone2DBPath := filepath.Join(clone2BeadsDir, "beads.db")
clone2Store := newTestStore(t, clone2DBPath)
defer clone2Store.Close()
clone2JSONLPath := filepath.Join(clone2BeadsDir, "beads.jsonl")
if _, err := importJSONLFile(ctx, clone2Store, clone2DBPath, clone2JSONLPath, importer.Options{}); err != nil {
t.Fatalf("Failed to import to clone2: %v", err)
}
// Both clones delete the issue simultaneously
// Clone1 deletes
clone1Store.DeleteIssue(ctx, issueID)
clone1DeletionsPath := filepath.Join(clone1BeadsDir, "deletions.jsonl")
deletions.AppendDeletion(clone1DeletionsPath, deletions.DeletionRecord{
ID: issueID,
Timestamp: time.Now().UTC(),
Actor: "user1",
Reason: "deleted by clone1",
})
exportIssuesToJSONL(ctx, clone1Store, clone1JSONLPath)
// Clone2 deletes (before pulling clone1's deletion)
clone2Store.DeleteIssue(ctx, issueID)
clone2DeletionsPath := filepath.Join(clone2BeadsDir, "deletions.jsonl")
deletions.AppendDeletion(clone2DeletionsPath, deletions.DeletionRecord{
ID: issueID,
Timestamp: time.Now().UTC(),
Actor: "user2",
Reason: "deleted by clone2",
})
exportIssuesToJSONL(ctx, clone2Store, clone2JSONLPath)
t.Log("✓ Both clones deleted the issue locally")
// Clone1 commits and pushes first
runGitCmd(t, clone1Dir, "add", ".beads")
runGitCmd(t, clone1Dir, "commit", "-m", "Delete issue (clone1)")
runGitCmd(t, clone1Dir, "push", "origin", "master")
// Clone2 commits, pulls (may have conflict), and pushes
runGitCmd(t, clone2Dir, "add", ".beads")
runGitCmd(t, clone2Dir, "commit", "-m", "Delete issue (clone2)")
// Pull with rebase to handle the concurrent deletion
// The deletions.jsonl conflict is handled by accepting both (append-only)
runGitCmdAllowError(t, clone2Dir, "pull", "--rebase")
// If there's a conflict in deletions.jsonl, resolve by concatenating
resolveDeletionsConflict(t, clone2Dir)
runGitCmdAllowError(t, clone2Dir, "rebase", "--continue")
runGitCmdAllowError(t, clone2Dir, "push", "origin", "master")
// Verify deletions.jsonl contains both deletion records (deduplicated by ID on load)
finalDeletionsPath := filepath.Join(clone2BeadsDir, "deletions.jsonl")
result, err := deletions.LoadDeletions(finalDeletionsPath)
if err != nil {
t.Fatalf("Failed to load deletions: %v", err)
}
// Should have the deletion record (may be from either clone, deduplication keeps one)
if _, found := result.Records[issueID]; !found {
t.Error("Expected deletion record to exist after simultaneous deletions")
}
t.Log("✓ Simultaneous deletions handled correctly (idempotent)")
}
// TestDeletionPropagation_LocalWorkPreserved verifies that local unpushed work
// is NOT deleted when deletions are synced.
func TestDeletionPropagation_LocalWorkPreserved(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
ctx := context.Background()
tempDir := t.TempDir()
// Create "remote" repository
remoteDir := filepath.Join(tempDir, "remote")
if err := os.MkdirAll(remoteDir, 0750); err != nil {
t.Fatalf("Failed to create remote dir: %v", err)
}
runGitCmd(t, remoteDir, "init", "--bare")
// Create clone1
clone1Dir := filepath.Join(tempDir, "clone1")
runGitCmd(t, tempDir, "clone", remoteDir, clone1Dir)
configureGit(t, clone1Dir)
// Create clone2
clone2Dir := filepath.Join(tempDir, "clone2")
runGitCmd(t, tempDir, "clone", remoteDir, clone2Dir)
configureGit(t, clone2Dir)
// Initialize beads in clone1
clone1BeadsDir := filepath.Join(clone1Dir, ".beads")
if err := os.MkdirAll(clone1BeadsDir, 0750); err != nil {
t.Fatalf("Failed to create .beads dir: %v", err)
}
clone1DBPath := filepath.Join(clone1BeadsDir, "beads.db")
clone1Store := newTestStore(t, clone1DBPath)
defer clone1Store.Close()
// Create shared issue in clone1
sharedIssue := &types.Issue{
Title: "Shared issue",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
if err := clone1Store.CreateIssue(ctx, sharedIssue, "test-user"); err != nil {
t.Fatalf("Failed to create shared issue: %v", err)
}
sharedID := sharedIssue.ID
// Export and push
clone1JSONLPath := filepath.Join(clone1BeadsDir, "beads.jsonl")
if err := exportIssuesToJSONL(ctx, clone1Store, clone1JSONLPath); err != nil {
t.Fatalf("Failed to export: %v", err)
}
runGitCmd(t, clone1Dir, "add", ".beads")
runGitCmd(t, clone1Dir, "commit", "-m", "Add shared issue")
runGitCmd(t, clone1Dir, "push", "origin", "master")
// Clone2 pulls and imports the shared issue
runGitCmd(t, clone2Dir, "pull")
clone2BeadsDir := filepath.Join(clone2Dir, ".beads")
clone2DBPath := filepath.Join(clone2BeadsDir, "beads.db")
clone2Store := newTestStore(t, clone2DBPath)
defer clone2Store.Close()
clone2JSONLPath := filepath.Join(clone2BeadsDir, "beads.jsonl")
if _, err := importJSONLFile(ctx, clone2Store, clone2DBPath, clone2JSONLPath, importer.Options{}); err != nil {
t.Fatalf("Failed to import to clone2: %v", err)
}
// Clone2 creates LOCAL work (not synced)
localIssue := &types.Issue{
Title: "Local work in clone2",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
if err := clone2Store.CreateIssue(ctx, localIssue, "clone2-user"); err != nil {
t.Fatalf("Failed to create local issue: %v", err)
}
localID := localIssue.ID
t.Logf("Clone2 created local issue: %s", localID)
// Clone1 deletes the shared issue
clone1Store.DeleteIssue(ctx, sharedID)
clone1DeletionsPath := filepath.Join(clone1BeadsDir, "deletions.jsonl")
deletions.AppendDeletion(clone1DeletionsPath, deletions.DeletionRecord{
ID: sharedID,
Timestamp: time.Now().UTC(),
Actor: "clone1-user",
Reason: "cleanup",
})
exportIssuesToJSONL(ctx, clone1Store, clone1JSONLPath)
runGitCmd(t, clone1Dir, "add", ".beads")
runGitCmd(t, clone1Dir, "commit", "-m", "Delete shared issue")
runGitCmd(t, clone1Dir, "push", "origin", "master")
// Clone2 pulls and imports (should delete shared, preserve local)
runGitCmd(t, clone2Dir, "pull")
result, err := importJSONLFile(ctx, clone2Store, clone2DBPath, clone2JSONLPath, importer.Options{})
if err != nil {
t.Fatalf("Failed to import after pull: %v", err)
}
t.Logf("Clone2 import: purged=%d, purgedIDs=%v", result.Purged, result.PurgedIDs)
// Verify shared issue is gone
sharedCheck, _ := clone2Store.GetIssue(ctx, sharedID)
if sharedCheck != nil {
t.Error("Shared issue should be deleted")
}
// Verify local issue is preserved
localCheck, _ := clone2Store.GetIssue(ctx, localID)
if localCheck == nil {
t.Error("Local work should be preserved (not in deletions manifest)")
}
t.Log("✓ Local work preserved while synced deletions propagated")
}
// TestDeletionPropagation_CorruptLineRecovery verifies that corrupt lines
// in deletions.jsonl are skipped gracefully during import.
func TestDeletionPropagation_CorruptLineRecovery(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
ctx := context.Background()
tempDir := t.TempDir()
// Setup single clone for this test
beadsDir := filepath.Join(tempDir, ".beads")
if err := os.MkdirAll(beadsDir, 0750); err != nil {
t.Fatalf("Failed to create .beads dir: %v", err)
}
dbPath := filepath.Join(beadsDir, "beads.db")
store := newTestStore(t, dbPath)
defer store.Close()
// Create two issues
issue1 := &types.Issue{
Title: "Issue 1 (to be deleted)",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
issue2 := &types.Issue{
Title: "Issue 2 (to keep)",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
store.CreateIssue(ctx, issue1, "test-user")
store.CreateIssue(ctx, issue2, "test-user")
// Create deletions.jsonl with corrupt lines + valid deletion for issue1
deletionsPath := filepath.Join(beadsDir, "deletions.jsonl")
now := time.Now().UTC().Format(time.RFC3339)
corruptContent := `this is not valid json
{"broken
{"id":"` + issue1.ID + `","ts":"` + now + `","by":"test-user","reason":"valid deletion"}
more garbage {{{
`
if err := os.WriteFile(deletionsPath, []byte(corruptContent), 0644); err != nil {
t.Fatalf("Failed to write corrupt deletions: %v", err)
}
// Load deletions - should skip corrupt lines but parse valid one
result, err := deletions.LoadDeletions(deletionsPath)
if err != nil {
t.Fatalf("LoadDeletions should not fail on corrupt lines: %v", err)
}
if result.Skipped != 3 {
t.Errorf("Expected 3 skipped lines, got %d", result.Skipped)
}
if len(result.Records) != 1 {
t.Errorf("Expected 1 valid record, got %d", len(result.Records))
}
if _, found := result.Records[issue1.ID]; !found {
t.Error("Valid deletion record should be parsed")
}
if len(result.Warnings) != 3 {
t.Errorf("Expected 3 warnings, got %d", len(result.Warnings))
}
t.Logf("Warnings: %v", result.Warnings)
t.Log("✓ Corrupt deletions.jsonl lines handled gracefully")
}
// TestDeletionPropagation_EmptyManifest verifies that import works with
// empty or missing deletions manifest.
func TestDeletionPropagation_EmptyManifest(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
ctx := context.Background()
tempDir := t.TempDir()
beadsDir := filepath.Join(tempDir, ".beads")
if err := os.MkdirAll(beadsDir, 0750); err != nil {
t.Fatalf("Failed to create .beads dir: %v", err)
}
dbPath := filepath.Join(beadsDir, "beads.db")
store := newTestStore(t, dbPath)
defer store.Close()
// Create an issue
issue := &types.Issue{
Title: "Test issue",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
store.CreateIssue(ctx, issue, "test-user")
// Export to JSONL
jsonlPath := filepath.Join(beadsDir, "beads.jsonl")
if err := exportIssuesToJSONL(ctx, store, jsonlPath); err != nil {
t.Fatalf("Failed to export: %v", err)
}
// Test 1: No deletions.jsonl exists
result, err := importJSONLFile(ctx, store, dbPath, jsonlPath, importer.Options{})
if err != nil {
t.Fatalf("Import should succeed without deletions.jsonl: %v", err)
}
if result.Purged != 0 {
t.Errorf("Expected 0 purged with no deletions manifest, got %d", result.Purged)
}
t.Log("✓ Import works without deletions.jsonl")
// Test 2: Empty deletions.jsonl
deletionsPath := filepath.Join(beadsDir, "deletions.jsonl")
if err := os.WriteFile(deletionsPath, []byte{}, 0644); err != nil {
t.Fatalf("Failed to create empty deletions.jsonl: %v", err)
}
result, err = importJSONLFile(ctx, store, dbPath, jsonlPath, importer.Options{})
if err != nil {
t.Fatalf("Import should succeed with empty deletions.jsonl: %v", err)
}
if result.Purged != 0 {
t.Errorf("Expected 0 purged with empty deletions manifest, got %d", result.Purged)
}
t.Log("✓ Import works with empty deletions.jsonl")
// Verify issue still exists
check, _ := store.GetIssue(ctx, issue.ID)
if check == nil {
t.Error("Issue should still exist")
}
}
// Helper to resolve deletions.jsonl conflicts by keeping all lines
func resolveDeletionsConflict(t *testing.T, dir string) {
t.Helper()
deletionsPath := filepath.Join(dir, ".beads", "deletions.jsonl")
content, err := os.ReadFile(deletionsPath)
if err != nil {
return // No conflict file
}
if !strings.Contains(string(content), "<<<<<<<") {
return // No conflict markers
}
// Remove conflict markers, keep all deletion records
var cleanLines []string
for _, line := range strings.Split(string(content), "\n") {
if strings.HasPrefix(line, "<<<<<<<") ||
strings.HasPrefix(line, "=======") ||
strings.HasPrefix(line, ">>>>>>>") {
continue
}
if strings.TrimSpace(line) != "" && strings.HasPrefix(line, "{") {
cleanLines = append(cleanLines, line)
}
}
cleaned := strings.Join(cleanLines, "\n") + "\n"
os.WriteFile(deletionsPath, []byte(cleaned), 0644)
runGitCmdAllowError(t, dir, "add", deletionsPath)
}
// runGitCmdAllowError runs git command and ignores errors
func runGitCmdAllowError(t *testing.T, dir string, args ...string) {
t.Helper()
cmd := runCommandInDir(dir, "git", args...)
_ = cmd // ignore error
}