Complete sync branch daemon tests (bd-7bd2, bd-502e)
- Add daemon_sync_branch.go with syncBranchCommitAndPush/Pull functions - Add daemon_sync_branch_test.go with 7 comprehensive tests - All tests passing: NotConfigured, Success, NoChanges, WorktreeHealthCheck, Pull, EndToEnd - Key fix: initMainBranch must run BEFORE creating issues/JSONL - Close bd-7bd2 and bd-502e Amp-Thread-ID: https://ampcode.com/threads/T-e3d7ba22-99d1-4210-a6db-1dcc3bdd622b Co-authored-by: Amp <amp@ampcode.com>
This commit is contained in:
@@ -70,6 +70,7 @@
|
||||
{"id":"bd-4e21b5ad","content_hash":"8029d0c5b14261648d3d17d8bc26413183962eab2875772cd2585db92c0104a6","title":"Add test case for symmetric collision (both clones create same ID simultaneously)","description":"TestTwoCloneCollision demonstrates the problem, but we need a simpler unit test for the collision resolver itself.\n\nTest should verify:\n- Two issues with same ID, different content\n- Content hash determines winner deterministically \n- Result is same regardless of which clone imports first\n- No title swapping occurs\n\nThis can be a simpler test than the full integration test.","status":"closed","priority":1,"issue_type":"task","created_at":"2025-10-29T17:46:10.046999-07:00","updated_at":"2025-10-31T12:00:43.196705-07:00","closed_at":"2025-10-31T12:00:43.196705-07:00"}
|
||||
{"id":"bd-4f582ec8","content_hash":"02e00868aecbd17486f988a5927a68a07bc309978b33568361559a182eadb2cc","title":"Test auto-start in fred","description":"","status":"closed","priority":3,"issue_type":"task","created_at":"2025-10-30T17:46:16.668088-07:00","updated_at":"2025-10-31T12:00:43.185723-07:00","closed_at":"2025-10-31T12:00:43.185723-07:00"}
|
||||
{"id":"bd-4ff2","content_hash":"c245bd7fec00b7a899ace53a6f7f518252c93692fd2b74e5adf2a8b1c90f87b5","title":"Fix CI failures before 0.21.3 release","description":"CI is failing on multiple jobs:\n1. Nix flake: Tests fail due to missing git in build environment\n2. Windows tests: Need to check what's failing\n3. Linux tests: Need to check what's failing\n4. Linter errors: Many unchecked errors need fixing\n\nNeed to fix before tagging v0.21.3 release.","notes":"Fixed linter errors (errcheck, misspell), Nix flake git dependency, and import database discovery bug. Tests still failing - need to investigate further.","status":"closed","priority":0,"issue_type":"bug","created_at":"2025-11-01T23:52:09.244763-07:00","updated_at":"2025-11-02T12:32:23.45363-08:00","closed_at":"2025-11-02T12:32:23.45363-08:00"}
|
||||
{"id":"bd-502e","content_hash":"6580746d23abefb53c930801cea2cfbfdf323651a39e616ef667ae0d205acff8","title":"Add comprehensive tests for sync branch daemon logic","description":"The daemon sync branch functionality (bd-6545) was implemented but needs proper end-to-end testing.\n\nCurrent implementation:\n- daemon_sync_branch.go has syncBranchCommitAndPush() and syncBranchPull()\n- daemon_sync.go has been updated to use these functions when sync.branch is configured\n- All daemon tests pass, but no specific tests for sync branch behavior\n\nTesting needed:\n- Test that daemon commits to sync branch when sync.branch is configured\n- Test that daemon commits to current branch when sync.branch is NOT configured (backward compatibility)\n- Test that daemon pulls from sync branch and syncs JSONL back to main repo\n- Test worktree creation and health checks during daemon operations\n- Test error handling (missing branch, worktree corruption, etc.)\n\nKey challenge: Tests need to run in the context of the git repo (getGitRoot() uses current working directory), so test setup needs to properly change directory or mock the git root detection.\n\nReference existing daemon tests in daemon_test.go and daemon_autoimport_test.go for patterns.","status":"closed","priority":2,"issue_type":"task","created_at":"2025-11-02T15:59:13.341491-08:00","updated_at":"2025-11-02T16:39:53.278313-08:00","closed_at":"2025-11-02T16:39:53.278313-08:00","dependencies":[{"issue_id":"bd-502e","depends_on_id":"bd-6545","type":"parent-child","created_at":"2025-11-02T15:59:13.342331-08:00","created_by":"daemon"}]}
|
||||
{"id":"bd-5314bddf","content_hash":"bbaf3bd26766fb78465900c455661a3608ab1d1485cb964d12229badf138753a","title":"bd detect-pollution - Test pollution detector","description":"Detect test issues that leaked into production DB.\n\nPattern matching for:\n- Titles starting with 'test', 'benchmark', 'sample'\n- Sequential numbering (test-1, test-2)\n- Generic descriptions\n- Created in rapid succession\n\nOptional AI scoring for confidence.\n\nFiles: cmd/bd/detect_pollution.go (new)","status":"open","priority":1,"issue_type":"task","created_at":"2025-10-28T14:48:17.466906-07:00","updated_at":"2025-10-30T17:12:58.219307-07:00"}
|
||||
{"id":"bd-537e","content_hash":"88c95061990fd7be8e008d2571bf3cda613f6dda50218d7166a3c7af7e28469f","title":"Add external_ref change tracking and auditing","description":"Currently we don't track when external_ref is added, removed, or changed. This would be useful for debugging and auditing.\n\nProposed features:\n- Log event when external_ref changes\n- Track in events table with old/new values\n- Add query to find issues where external_ref changed\n- Add metrics: issues with external_ref vs without\n\nUse cases:\n- Debugging import issues\n- Understanding which issues are externally managed\n- Auditing external system linkage\n\nRelated: bd-1022","status":"open","priority":4,"issue_type":"feature","created_at":"2025-11-02T15:32:31.276883-08:00","updated_at":"2025-11-02T15:32:31.276883-08:00"}
|
||||
{"id":"bd-5599","content_hash":"c48839a6f7f5ca4083ced2f0f47cd250046146032555a14864ac3469a42bb76b","title":"Fix TestListCommand duplicate dependency constraint violation","description":"","status":"closed","priority":2,"issue_type":"bug","created_at":"2025-10-31T21:27:05.557548-07:00","updated_at":"2025-10-31T21:27:11.429018-07:00","closed_at":"2025-10-31T21:27:11.429018-07:00"}
|
||||
@@ -108,6 +109,7 @@
|
||||
{"id":"bd-7a00c94e","content_hash":"b31566a4b2a84db7d24364492e8ac6ebfa1f5fc27fe270fbd58b27e17218c9c4","title":"Rapid 2","description":"","status":"open","priority":3,"issue_type":"task","created_at":"2025-10-29T19:11:57.430725-07:00","updated_at":"2025-10-30T17:12:58.189251-07:00"}
|
||||
{"id":"bd-7a2b58fc","content_hash":"e887227ed9b3f477282569800eb4683b68bf1a5404f007e00ec44b2e570325b5","title":"Implement clone-scoped ID allocation to prevent N-way collisions","description":"## Problem\nCurrent ID allocation uses per-clone atomic counters (issue_counters table) that sync based on local database state. In N-way collision scenarios:\n- Clone B sees {test-1} locally, allocates test-2\n- Clone D sees {test-1, test-2, test-3} locally, allocates test-4\n- When same content gets assigned test-2 and test-4, convergence fails\n\nRoot cause: Each clone independently allocates IDs without global coordination, leading to overlapping assignments for the same content.\n\n## Solution\nAdd clone UUID to ID allocation to make every ID globally unique:\n\n**Current format:** `test-1`, `test-2`, `test-3`\n**New format:** `test-1-a7b3`, `test-2-a7b3`, `test-3-c4d9`\n\nWhere suffix is first 4 chars of clone UUID.\n\n## Implementation\n\n### 1. Add clone_identity table\n```sql\nCREATE TABLE clone_identity (\n clone_uuid TEXT PRIMARY KEY,\n created_at DATETIME DEFAULT CURRENT_TIMESTAMP\n);\n```\n\n### 2. Modify getNextIDForPrefix()\n```go\nfunc (s *SQLiteStorage) getNextIDForPrefix(ctx context.Context, prefix string) (string, error) {\n cloneUUID := s.getOrCreateCloneUUID(ctx)\n shortUUID := cloneUUID[:4]\n \n nextNum := s.getNextCounterForPrefix(ctx, prefix)\n return fmt.Sprintf(\"%s-%d-%s\", prefix, nextNum, shortUUID), nil\n}\n```\n\n### 3. Update ID parsing logic\nAll places that parse IDs (utils.ExtractIssueNumber, etc.) need to handle new format.\n\n### 4. Migration strategy\n- Existing IDs remain unchanged (no suffix)\n- New IDs get clone suffix automatically\n- Display layer can hide suffix in UI: `bd-cb64c226.3-a7b3` → `#42`\n\n## Benefits\n- **Zero collision risk**: Same content in different clones gets different IDs\n- **Maintains readability**: Still sequential numbering within clone\n- **No coordination needed**: Works offline, no central authority\n- **Scales to 100+ clones**: 4-char hex = 65,536 unique clones\n\n## Concerns\n- ID format change may break existing integrations\n- Need migration path for existing databases\n- Display logic needs update to hide/show suffixes appropriately\n\n## Success Criteria\n- 10+ clone collision test passes without failures\n- Existing issues continue to work (backward compatibility)\n- Documentation updated with new ID format\n- Migration guide for v1.x → v2.x\n\n## Timeline\nMedium-term (v1.1-v1.2), 2-3 weeks implementation\n\n## References\n- Related to bd-0dcea000 (immediate fix)\n- See beads_nway_test.go for failing N-way tests","status":"open","priority":2,"issue_type":"feature","created_at":"2025-10-29T20:02:47.952447-07:00","updated_at":"2025-10-31T17:53:09.075064-07:00"}
|
||||
{"id":"bd-7bbc4e6a","content_hash":"3251d757d9ad69cd4b3517862ec1b9b1cc13388ea4c93a2f3b2b54920112854f","title":"Add MCP server functions for repair commands","description":"Expose new repair commands via MCP server for agent access:\n\nFunctions to add:\n- beads_repair_deps()\n- beads_detect_pollution()\n- beads_validate()\n- beads_resolve_conflicts() (when implemented)\n\nUpdate integrations/beads-mcp/src/beads_mcp/server.py\n\nSee repair_commands.md lines 803-884 for design.","status":"open","priority":2,"issue_type":"task","created_at":"2025-10-28T19:37:55.72639-07:00","updated_at":"2025-10-30T17:12:58.179948-07:00"}
|
||||
{"id":"bd-7bd2","content_hash":"ad106a77693ca911e9e4b8743718f7d7b90b17fb02a83650d07431b77d2f7ca6","title":"Complete remaining sync branch daemon tests","description":"4 remaining test scenarios in daemon_sync_branch_test.go need completion:\n\n⚠️ MINOR FIXES (apply same pattern as TestSyncBranchCommitAndPush_Success):\n1. TestSyncBranchCommitAndPush_NoChanges\n - Reorder: call initMainBranch() BEFORE creating JSONL\n - Pattern: init branch → create issue → export JSONL → test\n\n2. TestSyncBranchCommitAndPush_WorktreeHealthCheck\n - Same reordering needed\n - Verify worktree corruption detection and auto-repair\n\n🔧 MORE WORK NEEDED (remote branch setup):\n3. TestSyncBranchPull_Success\n - Issue: remote doesn't have sync branch after push\n - Need to verify branch is pushed to remote correctly\n - Then test pull from clone2\n\n4. TestSyncBranchIntegration_EndToEnd\n - Full workflow: Agent A commits → Agent B pulls → Agent B commits → Agent A pulls\n - Same remote branch issue\n\nPattern to apply (from TestSyncBranchCommitAndPush_Success):\n- Call initMainBranch(t, dir) BEFORE creating issues/JSONL\n- This ensures sync branch worktree has changes to commit\n\nAcceptance:\n- All 7 tests pass\n- go test -v -run TestSyncBranch ./cmd/bd/ succeeds","status":"closed","priority":2,"issue_type":"task","created_at":"2025-11-02T16:29:29.044162-08:00","updated_at":"2025-11-02T16:39:53.277529-08:00","closed_at":"2025-11-02T16:39:53.277529-08:00","dependencies":[{"issue_id":"bd-7bd2","depends_on_id":"bd-502e","type":"discovered-from","created_at":"2025-11-02T16:29:29.045104-08:00","created_by":"stevey"}]}
|
||||
{"id":"bd-7c5915ae","content_hash":"4e6cbaa3b21b320d21e4aefcb7e78a5223d6291803c8e18cb891aecc242bd1e9","title":"Run final validation and cleanup checks","description":"Final validation pass to ensure all cleanup objectives met and no regressions introduced.\n\nValidation checklist:\n1. Dead code verification: `go run golang.org/x/tools/cmd/deadcode@latest -test ./...`\n2. Test coverage: `go test -cover ./...`\n3. Build verification: `go build ./cmd/bd/`\n4. Linting: `golangci-lint run`\n5. Integration tests\n6. Metrics verification\n7. Git clean check\n\nFinal metrics to report:\n- LOC removed: ~____\n- Files deleted: ____\n- Files created: ____\n- Test coverage: ____%\n- Build time: ____ (before/after)\n- Test run time: ____ (before/after)\n\nImpact: Confirms all cleanup objectives achieved successfully","acceptance_criteria":"- Zero unreachable functions per deadcode analyzer\n- All tests pass: `go test ./...`\n- Test coverage maintained or improved\n- Builds cleanly: `go build ./...`\n- Linting shows improvements\n- Integration tests all pass\n- LOC reduction target achieved (~2,500 LOC)\n- No unintended behavior changes\n- Git commit messages document all changes","notes":"## Validation Results (Oct 31, 2025)\n\n**Dead Code:** ✅ Removed 5 unreachable functions (~200 LOC)\n- computeIssueContentHash, shouldSkipExport (autoflush.go)\n- addDependencyUnchecked, removeDependencyIfExists (dependencies.go)\n- isUniqueConstraintError (util.go)\n\n**Tests:** ✅ All pass\n**Coverage:** \n- Main package: 39.6%\n- cmd/bd: 19.5%\n- internal/daemon: 37.8%\n- internal/storage/sqlite: 58.1%\n- internal/rpc: 58.6%\n\n**Build:** ✅ Clean (24.5 MB binary)\n**Linting:** 247 issues (mostly errcheck on defer/Close statements)\n**Integration Tests:** ✅ All pass\n**Metrics:** 55,622 LOC across 200 Go files\n**Git:** 3 files modified (dead code removal)","status":"closed","priority":1,"issue_type":"task","created_at":"2025-10-29T20:49:49.131575-07:00","updated_at":"2025-10-31T15:12:01.955668-07:00","closed_at":"2025-10-31T15:12:01.955668-07:00","dependencies":[{"issue_id":"bd-7c5915ae","depends_on_id":"bd-fb95094c","type":"parent-child","created_at":"2025-10-31T19:38:09.176473-07:00","created_by":"stevey"}]}
|
||||
{"id":"bd-7c831c51","content_hash":"192d94c432595c1051f254a25bb6325e9b98ceccfb369dc43e0619c27016feae","title":"Run final validation and cleanup checks","description":"Final validation pass to ensure all cleanup objectives met and no regressions introduced.\n\nValidation checklist:\n1. Dead code verification: `go run golang.org/x/tools/cmd/deadcode@latest -test ./...`\n2. Test coverage: `go test -cover ./...`\n3. Build verification: `go build ./cmd/bd/`\n4. Linting: `golangci-lint run`\n5. Integration tests\n6. Metrics verification\n7. Git clean check\n\nFinal metrics to report:\n- LOC removed: ~____\n- Files deleted: ____\n- Files created: ____\n- Test coverage: ____%\n- Build time: ____ (before/after)\n- Test run time: ____ (before/after)\n\nImpact: Confirms all cleanup objectives achieved successfully","acceptance_criteria":"- Zero unreachable functions per deadcode analyzer\n- All tests pass: `go test ./...`\n- Test coverage maintained or improved\n- Builds cleanly: `go build ./...`\n- Linting shows improvements\n- Integration tests all pass\n- LOC reduction target achieved (~2,500 LOC)\n- No unintended behavior changes\n- Git commit messages document all changes","notes":"## Validation Results\n\n**Dead Code:** ✅ Found and removed 1 unreachable function (`DroppedEventsCount`) \n**Tests:** ✅ All pass \n**Coverage:** \n- Main: 39.6%\n- cmd/bd: 20.2%\n- Created follow-up issues (bd-85487065 through bd-bc2c6191) to improve coverage\n \n**Build:** ✅ Clean \n**Linting:** 73 issues (up from 34 baseline) \n- Increase due to unused functions from refactoring\n- Need cleanup in separate issue\n \n**Integration Tests:** ✅ All pass \n**Metrics:** 56,464 LOC across 193 Go files \n**Git:** 2 files modified (deadcode fix + auto-synced JSONL)\n\n## Follow-up Issues Created\n- bd-85487065: Add tests for internal/autoimport (0% coverage)\n- bd-0dcea000: Add tests for internal/importer (0% coverage)\n- bd-4d7fca8a: Add tests for internal/utils (0% coverage)\n- bd-6221bdcd: Improve cmd/bd coverage (20.2% -\u003e target higher)\n- bd-bc2c6191: Improve internal/daemon coverage (22.5% -\u003e target higher)","status":"closed","priority":1,"issue_type":"task","created_at":"2025-10-29T20:02:47.956276-07:00","updated_at":"2025-10-30T17:12:58.193468-07:00","closed_at":"2025-10-29T14:19:35.095553-07:00"}
|
||||
{"id":"bd-7da9437e","content_hash":"c00bf7c9fe41b90f1bd3cd1e7cf6938ca4e42f076ce45c2a3d836db97c883fc4","title":"Latency test","description":"","status":"closed","priority":2,"issue_type":"task","created_at":"2025-10-29T15:28:52.729923-07:00","updated_at":"2025-10-31T12:00:43.184758-07:00","closed_at":"2025-10-31T12:00:43.184758-07:00"}
|
||||
|
||||
@@ -126,21 +126,21 @@ func importToJSONLWithStore(ctx context.Context, store storage.Storage, jsonlPat
|
||||
return fmt.Errorf("failed to open JSONL: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
|
||||
// Parse all issues
|
||||
var issues []*types.Issue
|
||||
scanner := bufio.NewScanner(file)
|
||||
lineNum := 0
|
||||
|
||||
|
||||
for scanner.Scan() {
|
||||
lineNum++
|
||||
line := scanner.Text()
|
||||
|
||||
|
||||
// Skip empty lines
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
// Parse JSON
|
||||
var issue types.Issue
|
||||
if err := json.Unmarshal([]byte(line), &issue); err != nil {
|
||||
@@ -148,22 +148,22 @@ func importToJSONLWithStore(ctx context.Context, store storage.Storage, jsonlPat
|
||||
fmt.Fprintf(os.Stderr, "Warning: failed to parse JSONL line %d: %v\n", lineNum, err)
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
issues = append(issues, &issue)
|
||||
}
|
||||
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return fmt.Errorf("failed to read JSONL: %w", err)
|
||||
}
|
||||
|
||||
|
||||
// Use existing import logic with auto-conflict resolution
|
||||
opts := ImportOptions{
|
||||
DryRun: false,
|
||||
SkipUpdate: false,
|
||||
Strict: false,
|
||||
SkipPrefixValidation: true, // Skip prefix validation for auto-import
|
||||
DryRun: false,
|
||||
SkipUpdate: false,
|
||||
Strict: false,
|
||||
SkipPrefixValidation: true, // Skip prefix validation for auto-import
|
||||
}
|
||||
|
||||
|
||||
_, err = importIssuesCore(ctx, "", store, issues, opts)
|
||||
return err
|
||||
}
|
||||
@@ -278,27 +278,37 @@ func createExportFunc(ctx context.Context, store storage.Storage, autoCommit, au
|
||||
|
||||
// Auto-commit if enabled
|
||||
if autoCommit {
|
||||
hasChanges, err := gitHasChanges(exportCtx, jsonlPath)
|
||||
// Try sync branch commit first
|
||||
committed, err := syncBranchCommitAndPush(exportCtx, store, jsonlPath, autoPush, log)
|
||||
if err != nil {
|
||||
log.log("Error checking git status: %v", err)
|
||||
log.log("Sync branch commit failed: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if hasChanges {
|
||||
message := fmt.Sprintf("bd daemon export: %s", time.Now().Format("2006-01-02 15:04:05"))
|
||||
if err := gitCommit(exportCtx, jsonlPath, message); err != nil {
|
||||
log.log("Commit failed: %v", err)
|
||||
// If sync branch not configured, use regular commit
|
||||
if !committed {
|
||||
hasChanges, err := gitHasChanges(exportCtx, jsonlPath)
|
||||
if err != nil {
|
||||
log.log("Error checking git status: %v", err)
|
||||
return
|
||||
}
|
||||
log.log("Committed changes")
|
||||
|
||||
// Auto-push if enabled
|
||||
if autoPush {
|
||||
if err := gitPush(exportCtx); err != nil {
|
||||
log.log("Push failed: %v", err)
|
||||
if hasChanges {
|
||||
message := fmt.Sprintf("bd daemon export: %s", time.Now().Format("2006-01-02 15:04:05"))
|
||||
if err := gitCommit(exportCtx, jsonlPath, message); err != nil {
|
||||
log.log("Commit failed: %v", err)
|
||||
return
|
||||
}
|
||||
log.log("Pushed to remote")
|
||||
log.log("Committed changes")
|
||||
|
||||
// Auto-push if enabled
|
||||
if autoPush {
|
||||
if err := gitPush(exportCtx); err != nil {
|
||||
log.log("Push failed: %v", err)
|
||||
return
|
||||
}
|
||||
log.log("Pushed to remote")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -340,32 +350,41 @@ func createAutoImportFunc(ctx context.Context, store storage.Storage, log daemon
|
||||
// Check JSONL modification time to avoid redundant imports
|
||||
jsonlInfo, err := os.Stat(jsonlPath)
|
||||
if err != nil {
|
||||
log.log("Failed to stat JSONL: %v", err)
|
||||
return
|
||||
}
|
||||
log.log("Failed to stat JSONL: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Get database modification time
|
||||
dbPath := filepath.Join(beadsDir, "beads.db")
|
||||
dbInfo, err := os.Stat(dbPath)
|
||||
if err != nil {
|
||||
log.log("Failed to stat database: %v", err)
|
||||
return
|
||||
}
|
||||
// Get database modification time
|
||||
dbPath := filepath.Join(beadsDir, "beads.db")
|
||||
dbInfo, err := os.Stat(dbPath)
|
||||
if err != nil {
|
||||
log.log("Failed to stat database: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Skip if JSONL is older than database (nothing new to import)
|
||||
if !jsonlInfo.ModTime().After(dbInfo.ModTime()) {
|
||||
log.log("Skipping import: JSONL not newer than database")
|
||||
return
|
||||
}
|
||||
// Skip if JSONL is older than database (nothing new to import)
|
||||
if !jsonlInfo.ModTime().After(dbInfo.ModTime()) {
|
||||
log.log("Skipping import: JSONL not newer than database")
|
||||
return
|
||||
}
|
||||
|
||||
// Pull from git
|
||||
if err := gitPull(importCtx); err != nil {
|
||||
log.log("Pull failed: %v", err)
|
||||
return
|
||||
}
|
||||
log.log("Pulled from remote")
|
||||
// Pull from git (try sync branch first)
|
||||
pulled, err := syncBranchPull(importCtx, store, log)
|
||||
if err != nil {
|
||||
log.log("Sync branch pull failed: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Count issues before import
|
||||
// If sync branch not configured, use regular pull
|
||||
if !pulled {
|
||||
if err := gitPull(importCtx); err != nil {
|
||||
log.log("Pull failed: %v", err)
|
||||
return
|
||||
}
|
||||
log.log("Pulled from remote")
|
||||
}
|
||||
|
||||
// Count issues before import
|
||||
beforeCount, err := countDBIssues(importCtx, store)
|
||||
if err != nil {
|
||||
log.log("Failed to count issues before import: %v", err)
|
||||
@@ -450,52 +469,72 @@ func createSyncFunc(ctx context.Context, store storage.Storage, autoCommit, auto
|
||||
log.log("Exported to JSONL")
|
||||
|
||||
if autoCommit {
|
||||
hasChanges, err := gitHasChanges(syncCtx, jsonlPath)
|
||||
// Try sync branch commit first
|
||||
committed, err := syncBranchCommitAndPush(syncCtx, store, jsonlPath, autoPush, log)
|
||||
if err != nil {
|
||||
log.log("Error checking git status: %v", err)
|
||||
log.log("Sync branch commit failed: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if hasChanges {
|
||||
message := fmt.Sprintf("bd daemon sync: %s", time.Now().Format("2006-01-02 15:04:05"))
|
||||
if err := gitCommit(syncCtx, jsonlPath, message); err != nil {
|
||||
log.log("Commit failed: %v", err)
|
||||
// If sync branch not configured, use regular commit
|
||||
if !committed {
|
||||
hasChanges, err := gitHasChanges(syncCtx, jsonlPath)
|
||||
if err != nil {
|
||||
log.log("Error checking git status: %v", err)
|
||||
return
|
||||
}
|
||||
log.log("Committed changes")
|
||||
|
||||
if hasChanges {
|
||||
message := fmt.Sprintf("bd daemon sync: %s", time.Now().Format("2006-01-02 15:04:05"))
|
||||
if err := gitCommit(syncCtx, jsonlPath, message); err != nil {
|
||||
log.log("Commit failed: %v", err)
|
||||
return
|
||||
}
|
||||
log.log("Committed changes")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := gitPull(syncCtx); err != nil {
|
||||
log.log("Pull failed: %v", err)
|
||||
return
|
||||
// Pull (try sync branch first)
|
||||
pulled, err := syncBranchPull(syncCtx, store, log)
|
||||
if err != nil {
|
||||
log.log("Sync branch pull failed: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// If sync branch not configured, use regular pull
|
||||
if !pulled {
|
||||
if err := gitPull(syncCtx); err != nil {
|
||||
log.log("Pull failed: %v", err)
|
||||
return
|
||||
}
|
||||
log.log("Pulled from remote")
|
||||
}
|
||||
log.log("Pulled from remote")
|
||||
|
||||
// Count issues before import for validation
|
||||
beforeCount, err := countDBIssues(syncCtx, store)
|
||||
if err != nil {
|
||||
log.log("Failed to count issues before import: %v", err)
|
||||
return
|
||||
}
|
||||
beforeCount, err := countDBIssues(syncCtx, store)
|
||||
if err != nil {
|
||||
log.log("Failed to count issues before import: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := importToJSONLWithStore(syncCtx, store, jsonlPath); err != nil {
|
||||
log.log("Import failed: %v", err)
|
||||
return
|
||||
}
|
||||
log.log("Imported from JSONL")
|
||||
if err := importToJSONLWithStore(syncCtx, store, jsonlPath); err != nil {
|
||||
log.log("Import failed: %v", err)
|
||||
return
|
||||
}
|
||||
log.log("Imported from JSONL")
|
||||
|
||||
// Validate import didn't cause data loss
|
||||
afterCount, err := countDBIssues(syncCtx, store)
|
||||
if err != nil {
|
||||
log.log("Failed to count issues after import: %v", err)
|
||||
return
|
||||
}
|
||||
// Validate import didn't cause data loss
|
||||
afterCount, err := countDBIssues(syncCtx, store)
|
||||
if err != nil {
|
||||
log.log("Failed to count issues after import: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := validatePostImport(beforeCount, afterCount); err != nil {
|
||||
log.log("Post-import validation failed: %v", err)
|
||||
return
|
||||
}
|
||||
if err := validatePostImport(beforeCount, afterCount); err != nil {
|
||||
log.log("Post-import validation failed: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
if autoPush && autoCommit {
|
||||
if err := gitPush(syncCtx); err != nil {
|
||||
|
||||
241
cmd/bd/daemon_sync_branch.go
Normal file
241
cmd/bd/daemon_sync_branch.go
Normal file
@@ -0,0 +1,241 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/steveyegge/beads/internal/git"
|
||||
"github.com/steveyegge/beads/internal/storage"
|
||||
)
|
||||
|
||||
// syncBranchCommitAndPush commits JSONL to the sync branch using a worktree
|
||||
// Returns true if changes were committed, false if no changes or sync.branch not configured
|
||||
func syncBranchCommitAndPush(ctx context.Context, store storage.Storage, jsonlPath string, autoPush bool, log daemonLogger) (bool, error) {
|
||||
// Get sync.branch config
|
||||
syncBranch, err := store.GetConfig(ctx, "sync.branch")
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to get sync.branch config: %w", err)
|
||||
}
|
||||
|
||||
// If no sync.branch configured, caller should use regular commit logic
|
||||
if syncBranch == "" {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
log.log("Using sync branch: %s", syncBranch)
|
||||
|
||||
// Get repo root
|
||||
repoRoot, err := getGitRoot(ctx)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to get git root: %w", err)
|
||||
}
|
||||
|
||||
// Worktree path is under .git/beads-worktrees/<branch>
|
||||
worktreePath := filepath.Join(repoRoot, ".git", "beads-worktrees", syncBranch)
|
||||
|
||||
// Initialize worktree manager
|
||||
wtMgr := git.NewWorktreeManager(repoRoot)
|
||||
|
||||
// Ensure worktree exists
|
||||
if err := wtMgr.CreateBeadsWorktree(syncBranch, worktreePath); err != nil {
|
||||
return false, fmt.Errorf("failed to create worktree: %w", err)
|
||||
}
|
||||
|
||||
// Check worktree health and repair if needed
|
||||
if err := wtMgr.CheckWorktreeHealth(worktreePath); err != nil {
|
||||
log.log("Worktree health check failed, attempting repair: %v", err)
|
||||
// Try to recreate worktree
|
||||
if err := wtMgr.RemoveBeadsWorktree(worktreePath); err != nil {
|
||||
log.log("Failed to remove unhealthy worktree: %v", err)
|
||||
}
|
||||
if err := wtMgr.CreateBeadsWorktree(syncBranch, worktreePath); err != nil {
|
||||
return false, fmt.Errorf("failed to recreate worktree after health check: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Sync JSONL file to worktree
|
||||
// Use hardcoded relative path since JSONL is always at .beads/issues.jsonl
|
||||
jsonlRelPath := filepath.Join(".beads", "issues.jsonl")
|
||||
if err := wtMgr.SyncJSONLToWorktree(worktreePath, jsonlRelPath); err != nil {
|
||||
return false, fmt.Errorf("failed to sync JSONL to worktree: %w", err)
|
||||
}
|
||||
|
||||
// Check for changes in worktree
|
||||
worktreeJSONLPath := filepath.Join(worktreePath, ".beads", "issues.jsonl")
|
||||
hasChanges, err := gitHasChangesInWorktree(ctx, worktreePath, worktreeJSONLPath)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to check for changes in worktree: %w", err)
|
||||
}
|
||||
|
||||
if !hasChanges {
|
||||
log.log("No changes to commit in sync branch")
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// Commit in worktree
|
||||
message := fmt.Sprintf("bd daemon sync: %s", time.Now().Format("2006-01-02 15:04:05"))
|
||||
if err := gitCommitInWorktree(ctx, worktreePath, worktreeJSONLPath, message); err != nil {
|
||||
return false, fmt.Errorf("failed to commit in worktree: %w", err)
|
||||
}
|
||||
log.log("Committed changes to sync branch %s", syncBranch)
|
||||
|
||||
// Push if enabled
|
||||
if autoPush {
|
||||
if err := gitPushFromWorktree(ctx, worktreePath, syncBranch); err != nil {
|
||||
return false, fmt.Errorf("failed to push from worktree: %w", err)
|
||||
}
|
||||
log.log("Pushed sync branch %s to remote", syncBranch)
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// getGitRoot returns the git repository root directory
|
||||
func getGitRoot(ctx context.Context) (string, error) {
|
||||
cmd := exec.CommandContext(ctx, "git", "rev-parse", "--show-toplevel")
|
||||
output, err := cmd.Output()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get git root: %w", err)
|
||||
}
|
||||
return strings.TrimSpace(string(output)), nil
|
||||
}
|
||||
|
||||
// gitHasChangesInWorktree checks if there are changes in the worktree
|
||||
func gitHasChangesInWorktree(ctx context.Context, worktreePath, filePath string) (bool, error) {
|
||||
// Make filePath relative to worktree
|
||||
relPath, err := filepath.Rel(worktreePath, filePath)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to make path relative: %w", err)
|
||||
}
|
||||
|
||||
cmd := exec.CommandContext(ctx, "git", "-C", worktreePath, "status", "--porcelain", relPath)
|
||||
output, err := cmd.Output()
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("git status failed in worktree: %w", err)
|
||||
}
|
||||
return len(strings.TrimSpace(string(output))) > 0, nil
|
||||
}
|
||||
|
||||
// gitCommitInWorktree commits changes in the worktree
|
||||
func gitCommitInWorktree(ctx context.Context, worktreePath, filePath, message string) error {
|
||||
// Make filePath relative to worktree
|
||||
relPath, err := filepath.Rel(worktreePath, filePath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to make path relative: %w", err)
|
||||
}
|
||||
|
||||
// Stage the file
|
||||
addCmd := exec.CommandContext(ctx, "git", "-C", worktreePath, "add", relPath)
|
||||
if err := addCmd.Run(); err != nil {
|
||||
return fmt.Errorf("git add failed in worktree: %w", err)
|
||||
}
|
||||
|
||||
// Commit
|
||||
commitCmd := exec.CommandContext(ctx, "git", "-C", worktreePath, "commit", "-m", message)
|
||||
output, err := commitCmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return fmt.Errorf("git commit failed in worktree: %w\n%s", err, output)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// gitPushFromWorktree pushes the sync branch from the worktree
|
||||
func gitPushFromWorktree(ctx context.Context, worktreePath, branch string) error {
|
||||
// Get remote name (usually "origin")
|
||||
remoteCmd := exec.CommandContext(ctx, "git", "-C", worktreePath, "config", "--get", fmt.Sprintf("branch.%s.remote", branch))
|
||||
remoteOutput, err := remoteCmd.Output()
|
||||
if err != nil {
|
||||
// If no remote configured, default to "origin" and set up tracking
|
||||
remoteOutput = []byte("origin\n")
|
||||
}
|
||||
remote := strings.TrimSpace(string(remoteOutput))
|
||||
|
||||
// Push with explicit remote and branch, set upstream if not set
|
||||
cmd := exec.CommandContext(ctx, "git", "-C", worktreePath, "push", "--set-upstream", remote, branch)
|
||||
output, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return fmt.Errorf("git push failed from worktree: %w\n%s", err, output)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// syncBranchPull pulls changes from the sync branch into the worktree
|
||||
// Returns true if pull was performed, false if sync.branch not configured
|
||||
func syncBranchPull(ctx context.Context, store storage.Storage, log daemonLogger) (bool, error) {
|
||||
// Get sync.branch config
|
||||
syncBranch, err := store.GetConfig(ctx, "sync.branch")
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to get sync.branch config: %w", err)
|
||||
}
|
||||
|
||||
// If no sync.branch configured, caller should use regular pull logic
|
||||
if syncBranch == "" {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// Get repo root
|
||||
repoRoot, err := getGitRoot(ctx)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to get git root: %w", err)
|
||||
}
|
||||
|
||||
// Worktree path is under .git/beads-worktrees/<branch>
|
||||
worktreePath := filepath.Join(repoRoot, ".git", "beads-worktrees", syncBranch)
|
||||
|
||||
// Initialize worktree manager
|
||||
wtMgr := git.NewWorktreeManager(repoRoot)
|
||||
|
||||
// Ensure worktree exists
|
||||
if err := wtMgr.CreateBeadsWorktree(syncBranch, worktreePath); err != nil {
|
||||
return false, fmt.Errorf("failed to create worktree: %w", err)
|
||||
}
|
||||
|
||||
// Get remote name
|
||||
remoteCmd := exec.CommandContext(ctx, "git", "-C", worktreePath, "config", "--get", fmt.Sprintf("branch.%s.remote", syncBranch))
|
||||
remoteOutput, err := remoteCmd.Output()
|
||||
if err != nil {
|
||||
// If no remote configured, default to "origin"
|
||||
remoteOutput = []byte("origin\n")
|
||||
}
|
||||
remote := strings.TrimSpace(string(remoteOutput))
|
||||
|
||||
// Pull in worktree
|
||||
cmd := exec.CommandContext(ctx, "git", "-C", worktreePath, "pull", remote, syncBranch)
|
||||
output, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("git pull failed in worktree: %w\n%s", err, output)
|
||||
}
|
||||
|
||||
log.log("Pulled sync branch %s", syncBranch)
|
||||
|
||||
// Copy JSONL back to main repo
|
||||
worktreeJSONLPath := filepath.Join(worktreePath, ".beads", "issues.jsonl")
|
||||
mainJSONLPath := filepath.Join(repoRoot, ".beads", "issues.jsonl")
|
||||
|
||||
// Check if worktree JSONL exists
|
||||
if _, err := os.Stat(worktreeJSONLPath); os.IsNotExist(err) {
|
||||
// No JSONL in worktree yet, nothing to sync
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// Copy JSONL from worktree to main repo
|
||||
data, err := os.ReadFile(worktreeJSONLPath)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to read worktree JSONL: %w", err)
|
||||
}
|
||||
|
||||
if err := os.WriteFile(mainJSONLPath, data, 0644); err != nil {
|
||||
return false, fmt.Errorf("failed to write main JSONL: %w", err)
|
||||
}
|
||||
|
||||
log.log("Synced JSONL from sync branch to main repo")
|
||||
|
||||
return true, nil
|
||||
}
|
||||
751
cmd/bd/daemon_sync_branch_test.go
Normal file
751
cmd/bd/daemon_sync_branch_test.go
Normal file
@@ -0,0 +1,751 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/steveyegge/beads/internal/storage/sqlite"
|
||||
"github.com/steveyegge/beads/internal/types"
|
||||
)
|
||||
|
||||
// TestSyncBranchCommitAndPush_NotConfigured tests backward compatibility
|
||||
// when sync.branch is not configured (should return false, no error)
|
||||
func TestSyncBranchCommitAndPush_NotConfigured(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("Skipping integration test in short mode")
|
||||
}
|
||||
|
||||
tmpDir := t.TempDir()
|
||||
initTestGitRepo(t, tmpDir)
|
||||
|
||||
// Setup test store
|
||||
beadsDir := filepath.Join(tmpDir, ".beads")
|
||||
if err := os.MkdirAll(beadsDir, 0755); err != nil {
|
||||
t.Fatalf("Failed to create .beads dir: %v", err)
|
||||
}
|
||||
|
||||
dbPath := filepath.Join(beadsDir, "test.db")
|
||||
store, err := sqlite.New(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create store: %v", err)
|
||||
}
|
||||
defer store.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
if err := store.SetConfig(ctx, "issue_prefix", "test"); err != nil {
|
||||
t.Fatalf("Failed to set prefix: %v", err)
|
||||
}
|
||||
|
||||
// Create test issue
|
||||
issue := &types.Issue{
|
||||
Title: "Test issue",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
if err := store.CreateIssue(ctx, issue, "test"); err != nil {
|
||||
t.Fatalf("Failed to create issue: %v", err)
|
||||
}
|
||||
|
||||
// Export to JSONL
|
||||
jsonlPath := filepath.Join(beadsDir, "issues.jsonl")
|
||||
if err := exportToJSONLWithStore(ctx, store, jsonlPath); err != nil {
|
||||
t.Fatalf("Failed to export: %v", err)
|
||||
}
|
||||
|
||||
// Change to temp directory for git operations
|
||||
oldWd, err := os.Getwd()
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get working directory: %v", err)
|
||||
}
|
||||
defer os.Chdir(oldWd)
|
||||
|
||||
if err := os.Chdir(tmpDir); err != nil {
|
||||
t.Fatalf("Failed to change directory: %v", err)
|
||||
}
|
||||
|
||||
// Test with no sync.branch configured
|
||||
log, logMsgs := newTestSyncBranchLogger()
|
||||
_ = logMsgs // unused in this test
|
||||
committed, err := syncBranchCommitAndPush(ctx, store, jsonlPath, false, log)
|
||||
|
||||
// Should return false (not committed), no error
|
||||
if err != nil {
|
||||
t.Errorf("Expected no error when sync.branch not configured, got: %v", err)
|
||||
}
|
||||
if committed {
|
||||
t.Error("Expected committed=false when sync.branch not configured")
|
||||
}
|
||||
}
|
||||
|
||||
// TestSyncBranchCommitAndPush_Success tests successful sync branch commit
|
||||
func TestSyncBranchCommitAndPush_Success(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("Skipping integration test in short mode")
|
||||
}
|
||||
|
||||
tmpDir := t.TempDir()
|
||||
initTestGitRepo(t, tmpDir)
|
||||
|
||||
// Setup test store
|
||||
beadsDir := filepath.Join(tmpDir, ".beads")
|
||||
if err := os.MkdirAll(beadsDir, 0755); err != nil {
|
||||
t.Fatalf("Failed to create .beads dir: %v", err)
|
||||
}
|
||||
|
||||
dbPath := filepath.Join(beadsDir, "test.db")
|
||||
store, err := sqlite.New(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create store: %v", err)
|
||||
}
|
||||
defer store.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
if err := store.SetConfig(ctx, "issue_prefix", "test"); err != nil {
|
||||
t.Fatalf("Failed to set prefix: %v", err)
|
||||
}
|
||||
|
||||
// Configure sync.branch
|
||||
syncBranch := "beads-sync"
|
||||
if err := store.SetConfig(ctx, "sync.branch", syncBranch); err != nil {
|
||||
t.Fatalf("Failed to set sync.branch: %v", err)
|
||||
}
|
||||
|
||||
// Initial commit on main branch (before creating JSONL)
|
||||
oldWd, err := os.Getwd()
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get working directory: %v", err)
|
||||
}
|
||||
defer os.Chdir(oldWd)
|
||||
|
||||
if err := os.Chdir(tmpDir); err != nil {
|
||||
t.Fatalf("Failed to change directory: %v", err)
|
||||
}
|
||||
|
||||
initMainBranch(t, tmpDir)
|
||||
|
||||
// Create test issue
|
||||
issue := &types.Issue{
|
||||
Title: "Test sync branch issue",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
if err := store.CreateIssue(ctx, issue, "test"); err != nil {
|
||||
t.Fatalf("Failed to create issue: %v", err)
|
||||
}
|
||||
|
||||
// Export to JSONL
|
||||
jsonlPath := filepath.Join(beadsDir, "issues.jsonl")
|
||||
if err := exportToJSONLWithStore(ctx, store, jsonlPath); err != nil {
|
||||
t.Fatalf("Failed to export: %v", err)
|
||||
}
|
||||
|
||||
// Test sync branch commit (without push)
|
||||
log, logMsgs := newTestSyncBranchLogger()
|
||||
_ = logMsgs // unused in this test
|
||||
committed, err := syncBranchCommitAndPush(ctx, store, jsonlPath, false, log)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("syncBranchCommitAndPush failed: %v", err)
|
||||
}
|
||||
if !committed {
|
||||
t.Error("Expected committed=true")
|
||||
}
|
||||
|
||||
// Verify worktree was created
|
||||
worktreePath := filepath.Join(tmpDir, ".git", "beads-worktrees", syncBranch)
|
||||
if _, err := os.Stat(worktreePath); os.IsNotExist(err) {
|
||||
t.Errorf("Worktree not created at %s", worktreePath)
|
||||
}
|
||||
|
||||
// Verify sync branch exists
|
||||
cmd := exec.Command("git", "branch", "--list", syncBranch)
|
||||
cmd.Dir = tmpDir
|
||||
output, err := cmd.Output()
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to list branches: %v", err)
|
||||
}
|
||||
if !strings.Contains(string(output), syncBranch) {
|
||||
t.Errorf("Sync branch %s not created", syncBranch)
|
||||
}
|
||||
|
||||
// Verify JSONL was synced to worktree
|
||||
worktreeJSONL := filepath.Join(worktreePath, ".beads", "issues.jsonl")
|
||||
if _, err := os.Stat(worktreeJSONL); os.IsNotExist(err) {
|
||||
t.Error("JSONL not synced to worktree")
|
||||
}
|
||||
|
||||
// Verify commit was made in worktree
|
||||
cmd = exec.Command("git", "-C", worktreePath, "log", "--oneline", "-1")
|
||||
output, err = cmd.Output()
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get log: %v", err)
|
||||
}
|
||||
if !strings.Contains(string(output), "bd daemon sync") {
|
||||
t.Errorf("Expected commit message with 'bd daemon sync', got: %s", string(output))
|
||||
}
|
||||
}
|
||||
|
||||
// TestSyncBranchCommitAndPush_NoChanges tests behavior when no changes to commit
|
||||
func TestSyncBranchCommitAndPush_NoChanges(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("Skipping integration test in short mode")
|
||||
}
|
||||
|
||||
tmpDir := t.TempDir()
|
||||
initTestGitRepo(t, tmpDir)
|
||||
initMainBranch(t, tmpDir)
|
||||
|
||||
beadsDir := filepath.Join(tmpDir, ".beads")
|
||||
if err := os.MkdirAll(beadsDir, 0755); err != nil {
|
||||
t.Fatalf("Failed to create .beads dir: %v", err)
|
||||
}
|
||||
|
||||
dbPath := filepath.Join(beadsDir, "test.db")
|
||||
store, err := sqlite.New(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create store: %v", err)
|
||||
}
|
||||
defer store.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
if err := store.SetConfig(ctx, "issue_prefix", "test"); err != nil {
|
||||
t.Fatalf("Failed to set prefix: %v", err)
|
||||
}
|
||||
|
||||
syncBranch := "beads-sync"
|
||||
if err := store.SetConfig(ctx, "sync.branch", syncBranch); err != nil {
|
||||
t.Fatalf("Failed to set sync.branch: %v", err)
|
||||
}
|
||||
|
||||
issue := &types.Issue{
|
||||
Title: "Test issue",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
if err := store.CreateIssue(ctx, issue, "test"); err != nil {
|
||||
t.Fatalf("Failed to create issue: %v", err)
|
||||
}
|
||||
|
||||
jsonlPath := filepath.Join(beadsDir, "issues.jsonl")
|
||||
if err := exportToJSONLWithStore(ctx, store, jsonlPath); err != nil {
|
||||
t.Fatalf("Failed to export: %v", err)
|
||||
}
|
||||
|
||||
oldWd, err := os.Getwd()
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get working directory: %v", err)
|
||||
}
|
||||
defer os.Chdir(oldWd)
|
||||
|
||||
if err := os.Chdir(tmpDir); err != nil {
|
||||
t.Fatalf("Failed to change directory: %v", err)
|
||||
}
|
||||
|
||||
log, logMsgs := newTestSyncBranchLogger()
|
||||
|
||||
// First commit should succeed
|
||||
committed, err := syncBranchCommitAndPush(ctx, store, jsonlPath, false, log)
|
||||
if err != nil {
|
||||
t.Fatalf("First commit failed: %v", err)
|
||||
}
|
||||
if !committed {
|
||||
t.Error("Expected first commit to succeed")
|
||||
}
|
||||
|
||||
// Second commit with no changes should return false
|
||||
committed, err = syncBranchCommitAndPush(ctx, store, jsonlPath, false, log)
|
||||
if err != nil {
|
||||
t.Fatalf("Second commit failed: %v", err)
|
||||
}
|
||||
if committed {
|
||||
t.Error("Expected committed=false when no changes")
|
||||
}
|
||||
|
||||
// Verify log message
|
||||
if !strings.Contains(*logMsgs, "No changes to commit") {
|
||||
t.Error("Expected 'No changes to commit' log message")
|
||||
}
|
||||
}
|
||||
|
||||
// TestSyncBranchCommitAndPush_WorktreeHealthCheck tests worktree repair logic
|
||||
func TestSyncBranchCommitAndPush_WorktreeHealthCheck(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("Skipping integration test in short mode")
|
||||
}
|
||||
|
||||
tmpDir := t.TempDir()
|
||||
initTestGitRepo(t, tmpDir)
|
||||
initMainBranch(t, tmpDir)
|
||||
|
||||
beadsDir := filepath.Join(tmpDir, ".beads")
|
||||
if err := os.MkdirAll(beadsDir, 0755); err != nil {
|
||||
t.Fatalf("Failed to create .beads dir: %v", err)
|
||||
}
|
||||
|
||||
dbPath := filepath.Join(beadsDir, "test.db")
|
||||
store, err := sqlite.New(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create store: %v", err)
|
||||
}
|
||||
defer store.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
if err := store.SetConfig(ctx, "issue_prefix", "test"); err != nil {
|
||||
t.Fatalf("Failed to set prefix: %v", err)
|
||||
}
|
||||
|
||||
syncBranch := "beads-sync"
|
||||
if err := store.SetConfig(ctx, "sync.branch", syncBranch); err != nil {
|
||||
t.Fatalf("Failed to set sync.branch: %v", err)
|
||||
}
|
||||
|
||||
issue := &types.Issue{
|
||||
Title: "Test issue",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
if err := store.CreateIssue(ctx, issue, "test"); err != nil {
|
||||
t.Fatalf("Failed to create issue: %v", err)
|
||||
}
|
||||
|
||||
jsonlPath := filepath.Join(beadsDir, "issues.jsonl")
|
||||
if err := exportToJSONLWithStore(ctx, store, jsonlPath); err != nil {
|
||||
t.Fatalf("Failed to export: %v", err)
|
||||
}
|
||||
|
||||
oldWd, err := os.Getwd()
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get working directory: %v", err)
|
||||
}
|
||||
defer os.Chdir(oldWd)
|
||||
|
||||
if err := os.Chdir(tmpDir); err != nil {
|
||||
t.Fatalf("Failed to change directory: %v", err)
|
||||
}
|
||||
|
||||
log, logMsgs := newTestSyncBranchLogger()
|
||||
|
||||
// First commit to create worktree
|
||||
committed, err := syncBranchCommitAndPush(ctx, store, jsonlPath, false, log)
|
||||
if err != nil {
|
||||
t.Fatalf("First commit failed: %v", err)
|
||||
}
|
||||
if !committed {
|
||||
t.Error("Expected first commit to succeed")
|
||||
}
|
||||
|
||||
// Corrupt the worktree by deleting .git file
|
||||
worktreePath := filepath.Join(tmpDir, ".git", "beads-worktrees", syncBranch)
|
||||
worktreeGitFile := filepath.Join(worktreePath, ".git")
|
||||
if err := os.Remove(worktreeGitFile); err != nil {
|
||||
t.Fatalf("Failed to corrupt worktree: %v", err)
|
||||
}
|
||||
|
||||
// Update issue to create new changes
|
||||
if err := store.UpdateIssue(ctx, issue.ID, map[string]interface{}{
|
||||
"priority": 2,
|
||||
}, "test"); err != nil {
|
||||
t.Fatalf("Failed to update issue: %v", err)
|
||||
}
|
||||
|
||||
if err := exportToJSONLWithStore(ctx, store, jsonlPath); err != nil {
|
||||
t.Fatalf("Failed to export: %v", err)
|
||||
}
|
||||
|
||||
*logMsgs = "" // Reset log
|
||||
|
||||
// Should detect corruption and repair (CreateBeadsWorktree handles this silently)
|
||||
committed, err = syncBranchCommitAndPush(ctx, store, jsonlPath, false, log)
|
||||
if err != nil {
|
||||
t.Fatalf("Commit after corruption failed: %v", err)
|
||||
}
|
||||
if !committed {
|
||||
t.Error("Expected commit to succeed after repair")
|
||||
}
|
||||
|
||||
// Verify worktree is functional again - .git file should be restored
|
||||
if _, err := os.Stat(worktreeGitFile); os.IsNotExist(err) {
|
||||
t.Error("Worktree .git file not restored")
|
||||
}
|
||||
}
|
||||
|
||||
// TestSyncBranchPull_NotConfigured tests pull with no sync.branch configured
|
||||
func TestSyncBranchPull_NotConfigured(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("Skipping integration test in short mode")
|
||||
}
|
||||
|
||||
tmpDir := t.TempDir()
|
||||
initTestGitRepo(t, tmpDir)
|
||||
|
||||
beadsDir := filepath.Join(tmpDir, ".beads")
|
||||
if err := os.MkdirAll(beadsDir, 0755); err != nil {
|
||||
t.Fatalf("Failed to create .beads dir: %v", err)
|
||||
}
|
||||
|
||||
dbPath := filepath.Join(beadsDir, "test.db")
|
||||
store, err := sqlite.New(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create store: %v", err)
|
||||
}
|
||||
defer store.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
if err := store.SetConfig(ctx, "issue_prefix", "test"); err != nil {
|
||||
t.Fatalf("Failed to set prefix: %v", err)
|
||||
}
|
||||
|
||||
oldWd, err := os.Getwd()
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get working directory: %v", err)
|
||||
}
|
||||
defer os.Chdir(oldWd)
|
||||
|
||||
if err := os.Chdir(tmpDir); err != nil {
|
||||
t.Fatalf("Failed to change directory: %v", err)
|
||||
}
|
||||
|
||||
log, logMsgs := newTestSyncBranchLogger()
|
||||
_ = logMsgs // unused in this test
|
||||
pulled, err := syncBranchPull(ctx, store, log)
|
||||
|
||||
// Should return false (not pulled), no error
|
||||
if err != nil {
|
||||
t.Errorf("Expected no error when sync.branch not configured, got: %v", err)
|
||||
}
|
||||
if pulled {
|
||||
t.Error("Expected pulled=false when sync.branch not configured")
|
||||
}
|
||||
}
|
||||
|
||||
// TestSyncBranchPull_Success tests successful pull from sync branch
|
||||
func TestSyncBranchPull_Success(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("Skipping integration test in short mode")
|
||||
}
|
||||
|
||||
// Create remote repository
|
||||
tmpDir := t.TempDir()
|
||||
remoteDir := filepath.Join(tmpDir, "remote")
|
||||
if err := os.MkdirAll(remoteDir, 0755); err != nil {
|
||||
t.Fatalf("Failed to create remote dir: %v", err)
|
||||
}
|
||||
runGitCmd(t, remoteDir, "init", "--bare")
|
||||
|
||||
// Create clone1 (will push changes)
|
||||
clone1Dir := filepath.Join(tmpDir, "clone1")
|
||||
runGitCmd(t, tmpDir, "clone", remoteDir, clone1Dir)
|
||||
configureGit(t, clone1Dir)
|
||||
|
||||
clone1BeadsDir := filepath.Join(clone1Dir, ".beads")
|
||||
if err := os.MkdirAll(clone1BeadsDir, 0755); err != nil {
|
||||
t.Fatalf("Failed to create .beads dir: %v", err)
|
||||
}
|
||||
|
||||
clone1DBPath := filepath.Join(clone1BeadsDir, "test.db")
|
||||
store1, err := sqlite.New(clone1DBPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create store1: %v", err)
|
||||
}
|
||||
defer store1.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
if err := store1.SetConfig(ctx, "issue_prefix", "test"); err != nil {
|
||||
t.Fatalf("Failed to set prefix: %v", err)
|
||||
}
|
||||
|
||||
syncBranch := "beads-sync"
|
||||
if err := store1.SetConfig(ctx, "sync.branch", syncBranch); err != nil {
|
||||
t.Fatalf("Failed to set sync.branch: %v", err)
|
||||
}
|
||||
|
||||
// Create issue in clone1
|
||||
issue := &types.Issue{
|
||||
Title: "Test sync pull issue",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
if err := store1.CreateIssue(ctx, issue, "test"); err != nil {
|
||||
t.Fatalf("Failed to create issue: %v", err)
|
||||
}
|
||||
|
||||
clone1JSONLPath := filepath.Join(clone1BeadsDir, "issues.jsonl")
|
||||
if err := exportToJSONLWithStore(ctx, store1, clone1JSONLPath); err != nil {
|
||||
t.Fatalf("Failed to export: %v", err)
|
||||
}
|
||||
|
||||
// Commit to main branch first
|
||||
initMainBranch(t, clone1Dir)
|
||||
runGitCmd(t, clone1Dir, "push", "origin", "master")
|
||||
|
||||
// Change to clone1 directory for sync branch operations
|
||||
oldWd, err := os.Getwd()
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get working directory: %v", err)
|
||||
}
|
||||
defer os.Chdir(oldWd)
|
||||
|
||||
if err := os.Chdir(clone1Dir); err != nil {
|
||||
t.Fatalf("Failed to change directory: %v", err)
|
||||
}
|
||||
|
||||
// Push to sync branch using syncBranchCommitAndPush
|
||||
log, logMsgs := newTestSyncBranchLogger()
|
||||
_ = logMsgs // unused in this test
|
||||
committed, err := syncBranchCommitAndPush(ctx, store1, clone1JSONLPath, true, log)
|
||||
if err != nil {
|
||||
t.Fatalf("syncBranchCommitAndPush failed: %v", err)
|
||||
}
|
||||
if !committed {
|
||||
t.Error("Expected commit to succeed")
|
||||
}
|
||||
|
||||
// Create clone2 (will pull changes)
|
||||
clone2Dir := filepath.Join(tmpDir, "clone2")
|
||||
runGitCmd(t, tmpDir, "clone", remoteDir, clone2Dir)
|
||||
configureGit(t, clone2Dir)
|
||||
|
||||
clone2BeadsDir := filepath.Join(clone2Dir, ".beads")
|
||||
clone2DBPath := filepath.Join(clone2BeadsDir, "test.db")
|
||||
store2, err := sqlite.New(clone2DBPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create store2: %v", err)
|
||||
}
|
||||
defer store2.Close()
|
||||
|
||||
if err := store2.SetConfig(ctx, "issue_prefix", "test"); err != nil {
|
||||
t.Fatalf("Failed to set prefix: %v", err)
|
||||
}
|
||||
|
||||
if err := store2.SetConfig(ctx, "sync.branch", syncBranch); err != nil {
|
||||
t.Fatalf("Failed to set sync.branch: %v", err)
|
||||
}
|
||||
|
||||
// Change to clone2 directory
|
||||
if err := os.Chdir(clone2Dir); err != nil {
|
||||
t.Fatalf("Failed to change directory: %v", err)
|
||||
}
|
||||
|
||||
// Pull from sync branch
|
||||
log2, logMsgs2 := newTestSyncBranchLogger()
|
||||
pulled, err := syncBranchPull(ctx, store2, log2)
|
||||
if err != nil {
|
||||
t.Fatalf("syncBranchPull failed: %v", err)
|
||||
}
|
||||
if !pulled {
|
||||
t.Error("Expected pulled=true")
|
||||
}
|
||||
|
||||
// Verify JSONL was copied to main repo
|
||||
clone2JSONLPath := filepath.Join(clone2BeadsDir, "issues.jsonl")
|
||||
if _, err := os.Stat(clone2JSONLPath); os.IsNotExist(err) {
|
||||
t.Error("JSONL not copied to main repo after pull")
|
||||
}
|
||||
|
||||
// Verify JSONL content matches
|
||||
clone1Data, err := os.ReadFile(clone1JSONLPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read clone1 JSONL: %v", err)
|
||||
}
|
||||
|
||||
clone2Data, err := os.ReadFile(clone2JSONLPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read clone2 JSONL: %v", err)
|
||||
}
|
||||
|
||||
if string(clone1Data) != string(clone2Data) {
|
||||
t.Error("JSONL content mismatch after pull")
|
||||
}
|
||||
|
||||
// Verify pull message in log
|
||||
if !strings.Contains(*logMsgs2, "Pulled sync branch") {
|
||||
t.Error("Expected 'Pulled sync branch' log message")
|
||||
}
|
||||
}
|
||||
|
||||
// TestSyncBranchIntegration_EndToEnd tests full sync workflow
|
||||
func TestSyncBranchIntegration_EndToEnd(t *testing.T) {
|
||||
if testing.Short() {
|
||||
t.Skip("Skipping integration test in short mode")
|
||||
}
|
||||
|
||||
// Setup remote and two clones
|
||||
tmpDir := t.TempDir()
|
||||
remoteDir := filepath.Join(tmpDir, "remote")
|
||||
os.MkdirAll(remoteDir, 0755)
|
||||
runGitCmd(t, remoteDir, "init", "--bare")
|
||||
|
||||
// Clone1: Agent A
|
||||
clone1Dir := filepath.Join(tmpDir, "clone1")
|
||||
runGitCmd(t, tmpDir, "clone", remoteDir, clone1Dir)
|
||||
configureGit(t, clone1Dir)
|
||||
|
||||
clone1BeadsDir := filepath.Join(clone1Dir, ".beads")
|
||||
os.MkdirAll(clone1BeadsDir, 0755)
|
||||
clone1DBPath := filepath.Join(clone1BeadsDir, "test.db")
|
||||
store1, _ := sqlite.New(clone1DBPath)
|
||||
defer store1.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
store1.SetConfig(ctx, "issue_prefix", "test")
|
||||
|
||||
syncBranch := "beads-sync"
|
||||
store1.SetConfig(ctx, "sync.branch", syncBranch)
|
||||
|
||||
// Agent A creates issue
|
||||
issue := &types.Issue{
|
||||
Title: "E2E test issue",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
store1.CreateIssue(ctx, issue, "agent-a")
|
||||
issueID := issue.ID
|
||||
|
||||
clone1JSONLPath := filepath.Join(clone1BeadsDir, "issues.jsonl")
|
||||
exportToJSONLWithStore(ctx, store1, clone1JSONLPath)
|
||||
|
||||
// Initial commit to main
|
||||
initMainBranch(t, clone1Dir)
|
||||
runGitCmd(t, clone1Dir, "push", "origin", "master")
|
||||
|
||||
// Change to clone1 directory
|
||||
oldWd, _ := os.Getwd()
|
||||
defer os.Chdir(oldWd)
|
||||
os.Chdir(clone1Dir)
|
||||
|
||||
// Agent A commits to sync branch
|
||||
log, logMsgs := newTestSyncBranchLogger()
|
||||
_ = logMsgs // unused in this test
|
||||
committed, err := syncBranchCommitAndPush(ctx, store1, clone1JSONLPath, true, log)
|
||||
if err != nil {
|
||||
t.Fatalf("syncBranchCommitAndPush failed: %v", err)
|
||||
}
|
||||
if !committed {
|
||||
t.Error("Expected commit to succeed")
|
||||
}
|
||||
|
||||
// Clone2: Agent B
|
||||
clone2Dir := filepath.Join(tmpDir, "clone2")
|
||||
runGitCmd(t, tmpDir, "clone", remoteDir, clone2Dir)
|
||||
configureGit(t, clone2Dir)
|
||||
|
||||
clone2BeadsDir := filepath.Join(clone2Dir, ".beads")
|
||||
clone2DBPath := filepath.Join(clone2BeadsDir, "test.db")
|
||||
store2, _ := sqlite.New(clone2DBPath)
|
||||
defer store2.Close()
|
||||
|
||||
store2.SetConfig(ctx, "issue_prefix", "test")
|
||||
store2.SetConfig(ctx, "sync.branch", syncBranch)
|
||||
|
||||
// Change to clone2 directory
|
||||
os.Chdir(clone2Dir)
|
||||
|
||||
// Agent B pulls from sync branch
|
||||
log2, logMsgs2 := newTestSyncBranchLogger()
|
||||
_ = logMsgs2 // unused in this test
|
||||
pulled, err := syncBranchPull(ctx, store2, log2)
|
||||
if err != nil {
|
||||
t.Fatalf("syncBranchPull failed: %v", err)
|
||||
}
|
||||
if !pulled {
|
||||
t.Error("Expected pull to succeed")
|
||||
}
|
||||
|
||||
// Import JSONL to database
|
||||
clone2JSONLPath := filepath.Join(clone2BeadsDir, "issues.jsonl")
|
||||
if err := importToJSONLWithStore(ctx, store2, clone2JSONLPath); err != nil {
|
||||
t.Fatalf("Failed to import: %v", err)
|
||||
}
|
||||
|
||||
// Verify issue exists in clone2
|
||||
clone2Issue, err := store2.GetIssue(ctx, issueID)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get issue in clone2: %v", err)
|
||||
}
|
||||
if clone2Issue.Title != issue.Title {
|
||||
t.Errorf("Issue title mismatch: expected %s, got %s", issue.Title, clone2Issue.Title)
|
||||
}
|
||||
|
||||
// Agent B closes the issue
|
||||
store2.CloseIssue(ctx, issueID, "Done by Agent B", "agent-b")
|
||||
exportToJSONLWithStore(ctx, store2, clone2JSONLPath)
|
||||
|
||||
// Agent B commits to sync branch
|
||||
committed, err = syncBranchCommitAndPush(ctx, store2, clone2JSONLPath, true, log2)
|
||||
if err != nil {
|
||||
t.Fatalf("syncBranchCommitAndPush failed for clone2: %v", err)
|
||||
}
|
||||
if !committed {
|
||||
t.Error("Expected commit to succeed for clone2")
|
||||
}
|
||||
|
||||
// Agent A pulls the update
|
||||
os.Chdir(clone1Dir)
|
||||
pulled, err = syncBranchPull(ctx, store1, log)
|
||||
if err != nil {
|
||||
t.Fatalf("syncBranchPull failed for clone1: %v", err)
|
||||
}
|
||||
if !pulled {
|
||||
t.Error("Expected pull to succeed for clone1")
|
||||
}
|
||||
|
||||
// Import to see the closed status
|
||||
importToJSONLWithStore(ctx, store1, clone1JSONLPath)
|
||||
|
||||
// Verify Agent A sees the closed issue
|
||||
updatedIssue, err := store1.GetIssue(ctx, issueID)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get issue in clone1: %v", err)
|
||||
}
|
||||
if updatedIssue.Status != types.StatusClosed {
|
||||
t.Errorf("Issue not closed in clone1: status=%s", updatedIssue.Status)
|
||||
}
|
||||
}
|
||||
|
||||
// Helper types for testing
|
||||
|
||||
func newTestSyncBranchLogger() (daemonLogger, *string) {
|
||||
messages := ""
|
||||
logger := daemonLogger{
|
||||
logFunc: func(format string, args ...interface{}) {
|
||||
messages += "\n" + format
|
||||
},
|
||||
}
|
||||
return logger, &messages
|
||||
}
|
||||
|
||||
// initMainBranch creates an initial commit on main branch
|
||||
// The JSONL file should not exist yet when this is called
|
||||
func initMainBranch(t *testing.T, dir string) {
|
||||
t.Helper()
|
||||
// Create a simple README to have something to commit
|
||||
readme := filepath.Join(dir, "README.md")
|
||||
if err := os.WriteFile(readme, []byte("# Test Repository\n"), 0644); err != nil {
|
||||
t.Fatalf("Failed to write README: %v", err)
|
||||
}
|
||||
runGitCmd(t, dir, "add", "README.md")
|
||||
runGitCmd(t, dir, "commit", "-m", "Initial commit")
|
||||
}
|
||||
Reference in New Issue
Block a user