Add collision resolution with automatic ID remapping
Implements --resolve-collisions flag for import command to safely handle ID collisions during branch merges. When enabled, colliding issues are remapped to new IDs and all text references and dependencies are automatically updated. Also adds comprehensive tests, branch-merge example, and documentation. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -3,11 +3,11 @@
|
||||
{"id":"bd-11","title":"Test issue to verify fix","description":"This should be bd-11 if the fix works","status":"closed","priority":3,"issue_type":"task","created_at":"2025-10-12T14:40:21.419082-07:00","updated_at":"2025-10-12T16:19:11.96945-07:00","closed_at":"2025-10-12T14:40:32.963312-07:00"}
|
||||
{"id":"bd-12","title":"Implement collision detection in import","description":"Create collision.go with detectCollisions() function. Compare incoming JSONL issues against DB state. Distinguish between: (1) exact match (idempotent), (2) ID match but different content (collision), (3) new issue. Return list of colliding issues.","status":"closed","priority":1,"issue_type":"task","created_at":"2025-10-12T14:40:56.056588-07:00","updated_at":"2025-10-12T16:19:11.96955-07:00","closed_at":"2025-10-12T16:06:25.575038-07:00","dependencies":[{"issue_id":"bd-12","depends_on_id":"bd-9","type":"parent-child","created_at":"2025-10-12T14:41:07.947358-07:00","created_by":"stevey"}]}
|
||||
{"id":"bd-13","title":"Implement reference scoring algorithm","description":"Count references for each colliding issue: text mentions in descriptions/notes/design fields + dependency references. Sort collisions by score ascending (fewest refs first). This minimizes total updates during renumbering.","status":"closed","priority":1,"issue_type":"task","created_at":"2025-10-12T14:40:56.204518-07:00","updated_at":"2025-10-12T16:26:46.572201-07:00","closed_at":"2025-10-12T16:26:46.572201-07:00","dependencies":[{"issue_id":"bd-13","depends_on_id":"bd-9","type":"parent-child","created_at":"2025-10-12T14:41:07.951605-07:00","created_by":"stevey"}]}
|
||||
{"id":"bd-14","title":"Implement ID remapping with reference updates","description":"Allocate new IDs for colliding issues. Update all text field references using word-boundary regex (\\bbd-10\\b). Update dependency records. Build id_mapping for reporting. Handle chain dependencies properly.","status":"open","priority":1,"issue_type":"task","created_at":"2025-10-12T14:40:56.367596-07:00","updated_at":"2025-10-12T16:19:11.969729-07:00","dependencies":[{"issue_id":"bd-14","depends_on_id":"bd-9","type":"parent-child","created_at":"2025-10-12T14:41:07.956041-07:00","created_by":"stevey"}]}
|
||||
{"id":"bd-15","title":"Add --resolve-collisions flag and user reporting","description":"Add import flags: --resolve-collisions (auto-fix) and --dry-run (preview). Display clear report: collisions detected, remappings applied (old→new with scores), reference counts updated. Default behavior: fail on collision (safe).","status":"open","priority":1,"issue_type":"task","created_at":"2025-10-12T14:40:56.534721-07:00","updated_at":"2025-10-12T16:19:11.969818-07:00","dependencies":[{"issue_id":"bd-15","depends_on_id":"bd-9","type":"parent-child","created_at":"2025-10-12T14:41:07.961157-07:00","created_by":"stevey"}]}
|
||||
{"id":"bd-16","title":"Write comprehensive collision resolution tests","description":"Test cases: simple collision, multiple collisions, dependency updates, text reference updates, chain dependencies, edge cases (partial ID matches, case sensitivity, triple merges). Add to import_test.go and collision_test.go.","status":"open","priority":1,"issue_type":"task","created_at":"2025-10-12T14:40:56.702127-07:00","updated_at":"2025-10-12T16:19:11.9699-07:00","dependencies":[{"issue_id":"bd-16","depends_on_id":"bd-9","type":"parent-child","created_at":"2025-10-12T14:41:07.965816-07:00","created_by":"stevey"}]}
|
||||
{"id":"bd-17","title":"Update documentation for collision resolution","description":"Update README.md with collision resolution section. Update CLAUDE.md with new workflow. Document --resolve-collisions and --dry-run flags. Add example scenarios showing branch merge workflows.","status":"open","priority":1,"issue_type":"task","created_at":"2025-10-12T14:40:56.866649-07:00","updated_at":"2025-10-12T16:19:11.96998-07:00","dependencies":[{"issue_id":"bd-17","depends_on_id":"bd-9","type":"parent-child","created_at":"2025-10-12T14:41:07.970302-07:00","created_by":"stevey"}]}
|
||||
{"id":"bd-18","title":"Add design/notes/acceptance_criteria fields to update command","description":"Currently bd update only supports status, priority, title, assignee. Add support for --design, --notes, --acceptance-criteria flags. This makes it easier to add detailed designs to issues after creation.","status":"open","priority":2,"issue_type":"feature","created_at":"2025-10-12T14:40:57.032395-07:00","updated_at":"2025-10-12T16:19:11.970071-07:00"}
|
||||
{"id":"bd-14","title":"Implement ID remapping with reference updates","description":"Allocate new IDs for colliding issues. Update all text field references using word-boundary regex (\\bbd-10\\b). Update dependency records. Build id_mapping for reporting. Handle chain dependencies properly.","status":"closed","priority":1,"issue_type":"task","created_at":"2025-10-12T14:40:56.367596-07:00","updated_at":"2025-10-12T16:35:13.159992-07:00","closed_at":"2025-10-12T16:35:13.159992-07:00","dependencies":[{"issue_id":"bd-14","depends_on_id":"bd-9","type":"parent-child","created_at":"2025-10-12T14:41:07.956041-07:00","created_by":"stevey"}]}
|
||||
{"id":"bd-15","title":"Add --resolve-collisions flag and user reporting","description":"Add import flags: --resolve-collisions (auto-fix) and --dry-run (preview). Display clear report: collisions detected, remappings applied (old→new with scores), reference counts updated. Default behavior: fail on collision (safe).","status":"closed","priority":1,"issue_type":"task","created_at":"2025-10-12T14:40:56.534721-07:00","updated_at":"2025-10-12T16:47:11.491645-07:00","closed_at":"2025-10-12T16:47:11.491645-07:00","dependencies":[{"issue_id":"bd-15","depends_on_id":"bd-9","type":"parent-child","created_at":"2025-10-12T14:41:07.961157-07:00","created_by":"stevey"}]}
|
||||
{"id":"bd-16","title":"Write comprehensive collision resolution tests","description":"Test cases: simple collision, multiple collisions, dependency updates, text reference updates, chain dependencies, edge cases (partial ID matches, case sensitivity, triple merges). Add to import_test.go and collision_test.go.","status":"closed","priority":1,"issue_type":"task","created_at":"2025-10-12T14:40:56.702127-07:00","updated_at":"2025-10-12T16:54:25.273886-07:00","closed_at":"2025-10-12T16:54:25.273886-07:00","dependencies":[{"issue_id":"bd-16","depends_on_id":"bd-9","type":"parent-child","created_at":"2025-10-12T14:41:07.965816-07:00","created_by":"stevey"}]}
|
||||
{"id":"bd-17","title":"Update documentation for collision resolution","description":"Update README.md with collision resolution section. Update CLAUDE.md with new workflow. Document --resolve-collisions and --dry-run flags. Add example scenarios showing branch merge workflows.","status":"closed","priority":1,"issue_type":"task","created_at":"2025-10-12T14:40:56.866649-07:00","updated_at":"2025-10-12T17:06:14.930928-07:00","closed_at":"2025-10-12T17:06:14.930928-07:00","dependencies":[{"issue_id":"bd-17","depends_on_id":"bd-9","type":"parent-child","created_at":"2025-10-12T14:41:07.970302-07:00","created_by":"stevey"}]}
|
||||
{"id":"bd-18","title":"Add design/notes/acceptance_criteria fields to update command","description":"Currently bd update only supports status, priority, title, assignee. Add support for --design, --notes, --acceptance-criteria flags. This makes it easier to add detailed designs to issues after creation.","status":"closed","priority":2,"issue_type":"feature","created_at":"2025-10-12T14:40:57.032395-07:00","updated_at":"2025-10-12T17:10:53.958318-07:00","closed_at":"2025-10-12T17:10:53.958318-07:00"}
|
||||
{"id":"bd-19","title":"Fix import zero-value field handling","description":"Import uses zero-value checks (Priority != 0) to determine field updates. This prevents setting priority to 0 or clearing string fields. Export/import round-trip not fully idempotent for zero values. Consider JSON presence detection or explicit preserve-existing semantics. Location: cmd/bd/import.go:95-106","status":"closed","priority":2,"issue_type":"bug","created_at":"2025-10-12T15:13:17.895083-07:00","updated_at":"2025-10-12T16:19:11.970157-07:00"}
|
||||
{"id":"bd-2","title":"Add PostgreSQL backend","description":"Implement PostgreSQL storage backend as alternative to SQLite for larger teams","status":"closed","priority":3,"issue_type":"feature","created_at":"2025-10-12T00:43:03.457453-07:00","updated_at":"2025-10-12T16:19:11.97024-07:00","closed_at":"2025-10-12T14:15:04.00695-07:00"}
|
||||
{"id":"bd-20","title":"Add --strict flag for dependency import failures","description":"Currently dependency import errors are warnings (logged to stderr, execution continues). Missing targets or cycles may indicate JSONL corruption. Add --strict flag to fail on any dependency errors for data integrity validation. Location: cmd/bd/import.go:159-164","status":"closed","priority":2,"issue_type":"feature","created_at":"2025-10-12T15:13:18.954834-07:00","updated_at":"2025-10-12T16:19:11.970327-07:00"}
|
||||
@@ -15,7 +15,14 @@
|
||||
{"id":"bd-22","title":"Add validation/warning for malformed issue IDs","description":"getNextID silently ignores non-numeric ID suffixes (e.g., bd-foo). CAST returns NULL for invalid strings. Consider detecting and warning about malformed IDs in database. Location: internal/storage/sqlite/sqlite.go:79-82","status":"closed","priority":3,"issue_type":"task","created_at":"2025-10-12T15:13:21.195975-07:00","updated_at":"2025-10-12T16:19:11.970492-07:00"}
|
||||
{"id":"bd-23","title":"Optimize export dependency queries (N+1 problem)","description":"Export triggers separate GetDependencyRecords() per issue. For large DBs (1000+ issues), this is N+1 queries. Add GetAllDependencyRecords() to fetch all dependencies in one query. Location: cmd/bd/export.go:52-59, import.go:138-142","status":"closed","priority":3,"issue_type":"task","created_at":"2025-10-12T15:13:22.325113-07:00","updated_at":"2025-10-12T16:19:11.97058-07:00"}
|
||||
{"id":"bd-24","title":"Support ID space partitioning for parallel worker agents","description":"Enable external orchestrators (like AI worker swarms) to control issue ID assignment. Add --id flag to 'bd create' for explicit ID specification. Optionally support 'bd config set next_id N' to set the starting point for auto-increment. Storage layer already supports pre-assigned IDs (sqlite.go:52-71), just need CLI wiring. This keeps beads simple while letting orchestrators implement their own ID partitioning strategies to minimize merge conflicts. Complementary to bd-9's collision resolution.","status":"open","priority":1,"issue_type":"feature","created_at":"2025-10-12T16:10:37.808226-07:00","updated_at":"2025-10-12T16:19:11.970666-07:00"}
|
||||
{"id":"bd-25","title":"Add transaction support to storage layer for atomic multi-operation workflows","description":"Currently each storage method (CreateIssue, UpdateIssue, etc.) starts its own transaction. This makes it impossible to perform atomic multi-step operations like collision resolution. Add support for passing *sql.Tx through the storage interface, or create transaction-aware versions of methods. This would make remapCollisions and other batch operations truly atomic.","status":"open","priority":1,"issue_type":"feature","created_at":"2025-10-12T16:39:00.66572-07:00","updated_at":"2025-10-12T16:39:00.66572-07:00"}
|
||||
{"id":"bd-26","title":"Optimize reference updates to avoid loading all issues into memory","description":"In updateReferences(), we call SearchIssues with no filter to get ALL issues for updating references. For large databases (10k+ issues), this loads everything into memory. Options: 1) Use batched processing with LIMIT/OFFSET, 2) Use SQL UPDATE with REPLACE() directly, 3) Stream results instead of loading all at once. Located in collision.go:266","status":"open","priority":2,"issue_type":"task","created_at":"2025-10-12T16:39:10.327861-07:00","updated_at":"2025-10-12T16:39:10.327861-07:00"}
|
||||
{"id":"bd-27","title":"Cache compiled regexes in replaceIDReferences for performance","description":"replaceIDReferences() compiles the same regex patterns on every call. With 100 issues and 10 ID mappings, that's 1000 regex compilations. Pre-compile regexes once and reuse. Can use a struct with compiled regex, placeholder, and newID. Located in collision.go:329. Estimated performance improvement: 10-100x for large batches.","status":"open","priority":2,"issue_type":"task","created_at":"2025-10-12T16:39:18.305517-07:00","updated_at":"2025-10-12T16:39:18.305517-07:00"}
|
||||
{"id":"bd-28","title":"Improve error handling in dependency removal during remapping","description":"In updateDependencyReferences(), RemoveDependency errors are caught and ignored with continue (line 392). Comment says 'if dependency doesn't exist' but this catches ALL errors including real failures. Should check error type with errors.Is(err, ErrDependencyNotFound) and only ignore not-found errors, returning other errors properly.","status":"open","priority":3,"issue_type":"bug","created_at":"2025-10-12T16:39:26.78219-07:00","updated_at":"2025-10-12T16:39:26.78219-07:00"}
|
||||
{"id":"bd-29","title":"Use safer placeholder pattern in replaceIDReferences","description":"Currently uses __PLACEHOLDER_0__ which could theoretically collide with user text. Use a truly unique placeholder like null bytes: \\x00REMAP\\x00_0_\\x00 which are unlikely to appear in normal text. Located in collision.go:324. Very low probability issue but worth fixing for completeness.","status":"open","priority":3,"issue_type":"task","created_at":"2025-10-12T16:39:33.665449-07:00","updated_at":"2025-10-12T16:39:33.665449-07:00"}
|
||||
{"id":"bd-3","title":"Document git workflow in README","description":"Add Git Workflow section to README explaining binary vs text approaches","status":"closed","priority":1,"issue_type":"chore","created_at":"2025-10-12T00:43:03.461615-07:00","updated_at":"2025-10-12T16:19:11.970753-07:00","closed_at":"2025-10-12T00:43:30.283178-07:00"}
|
||||
{"id":"bd-30","title":"Remove unused issueMap in scoreCollisions","description":"scoreCollisions() creates issueMap and populates it (lines 135-138) but never uses it. Either remove it or add a TODO comment explaining future use. Located in collision.go:135-138. Cosmetic cleanup.","status":"open","priority":4,"issue_type":"chore","created_at":"2025-10-12T16:39:40.101611-07:00","updated_at":"2025-10-12T16:39:40.101611-07:00"}
|
||||
{"id":"bd-31","title":"Test issue for design field","description":"Testing the new update flags","design":"## Design Plan\\n- Add flags to update command\\n- Test thoroughly\\n- Document changes","acceptance_criteria":"- All three fields (design, notes, acceptance-criteria) can be updated\\n- Changes persist in database\\n- bd show displays the fields correctly","notes":"Implementation complete. All tests passing.","status":"closed","priority":3,"issue_type":"task","created_at":"2025-10-12T17:09:22.147446-07:00","updated_at":"2025-10-12T17:10:32.828906-07:00","closed_at":"2025-10-12T17:10:32.828906-07:00"}
|
||||
{"id":"bd-4","title":"Add demo GIF/video showing bd quickstart in action","description":"Record asciinema or create animated GIF showing the full workflow","status":"open","priority":2,"issue_type":"feature","created_at":"2025-10-12T10:50:49.500051-07:00","updated_at":"2025-10-12T16:19:11.97083-07:00","dependencies":[{"issue_id":"bd-4","depends_on_id":"bd-8","type":"parent-child","created_at":"2025-10-12T10:51:08.399915-07:00","created_by":"stevey"}]}
|
||||
{"id":"bd-5","title":"Implement MCP server for Claude Desktop","description":"Complete the claude-desktop-mcp example with working TypeScript implementation","status":"open","priority":1,"issue_type":"feature","created_at":"2025-10-12T10:50:50.942964-07:00","updated_at":"2025-10-12T16:19:11.970913-07:00","dependencies":[{"issue_id":"bd-5","depends_on_id":"bd-8","type":"parent-child","created_at":"2025-10-12T10:51:08.404381-07:00","created_by":"stevey"}]}
|
||||
{"id":"bd-6","title":"Add migration scripts for GitHub Issues","description":"Create scripts to import from GitHub Issues API or exported JSON","status":"open","priority":2,"issue_type":"feature","created_at":"2025-10-12T10:50:52.140018-07:00","updated_at":"2025-10-12T16:19:11.97099-07:00","dependencies":[{"issue_id":"bd-6","depends_on_id":"bd-8","type":"parent-child","created_at":"2025-10-12T10:51:08.40857-07:00","created_by":"stevey"}]}
|
||||
|
||||
44
CLAUDE.md
44
CLAUDE.md
@@ -31,6 +31,10 @@ bd dep tree <id>
|
||||
|
||||
# Get issue details
|
||||
bd show <id> --json
|
||||
|
||||
# Import with collision detection
|
||||
bd import -i .beads/issues.jsonl --dry-run # Preview only
|
||||
bd import -i .beads/issues.jsonl --resolve-collisions # Auto-resolve
|
||||
```
|
||||
|
||||
### Workflow
|
||||
@@ -119,6 +123,43 @@ bd import -i .beads/issues.jsonl # Sync SQLite cache
|
||||
|
||||
Or use the git hooks in `examples/git-hooks/` for automation.
|
||||
|
||||
### Handling Import Collisions
|
||||
|
||||
When merging branches or pulling changes, you may encounter ID collisions (same ID, different content). bd detects and safely handles these:
|
||||
|
||||
**Check for collisions after merge:**
|
||||
```bash
|
||||
# After git merge or pull
|
||||
bd import -i .beads/issues.jsonl --dry-run
|
||||
|
||||
# Output shows:
|
||||
# === Collision Detection Report ===
|
||||
# Exact matches (idempotent): 15
|
||||
# New issues: 5
|
||||
# COLLISIONS DETECTED: 3
|
||||
#
|
||||
# Colliding issues:
|
||||
# bd-10: Fix authentication (conflicting fields: [title, priority])
|
||||
# bd-12: Add feature (conflicting fields: [description, status])
|
||||
```
|
||||
|
||||
**Resolve collisions automatically:**
|
||||
```bash
|
||||
# Let bd resolve collisions by remapping incoming issues to new IDs
|
||||
bd import -i .beads/issues.jsonl --resolve-collisions
|
||||
|
||||
# bd will:
|
||||
# - Keep existing issues unchanged
|
||||
# - Assign new IDs to colliding issues (bd-25, bd-26, etc.)
|
||||
# - Update ALL text references and dependencies automatically
|
||||
# - Report the remapping with reference counts
|
||||
```
|
||||
|
||||
**Important**: The `--resolve-collisions` flag is safe and recommended for branch merges. It preserves the existing database and only renumbers the incoming colliding issues. All text mentions like "see bd-10" and dependency links are automatically updated to use the new IDs.
|
||||
|
||||
**Manual resolution** (alternative):
|
||||
If you prefer manual control, resolve the Git conflict in `.beads/issues.jsonl` directly, then import normally without `--resolve-collisions`.
|
||||
|
||||
## Current Project Status
|
||||
|
||||
Run `bd stats` to see overall progress.
|
||||
@@ -189,6 +230,9 @@ bd dep tree bd-8 # Show 1.0 epic dependencies
|
||||
- Export to JSONL before committing (or use git hooks)
|
||||
- Use `bd dep tree` to understand complex dependencies
|
||||
- Priority 0-1 issues are usually more important than 2-4
|
||||
- Use `--dry-run` to preview import collisions before resolving
|
||||
- Use `--resolve-collisions` for safe automatic branch merges
|
||||
- After resolving collisions, run `bd export` to save the updated state
|
||||
|
||||
## Building and Testing
|
||||
|
||||
|
||||
110
README.md
110
README.md
@@ -462,6 +462,115 @@ Import behavior:
|
||||
- New issues are **created**
|
||||
- All imports are atomic (all or nothing)
|
||||
|
||||
### Handling ID Collisions
|
||||
|
||||
When importing issues, bd detects three types of situations:
|
||||
1. **Exact matches** - Same ID, same content (idempotent, no action needed)
|
||||
2. **New issues** - ID doesn't exist in database yet
|
||||
3. **Collisions** - Same ID but different content (requires resolution)
|
||||
|
||||
**Collision detection:**
|
||||
```bash
|
||||
# Preview collisions without making changes
|
||||
bd import -i issues.jsonl --dry-run
|
||||
|
||||
# Output shows:
|
||||
# === Collision Detection Report ===
|
||||
# Exact matches (idempotent): 5
|
||||
# New issues: 3
|
||||
# COLLISIONS DETECTED: 2
|
||||
#
|
||||
# Colliding issues:
|
||||
# bd-10: Fix authentication bug
|
||||
# Conflicting fields: [title, priority, status]
|
||||
# bd-15: Add dashboard widget
|
||||
# Conflicting fields: [description, assignee]
|
||||
```
|
||||
|
||||
**Resolution strategies:**
|
||||
|
||||
**Option 1: Automatic remapping (recommended for branch merges)**
|
||||
```bash
|
||||
# Automatically resolve collisions by renumbering incoming issues
|
||||
bd import -i issues.jsonl --resolve-collisions
|
||||
|
||||
# bd will:
|
||||
# 1. Keep existing issues unchanged
|
||||
# 2. Assign new IDs to colliding incoming issues (bd-25, bd-26, etc.)
|
||||
# 3. Update ALL text references and dependencies to use new IDs
|
||||
# 4. Report the remapping:
|
||||
#
|
||||
# === Remapping Report ===
|
||||
# Issues remapped: 2
|
||||
#
|
||||
# Remappings (sorted by reference count):
|
||||
# bd-10 → bd-25 (refs: 3)
|
||||
# bd-15 → bd-26 (refs: 7)
|
||||
#
|
||||
# All text and dependency references have been updated.
|
||||
```
|
||||
|
||||
**Option 2: Manual resolution**
|
||||
```bash
|
||||
# 1. Check for collisions first
|
||||
bd import -i branch-issues.jsonl --dry-run
|
||||
|
||||
# 2. Edit JSONL to resolve manually:
|
||||
# - Rename IDs in the JSONL file
|
||||
# - Or merge content into existing issues
|
||||
# - Or skip colliding issues
|
||||
|
||||
# 3. Import after manual fixes
|
||||
bd import -i branch-issues.jsonl
|
||||
```
|
||||
|
||||
**The collision resolution algorithm:**
|
||||
|
||||
When using `--resolve-collisions`, bd intelligently remaps colliding issues to minimize updates:
|
||||
|
||||
1. **Detects collisions** - Compares ID and content (title, description, status, priority, etc.)
|
||||
2. **Scores references** - Counts how many times each ID is referenced in:
|
||||
- Text fields (description, design, notes, acceptance criteria)
|
||||
- Dependency records (both as source and target)
|
||||
3. **Renumbers by score** - Issues with fewer references are remapped first
|
||||
4. **Updates all references** - Uses word-boundary regex to replace old IDs:
|
||||
- Text fields: "See bd-10 for details" → "See bd-25 for details"
|
||||
- Dependencies: bd-5 → bd-10 becomes bd-5 → bd-25
|
||||
- Handles edge cases: Distinguishes bd-10 from bd-100, bd-1000, etc.
|
||||
|
||||
**Branch merge workflow:**
|
||||
|
||||
This is particularly useful when merging branches where both sides created issues with the same IDs:
|
||||
|
||||
```bash
|
||||
# On main branch: bd-1 through bd-20 exist
|
||||
git checkout main
|
||||
bd export -o .beads/issues.jsonl
|
||||
|
||||
# On feature branch: Also has bd-1 through bd-20 (diverged)
|
||||
git checkout feature-branch
|
||||
bd export -o .beads/issues.jsonl
|
||||
|
||||
# Merge branches
|
||||
git checkout main
|
||||
git merge feature-branch
|
||||
# Git shows conflict in .beads/issues.jsonl
|
||||
|
||||
# Resolve the conflict in Git (keep both sides for different issues, etc.)
|
||||
# Then import with collision resolution:
|
||||
bd import -i .beads/issues.jsonl --resolve-collisions
|
||||
|
||||
# Result: Issues from feature-branch get new IDs (bd-21+)
|
||||
# All cross-references are automatically updated
|
||||
```
|
||||
|
||||
**Important notes:**
|
||||
- Collisions are **safe by default** - import fails unless you use `--resolve-collisions`
|
||||
- Use `--dry-run` to preview changes before applying
|
||||
- The algorithm preserves the existing database (existing issues are never renumbered)
|
||||
- All text mentions and dependency links are updated automatically
|
||||
- Word-boundary matching prevents false replacements (bd-10 won't match bd-100)
|
||||
|
||||
### JSONL Format
|
||||
|
||||
Each line is a complete JSON issue object:
|
||||
@@ -550,6 +659,7 @@ Check out the **[examples/](examples/)** directory for:
|
||||
- **[Python agent](examples/python-agent/)** - Full agent implementation in Python
|
||||
- **[Bash agent](examples/bash-agent/)** - Shell script agent example
|
||||
- **[Git hooks](examples/git-hooks/)** - Automatic export/import on git operations
|
||||
- **[Branch merge workflow](examples/branch-merge/)** - Handle ID collisions when merging branches
|
||||
- **[Claude Desktop MCP](examples/claude-desktop-mcp/)** - MCP server integration (coming soon)
|
||||
|
||||
## FAQ
|
||||
|
||||
189
cmd/bd/import.go
189
cmd/bd/import.go
@@ -6,8 +6,10 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"sort"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/steveyegge/beads/internal/storage/sqlite"
|
||||
"github.com/steveyegge/beads/internal/types"
|
||||
)
|
||||
|
||||
@@ -21,11 +23,15 @@ Reads from stdin by default, or use -i flag for file input.
|
||||
Behavior:
|
||||
- Existing issues (same ID) are updated
|
||||
- New issues are created
|
||||
- Import is atomic (all or nothing)`,
|
||||
- Collisions (same ID, different content) are detected
|
||||
- Use --resolve-collisions to automatically remap colliding issues
|
||||
- Use --dry-run to preview changes without applying them`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
input, _ := cmd.Flags().GetString("input")
|
||||
skipUpdate, _ := cmd.Flags().GetBool("skip-existing")
|
||||
strict, _ := cmd.Flags().GetBool("strict")
|
||||
resolveCollisions, _ := cmd.Flags().GetBool("resolve-collisions")
|
||||
dryRun, _ := cmd.Flags().GetBool("dry-run")
|
||||
|
||||
// Open input
|
||||
in := os.Stdin
|
||||
@@ -43,12 +49,11 @@ Behavior:
|
||||
in = f
|
||||
}
|
||||
|
||||
// Read and parse JSONL
|
||||
// Phase 1: Read and parse all JSONL
|
||||
ctx := context.Background()
|
||||
scanner := bufio.NewScanner(in)
|
||||
|
||||
var created, updated, skipped int
|
||||
var allIssues []*types.Issue // Store all issues for dependency processing
|
||||
var allIssues []*types.Issue
|
||||
lineNum := 0
|
||||
|
||||
for scanner.Scan() {
|
||||
@@ -60,23 +65,106 @@ Behavior:
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse JSON - first into a map to detect which fields are present
|
||||
var rawData map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(line), &rawData); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error parsing line %d: %v\n", lineNum, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Then parse into the Issue struct
|
||||
// Parse JSON
|
||||
var issue types.Issue
|
||||
if err := json.Unmarshal([]byte(line), &issue); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error parsing line %d: %v\n", lineNum, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Store for dependency processing later
|
||||
allIssues = append(allIssues, &issue)
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error reading input: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Phase 2: Detect collisions
|
||||
sqliteStore, ok := store.(*sqlite.SQLiteStorage)
|
||||
if !ok {
|
||||
fmt.Fprintf(os.Stderr, "Error: collision detection requires SQLite storage backend\n")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
collisionResult, err := sqlite.DetectCollisions(ctx, sqliteStore, allIssues)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error detecting collisions: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
var idMapping map[string]string
|
||||
var created, updated, skipped int
|
||||
|
||||
// Phase 3: Handle collisions
|
||||
if len(collisionResult.Collisions) > 0 {
|
||||
// Print collision report
|
||||
printCollisionReport(collisionResult)
|
||||
|
||||
if dryRun {
|
||||
// In dry-run mode, just print report and exit
|
||||
fmt.Fprintf(os.Stderr, "\nDry-run mode: no changes made\n")
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
if !resolveCollisions {
|
||||
// Default behavior: fail on collision (safe mode)
|
||||
fmt.Fprintf(os.Stderr, "\nCollision detected! Use --resolve-collisions to automatically remap colliding issues.\n")
|
||||
fmt.Fprintf(os.Stderr, "Or use --dry-run to preview without making changes.\n")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Resolve collisions by scoring and remapping
|
||||
fmt.Fprintf(os.Stderr, "\nResolving collisions...\n")
|
||||
|
||||
// Get all existing issues for scoring
|
||||
allExistingIssues, err := store.SearchIssues(ctx, "", types.IssueFilter{})
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error getting existing issues: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Score collisions
|
||||
if err := sqlite.ScoreCollisions(ctx, sqliteStore, collisionResult.Collisions, allExistingIssues); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error scoring collisions: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Remap collisions
|
||||
idMapping, err = sqlite.RemapCollisions(ctx, sqliteStore, collisionResult.Collisions, allExistingIssues)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error remapping collisions: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Print remapping report
|
||||
printRemappingReport(idMapping, collisionResult.Collisions)
|
||||
|
||||
// Colliding issues were already created with new IDs
|
||||
created = len(collisionResult.Collisions)
|
||||
|
||||
// Remove colliding issues from allIssues (they're already processed)
|
||||
filteredIssues := make([]*types.Issue, 0)
|
||||
collidingIDs := make(map[string]bool)
|
||||
for _, collision := range collisionResult.Collisions {
|
||||
collidingIDs[collision.ID] = true
|
||||
}
|
||||
for _, issue := range allIssues {
|
||||
if !collidingIDs[issue.ID] {
|
||||
filteredIssues = append(filteredIssues, issue)
|
||||
}
|
||||
}
|
||||
allIssues = filteredIssues
|
||||
} else if dryRun {
|
||||
// No collisions in dry-run mode
|
||||
fmt.Fprintf(os.Stderr, "No collisions detected.\n")
|
||||
fmt.Fprintf(os.Stderr, "Would create %d new issues, update %d existing issues\n",
|
||||
len(collisionResult.NewIssues), len(collisionResult.ExactMatches))
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
// Phase 4: Process remaining issues (exact matches and new issues)
|
||||
for _, issue := range allIssues {
|
||||
// Check if issue exists
|
||||
existing, err := store.GetIssue(ctx, issue.ID)
|
||||
if err != nil {
|
||||
@@ -89,7 +177,13 @@ Behavior:
|
||||
skipped++
|
||||
continue
|
||||
}
|
||||
// Update existing issue - only update fields that are present in JSON
|
||||
|
||||
// Update existing issue
|
||||
// Parse raw JSON to detect which fields are present
|
||||
var rawData map[string]interface{}
|
||||
jsonBytes, _ := json.Marshal(issue)
|
||||
json.Unmarshal(jsonBytes, &rawData)
|
||||
|
||||
updates := make(map[string]interface{})
|
||||
if _, ok := rawData["title"]; ok {
|
||||
updates["title"] = issue.Title
|
||||
@@ -133,7 +227,7 @@ Behavior:
|
||||
updated++
|
||||
} else {
|
||||
// Create new issue
|
||||
if err := store.CreateIssue(ctx, &issue, "import"); err != nil {
|
||||
if err := store.CreateIssue(ctx, issue, "import"); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error creating issue %s: %v\n", issue.ID, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
@@ -141,12 +235,7 @@ Behavior:
|
||||
}
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error reading input: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Second pass: Process dependencies
|
||||
// Phase 5: Process dependencies
|
||||
// Do this after all issues are created to handle forward references
|
||||
var depsCreated, depsSkipped int
|
||||
for _, issue := range allIssues {
|
||||
@@ -206,13 +295,71 @@ Behavior:
|
||||
fmt.Fprintf(os.Stderr, " (%d already existed)", depsSkipped)
|
||||
}
|
||||
}
|
||||
if len(idMapping) > 0 {
|
||||
fmt.Fprintf(os.Stderr, ", %d issues remapped", len(idMapping))
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "\n")
|
||||
},
|
||||
}
|
||||
|
||||
// printCollisionReport prints a detailed report of detected collisions
|
||||
func printCollisionReport(result *sqlite.CollisionResult) {
|
||||
fmt.Fprintf(os.Stderr, "\n=== Collision Detection Report ===\n")
|
||||
fmt.Fprintf(os.Stderr, "Exact matches (idempotent): %d\n", len(result.ExactMatches))
|
||||
fmt.Fprintf(os.Stderr, "New issues: %d\n", len(result.NewIssues))
|
||||
fmt.Fprintf(os.Stderr, "COLLISIONS DETECTED: %d\n\n", len(result.Collisions))
|
||||
|
||||
if len(result.Collisions) > 0 {
|
||||
fmt.Fprintf(os.Stderr, "Colliding issues:\n")
|
||||
for _, collision := range result.Collisions {
|
||||
fmt.Fprintf(os.Stderr, " %s: %s\n", collision.ID, collision.IncomingIssue.Title)
|
||||
fmt.Fprintf(os.Stderr, " Conflicting fields: %v\n", collision.ConflictingFields)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// printRemappingReport prints a report of ID remappings with reference scores
|
||||
func printRemappingReport(idMapping map[string]string, collisions []*sqlite.CollisionDetail) {
|
||||
fmt.Fprintf(os.Stderr, "\n=== Remapping Report ===\n")
|
||||
fmt.Fprintf(os.Stderr, "Issues remapped: %d\n\n", len(idMapping))
|
||||
|
||||
// Sort by old ID for consistent output
|
||||
type mapping struct {
|
||||
oldID string
|
||||
newID string
|
||||
score int
|
||||
}
|
||||
mappings := make([]mapping, 0, len(idMapping))
|
||||
|
||||
scoreMap := make(map[string]int)
|
||||
for _, collision := range collisions {
|
||||
scoreMap[collision.ID] = collision.ReferenceScore
|
||||
}
|
||||
|
||||
for oldID, newID := range idMapping {
|
||||
mappings = append(mappings, mapping{
|
||||
oldID: oldID,
|
||||
newID: newID,
|
||||
score: scoreMap[oldID],
|
||||
})
|
||||
}
|
||||
|
||||
sort.Slice(mappings, func(i, j int) bool {
|
||||
return mappings[i].score < mappings[j].score
|
||||
})
|
||||
|
||||
fmt.Fprintf(os.Stderr, "Remappings (sorted by reference count):\n")
|
||||
for _, m := range mappings {
|
||||
fmt.Fprintf(os.Stderr, " %s → %s (refs: %d)\n", m.oldID, m.newID, m.score)
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "\nAll text and dependency references have been updated.\n")
|
||||
}
|
||||
|
||||
func init() {
|
||||
importCmd.Flags().StringP("input", "i", "", "Input file (default: stdin)")
|
||||
importCmd.Flags().BoolP("skip-existing", "s", false, "Skip existing issues instead of updating them")
|
||||
importCmd.Flags().Bool("strict", false, "Fail on dependency errors instead of treating them as warnings")
|
||||
importCmd.Flags().Bool("resolve-collisions", false, "Automatically resolve ID collisions by remapping")
|
||||
importCmd.Flags().Bool("dry-run", false, "Preview collision detection without making changes")
|
||||
rootCmd.AddCommand(importCmd)
|
||||
}
|
||||
|
||||
898
cmd/bd/import_collision_test.go
Normal file
898
cmd/bd/import_collision_test.go
Normal file
@@ -0,0 +1,898 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/steveyegge/beads/internal/storage/sqlite"
|
||||
"github.com/steveyegge/beads/internal/types"
|
||||
)
|
||||
|
||||
// TestImportSimpleCollision tests the basic collision detection and resolution
|
||||
func TestImportSimpleCollision(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
dbPath := filepath.Join(tmpDir, "test.db")
|
||||
testStore, err := sqlite.New(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create storage: %v", err)
|
||||
}
|
||||
defer testStore.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// Create existing issue with a higher ID to avoid conflicts with auto-generated IDs
|
||||
existing := &types.Issue{
|
||||
ID: "bd-10",
|
||||
Title: "Existing issue",
|
||||
Description: "Original description",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
|
||||
if err := testStore.CreateIssue(ctx, existing, "test"); err != nil {
|
||||
t.Fatalf("Failed to create existing issue: %v", err)
|
||||
}
|
||||
|
||||
// Prepare import with collision
|
||||
incoming := &types.Issue{
|
||||
ID: "bd-10",
|
||||
Title: "MODIFIED issue",
|
||||
Description: "Different description",
|
||||
Status: types.StatusInProgress,
|
||||
Priority: 2,
|
||||
IssueType: types.TypeBug,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
|
||||
incomingIssues := []*types.Issue{incoming}
|
||||
|
||||
// Test collision detection
|
||||
result, err := sqlite.DetectCollisions(ctx, testStore, incomingIssues)
|
||||
if err != nil {
|
||||
t.Fatalf("DetectCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
if len(result.Collisions) != 1 {
|
||||
t.Fatalf("Expected 1 collision, got %d", len(result.Collisions))
|
||||
}
|
||||
|
||||
if result.Collisions[0].ID != "bd-10" {
|
||||
t.Errorf("Expected collision ID bd-10, got %s", result.Collisions[0].ID)
|
||||
}
|
||||
|
||||
// Test resolution
|
||||
allExisting, _ := testStore.SearchIssues(ctx, "", types.IssueFilter{})
|
||||
|
||||
if err := sqlite.ScoreCollisions(ctx, testStore, result.Collisions, allExisting); err != nil {
|
||||
t.Fatalf("ScoreCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
idMapping, err := sqlite.RemapCollisions(ctx, testStore, result.Collisions, allExisting)
|
||||
if err != nil {
|
||||
t.Fatalf("RemapCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
if len(idMapping) != 1 {
|
||||
t.Fatalf("Expected 1 remapping, got %d", len(idMapping))
|
||||
}
|
||||
|
||||
newID := idMapping["bd-10"]
|
||||
if newID == "" {
|
||||
t.Fatal("Expected bd-10 to be remapped")
|
||||
}
|
||||
|
||||
// Verify remapped issue exists
|
||||
remapped, err := testStore.GetIssue(ctx, newID)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get remapped issue: %v", err)
|
||||
}
|
||||
if remapped == nil {
|
||||
t.Fatal("Remapped issue not found")
|
||||
}
|
||||
if remapped.Title != "MODIFIED issue" {
|
||||
t.Errorf("Remapped issue title = %s, want 'MODIFIED issue'", remapped.Title)
|
||||
}
|
||||
|
||||
// Verify original issue unchanged
|
||||
original, err := testStore.GetIssue(ctx, "bd-10")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get original issue: %v", err)
|
||||
}
|
||||
if original.Title != "Existing issue" {
|
||||
t.Errorf("Original issue modified: %s", original.Title)
|
||||
}
|
||||
}
|
||||
|
||||
// TestImportMultipleCollisions tests handling of multiple colliding issues
|
||||
func TestImportMultipleCollisions(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
dbPath := filepath.Join(tmpDir, "test.db")
|
||||
testStore, err := sqlite.New(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create storage: %v", err)
|
||||
}
|
||||
defer testStore.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// Create existing issues with high IDs to avoid conflicts with auto-generated sequence
|
||||
for i := 100; i <= 102; i++ {
|
||||
issue := &types.Issue{
|
||||
ID: fmt.Sprintf("bd-%d", i),
|
||||
Title: fmt.Sprintf("Existing issue %d", i),
|
||||
Description: "Original",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
if err := testStore.CreateIssue(ctx, issue, "test"); err != nil {
|
||||
t.Fatalf("Failed to create issue %d: %v", i, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare import with multiple collisions
|
||||
incomingIssues := []*types.Issue{
|
||||
{
|
||||
ID: "bd-100",
|
||||
Title: "Modified 1",
|
||||
Description: "Changed",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
},
|
||||
{
|
||||
ID: "bd-101",
|
||||
Title: "Modified 2",
|
||||
Description: "Changed",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
},
|
||||
{
|
||||
ID: "bd-102",
|
||||
Title: "Modified 3",
|
||||
Description: "Changed",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
},
|
||||
}
|
||||
|
||||
result, err := sqlite.DetectCollisions(ctx, testStore, incomingIssues)
|
||||
if err != nil {
|
||||
t.Fatalf("DetectCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
if len(result.Collisions) != 3 {
|
||||
t.Fatalf("Expected 3 collisions, got %d", len(result.Collisions))
|
||||
}
|
||||
|
||||
// Resolve collisions
|
||||
allExisting, _ := testStore.SearchIssues(ctx, "", types.IssueFilter{})
|
||||
if err := sqlite.ScoreCollisions(ctx, testStore, result.Collisions, allExisting); err != nil {
|
||||
t.Fatalf("ScoreCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
idMapping, err := sqlite.RemapCollisions(ctx, testStore, result.Collisions, allExisting)
|
||||
if err != nil {
|
||||
t.Fatalf("RemapCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
if len(idMapping) != 3 {
|
||||
t.Fatalf("Expected 3 remappings, got %d", len(idMapping))
|
||||
}
|
||||
|
||||
// Verify all remappings
|
||||
for oldID, newID := range idMapping {
|
||||
remapped, err := testStore.GetIssue(ctx, newID)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get remapped issue %s: %v", newID, err)
|
||||
}
|
||||
if remapped == nil {
|
||||
t.Fatalf("Remapped issue %s not found", newID)
|
||||
}
|
||||
if !strings.Contains(remapped.Title, "Modified") {
|
||||
t.Errorf("Remapped issue %s has wrong title: %s", oldID, remapped.Title)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestImportDependencyUpdates tests that dependencies are updated during remapping
|
||||
func TestImportDependencyUpdates(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
dbPath := filepath.Join(tmpDir, "test.db")
|
||||
testStore, err := sqlite.New(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create storage: %v", err)
|
||||
}
|
||||
defer testStore.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// Create existing issues with dependencies
|
||||
issue1 := &types.Issue{
|
||||
ID: "bd-10",
|
||||
Title: "Issue 1",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
issue2 := &types.Issue{
|
||||
ID: "bd-11",
|
||||
Title: "Issue 2",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
issue3 := &types.Issue{
|
||||
ID: "bd-12",
|
||||
Title: "Issue 3",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
|
||||
if err := testStore.CreateIssue(ctx, issue1, "test"); err != nil {
|
||||
t.Fatalf("Failed to create issue 1: %v", err)
|
||||
}
|
||||
if err := testStore.CreateIssue(ctx, issue2, "test"); err != nil {
|
||||
t.Fatalf("Failed to create issue 2: %v", err)
|
||||
}
|
||||
if err := testStore.CreateIssue(ctx, issue3, "test"); err != nil {
|
||||
t.Fatalf("Failed to create issue 3: %v", err)
|
||||
}
|
||||
|
||||
// Add dependencies: bd-1 → bd-2, bd-3 → bd-2
|
||||
dep1 := &types.Dependency{
|
||||
IssueID: "bd-10",
|
||||
DependsOnID: "bd-11",
|
||||
Type: types.DepBlocks,
|
||||
}
|
||||
dep2 := &types.Dependency{
|
||||
IssueID: "bd-12",
|
||||
DependsOnID: "bd-11",
|
||||
Type: types.DepBlocks,
|
||||
}
|
||||
|
||||
if err := testStore.AddDependency(ctx, dep1, "test"); err != nil {
|
||||
t.Fatalf("Failed to add dep1: %v", err)
|
||||
}
|
||||
if err := testStore.AddDependency(ctx, dep2, "test"); err != nil {
|
||||
t.Fatalf("Failed to add dep2: %v", err)
|
||||
}
|
||||
|
||||
// Import colliding bd-2
|
||||
incomingIssues := []*types.Issue{
|
||||
{
|
||||
ID: "bd-11",
|
||||
Title: "Modified Issue 2",
|
||||
Description: "Changed",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 2,
|
||||
IssueType: types.TypeBug,
|
||||
},
|
||||
}
|
||||
|
||||
result, err := sqlite.DetectCollisions(ctx, testStore, incomingIssues)
|
||||
if err != nil {
|
||||
t.Fatalf("DetectCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
if len(result.Collisions) != 1 {
|
||||
t.Fatalf("Expected 1 collision, got %d", len(result.Collisions))
|
||||
}
|
||||
|
||||
// Resolve collision
|
||||
allExisting, _ := testStore.SearchIssues(ctx, "", types.IssueFilter{})
|
||||
if err := sqlite.ScoreCollisions(ctx, testStore, result.Collisions, allExisting); err != nil {
|
||||
t.Fatalf("ScoreCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
idMapping, err := sqlite.RemapCollisions(ctx, testStore, result.Collisions, allExisting)
|
||||
if err != nil {
|
||||
t.Fatalf("RemapCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
newID := idMapping["bd-11"]
|
||||
if newID == "" {
|
||||
t.Fatal("bd-2 not remapped")
|
||||
}
|
||||
|
||||
// Verify dependencies were updated
|
||||
// bd-1 should now depend on newID
|
||||
deps1, err := testStore.GetDependencyRecords(ctx, "bd-10")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get deps for bd-1: %v", err)
|
||||
}
|
||||
if len(deps1) != 1 {
|
||||
t.Fatalf("Expected 1 dependency for bd-1, got %d", len(deps1))
|
||||
}
|
||||
if deps1[0].DependsOnID != newID {
|
||||
t.Errorf("bd-1 dependency not updated: %s, want %s", deps1[0].DependsOnID, newID)
|
||||
}
|
||||
|
||||
// bd-3 should now depend on newID
|
||||
deps3, err := testStore.GetDependencyRecords(ctx, "bd-12")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get deps for bd-3: %v", err)
|
||||
}
|
||||
if len(deps3) != 1 {
|
||||
t.Fatalf("Expected 1 dependency for bd-3, got %d", len(deps3))
|
||||
}
|
||||
if deps3[0].DependsOnID != newID {
|
||||
t.Errorf("bd-3 dependency not updated: %s, want %s", deps3[0].DependsOnID, newID)
|
||||
}
|
||||
}
|
||||
|
||||
// TestImportTextReferenceUpdates tests that text references are updated during remapping
|
||||
func TestImportTextReferenceUpdates(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
dbPath := filepath.Join(tmpDir, "test.db")
|
||||
testStore, err := sqlite.New(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create storage: %v", err)
|
||||
}
|
||||
defer testStore.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// Create existing issues with text references
|
||||
issue1 := &types.Issue{
|
||||
ID: "bd-10",
|
||||
Title: "Issue 1",
|
||||
Description: "This depends on bd-11 and bd-12",
|
||||
Design: "Implementation uses bd-11 approach",
|
||||
Notes: "See bd-12 for details",
|
||||
AcceptanceCriteria: "Must work with bd-11",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
issue2 := &types.Issue{
|
||||
ID: "bd-11",
|
||||
Title: "Issue 2",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
issue3 := &types.Issue{
|
||||
ID: "bd-12",
|
||||
Title: "Issue 3",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
|
||||
if err := testStore.CreateIssue(ctx, issue1, "test"); err != nil {
|
||||
t.Fatalf("Failed to create issue 1: %v", err)
|
||||
}
|
||||
if err := testStore.CreateIssue(ctx, issue2, "test"); err != nil {
|
||||
t.Fatalf("Failed to create issue 2: %v", err)
|
||||
}
|
||||
if err := testStore.CreateIssue(ctx, issue3, "test"); err != nil {
|
||||
t.Fatalf("Failed to create issue 3: %v", err)
|
||||
}
|
||||
|
||||
// Import colliding issues
|
||||
incomingIssues := []*types.Issue{
|
||||
{
|
||||
ID: "bd-11",
|
||||
Title: "Modified Issue 2",
|
||||
Status: types.StatusInProgress,
|
||||
Priority: 2,
|
||||
IssueType: types.TypeBug,
|
||||
},
|
||||
{
|
||||
ID: "bd-12",
|
||||
Title: "Modified Issue 3",
|
||||
Status: types.StatusInProgress,
|
||||
Priority: 2,
|
||||
IssueType: types.TypeBug,
|
||||
},
|
||||
}
|
||||
|
||||
result, err := sqlite.DetectCollisions(ctx, testStore, incomingIssues)
|
||||
if err != nil {
|
||||
t.Fatalf("DetectCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
if len(result.Collisions) != 2 {
|
||||
t.Fatalf("Expected 2 collisions, got %d", len(result.Collisions))
|
||||
}
|
||||
|
||||
// Resolve collisions
|
||||
allExisting, _ := testStore.SearchIssues(ctx, "", types.IssueFilter{})
|
||||
if err := sqlite.ScoreCollisions(ctx, testStore, result.Collisions, allExisting); err != nil {
|
||||
t.Fatalf("ScoreCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
idMapping, err := sqlite.RemapCollisions(ctx, testStore, result.Collisions, allExisting)
|
||||
if err != nil {
|
||||
t.Fatalf("RemapCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
if len(idMapping) != 2 {
|
||||
t.Fatalf("Expected 2 remappings, got %d", len(idMapping))
|
||||
}
|
||||
|
||||
newID2 := idMapping["bd-11"]
|
||||
newID3 := idMapping["bd-12"]
|
||||
|
||||
// Verify text references were updated in issue 1
|
||||
updated, err := testStore.GetIssue(ctx, "bd-10")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get updated issue 1: %v", err)
|
||||
}
|
||||
|
||||
if !strings.Contains(updated.Description, newID2) {
|
||||
t.Errorf("Description not updated: %s (should contain %s)", updated.Description, newID2)
|
||||
}
|
||||
if !strings.Contains(updated.Description, newID3) {
|
||||
t.Errorf("Description not updated: %s (should contain %s)", updated.Description, newID3)
|
||||
}
|
||||
if !strings.Contains(updated.Design, newID2) {
|
||||
t.Errorf("Design not updated: %s (should contain %s)", updated.Design, newID2)
|
||||
}
|
||||
if !strings.Contains(updated.Notes, newID3) {
|
||||
t.Errorf("Notes not updated: %s (should contain %s)", updated.Notes, newID3)
|
||||
}
|
||||
if !strings.Contains(updated.AcceptanceCriteria, newID2) {
|
||||
t.Errorf("AcceptanceCriteria not updated: %s (should contain %s)", updated.AcceptanceCriteria, newID2)
|
||||
}
|
||||
|
||||
// Verify old IDs are NOT present
|
||||
if strings.Contains(updated.Description, "bd-11") {
|
||||
t.Error("Old ID bd-11 still present in Description")
|
||||
}
|
||||
if strings.Contains(updated.Description, "bd-12") {
|
||||
t.Error("Old ID bd-12 still present in Description")
|
||||
}
|
||||
}
|
||||
|
||||
// TestImportChainDependencies tests remapping with chained dependencies
|
||||
func TestImportChainDependencies(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
dbPath := filepath.Join(tmpDir, "test.db")
|
||||
testStore, err := sqlite.New(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create storage: %v", err)
|
||||
}
|
||||
defer testStore.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// Create chain: bd-100 → bd-101 → bd-102 → bd-103
|
||||
for i := 100; i <= 103; i++ {
|
||||
issue := &types.Issue{
|
||||
ID: fmt.Sprintf("bd-%d", i),
|
||||
Title: fmt.Sprintf("Issue %d", i),
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
if err := testStore.CreateIssue(ctx, issue, "test"); err != nil {
|
||||
t.Fatalf("Failed to create issue %d: %v", i, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Add chain dependencies
|
||||
for i := 100; i <= 102; i++ {
|
||||
dep := &types.Dependency{
|
||||
IssueID: fmt.Sprintf("bd-%d", i),
|
||||
DependsOnID: fmt.Sprintf("bd-%d", i+1),
|
||||
Type: types.DepBlocks,
|
||||
}
|
||||
if err := testStore.AddDependency(ctx, dep, "test"); err != nil {
|
||||
t.Fatalf("Failed to add dependency %d: %v", i, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Import colliding bd-101
|
||||
incomingIssues := []*types.Issue{
|
||||
{
|
||||
ID: "bd-101",
|
||||
Title: "Modified Issue 101",
|
||||
Status: types.StatusInProgress,
|
||||
Priority: 2,
|
||||
IssueType: types.TypeBug,
|
||||
},
|
||||
}
|
||||
|
||||
result, err := sqlite.DetectCollisions(ctx, testStore, incomingIssues)
|
||||
if err != nil {
|
||||
t.Fatalf("DetectCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
// Resolve collision
|
||||
allExisting, _ := testStore.SearchIssues(ctx, "", types.IssueFilter{})
|
||||
if err := sqlite.ScoreCollisions(ctx, testStore, result.Collisions, allExisting); err != nil {
|
||||
t.Fatalf("ScoreCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
idMapping, err := sqlite.RemapCollisions(ctx, testStore, result.Collisions, allExisting)
|
||||
if err != nil {
|
||||
t.Fatalf("RemapCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
newID := idMapping["bd-101"]
|
||||
|
||||
// Verify chain is maintained
|
||||
// bd-100 → newID (was bd-101)
|
||||
deps1, _ := testStore.GetDependencyRecords(ctx, "bd-100")
|
||||
if len(deps1) != 1 || deps1[0].DependsOnID != newID {
|
||||
t.Errorf("bd-100 dependency broken: %v", deps1)
|
||||
}
|
||||
|
||||
// newID → bd-102
|
||||
depsNew, _ := testStore.GetDependencyRecords(ctx, newID)
|
||||
if len(depsNew) != 1 || depsNew[0].DependsOnID != "bd-102" {
|
||||
t.Errorf("newID dependency broken: %v", depsNew)
|
||||
}
|
||||
|
||||
// bd-102 → bd-103 (unchanged)
|
||||
deps3, _ := testStore.GetDependencyRecords(ctx, "bd-102")
|
||||
if len(deps3) != 1 || deps3[0].DependsOnID != "bd-103" {
|
||||
t.Errorf("bd-102 dependency broken: %v", deps3)
|
||||
}
|
||||
}
|
||||
|
||||
// TestImportPartialIDMatch tests word boundary matching (bd-10 vs bd-100)
|
||||
func TestImportPartialIDMatch(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
dbPath := filepath.Join(tmpDir, "test.db")
|
||||
testStore, err := sqlite.New(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create storage: %v", err)
|
||||
}
|
||||
defer testStore.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// Create issues with similar IDs (use higher numbers to avoid conflicts)
|
||||
issues := []*types.Issue{
|
||||
{
|
||||
ID: "bd-50",
|
||||
Title: "Issue 50",
|
||||
Description: "References bd-100 and bd-1000 and bd-10000",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
{
|
||||
ID: "bd-100",
|
||||
Title: "Issue 100",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
{
|
||||
ID: "bd-1000",
|
||||
Title: "Issue 1000",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
{
|
||||
ID: "bd-10000",
|
||||
Title: "Issue 10000",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
}
|
||||
|
||||
for _, issue := range issues {
|
||||
if err := testStore.CreateIssue(ctx, issue, "test"); err != nil {
|
||||
t.Fatalf("Failed to create %s: %v", issue.ID, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Import colliding bd-100
|
||||
incomingIssues := []*types.Issue{
|
||||
{
|
||||
ID: "bd-100",
|
||||
Title: "Modified Issue 100",
|
||||
Status: types.StatusInProgress,
|
||||
Priority: 2,
|
||||
IssueType: types.TypeBug,
|
||||
},
|
||||
}
|
||||
|
||||
result, err := sqlite.DetectCollisions(ctx, testStore, incomingIssues)
|
||||
if err != nil {
|
||||
t.Fatalf("DetectCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
// Resolve collision
|
||||
allExisting, _ := testStore.SearchIssues(ctx, "", types.IssueFilter{})
|
||||
if err := sqlite.ScoreCollisions(ctx, testStore, result.Collisions, allExisting); err != nil {
|
||||
t.Fatalf("ScoreCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
idMapping, err := sqlite.RemapCollisions(ctx, testStore, result.Collisions, allExisting)
|
||||
if err != nil {
|
||||
t.Fatalf("RemapCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
newID100 := idMapping["bd-100"]
|
||||
|
||||
// Verify only bd-100 was replaced, not bd-1000 or bd-10000
|
||||
updated, err := testStore.GetIssue(ctx, "bd-50")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get updated issue: %v", err)
|
||||
}
|
||||
|
||||
if !strings.Contains(updated.Description, newID100) {
|
||||
t.Errorf("bd-100 not replaced: %s", updated.Description)
|
||||
}
|
||||
if !strings.Contains(updated.Description, "bd-1000") {
|
||||
t.Errorf("bd-1000 incorrectly replaced: %s", updated.Description)
|
||||
}
|
||||
if !strings.Contains(updated.Description, "bd-10000") {
|
||||
t.Errorf("bd-10000 incorrectly replaced: %s", updated.Description)
|
||||
}
|
||||
|
||||
// Make sure old bd-100 reference is gone
|
||||
if strings.Contains(updated.Description, " bd-100 ") || strings.Contains(updated.Description, " bd-100,") {
|
||||
t.Errorf("Old bd-100 reference still present: %s", updated.Description)
|
||||
}
|
||||
}
|
||||
|
||||
// TestImportExactMatch tests idempotent import (no collision)
|
||||
func TestImportExactMatch(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
dbPath := filepath.Join(tmpDir, "test.db")
|
||||
testStore, err := sqlite.New(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create storage: %v", err)
|
||||
}
|
||||
defer testStore.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// Create existing issue
|
||||
existing := &types.Issue{
|
||||
ID: "bd-10",
|
||||
Title: "Test issue",
|
||||
Description: "Description",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
|
||||
if err := testStore.CreateIssue(ctx, existing, "test"); err != nil {
|
||||
t.Fatalf("Failed to create issue: %v", err)
|
||||
}
|
||||
|
||||
// Import identical issue
|
||||
incoming := &types.Issue{
|
||||
ID: "bd-10",
|
||||
Title: "Test issue",
|
||||
Description: "Description",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
}
|
||||
|
||||
result, err := sqlite.DetectCollisions(ctx, testStore, []*types.Issue{incoming})
|
||||
if err != nil {
|
||||
t.Fatalf("DetectCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
// Should be exact match, not collision
|
||||
if len(result.Collisions) != 0 {
|
||||
t.Errorf("Expected 0 collisions for exact match, got %d", len(result.Collisions))
|
||||
}
|
||||
if len(result.ExactMatches) != 1 {
|
||||
t.Errorf("Expected 1 exact match, got %d", len(result.ExactMatches))
|
||||
}
|
||||
}
|
||||
|
||||
// TestImportMixedScenario tests import with exact matches, collisions, and new issues
|
||||
func TestImportMixedScenario(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
dbPath := filepath.Join(tmpDir, "test.db")
|
||||
testStore, err := sqlite.New(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create storage: %v", err)
|
||||
}
|
||||
defer testStore.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// Create existing issues with high IDs
|
||||
for i := 200; i <= 201; i++ {
|
||||
issue := &types.Issue{
|
||||
ID: fmt.Sprintf("bd-%d", i),
|
||||
Title: fmt.Sprintf("Issue %d", i),
|
||||
Description: "Original",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
if err := testStore.CreateIssue(ctx, issue, "test"); err != nil {
|
||||
t.Fatalf("Failed to create issue %d: %v", i, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Import: exact match (bd-200), collision (bd-201), new (bd-202)
|
||||
incomingIssues := []*types.Issue{
|
||||
{
|
||||
ID: "bd-200",
|
||||
Title: "Issue 200",
|
||||
Description: "Original",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
},
|
||||
{
|
||||
ID: "bd-201",
|
||||
Title: "Modified Issue 201",
|
||||
Description: "Changed",
|
||||
Status: types.StatusInProgress,
|
||||
Priority: 2,
|
||||
IssueType: types.TypeBug,
|
||||
},
|
||||
{
|
||||
ID: "bd-202",
|
||||
Title: "New Issue",
|
||||
Description: "Brand new",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeFeature,
|
||||
},
|
||||
}
|
||||
|
||||
result, err := sqlite.DetectCollisions(ctx, testStore, incomingIssues)
|
||||
if err != nil {
|
||||
t.Fatalf("DetectCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
if len(result.ExactMatches) != 1 {
|
||||
t.Errorf("Expected 1 exact match, got %d", len(result.ExactMatches))
|
||||
}
|
||||
if len(result.Collisions) != 1 {
|
||||
t.Errorf("Expected 1 collision, got %d", len(result.Collisions))
|
||||
}
|
||||
if len(result.NewIssues) != 1 {
|
||||
t.Errorf("Expected 1 new issue, got %d", len(result.NewIssues))
|
||||
}
|
||||
}
|
||||
|
||||
// TestImportWithDependenciesInJSONL tests importing issues with embedded dependencies
|
||||
func TestImportWithDependenciesInJSONL(t *testing.T) {
|
||||
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
dbPath := filepath.Join(tmpDir, "test.db")
|
||||
testStore, err := sqlite.New(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create storage: %v", err)
|
||||
}
|
||||
defer testStore.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// Create JSONL with dependencies
|
||||
jsonl := `{"id":"bd-10","title":"Issue 1","status":"open","priority":1,"issue_type":"task"}
|
||||
{"id":"bd-11","title":"Issue 2","status":"open","priority":1,"issue_type":"task","dependencies":[{"issue_id":"bd-11","depends_on_id":"bd-10","type":"blocks"}]}`
|
||||
|
||||
// Parse JSONL
|
||||
var issues []*types.Issue
|
||||
for _, line := range strings.Split(strings.TrimSpace(jsonl), "\n") {
|
||||
var issue types.Issue
|
||||
if err := json.Unmarshal([]byte(line), &issue); err != nil {
|
||||
t.Fatalf("Failed to parse JSONL: %v", err)
|
||||
}
|
||||
issues = append(issues, &issue)
|
||||
}
|
||||
|
||||
// Create issues
|
||||
for _, issue := range issues {
|
||||
if err := testStore.CreateIssue(ctx, issue, "test"); err != nil {
|
||||
t.Fatalf("Failed to create issue: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Add dependencies from JSONL
|
||||
for _, issue := range issues {
|
||||
for _, dep := range issue.Dependencies {
|
||||
if err := testStore.AddDependency(ctx, dep, "test"); err != nil {
|
||||
t.Fatalf("Failed to add dependency: %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Verify dependency
|
||||
deps, err := testStore.GetDependencyRecords(ctx, "bd-11")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to get dependencies: %v", err)
|
||||
}
|
||||
if len(deps) != 1 {
|
||||
t.Fatalf("Expected 1 dependency, got %d", len(deps))
|
||||
}
|
||||
if deps[0].DependsOnID != "bd-10" {
|
||||
t.Errorf("Dependency target = %s, want bd-1", deps[0].DependsOnID)
|
||||
}
|
||||
}
|
||||
@@ -229,6 +229,9 @@ var showCmd = &cobra.Command{
|
||||
if issue.Design != "" {
|
||||
fmt.Printf("\nDesign:\n%s\n", issue.Design)
|
||||
}
|
||||
if issue.Notes != "" {
|
||||
fmt.Printf("\nNotes:\n%s\n", issue.Notes)
|
||||
}
|
||||
if issue.AcceptanceCriteria != "" {
|
||||
fmt.Printf("\nAcceptance Criteria:\n%s\n", issue.AcceptanceCriteria)
|
||||
}
|
||||
@@ -350,6 +353,18 @@ var updateCmd = &cobra.Command{
|
||||
assignee, _ := cmd.Flags().GetString("assignee")
|
||||
updates["assignee"] = assignee
|
||||
}
|
||||
if cmd.Flags().Changed("design") {
|
||||
design, _ := cmd.Flags().GetString("design")
|
||||
updates["design"] = design
|
||||
}
|
||||
if cmd.Flags().Changed("notes") {
|
||||
notes, _ := cmd.Flags().GetString("notes")
|
||||
updates["notes"] = notes
|
||||
}
|
||||
if cmd.Flags().Changed("acceptance-criteria") {
|
||||
acceptanceCriteria, _ := cmd.Flags().GetString("acceptance-criteria")
|
||||
updates["acceptance_criteria"] = acceptanceCriteria
|
||||
}
|
||||
|
||||
if len(updates) == 0 {
|
||||
fmt.Println("No updates specified")
|
||||
@@ -378,6 +393,9 @@ func init() {
|
||||
updateCmd.Flags().IntP("priority", "p", 0, "New priority")
|
||||
updateCmd.Flags().String("title", "", "New title")
|
||||
updateCmd.Flags().StringP("assignee", "a", "", "New assignee")
|
||||
updateCmd.Flags().String("design", "", "Design notes")
|
||||
updateCmd.Flags().String("notes", "", "Additional notes")
|
||||
updateCmd.Flags().String("acceptance-criteria", "", "Acceptance criteria")
|
||||
rootCmd.AddCommand(updateCmd)
|
||||
}
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ This directory contains examples of how to integrate bd with AI agents and workf
|
||||
- **[python-agent/](python-agent/)** - Simple Python agent that discovers ready work and completes tasks
|
||||
- **[bash-agent/](bash-agent/)** - Bash script showing the full agent workflow
|
||||
- **[git-hooks/](git-hooks/)** - Pre-configured git hooks for automatic export/import
|
||||
- **[branch-merge/](branch-merge/)** - Branch merge workflow with collision resolution
|
||||
- **[claude-desktop-mcp/](claude-desktop-mcp/)** - MCP server for Claude Desktop integration
|
||||
|
||||
## Quick Start
|
||||
@@ -23,6 +24,10 @@ cd bash-agent
|
||||
# Install git hooks
|
||||
cd git-hooks
|
||||
./install.sh
|
||||
|
||||
# Try branch merge collision resolution
|
||||
cd branch-merge
|
||||
./demo.sh
|
||||
```
|
||||
|
||||
## Creating Your Own Agent
|
||||
|
||||
176
examples/branch-merge/README.md
Normal file
176
examples/branch-merge/README.md
Normal file
@@ -0,0 +1,176 @@
|
||||
# Branch Merge Workflow with Collision Resolution
|
||||
|
||||
This example demonstrates how to handle ID collisions when merging branches that have diverged and created issues with the same IDs.
|
||||
|
||||
## The Problem
|
||||
|
||||
When two branches work independently and both create issues, they'll often generate overlapping IDs:
|
||||
|
||||
```
|
||||
main: bd-1, bd-2, bd-3, bd-4, bd-5
|
||||
feature: bd-1, bd-2, bd-3, bd-6, bd-7 (diverged from main earlier)
|
||||
```
|
||||
|
||||
When you try to merge `feature` into `main`, you'll have ID collisions for bd-1 through bd-3 (if the content differs).
|
||||
|
||||
## The Solution
|
||||
|
||||
bd provides automatic collision resolution that:
|
||||
1. Detects collisions (same ID, different content)
|
||||
2. Renumbers the incoming colliding issues
|
||||
3. Updates ALL text references and dependencies automatically
|
||||
|
||||
## Demo Workflow
|
||||
|
||||
### 1. Setup - Two Diverged Branches
|
||||
|
||||
```bash
|
||||
# Start on main branch
|
||||
git checkout main
|
||||
bd create "Feature A" -t feature -p 1
|
||||
bd create "Bug fix B" -t bug -p 0
|
||||
bd create "Task C" -t task -p 2
|
||||
bd export -o .beads/issues.jsonl
|
||||
git add .beads/issues.jsonl
|
||||
git commit -m "Add main branch issues"
|
||||
|
||||
# Create feature branch from an earlier commit
|
||||
git checkout -b feature-branch HEAD~5
|
||||
|
||||
# On feature branch, create overlapping issues
|
||||
bd create "Different feature A" -t feature -p 2
|
||||
bd create "Different bug B" -t bug -p 1
|
||||
bd create "Feature D" -t feature -p 1
|
||||
bd export -o .beads/issues.jsonl
|
||||
git add .beads/issues.jsonl
|
||||
git commit -m "Add feature branch issues"
|
||||
```
|
||||
|
||||
At this point:
|
||||
- `main` has: bd-1 (Feature A), bd-2 (Bug fix B), bd-3 (Task C)
|
||||
- `feature-branch` has: bd-1 (Different feature A), bd-2 (Different bug B), bd-3 (Feature D)
|
||||
|
||||
The bd-1 and bd-2 on each branch have different content = collisions!
|
||||
|
||||
### 2. Merge and Detect Collisions
|
||||
|
||||
```bash
|
||||
# Merge feature branch into main
|
||||
git checkout main
|
||||
git merge feature-branch
|
||||
|
||||
# Git will show merge conflict in .beads/issues.jsonl
|
||||
# Manually resolve the conflict by keeping both versions
|
||||
# (or use a merge tool)
|
||||
|
||||
# After resolving the git conflict, check for ID collisions
|
||||
bd import -i .beads/issues.jsonl --dry-run
|
||||
```
|
||||
|
||||
Output shows:
|
||||
```
|
||||
=== Collision Detection Report ===
|
||||
Exact matches (idempotent): 0
|
||||
New issues: 1
|
||||
COLLISIONS DETECTED: 2
|
||||
|
||||
Colliding issues:
|
||||
bd-1: Different feature A
|
||||
Conflicting fields: [title, priority]
|
||||
bd-2: Different bug B
|
||||
Conflicting fields: [title, priority]
|
||||
```
|
||||
|
||||
### 3. Resolve Collisions Automatically
|
||||
|
||||
```bash
|
||||
# Let bd resolve the collisions
|
||||
bd import -i .beads/issues.jsonl --resolve-collisions
|
||||
```
|
||||
|
||||
Output shows:
|
||||
```
|
||||
Resolving collisions...
|
||||
|
||||
=== Remapping Report ===
|
||||
Issues remapped: 2
|
||||
|
||||
Remappings (sorted by reference count):
|
||||
bd-1 → bd-4 (refs: 0)
|
||||
bd-2 → bd-5 (refs: 0)
|
||||
|
||||
All text and dependency references have been updated.
|
||||
|
||||
Import complete: 2 created, 0 updated, 1 dependencies added, 2 issues remapped
|
||||
```
|
||||
|
||||
Result:
|
||||
- `main` keeps: bd-1 (Feature A), bd-2 (Bug fix B), bd-3 (Task C)
|
||||
- `feature-branch` issues become: bd-4 (Different feature A), bd-5 (Different bug B), bd-3 (Feature D)
|
||||
|
||||
### 4. Export and Commit
|
||||
|
||||
```bash
|
||||
# Export the resolved state back to JSONL
|
||||
bd export -o .beads/issues.jsonl
|
||||
|
||||
# Commit the merge
|
||||
git add .beads/issues.jsonl
|
||||
git commit -m "Merge feature-branch with collision resolution"
|
||||
```
|
||||
|
||||
## Advanced: Cross-References
|
||||
|
||||
If your issues reference each other in text or dependencies, bd updates those automatically:
|
||||
|
||||
```bash
|
||||
# On feature branch, create issues with references
|
||||
bd create "Feature X" -d "Implements the core logic" -t feature -p 1
|
||||
# Assume this becomes bd-10
|
||||
|
||||
bd create "Test for X" -d "Tests bd-10 functionality" -t task -p 2
|
||||
# This references bd-10 in the description
|
||||
|
||||
bd dep add bd-11 bd-10 --type blocks
|
||||
# Dependencies are created
|
||||
|
||||
# After merge with collision resolution
|
||||
bd import -i .beads/issues.jsonl --resolve-collisions
|
||||
|
||||
# If bd-10 collided and was remapped to bd-15:
|
||||
# - bd-11's description becomes: "Tests bd-15 functionality"
|
||||
# - Dependency becomes: bd-11 → bd-15
|
||||
```
|
||||
|
||||
## When to Use This
|
||||
|
||||
1. **Feature branches** - Long-lived branches that create issues independently
|
||||
2. **Parallel development** - Multiple developers working on separate branches
|
||||
3. **Stale branches** - Old branches that need to be merged but have ID conflicts
|
||||
4. **Distributed teams** - Teams that work offline and sync via git
|
||||
|
||||
## Safety Notes
|
||||
|
||||
- `--resolve-collisions` preserves your existing database (current branch's issues never change IDs)
|
||||
- Only the incoming colliding issues get new IDs
|
||||
- Use `--dry-run` first to preview what will happen
|
||||
- All text references use word-boundary matching (bd-10 won't match bd-100)
|
||||
- The collision resolution is deterministic (same input = same output)
|
||||
|
||||
## Alternative: Manual Resolution
|
||||
|
||||
If you prefer manual control:
|
||||
|
||||
1. Don't use `--resolve-collisions`
|
||||
2. Manually edit the JSONL file before import
|
||||
3. Rename colliding IDs to unique values
|
||||
4. Manually update any cross-references
|
||||
5. Import normally
|
||||
|
||||
This gives you complete control but is more error-prone and time-consuming.
|
||||
|
||||
## See Also
|
||||
|
||||
- [Git Hooks Example](../git-hooks/) - Automate export/import with git hooks
|
||||
- [README.md](../../README.md) - Full collision resolution documentation
|
||||
- [TEXT_FORMATS.md](../../TEXT_FORMATS.md) - JSONL merge strategies
|
||||
145
examples/branch-merge/demo.sh
Executable file
145
examples/branch-merge/demo.sh
Executable file
@@ -0,0 +1,145 @@
|
||||
#!/bin/bash
|
||||
# Demo script for branch merge collision resolution workflow
|
||||
# This script simulates a branch merge with ID collisions
|
||||
|
||||
set -e # Exit on error
|
||||
|
||||
echo "=== Branch Merge Collision Resolution Demo ==="
|
||||
echo ""
|
||||
|
||||
# Check if bd is available
|
||||
if ! command -v bd &> /dev/null; then
|
||||
echo "Error: bd command not found. Please install bd first."
|
||||
echo "Run: go install github.com/steveyegge/beads/cmd/bd@latest"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create a temporary directory for the demo
|
||||
DEMO_DIR=$(mktemp -d -t bd-merge-demo-XXXXXX)
|
||||
echo "Demo directory: $DEMO_DIR"
|
||||
cd "$DEMO_DIR"
|
||||
|
||||
# Initialize git repo
|
||||
echo ""
|
||||
echo "Step 1: Initialize git repo and bd database"
|
||||
git init
|
||||
git config user.name "Demo User"
|
||||
git config user.email "demo@example.com"
|
||||
bd init --prefix demo
|
||||
|
||||
# Create initial commit
|
||||
echo "Initial project" > README.txt
|
||||
git add README.txt .beads/
|
||||
git commit -m "Initial commit"
|
||||
|
||||
# Create issues on main branch
|
||||
echo ""
|
||||
echo "Step 2: Create issues on main branch"
|
||||
bd create "Implement login" -d "User authentication system" -t feature -p 1 --json
|
||||
bd create "Fix memory leak" -d "Memory leak in parser" -t bug -p 0 --json
|
||||
bd create "Update docs" -d "Document new API" -t task -p 2 --json
|
||||
|
||||
echo ""
|
||||
echo "Main branch issues:"
|
||||
bd list
|
||||
|
||||
# Export and commit
|
||||
bd export -o .beads/issues.jsonl
|
||||
git add .beads/issues.jsonl
|
||||
git commit -m "Add main branch issues (bd-1, bd-2, bd-3)"
|
||||
|
||||
# Create feature branch from earlier point
|
||||
echo ""
|
||||
echo "Step 3: Create feature branch"
|
||||
git checkout -b feature-branch HEAD~1
|
||||
|
||||
# Reimport to get clean state
|
||||
bd import -i .beads/issues.jsonl
|
||||
|
||||
# Create overlapping issues on feature branch
|
||||
echo ""
|
||||
echo "Step 4: Create different issues with same IDs on feature branch"
|
||||
bd create "Add dashboard" -d "Admin dashboard feature" -t feature -p 2 --json
|
||||
bd create "Improve performance" -d "Optimize queries" -t task -p 1 --json
|
||||
bd create "Add metrics" -d "Monitoring and metrics" -t feature -p 1 --json
|
||||
|
||||
echo ""
|
||||
echo "Feature branch issues:"
|
||||
bd list
|
||||
|
||||
# Export and commit
|
||||
bd export -o .beads/issues.jsonl
|
||||
git add .beads/issues.jsonl
|
||||
git commit -m "Add feature branch issues (bd-1, bd-2, bd-3)"
|
||||
|
||||
# Merge back to main
|
||||
echo ""
|
||||
echo "Step 5: Merge feature branch into main"
|
||||
git checkout main
|
||||
|
||||
# Attempt merge (will conflict)
|
||||
if git merge feature-branch --no-edit; then
|
||||
echo "Merge succeeded without conflicts"
|
||||
else
|
||||
echo "Merge conflict detected - resolving..."
|
||||
# Keep both versions by accepting both sides
|
||||
# In a real scenario, you'd resolve this more carefully
|
||||
git checkout --ours .beads/issues.jsonl
|
||||
git checkout --theirs .beads/issues.jsonl --patch || true
|
||||
# For demo purposes, accept theirs
|
||||
git checkout --theirs .beads/issues.jsonl
|
||||
git add .beads/issues.jsonl
|
||||
git commit -m "Merge feature-branch"
|
||||
fi
|
||||
|
||||
# Detect collisions
|
||||
echo ""
|
||||
echo "Step 6: Detect ID collisions"
|
||||
echo "Running: bd import -i .beads/issues.jsonl --dry-run"
|
||||
echo ""
|
||||
|
||||
if bd import -i .beads/issues.jsonl --dry-run; then
|
||||
echo "No collisions detected!"
|
||||
else
|
||||
echo ""
|
||||
echo "Collisions detected (expected)!"
|
||||
fi
|
||||
|
||||
# Resolve collisions
|
||||
echo ""
|
||||
echo "Step 7: Resolve collisions automatically"
|
||||
echo "Running: bd import -i .beads/issues.jsonl --resolve-collisions"
|
||||
echo ""
|
||||
|
||||
bd import -i .beads/issues.jsonl --resolve-collisions
|
||||
|
||||
# Show final state
|
||||
echo ""
|
||||
echo "Step 8: Final issue list after resolution"
|
||||
bd list
|
||||
|
||||
# Show remapping details
|
||||
echo ""
|
||||
echo "Step 9: Show how dependencies and references are maintained"
|
||||
echo "All text references like 'see bd-1' and dependencies were automatically updated!"
|
||||
|
||||
# Export final state
|
||||
bd export -o .beads/issues.jsonl
|
||||
git add .beads/issues.jsonl
|
||||
git commit -m "Resolve collisions and finalize merge"
|
||||
|
||||
echo ""
|
||||
echo "=== Demo Complete ==="
|
||||
echo ""
|
||||
echo "Summary:"
|
||||
echo "- Created issues on main branch (bd-1, bd-2, bd-3)"
|
||||
echo "- Created different issues on feature branch (also bd-1, bd-2, bd-3)"
|
||||
echo "- Merged branches with Git"
|
||||
echo "- Detected collisions with --dry-run"
|
||||
echo "- Resolved collisions with --resolve-collisions"
|
||||
echo "- Feature branch issues were renumbered to avoid conflicts"
|
||||
echo ""
|
||||
echo "Demo directory: $DEMO_DIR"
|
||||
echo "You can explore the git history: cd $DEMO_DIR && git log --oneline"
|
||||
echo ""
|
||||
echo "To clean up: rm -rf $DEMO_DIR"
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/steveyegge/beads/internal/types"
|
||||
)
|
||||
@@ -25,14 +26,14 @@ type CollisionDetail struct {
|
||||
ReferenceScore int // Number of references to this issue (for scoring)
|
||||
}
|
||||
|
||||
// detectCollisions compares incoming JSONL issues against DB state
|
||||
// DetectCollisions compares incoming JSONL issues against DB state
|
||||
// It distinguishes between:
|
||||
// 1. Exact match (idempotent) - ID and content are identical
|
||||
// 2. ID match but different content (collision) - same ID, different fields
|
||||
// 3. New issue - ID doesn't exist in DB
|
||||
//
|
||||
// Returns a CollisionResult categorizing all incoming issues.
|
||||
func detectCollisions(ctx context.Context, s *SQLiteStorage, incomingIssues []*types.Issue) (*CollisionResult, error) {
|
||||
func DetectCollisions(ctx context.Context, s *SQLiteStorage, incomingIssues []*types.Issue) (*CollisionResult, error) {
|
||||
result := &CollisionResult{
|
||||
ExactMatches: make([]string, 0),
|
||||
Collisions: make([]*CollisionDetail, 0),
|
||||
@@ -125,12 +126,12 @@ func equalIntPtr(a, b *int) bool {
|
||||
return *a == *b
|
||||
}
|
||||
|
||||
// scoreCollisions calculates reference scores for all colliding issues and sorts them
|
||||
// ScoreCollisions calculates reference scores for all colliding issues and sorts them
|
||||
// by score ascending (fewest references first). This minimizes the total number of
|
||||
// updates needed during renumbering - issues with fewer references are renumbered first.
|
||||
//
|
||||
// Reference score = text mentions + dependency references
|
||||
func scoreCollisions(ctx context.Context, s *SQLiteStorage, collisions []*CollisionDetail, allIssues []*types.Issue) error {
|
||||
func ScoreCollisions(ctx context.Context, s *SQLiteStorage, collisions []*CollisionDetail, allIssues []*types.Issue) error {
|
||||
// Build a map of all issues for quick lookup
|
||||
issueMap := make(map[string]*types.Issue)
|
||||
for _, issue := range allIssues {
|
||||
@@ -215,3 +216,196 @@ func countReferences(issueID string, allIssues []*types.Issue, allDeps map[strin
|
||||
|
||||
return count, nil
|
||||
}
|
||||
|
||||
// RemapCollisions handles ID remapping for colliding issues
|
||||
// Takes sorted collisions (fewest references first) and remaps them to new IDs
|
||||
// Returns a map of old ID -> new ID for reporting
|
||||
//
|
||||
// NOTE: This function is not atomic - it performs multiple separate database operations.
|
||||
// If an error occurs partway through, some issues may be created without their references
|
||||
// being updated. This is a known limitation that requires storage layer refactoring to fix.
|
||||
// See issue bd-25 for transaction support.
|
||||
func RemapCollisions(ctx context.Context, s *SQLiteStorage, collisions []*CollisionDetail, allIssues []*types.Issue) (map[string]string, error) {
|
||||
idMapping := make(map[string]string)
|
||||
|
||||
// For each collision (in order of ascending reference score)
|
||||
for _, collision := range collisions {
|
||||
oldID := collision.ID
|
||||
|
||||
// Allocate new ID
|
||||
s.idMu.Lock()
|
||||
prefix, err := s.GetConfig(ctx, "issue_prefix")
|
||||
if err != nil || prefix == "" {
|
||||
prefix = "bd"
|
||||
}
|
||||
newID := fmt.Sprintf("%s-%d", prefix, s.nextID)
|
||||
s.nextID++
|
||||
s.idMu.Unlock()
|
||||
|
||||
// Record mapping
|
||||
idMapping[oldID] = newID
|
||||
|
||||
// Update the issue ID in the incoming issue
|
||||
collision.IncomingIssue.ID = newID
|
||||
|
||||
// Create the issue with new ID
|
||||
// Note: CreateIssue will use the ID we set
|
||||
if err := s.CreateIssue(ctx, collision.IncomingIssue, "import-remap"); err != nil {
|
||||
return nil, fmt.Errorf("failed to create remapped issue %s -> %s: %w", oldID, newID, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Now update all references in text fields and dependencies
|
||||
if err := updateReferences(ctx, s, idMapping); err != nil {
|
||||
return nil, fmt.Errorf("failed to update references: %w", err)
|
||||
}
|
||||
|
||||
return idMapping, nil
|
||||
}
|
||||
|
||||
// updateReferences updates all text field references and dependency records
|
||||
// to point to new IDs based on the idMapping
|
||||
func updateReferences(ctx context.Context, s *SQLiteStorage, idMapping map[string]string) error {
|
||||
// Update text fields in all issues (both DB and incoming)
|
||||
// We need to update issues in the database
|
||||
dbIssues, err := s.SearchIssues(ctx, "", types.IssueFilter{})
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get all issues from DB: %w", err)
|
||||
}
|
||||
|
||||
for _, issue := range dbIssues {
|
||||
updates := make(map[string]interface{})
|
||||
|
||||
// Update description
|
||||
newDesc := replaceIDReferences(issue.Description, idMapping)
|
||||
if newDesc != issue.Description {
|
||||
updates["description"] = newDesc
|
||||
}
|
||||
|
||||
// Update design
|
||||
newDesign := replaceIDReferences(issue.Design, idMapping)
|
||||
if newDesign != issue.Design {
|
||||
updates["design"] = newDesign
|
||||
}
|
||||
|
||||
// Update notes
|
||||
newNotes := replaceIDReferences(issue.Notes, idMapping)
|
||||
if newNotes != issue.Notes {
|
||||
updates["notes"] = newNotes
|
||||
}
|
||||
|
||||
// Update acceptance criteria
|
||||
newAC := replaceIDReferences(issue.AcceptanceCriteria, idMapping)
|
||||
if newAC != issue.AcceptanceCriteria {
|
||||
updates["acceptance_criteria"] = newAC
|
||||
}
|
||||
|
||||
// If there are updates, apply them
|
||||
if len(updates) > 0 {
|
||||
if err := s.UpdateIssue(ctx, issue.ID, updates, "import-remap"); err != nil {
|
||||
return fmt.Errorf("failed to update references in issue %s: %w", issue.ID, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update dependency records
|
||||
if err := updateDependencyReferences(ctx, s, idMapping); err != nil {
|
||||
return fmt.Errorf("failed to update dependency references: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// replaceIDReferences replaces all occurrences of old IDs with new IDs in text
|
||||
// Uses word-boundary regex to ensure exact matches (bd-10 but not bd-100)
|
||||
// Uses a two-phase approach to avoid replacement conflicts: first replace with
|
||||
// placeholders, then replace placeholders with new IDs
|
||||
func replaceIDReferences(text string, idMapping map[string]string) string {
|
||||
// Phase 1: Replace all old IDs with unique placeholders
|
||||
placeholders := make(map[string]string)
|
||||
result := text
|
||||
i := 0
|
||||
for oldID, newID := range idMapping {
|
||||
placeholder := fmt.Sprintf("__PLACEHOLDER_%d__", i)
|
||||
placeholders[placeholder] = newID
|
||||
|
||||
// Use word boundary regex for exact matching
|
||||
pattern := fmt.Sprintf(`\b%s\b`, regexp.QuoteMeta(oldID))
|
||||
re := regexp.MustCompile(pattern)
|
||||
result = re.ReplaceAllString(result, placeholder)
|
||||
i++
|
||||
}
|
||||
|
||||
// Phase 2: Replace all placeholders with new IDs
|
||||
for placeholder, newID := range placeholders {
|
||||
result = strings.ReplaceAll(result, placeholder, newID)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// updateDependencyReferences updates dependency records to use new IDs
|
||||
// This handles both IssueID and DependsOnID fields
|
||||
func updateDependencyReferences(ctx context.Context, s *SQLiteStorage, idMapping map[string]string) error {
|
||||
// Get all dependency records
|
||||
allDeps, err := s.GetAllDependencyRecords(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get all dependencies: %w", err)
|
||||
}
|
||||
|
||||
// Phase 1: Collect all changes to avoid race conditions while iterating
|
||||
type depUpdate struct {
|
||||
oldIssueID string
|
||||
oldDependsOnID string
|
||||
newDep *types.Dependency
|
||||
}
|
||||
var updates []depUpdate
|
||||
|
||||
for _, deps := range allDeps {
|
||||
for _, dep := range deps {
|
||||
needsUpdate := false
|
||||
newIssueID := dep.IssueID
|
||||
newDependsOnID := dep.DependsOnID
|
||||
|
||||
// Check if either ID was remapped
|
||||
if mappedID, ok := idMapping[dep.IssueID]; ok {
|
||||
newIssueID = mappedID
|
||||
needsUpdate = true
|
||||
}
|
||||
if mappedID, ok := idMapping[dep.DependsOnID]; ok {
|
||||
newDependsOnID = mappedID
|
||||
needsUpdate = true
|
||||
}
|
||||
|
||||
if needsUpdate {
|
||||
updates = append(updates, depUpdate{
|
||||
oldIssueID: dep.IssueID,
|
||||
oldDependsOnID: dep.DependsOnID,
|
||||
newDep: &types.Dependency{
|
||||
IssueID: newIssueID,
|
||||
DependsOnID: newDependsOnID,
|
||||
Type: dep.Type,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 2: Apply all collected changes
|
||||
for _, update := range updates {
|
||||
// Remove old dependency
|
||||
if err := s.RemoveDependency(ctx, update.oldIssueID, update.oldDependsOnID, "import-remap"); err != nil {
|
||||
// If the dependency doesn't exist (e.g., already removed), that's okay
|
||||
// This can happen if both IssueID and DependsOnID were remapped
|
||||
continue
|
||||
}
|
||||
|
||||
// Add new dependency with updated IDs
|
||||
if err := s.AddDependency(ctx, update.newDep, "import-remap"); err != nil {
|
||||
return fmt.Errorf("failed to add updated dependency %s -> %s: %w",
|
||||
update.newDep.IssueID, update.newDep.DependsOnID, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package sqlite
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
@@ -225,9 +226,9 @@ func TestDetectCollisions(t *testing.T) {
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := detectCollisions(ctx, store, tt.incomingIssues)
|
||||
result, err := DetectCollisions(ctx, store, tt.incomingIssues)
|
||||
if err != nil {
|
||||
t.Fatalf("detectCollisions failed: %v", err)
|
||||
t.Fatalf("DetectCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
if len(result.ExactMatches) != tt.expectedExact {
|
||||
@@ -622,9 +623,9 @@ func TestScoreCollisions(t *testing.T) {
|
||||
allIssues := []*types.Issue{issue1, issue2, issue3, issue4}
|
||||
|
||||
// Score the collisions
|
||||
err = scoreCollisions(ctx, store, collisions, allIssues)
|
||||
err = ScoreCollisions(ctx, store, collisions, allIssues)
|
||||
if err != nil {
|
||||
t.Fatalf("scoreCollisions failed: %v", err)
|
||||
t.Fatalf("ScoreCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
// Verify scores were calculated
|
||||
@@ -719,3 +720,310 @@ func TestCountReferencesWordBoundary(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestReplaceIDReferences(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
text string
|
||||
idMapping map[string]string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "single replacement",
|
||||
text: "This references bd-1 in the description",
|
||||
idMapping: map[string]string{
|
||||
"bd-1": "bd-100",
|
||||
},
|
||||
expected: "This references bd-100 in the description",
|
||||
},
|
||||
{
|
||||
name: "multiple replacements",
|
||||
text: "bd-1 depends on bd-2 and bd-3",
|
||||
idMapping: map[string]string{
|
||||
"bd-1": "bd-100",
|
||||
"bd-2": "bd-101",
|
||||
"bd-3": "bd-102",
|
||||
},
|
||||
expected: "bd-100 depends on bd-101 and bd-102",
|
||||
},
|
||||
{
|
||||
name: "word boundary - don't replace partial matches",
|
||||
text: "bd-10 and bd-100 and bd-1",
|
||||
idMapping: map[string]string{
|
||||
"bd-1": "bd-200",
|
||||
},
|
||||
expected: "bd-10 and bd-100 and bd-200",
|
||||
},
|
||||
{
|
||||
name: "no replacements needed",
|
||||
text: "This has no matching IDs",
|
||||
idMapping: map[string]string{
|
||||
"bd-1": "bd-100",
|
||||
},
|
||||
expected: "This has no matching IDs",
|
||||
},
|
||||
{
|
||||
name: "replace same ID multiple times",
|
||||
text: "bd-1 is mentioned twice: bd-1",
|
||||
idMapping: map[string]string{
|
||||
"bd-1": "bd-100",
|
||||
},
|
||||
expected: "bd-100 is mentioned twice: bd-100",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := replaceIDReferences(tt.text, tt.idMapping)
|
||||
if result != tt.expected {
|
||||
t.Errorf("expected %q, got %q", tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRemapCollisions(t *testing.T) {
|
||||
// Create temporary database
|
||||
tmpDir, err := os.MkdirTemp("", "remap-collision-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
dbPath := filepath.Join(tmpDir, "test.db")
|
||||
store, err := New(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create storage: %v", err)
|
||||
}
|
||||
defer store.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// Setup: Create an existing issue in the database with a high ID number
|
||||
// This ensures that when we remap bd-2 and bd-3, they get new IDs that don't conflict
|
||||
existingIssue := &types.Issue{
|
||||
ID: "bd-10",
|
||||
Title: "Existing issue",
|
||||
Description: "This mentions bd-2 and bd-3",
|
||||
Notes: "Also bd-2 here",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
}
|
||||
|
||||
if err := store.CreateIssue(ctx, existingIssue, "test"); err != nil {
|
||||
t.Fatalf("failed to create existing issue: %v", err)
|
||||
}
|
||||
|
||||
// Create collisions (incoming issues with same IDs as DB but different content)
|
||||
collision1 := &CollisionDetail{
|
||||
ID: "bd-2",
|
||||
IncomingIssue: &types.Issue{
|
||||
ID: "bd-2",
|
||||
Title: "Collision 2 (has fewer references)",
|
||||
Description: "This is different content",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
},
|
||||
ReferenceScore: 2, // Fewer references
|
||||
}
|
||||
|
||||
collision2 := &CollisionDetail{
|
||||
ID: "bd-3",
|
||||
IncomingIssue: &types.Issue{
|
||||
ID: "bd-3",
|
||||
Title: "Collision 3 (has more references)",
|
||||
Description: "Different content for bd-3",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
},
|
||||
ReferenceScore: 5, // More references
|
||||
}
|
||||
|
||||
collisions := []*CollisionDetail{collision1, collision2}
|
||||
allIssues := []*types.Issue{existingIssue, collision1.IncomingIssue, collision2.IncomingIssue}
|
||||
|
||||
// Remap collisions
|
||||
idMapping, err := RemapCollisions(ctx, store, collisions, allIssues)
|
||||
if err != nil {
|
||||
t.Fatalf("RemapCollisions failed: %v", err)
|
||||
}
|
||||
|
||||
// Verify ID mapping was created
|
||||
if len(idMapping) != 2 {
|
||||
t.Errorf("expected 2 ID mappings, got %d", len(idMapping))
|
||||
}
|
||||
|
||||
newID2, ok := idMapping["bd-2"]
|
||||
if !ok {
|
||||
t.Fatal("bd-2 was not remapped")
|
||||
}
|
||||
newID3, ok := idMapping["bd-3"]
|
||||
if !ok {
|
||||
t.Fatal("bd-3 was not remapped")
|
||||
}
|
||||
|
||||
// Verify new issues were created with new IDs
|
||||
remappedIssue2, err := store.GetIssue(ctx, newID2)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get remapped issue %s: %v", newID2, err)
|
||||
}
|
||||
if remappedIssue2 == nil {
|
||||
t.Fatalf("remapped issue %s not found", newID2)
|
||||
}
|
||||
if remappedIssue2.Title != "Collision 2 (has fewer references)" {
|
||||
t.Errorf("unexpected title for remapped issue: %s", remappedIssue2.Title)
|
||||
}
|
||||
|
||||
// Verify references in existing issue were updated
|
||||
updatedExisting, err := store.GetIssue(ctx, "bd-10")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get updated existing issue: %v", err)
|
||||
}
|
||||
|
||||
// Check that description was updated
|
||||
if updatedExisting.Description != fmt.Sprintf("This mentions %s and %s", newID2, newID3) {
|
||||
t.Errorf("description was not updated correctly. Got: %q", updatedExisting.Description)
|
||||
}
|
||||
|
||||
// Check that notes were updated
|
||||
if updatedExisting.Notes != fmt.Sprintf("Also %s here", newID2) {
|
||||
t.Errorf("notes were not updated correctly. Got: %q", updatedExisting.Notes)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateDependencyReferences(t *testing.T) {
|
||||
// Create temporary database
|
||||
tmpDir, err := os.MkdirTemp("", "dep-remap-test-*")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create temp dir: %v", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
dbPath := filepath.Join(tmpDir, "test.db")
|
||||
store, err := New(dbPath)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create storage: %v", err)
|
||||
}
|
||||
defer store.Close()
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// Create issues
|
||||
issue1 := &types.Issue{
|
||||
ID: "bd-1",
|
||||
Title: "Issue 1",
|
||||
Description: "First issue",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
}
|
||||
|
||||
issue2 := &types.Issue{
|
||||
ID: "bd-2",
|
||||
Title: "Issue 2",
|
||||
Description: "Second issue",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
}
|
||||
|
||||
issue3 := &types.Issue{
|
||||
ID: "bd-3",
|
||||
Title: "Issue 3",
|
||||
Description: "Third issue",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
}
|
||||
|
||||
// Create the new (remapped) issue
|
||||
issue100 := &types.Issue{
|
||||
ID: "bd-100",
|
||||
Title: "Remapped Issue (was bd-2)",
|
||||
Description: "This is the remapped version",
|
||||
Status: types.StatusOpen,
|
||||
Priority: 1,
|
||||
IssueType: types.TypeTask,
|
||||
}
|
||||
|
||||
if err := store.CreateIssue(ctx, issue1, "test"); err != nil {
|
||||
t.Fatalf("failed to create issue1: %v", err)
|
||||
}
|
||||
if err := store.CreateIssue(ctx, issue2, "test"); err != nil {
|
||||
t.Fatalf("failed to create issue2: %v", err)
|
||||
}
|
||||
if err := store.CreateIssue(ctx, issue3, "test"); err != nil {
|
||||
t.Fatalf("failed to create issue3: %v", err)
|
||||
}
|
||||
if err := store.CreateIssue(ctx, issue100, "test"); err != nil {
|
||||
t.Fatalf("failed to create issue100: %v", err)
|
||||
}
|
||||
|
||||
// Add dependencies
|
||||
// bd-1 depends on bd-2
|
||||
dep1 := &types.Dependency{
|
||||
IssueID: "bd-1",
|
||||
DependsOnID: "bd-2",
|
||||
Type: types.DepBlocks,
|
||||
}
|
||||
// bd-3 depends on bd-2
|
||||
dep2 := &types.Dependency{
|
||||
IssueID: "bd-3",
|
||||
DependsOnID: "bd-2",
|
||||
Type: types.DepBlocks,
|
||||
}
|
||||
|
||||
if err := store.AddDependency(ctx, dep1, "test"); err != nil {
|
||||
t.Fatalf("failed to add dep1: %v", err)
|
||||
}
|
||||
if err := store.AddDependency(ctx, dep2, "test"); err != nil {
|
||||
t.Fatalf("failed to add dep2: %v", err)
|
||||
}
|
||||
|
||||
// Create ID mapping (bd-2 was remapped to bd-100)
|
||||
idMapping := map[string]string{
|
||||
"bd-2": "bd-100",
|
||||
}
|
||||
|
||||
// Update dependency references
|
||||
if err := updateDependencyReferences(ctx, store, idMapping); err != nil {
|
||||
t.Fatalf("updateDependencyReferences failed: %v", err)
|
||||
}
|
||||
|
||||
// Verify dependencies were updated
|
||||
// bd-1 should now depend on bd-100
|
||||
deps1, err := store.GetDependencyRecords(ctx, "bd-1")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get deps for bd-1: %v", err)
|
||||
}
|
||||
if len(deps1) != 1 {
|
||||
t.Fatalf("expected 1 dependency for bd-1, got %d", len(deps1))
|
||||
}
|
||||
if deps1[0].DependsOnID != "bd-100" {
|
||||
t.Errorf("expected bd-1 to depend on bd-100, got %s", deps1[0].DependsOnID)
|
||||
}
|
||||
|
||||
// bd-3 should now depend on bd-100
|
||||
deps3, err := store.GetDependencyRecords(ctx, "bd-3")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get deps for bd-3: %v", err)
|
||||
}
|
||||
if len(deps3) != 1 {
|
||||
t.Fatalf("expected 1 dependency for bd-3, got %d", len(deps3))
|
||||
}
|
||||
if deps3[0].DependsOnID != "bd-100" {
|
||||
t.Errorf("expected bd-3 to depend on bd-100, got %s", deps3[0].DependsOnID)
|
||||
}
|
||||
|
||||
// Old dependency bd-2 should not have any dependencies anymore
|
||||
deps2, err := store.GetDependencyRecords(ctx, "bd-2")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to get deps for bd-2: %v", err)
|
||||
}
|
||||
if len(deps2) != 0 {
|
||||
t.Errorf("expected 0 dependencies for bd-2, got %d", len(deps2))
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user