Remove obsolete collision remapping code and tests

- Deleted collision remapping tests (obsolete with hash IDs bd-8e05)
- Simplified collision.go from 704 to 138 lines
- Removed RemapCollisions, ScoreCollisions, and reference update code
- Removed issue_counters table dependencies (bd-807b)
- Added COLLISION_MATH.md documentation
- Fixed RenameCounterPrefix and ResetCounter to be no-ops
- Closed bd-a58f, bd-3d65, bd-807b

Hash-based IDs make collision remapping unnecessary since collisions
are extremely rare (same ID = same content).

Amp-Thread-ID: https://ampcode.com/threads/T-cbb0f111-6a95-4598-b03e-c137112f9875
Co-authored-by: Amp <amp@ampcode.com>
This commit is contained in:
Steve Yegge
2025-10-31 00:19:42 -07:00
parent 4e9f6e131c
commit 64fe51d6bb
19 changed files with 307 additions and 3888 deletions

View File

@@ -1,784 +0,0 @@
package main
import (
"bytes"
"context"
"encoding/json"
"io"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/steveyegge/beads/internal/storage/sqlite"
"github.com/steveyegge/beads/internal/types"
)
// Helper function to create test database with issues
func createTestDBWithIssues(t *testing.T, issues []*types.Issue) (string, *sqlite.SQLiteStorage) {
t.Helper()
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
t.Cleanup(func() { os.RemoveAll(tmpDir) })
dbPath := filepath.Join(tmpDir, "test.db")
testStore, err := sqlite.New(dbPath)
if err != nil {
t.Fatalf("Failed to create storage: %v", err)
}
t.Cleanup(func() { testStore.Close() })
ctx := context.Background()
// Set issue_prefix to prevent "database not initialized" errors
if err := testStore.SetConfig(ctx, "issue_prefix", "test"); err != nil {
t.Fatalf("Failed to set issue_prefix: %v", err)
}
for _, issue := range issues {
if err := testStore.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("Failed to create issue %s: %v", issue.ID, err)
}
}
return tmpDir, testStore
}
// Helper function to write JSONL file
func writeJSONLFile(t *testing.T, dir string, issues []*types.Issue) {
t.Helper()
jsonlPath := filepath.Join(dir, "issues.jsonl")
f, err := os.Create(jsonlPath)
if err != nil {
t.Fatalf("Failed to create JSONL file: %v", err)
}
defer f.Close()
encoder := json.NewEncoder(f)
for _, issue := range issues {
if err := encoder.Encode(issue); err != nil {
t.Fatalf("Failed to encode issue %s: %v", issue.ID, err)
}
}
}
// Helper function to capture stderr output
func captureStderr(t *testing.T, fn func()) string {
t.Helper()
oldStderr := os.Stderr
r, w, _ := os.Pipe()
os.Stderr = w
fn()
w.Close()
os.Stderr = oldStderr
var buf bytes.Buffer
io.Copy(&buf, r)
return buf.String()
}
// Helper function to setup auto-import test environment
func setupAutoImportTest(t *testing.T, testStore *sqlite.SQLiteStorage, tmpDir string) {
t.Helper()
store = testStore
dbPath = filepath.Join(tmpDir, "test.db")
storeMutex.Lock()
storeActive = true
storeMutex.Unlock()
t.Cleanup(func() {
storeMutex.Lock()
storeActive = false
storeMutex.Unlock()
})
}
// TestAutoImportMultipleCollisionsRemapped tests that multiple collisions are auto-resolved
func TestAutoImportMultipleCollisionsRemapped(t *testing.T) {
// Create 5 issues in DB with local modifications
now := time.Now().UTC()
closedTime := now.Add(-1 * time.Hour)
dbIssues := []*types.Issue{
{
ID: "test-mc-1",
Title: "Local version 1",
Status: types.StatusClosed,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
ClosedAt: &closedTime,
},
{
ID: "test-mc-2",
Title: "Local version 2",
Status: types.StatusInProgress,
Priority: 2,
IssueType: types.TypeBug,
CreatedAt: now,
UpdatedAt: now,
},
{
ID: "test-mc-3",
Title: "Local version 3",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeFeature,
CreatedAt: now,
UpdatedAt: now,
},
{
ID: "test-mc-4",
Title: "Exact match",
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
},
{
ID: "test-mc-5",
Title: "Another exact match",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
},
}
tmpDir, testStore := createTestDBWithIssues(t, dbIssues)
setupAutoImportTest(t, testStore, tmpDir)
// Create JSONL with 3 colliding issues, 2 exact matches, and 1 new issue
jsonlIssues := []*types.Issue{
{
ID: "test-mc-1",
Title: "Remote version 1 (conflict)",
Status: types.StatusOpen,
Priority: 3,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
},
{
ID: "test-mc-2",
Title: "Remote version 2 (conflict)",
Status: types.StatusClosed,
Priority: 1,
IssueType: types.TypeBug,
CreatedAt: now,
UpdatedAt: now.Add(-30 * time.Minute),
ClosedAt: &closedTime,
},
{
ID: "test-mc-3",
Title: "Remote version 3 (conflict)",
Status: types.StatusBlocked,
Priority: 3,
IssueType: types.TypeFeature,
CreatedAt: now,
UpdatedAt: now,
},
{
ID: "test-mc-4",
Title: "Exact match",
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
},
{
ID: "test-mc-5",
Title: "Another exact match",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
},
{
ID: "test-mc-6",
Title: "Brand new issue",
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
},
}
writeJSONLFile(t, tmpDir, jsonlIssues)
// Capture stderr and run auto-import
stderrOutput := captureStderr(t, autoImportIfNewer)
ctx := context.Background()
// Verify content-hash based collision resolution
// The winner is the version with the lexicographically lower content hash
// For deterministic testing, we check that the remapped version exists as new issue
// Check test-mc-1: Should have the winning version at original ID
issue1, _ := testStore.GetIssue(ctx, "test-mc-1")
if issue1 == nil {
t.Fatal("Expected test-mc-1 to exist")
}
// The winner should be either "Local version 1" or "Remote version 1 (conflict)"
// We don't assert which one, just that one exists at the original ID
// Check test-mc-2: Should have the winning version at original ID
issue2, _ := testStore.GetIssue(ctx, "test-mc-2")
if issue2 == nil {
t.Fatal("Expected test-mc-2 to exist")
}
// Check test-mc-3: Should have the winning version at original ID
issue3, _ := testStore.GetIssue(ctx, "test-mc-3")
if issue3 == nil {
t.Fatal("Expected test-mc-3 to exist")
}
// Verify new issue was imported
newIssue, _ := testStore.GetIssue(ctx, "test-mc-6")
if newIssue == nil {
t.Fatal("Expected new issue test-mc-6 to be imported")
}
if newIssue.Title != "Brand new issue" {
t.Errorf("Expected new issue title 'Brand new issue', got: %s", newIssue.Title)
}
// Verify remapping message was printed
if !strings.Contains(stderrOutput, "remapped") {
t.Errorf("Expected remapping message in stderr, got: %s", stderrOutput)
}
if !strings.Contains(stderrOutput, "test-mc-1") {
t.Errorf("Expected test-mc-1 in remapping message, got: %s", stderrOutput)
}
// Verify colliding issues were created with new IDs
// They should appear in the database with different IDs
allIssues, err := testStore.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("Failed to get all issues: %v", err)
}
// Should have: 5 original + 1 new + 3 remapped = 9 total
if len(allIssues) < 8 {
t.Errorf("Expected at least 8 issues (5 original + 1 new + 3 remapped), got %d", len(allIssues))
}
}
// TestAutoImportAllCollisionsRemapped tests when every issue has a collision
func TestAutoImportAllCollisionsRemapped(t *testing.T) {
now := time.Now().UTC()
closedTime := now.Add(-1 * time.Hour)
dbIssues := []*types.Issue{
{
ID: "test-ac-1",
Title: "Local 1",
Status: types.StatusClosed,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
ClosedAt: &closedTime,
},
{
ID: "test-ac-2",
Title: "Local 2",
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeBug,
CreatedAt: now,
UpdatedAt: now,
},
}
tmpDir, testStore := createTestDBWithIssues(t, dbIssues)
setupAutoImportTest(t, testStore, tmpDir)
// JSONL with all conflicts (different content for same IDs)
jsonlIssues := []*types.Issue{
{
ID: "test-ac-1",
Title: "Remote 1 (conflict)",
Status: types.StatusOpen,
Priority: 3,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
},
{
ID: "test-ac-2",
Title: "Remote 2 (conflict)",
Status: types.StatusClosed,
Priority: 1,
IssueType: types.TypeBug,
CreatedAt: now,
UpdatedAt: now,
ClosedAt: &closedTime,
},
}
writeJSONLFile(t, tmpDir, jsonlIssues)
// Capture stderr and run auto-import
stderrOutput := captureStderr(t, autoImportIfNewer)
ctx := context.Background()
// Verify content-hash based collision resolution
// The winner is the version with the lexicographically lower content hash
// Check that original IDs exist with winning version
issue1, _ := testStore.GetIssue(ctx, "test-ac-1")
if issue1 == nil {
t.Fatal("Expected test-ac-1 to exist")
}
// Winner could be either "Local 1" or "Remote 1 (conflict)" - don't assert which
issue2, _ := testStore.GetIssue(ctx, "test-ac-2")
if issue2 == nil {
t.Fatal("Expected test-ac-2 to exist")
}
// Winner could be either "Local 2" or "Remote 2 (conflict)" - don't assert which
// Verify remapping message mentions both collisions
if !strings.Contains(stderrOutput, "remapped 2") {
t.Errorf("Expected '2' in remapping count, got: %s", stderrOutput)
}
}
// TestAutoImportExactMatchesOnly tests happy path with no conflicts
func TestAutoImportExactMatchesOnly(t *testing.T) {
now := time.Now().UTC()
dbIssues := []*types.Issue{
{
ID: "test-em-1",
Title: "Exact match issue",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
},
}
tmpDir, testStore := createTestDBWithIssues(t, dbIssues)
setupAutoImportTest(t, testStore, tmpDir)
// JSONL with exact match + new issue
jsonlIssues := []*types.Issue{
{
ID: "test-em-1",
Title: "Exact match issue",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
},
{
ID: "test-em-2",
Title: "New issue",
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeBug,
CreatedAt: now,
UpdatedAt: now,
},
}
writeJSONLFile(t, tmpDir, jsonlIssues)
// Run auto-import (should not print collision warnings)
stderrOutput := captureStderr(t, autoImportIfNewer)
ctx := context.Background()
// Verify new issue imported
newIssue, _ := testStore.GetIssue(ctx, "test-em-2")
if newIssue == nil {
t.Fatal("Expected new issue to be imported")
}
if newIssue.Title != "New issue" {
t.Errorf("Expected title 'New issue', got: %s", newIssue.Title)
}
// Verify no collision warnings
if strings.Contains(stderrOutput, "remapped") {
t.Errorf("Expected no remapping message, got: %s", stderrOutput)
}
}
// TestAutoImportHashUnchanged tests fast path when JSONL hasn't changed
func TestAutoImportHashUnchanged(t *testing.T) {
now := time.Now().UTC()
dbIssues := []*types.Issue{
{
ID: "test-hu-1",
Title: "Test issue",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
},
}
tmpDir, testStore := createTestDBWithIssues(t, dbIssues)
setupAutoImportTest(t, testStore, tmpDir)
writeJSONLFile(t, tmpDir, dbIssues)
// Run auto-import first time
os.Setenv("BD_DEBUG", "1")
defer os.Unsetenv("BD_DEBUG")
stderrOutput1 := captureStderr(t, autoImportIfNewer)
// Should trigger import on first run
if !strings.Contains(stderrOutput1, "auto-import triggered") && !strings.Contains(stderrOutput1, "hash changed") {
t.Logf("First run: %s", stderrOutput1)
}
// Run auto-import second time (JSONL unchanged)
stderrOutput2 := captureStderr(t, autoImportIfNewer)
// Verify fast path was taken (hash match)
if !strings.Contains(stderrOutput2, "JSONL unchanged") {
t.Errorf("Expected 'JSONL unchanged' in debug output, got: %s", stderrOutput2)
}
}
// TestAutoImportParseError tests that parse errors are handled gracefully
func TestAutoImportParseError(t *testing.T) {
now := time.Now().UTC()
dbIssues := []*types.Issue{
{
ID: "test-pe-1",
Title: "Test issue",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
},
}
tmpDir, testStore := createTestDBWithIssues(t, dbIssues)
setupAutoImportTest(t, testStore, tmpDir)
// Create malformed JSONL
jsonlPath := filepath.Join(tmpDir, "issues.jsonl")
os.WriteFile(jsonlPath, []byte(`{"id":"test-pe-1","title":"Good issue","status":"open","priority":1,"issue_type":"task","created_at":"2025-10-16T00:00:00Z","updated_at":"2025-10-16T00:00:00Z"}
{invalid json here}
`), 0644)
// Run auto-import (should skip due to parse error)
stderrOutput := captureStderr(t, autoImportIfNewer)
// Verify parse error was reported
if !strings.Contains(stderrOutput, "parse error") {
t.Errorf("Expected parse error message, got: %s", stderrOutput)
}
}
// TestAutoImportEmptyJSONL tests behavior with empty JSONL file
func TestAutoImportEmptyJSONL(t *testing.T) {
now := time.Now().UTC()
dbIssues := []*types.Issue{
{
ID: "test-ej-1",
Title: "Existing issue",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
},
}
tmpDir, testStore := createTestDBWithIssues(t, dbIssues)
setupAutoImportTest(t, testStore, tmpDir)
// Create empty JSONL
jsonlPath := filepath.Join(tmpDir, "issues.jsonl")
os.WriteFile(jsonlPath, []byte(""), 0644)
// Run auto-import
autoImportIfNewer()
ctx := context.Background()
// Verify existing issue still exists (not deleted)
existing, _ := testStore.GetIssue(ctx, "test-ej-1")
if existing == nil {
t.Fatal("Expected existing issue to remain after empty JSONL import")
}
}
// TestAutoImportNewIssuesOnly tests importing only new issues
func TestAutoImportNewIssuesOnly(t *testing.T) {
now := time.Now().UTC()
dbIssues := []*types.Issue{
{
ID: "test-ni-1",
Title: "Existing issue",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
},
}
tmpDir, testStore := createTestDBWithIssues(t, dbIssues)
setupAutoImportTest(t, testStore, tmpDir)
// JSONL with only new issues (no collisions, no exact matches)
jsonlIssues := []*types.Issue{
{
ID: "test-ni-2",
Title: "New issue 1",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
},
{
ID: "test-ni-3",
Title: "New issue 2",
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeBug,
CreatedAt: now,
UpdatedAt: now,
},
}
writeJSONLFile(t, tmpDir, jsonlIssues)
// Run auto-import
stderrOutput := captureStderr(t, autoImportIfNewer)
ctx := context.Background()
// Verify new issues imported
issue2, _ := testStore.GetIssue(ctx, "test-ni-2")
if issue2 == nil || issue2.Title != "New issue 1" {
t.Error("Expected new issue 1 to be imported")
}
issue3, _ := testStore.GetIssue(ctx, "test-ni-3")
if issue3 == nil || issue3.Title != "New issue 2" {
t.Error("Expected new issue 2 to be imported")
}
// Verify no collision warnings
if strings.Contains(stderrOutput, "remapped") {
t.Errorf("Expected no collision messages, got: %s", stderrOutput)
}
}
// TestAutoImportUpdatesExactMatches tests that exact matches update the DB
func TestAutoImportUpdatesExactMatches(t *testing.T) {
now := time.Now().UTC()
oldTime := now.Add(-24 * time.Hour)
dbIssues := []*types.Issue{
{
ID: "test-um-1",
Title: "Exact match",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: oldTime,
UpdatedAt: oldTime,
},
}
tmpDir, testStore := createTestDBWithIssues(t, dbIssues)
setupAutoImportTest(t, testStore, tmpDir)
// JSONL with exact match (same content, newer timestamp)
jsonlIssues := []*types.Issue{
{
ID: "test-um-1",
Title: "Exact match",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: oldTime,
UpdatedAt: now, // Newer timestamp
},
}
writeJSONLFile(t, tmpDir, jsonlIssues)
// Run auto-import
autoImportIfNewer()
ctx := context.Background()
// Verify issue was updated (UpdatedAt should be newer)
updated, _ := testStore.GetIssue(ctx, "test-um-1")
if updated.UpdatedAt.Before(now.Add(-1 * time.Second)) {
t.Errorf("Expected UpdatedAt to be updated to %v, got %v", now, updated.UpdatedAt)
}
}
// TestAutoImportJSONLNotFound tests behavior when JSONL doesn't exist
func TestAutoImportJSONLNotFound(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "bd-test-notfound-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer os.RemoveAll(tmpDir)
dbPath = filepath.Join(tmpDir, "test.db")
// Don't create JSONL file
testStore, err := sqlite.New(dbPath)
if err != nil {
t.Fatalf("Failed to create storage: %v", err)
}
defer testStore.Close()
store = testStore
storeMutex.Lock()
storeActive = true
storeMutex.Unlock()
defer func() {
storeMutex.Lock()
storeActive = false
storeMutex.Unlock()
}()
// Enable debug mode to see skip message
os.Setenv("BD_DEBUG", "1")
defer os.Unsetenv("BD_DEBUG")
// Run auto-import (should skip silently)
stderrOutput := captureStderr(t, autoImportIfNewer)
// Verify it skipped due to missing JSONL
if !strings.Contains(stderrOutput, "JSONL not found") {
t.Logf("Expected 'JSONL not found' message, got: %s", stderrOutput)
}
}
// TestAutoImportCollisionRemapMultipleFields tests remapping with different field conflicts
func TestAutoImportCollisionRemapMultipleFields(t *testing.T) {
now := time.Now().UTC()
// Create issue with many fields set
dbIssues := []*types.Issue{
{
ID: "test-fields-1",
Title: "Local title",
Description: "Local description",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: now,
UpdatedAt: now,
Notes: "Local notes",
Design: "Local design",
AcceptanceCriteria: "Local acceptance",
},
}
tmpDir, testStore := createTestDBWithIssues(t, dbIssues)
setupAutoImportTest(t, testStore, tmpDir)
ctx := context.Background()
// JSONL with conflicts in multiple fields
jsonlIssues := []*types.Issue{
{
ID: "test-fields-1",
Title: "Remote title (conflict)",
Description: "Remote description (conflict)",
Status: types.StatusClosed,
Priority: 3,
IssueType: types.TypeBug,
CreatedAt: now,
UpdatedAt: now,
ClosedAt: &now,
Notes: "Remote notes (conflict)",
Design: "Remote design (conflict)",
AcceptanceCriteria: "Remote acceptance (conflict)",
},
}
writeJSONLFile(t, tmpDir, jsonlIssues)
// Run auto-import
stderrOutput := captureStderr(t, autoImportIfNewer)
// Verify remapping occurred
if !strings.Contains(stderrOutput, "test-fields-1") {
t.Logf("Expected remapping message for test-fields-1: %s", stderrOutput)
}
// Verify content-hash based collision resolution
// The winning version (lower content hash) keeps the original ID
// The loser is remapped to a new ID
issue, _ := testStore.GetIssue(ctx, "test-fields-1")
if issue == nil {
t.Fatal("Expected test-fields-1 to exist")
}
// Verify the issue has consistent fields (all from the same version)
// Don't assert which version won, just that it's internally consistent
if issue.Title == "Local title" {
// If local won, verify all local fields
if issue.Description != "Local description" {
t.Errorf("Expected local description with local title, got: %s", issue.Description)
}
if issue.Status != types.StatusOpen {
t.Errorf("Expected local status with local title, got: %s", issue.Status)
}
if issue.Priority != 1 {
t.Errorf("Expected local priority with local title, got: %d", issue.Priority)
}
} else if issue.Title == "Remote title (conflict)" {
// If remote won, verify all remote fields
if issue.Description != "Remote description (conflict)" {
t.Errorf("Expected remote description with remote title, got: %s", issue.Description)
}
if issue.Status != types.StatusClosed {
t.Errorf("Expected remote status with remote title, got: %s", issue.Status)
}
if issue.Priority != 3 {
t.Errorf("Expected remote priority with remote title, got: %d", issue.Priority)
}
} else {
t.Errorf("Unexpected title: %s", issue.Title)
}
}
// TestAutoImportMetadataReadError tests error handling when metadata can't be read
func TestAutoImportMetadataReadError(t *testing.T) {
// This test is difficult to implement without mocking since metadata
// should always work in SQLite. We can document that this error path
// is defensive but hard to trigger in practice.
t.Skip("Metadata read error is defensive code path, hard to test without mocking")
}

View File

@@ -1,307 +0,0 @@
package main
import (
"context"
"os"
"path/filepath"
"testing"
"github.com/steveyegge/beads/internal/storage/sqlite"
"github.com/steveyegge/beads/internal/types"
)
const (
testIssueBD1 = "bd-1"
testIssueBD2 = "bd-2"
)
// TestRemapCollisionsRemapsImportedNotExisting verifies the bug fix where collision
// resolution incorrectly modified existing issue dependencies.
//
// Bug (fixed): updateDependencyReferences() was updating ALL dependencies in the database
// based on the idMapping, without distinguishing between dependencies belonging to
// IMPORTED issues (should be updated) vs EXISTING issues (should NOT be touched).
//
// This test ensures existing issue dependencies are preserved during collision resolution.
func TestRemapCollisionsRemapsImportedNotExisting(t *testing.T) {
// Setup: Create temporary database
tmpDir, err := os.MkdirTemp("", "collision-bug-test-*")
if err != nil {
t.Fatalf("failed to create temp dir: %v", err)
}
defer os.RemoveAll(tmpDir)
dbPath := filepath.Join(tmpDir, "test.db")
store := newTestStoreWithPrefix(t, dbPath, "bd")
ctx := context.Background()
// Step 1: Create existing issues with dependencies
existingIssues := []*types.Issue{
{
ID: testIssueBD1,
Title: "Existing BD-1",
Description: "Original database issue 1, depends on bd-2",
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeTask,
},
{
ID: testIssueBD2,
Title: "Existing BD-2",
Description: "Original database issue 2",
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeTask,
},
{
ID: "bd-3",
Title: "Existing BD-3",
Description: "Original database issue 3, depends on " + testIssueBD1,
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeTask,
},
}
for _, issue := range existingIssues {
if err := store.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("failed to create existing issue %s: %v", issue.ID, err)
}
}
// Add dependencies between existing issues
dep1 := &types.Dependency{
IssueID: testIssueBD1,
DependsOnID: testIssueBD2,
Type: types.DepBlocks,
}
dep2 := &types.Dependency{
IssueID: "bd-3",
DependsOnID: testIssueBD1,
Type: types.DepBlocks,
}
if err := store.AddDependency(ctx, dep1, "test"); err != nil {
t.Fatalf("failed to add dependency bd-1 → bd-2: %v", err)
}
if err := store.AddDependency(ctx, dep2, "test"); err != nil {
t.Fatalf("failed to add dependency bd-3 → bd-1: %v", err)
}
// Step 2: Simulate importing issues with same IDs but different content
importedIssues := []*types.Issue{
{
ID: testIssueBD1,
Title: "Imported BD-1",
Description: "From import",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
},
{
ID: testIssueBD2,
Title: "Imported BD-2",
Description: "From import",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
},
{
ID: "bd-3",
Title: "Imported BD-3",
Description: "From import",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
},
}
// Step 3: Detect collisions
collisionResult, err := sqlite.DetectCollisions(ctx, store, importedIssues)
if err != nil {
t.Fatalf("collision detection failed: %v", err)
}
if len(collisionResult.Collisions) != 3 {
t.Fatalf("expected 3 collisions, got %d", len(collisionResult.Collisions))
}
// Step 4: Resolve collisions
allExisting, err := store.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
t.Fatalf("failed to get existing issues: %v", err)
}
if err := sqlite.ScoreCollisions(ctx, store, collisionResult.Collisions, allExisting); err != nil {
t.Fatalf("failed to score collisions: %v", err)
}
idMapping, err := sqlite.RemapCollisions(ctx, store, collisionResult.Collisions, allExisting)
if err != nil {
t.Fatalf("RemapCollisions failed: %v", err)
}
// Step 5: Verify dependencies are preserved on remapped issues
// With content-hash scoring, all existing issues get remapped to new IDs
t.Logf("\n=== Verifying Dependencies Preserved on Remapped Issues ===")
t.Logf("ID Mappings: %v", idMapping)
// The new bd-1, bd-2, bd-3 (incoming issues) should have NO dependencies
newBD1Deps, _ := store.GetDependencyRecords(ctx, "bd-1")
if len(newBD1Deps) != 0 {
t.Errorf("Expected 0 dependencies for new bd-1 (incoming), got %d", len(newBD1Deps))
}
newBD3Deps, _ := store.GetDependencyRecords(ctx, "bd-3")
if len(newBD3Deps) != 0 {
t.Errorf("Expected 0 dependencies for new bd-3 (incoming), got %d", len(newBD3Deps))
}
// The remapped issues should have their dependencies preserved
remappedBD1 := idMapping["bd-1"] // Old bd-1 → new ID
remappedBD2 := idMapping["bd-2"] // Old bd-2 → new ID
remappedBD3 := idMapping["bd-3"] // Old bd-3 → new ID
// Check remapped bd-1's dependency (was bd-1 → bd-2, now should be remappedBD1 → remappedBD2)
remappedBD1Deps, _ := store.GetDependencyRecords(ctx, remappedBD1)
t.Logf("%s dependencies: %d (expected: 1)", remappedBD1, len(remappedBD1Deps))
if len(remappedBD1Deps) != 1 {
t.Errorf("Expected 1 dependency for remapped %s (preserved from old bd-1), got %d",
remappedBD1, len(remappedBD1Deps))
} else if remappedBD1Deps[0].DependsOnID != remappedBD2 {
t.Errorf("Expected %s → %s, got %s → %s",
remappedBD1, remappedBD2, remappedBD1, remappedBD1Deps[0].DependsOnID)
}
// Check remapped bd-3's dependency (was bd-3 → bd-1, now should be remappedBD3 → remappedBD1)
remappedBD3Deps, _ := store.GetDependencyRecords(ctx, remappedBD3)
t.Logf("%s dependencies: %d (expected: 1)", remappedBD3, len(remappedBD3Deps))
if len(remappedBD3Deps) != 1 {
t.Errorf("Expected 1 dependency for remapped %s (preserved from old bd-3), got %d",
remappedBD3, len(remappedBD3Deps))
} else if remappedBD3Deps[0].DependsOnID != remappedBD1 {
t.Errorf("Expected %s → %s, got %s → %s",
remappedBD3, remappedBD1, remappedBD3, remappedBD3Deps[0].DependsOnID)
}
t.Logf("Fix verified: Dependencies preserved correctly on remapped issues with content-hash scoring")
}
// TestRemapCollisionsDoesNotUpdateNonexistentDependencies verifies that
// updateDependencyReferences is effectively a no-op during normal import flow,
// since imported dependencies haven't been added to the database yet when
// RemapCollisions runs.
//
// This test demonstrates that even if we had dependencies with the old imported IDs
// in the database, they are NOT touched because they don't have the NEW remapped IDs.
func TestRemapCollisionsDoesNotUpdateNonexistentDependencies(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "collision-noop-deps-*")
if err != nil {
t.Fatalf("failed to create temp dir: %v", err)
}
defer os.RemoveAll(tmpDir)
dbPath := filepath.Join(tmpDir, "test.db")
store := newTestStoreWithPrefix(t, dbPath, "bd")
ctx := context.Background()
// Step 1: Create existing issue with dependency
existing1 := &types.Issue{
ID: testIssueBD1,
Title: "Existing BD-1",
Description: "Original database issue",
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeTask,
}
existing2 := &types.Issue{
ID: testIssueBD2,
Title: "Existing BD-2",
Description: "Original database issue",
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeTask,
}
if err := store.CreateIssue(ctx, existing1, "test"); err != nil {
t.Fatalf("failed to create existing issue bd-1: %v", err)
}
if err := store.CreateIssue(ctx, existing2, "test"); err != nil {
t.Fatalf("failed to create existing issue bd-2: %v", err)
}
// Add dependency between existing issues
existingDep := &types.Dependency{
IssueID: testIssueBD1,
DependsOnID: testIssueBD2,
Type: types.DepBlocks,
}
if err := store.AddDependency(ctx, existingDep, "test"); err != nil {
t.Fatalf("failed to add existing dependency: %v", err)
}
// Step 2: Import colliding issues (without dependencies in DB)
imported := []*types.Issue{
{
ID: testIssueBD1,
Title: "Imported BD-1",
Description: "From import, will be remapped",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
},
}
// Detect and resolve collisions
collisionResult, err := sqlite.DetectCollisions(ctx, store, imported)
if err != nil {
t.Fatalf("collision detection failed: %v", err)
}
allExisting, _ := store.SearchIssues(ctx, "", types.IssueFilter{})
if err := sqlite.ScoreCollisions(ctx, store, collisionResult.Collisions, allExisting); err != nil {
t.Fatalf("failed to score collisions: %v", err)
}
// Now remap collisions - this should NOT touch the existing bd-1 → bd-2 dependency
idMapping, err := sqlite.RemapCollisions(ctx, store, collisionResult.Collisions, allExisting)
if err != nil {
t.Fatalf("RemapCollisions failed: %v", err)
}
// Step 3: Verify dependencies are preserved correctly
// With content-hash scoring: existing hash > incoming hash, so RemapIncoming=false
// This means: existing bd-1 → remapped to new ID, incoming bd-1 takes over bd-1
// The remapped issue (old bd-1) should have its dependency preserved
remappedID := idMapping["bd-1"]
remappedDeps, err := store.GetDependencyRecords(ctx, remappedID)
if err != nil {
t.Fatalf("failed to get dependencies for %s: %v", remappedID, err)
}
if len(remappedDeps) != 1 {
t.Errorf("Expected 1 dependency for remapped %s (preserved from old bd-1), got %d",
remappedID, len(remappedDeps))
} else {
// The dependency should now be remappedID → bd-2 (updated from bd-1 → bd-2)
if remappedDeps[0].DependsOnID != testIssueBD2 {
t.Errorf("Expected %s → bd-2, got %s → %s", remappedID, remappedID, remappedDeps[0].DependsOnID)
}
}
// The new bd-1 (incoming issue) should have no dependencies
// (because dependencies are imported later in Phase 5)
newBD1Deps, err := store.GetDependencyRecords(ctx, "bd-1")
if err != nil {
t.Fatalf("failed to get dependencies for bd-1: %v", err)
}
if len(newBD1Deps) != 0 {
t.Errorf("Expected 0 dependencies for new bd-1 (dependencies added later), got %d", len(newBD1Deps))
}
t.Logf("Verified: Dependencies preserved correctly during collision resolution with content-hash scoring")
}

View File

@@ -1,731 +0,0 @@
package main
import (
"context"
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/steveyegge/beads/internal/storage/sqlite"
"github.com/steveyegge/beads/internal/types"
)
// TestImportSimpleCollision tests the basic collision detection and resolution
func TestImportSimpleCollision(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer func() {
if err := os.RemoveAll(tmpDir); err != nil {
t.Logf("Warning: cleanup failed: %v", err)
}
}()
dbPath := filepath.Join(tmpDir, "test.db")
testStore := newTestStoreWithPrefix(t, dbPath, "bd")
ctx := context.Background()
// Create existing issue with a higher ID to avoid conflicts with auto-generated IDs
existing := &types.Issue{
ID: "bd-10",
Title: "Existing issue",
Description: "Original description",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
if err := testStore.CreateIssue(ctx, existing, "test"); err != nil {
t.Fatalf("Failed to create existing issue: %v", err)
}
// Prepare import with collision
incoming := &types.Issue{
ID: "bd-10",
Title: "MODIFIED issue",
Description: "Different description",
Status: types.StatusInProgress,
Priority: 2,
IssueType: types.TypeBug,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
incomingIssues := []*types.Issue{incoming}
// Test collision detection
result, err := sqlite.DetectCollisions(ctx, testStore, incomingIssues)
if err != nil {
t.Fatalf("DetectCollisions failed: %v", err)
}
if len(result.Collisions) != 1 {
t.Fatalf("Expected 1 collision, got %d", len(result.Collisions))
}
if result.Collisions[0].ID != "bd-10" {
t.Errorf("Expected collision ID bd-10, got %s", result.Collisions[0].ID)
}
// Test resolution
allExisting, _ := testStore.SearchIssues(ctx, "", types.IssueFilter{})
if err := sqlite.ScoreCollisions(ctx, testStore, result.Collisions, allExisting); err != nil {
t.Fatalf("ScoreCollisions failed: %v", err)
}
idMapping, err := sqlite.RemapCollisions(ctx, testStore, result.Collisions, allExisting)
if err != nil {
t.Fatalf("RemapCollisions failed: %v", err)
}
if len(idMapping) != 1 {
t.Fatalf("Expected 1 remapping, got %d", len(idMapping))
}
newID := idMapping["bd-10"]
if newID == "" {
t.Fatal("Expected bd-10 to be remapped")
}
// Verify remapped issue exists
remapped, err := testStore.GetIssue(ctx, newID)
if err != nil {
t.Fatalf("Failed to get remapped issue: %v", err)
}
if remapped == nil {
t.Fatal("Remapped issue not found")
}
if remapped.Title != "MODIFIED issue" {
t.Errorf("Remapped issue title = %s, want 'MODIFIED issue'", remapped.Title)
}
// Verify original issue unchanged
original, err := testStore.GetIssue(ctx, "bd-10")
if err != nil {
t.Fatalf("Failed to get original issue: %v", err)
}
if original.Title != "Existing issue" {
t.Errorf("Original issue modified: %s", original.Title)
}
}
// TestImportMultipleCollisions tests handling of multiple colliding issues
func TestImportMultipleCollisions(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer func() {
if err := os.RemoveAll(tmpDir); err != nil {
t.Logf("Warning: cleanup failed: %v", err)
}
}()
dbPath := filepath.Join(tmpDir, "test.db")
testStore := newTestStoreWithPrefix(t, dbPath, "bd")
ctx := context.Background()
// Create existing issues with high IDs to avoid conflicts with auto-generated sequence
for i := 100; i <= 102; i++ {
issue := &types.Issue{
ID: fmt.Sprintf("bd-%d", i),
Title: fmt.Sprintf("Existing issue %d", i),
Description: "Original",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
if err := testStore.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("Failed to create issue %d: %v", i, err)
}
}
// Prepare import with multiple collisions
incomingIssues := []*types.Issue{
{
ID: "bd-100",
Title: "Modified 1",
Description: "Changed",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
},
{
ID: "bd-101",
Title: "Modified 2",
Description: "Changed",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
},
{
ID: "bd-102",
Title: "Modified 3",
Description: "Changed",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
},
}
result, err := sqlite.DetectCollisions(ctx, testStore, incomingIssues)
if err != nil {
t.Fatalf("DetectCollisions failed: %v", err)
}
if len(result.Collisions) != 3 {
t.Fatalf("Expected 3 collisions, got %d", len(result.Collisions))
}
// Resolve collisions
allExisting, _ := testStore.SearchIssues(ctx, "", types.IssueFilter{})
if err := sqlite.ScoreCollisions(ctx, testStore, result.Collisions, allExisting); err != nil {
t.Fatalf("ScoreCollisions failed: %v", err)
}
idMapping, err := sqlite.RemapCollisions(ctx, testStore, result.Collisions, allExisting)
if err != nil {
t.Fatalf("RemapCollisions failed: %v", err)
}
if len(idMapping) != 3 {
t.Fatalf("Expected 3 remappings, got %d", len(idMapping))
}
// Verify all remappings
for oldID, newID := range idMapping {
remapped, err := testStore.GetIssue(ctx, newID)
if err != nil {
t.Fatalf("Failed to get remapped issue %s: %v", newID, err)
}
if remapped == nil {
t.Fatalf("Remapped issue %s not found", newID)
}
if !strings.Contains(remapped.Title, "Modified") {
t.Errorf("Remapped issue %s has wrong title: %s", oldID, remapped.Title)
}
}
}
// TestImportTextReferenceUpdates tests that text references are updated during remapping
func TestImportTextReferenceUpdates(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer func() {
if err := os.RemoveAll(tmpDir); err != nil {
t.Logf("Warning: cleanup failed: %v", err)
}
}()
dbPath := filepath.Join(tmpDir, "test.db")
testStore := newTestStoreWithPrefix(t, dbPath, "bd")
ctx := context.Background()
// Create existing issues with text references
issue1 := &types.Issue{
ID: "bd-10",
Title: "Issue 1",
Description: "This depends on bd-11 and bd-12",
Design: "Implementation uses bd-11 approach",
Notes: "See bd-12 for details",
AcceptanceCriteria: "Must work with bd-11",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
issue2 := &types.Issue{
ID: "bd-11",
Title: "Issue 2",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
issue3 := &types.Issue{
ID: "bd-12",
Title: "Issue 3",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
if err := testStore.CreateIssue(ctx, issue1, "test"); err != nil {
t.Fatalf("Failed to create issue 1: %v", err)
}
if err := testStore.CreateIssue(ctx, issue2, "test"); err != nil {
t.Fatalf("Failed to create issue 2: %v", err)
}
if err := testStore.CreateIssue(ctx, issue3, "test"); err != nil {
t.Fatalf("Failed to create issue 3: %v", err)
}
// Import colliding issues
incomingIssues := []*types.Issue{
{
ID: "bd-11",
Title: "Modified Issue 2",
Status: types.StatusInProgress,
Priority: 2,
IssueType: types.TypeBug,
},
{
ID: "bd-12",
Title: "Modified Issue 3",
Status: types.StatusInProgress,
Priority: 2,
IssueType: types.TypeBug,
},
}
result, err := sqlite.DetectCollisions(ctx, testStore, incomingIssues)
if err != nil {
t.Fatalf("DetectCollisions failed: %v", err)
}
if len(result.Collisions) != 2 {
t.Fatalf("Expected 2 collisions, got %d", len(result.Collisions))
}
// Resolve collisions
allExisting, _ := testStore.SearchIssues(ctx, "", types.IssueFilter{})
if err := sqlite.ScoreCollisions(ctx, testStore, result.Collisions, allExisting); err != nil {
t.Fatalf("ScoreCollisions failed: %v", err)
}
idMapping, err := sqlite.RemapCollisions(ctx, testStore, result.Collisions, allExisting)
if err != nil {
t.Fatalf("RemapCollisions failed: %v", err)
}
if len(idMapping) != 2 {
t.Fatalf("Expected 2 remappings, got %d", len(idMapping))
}
newID2 := idMapping["bd-11"]
newID3 := idMapping["bd-12"]
// Verify text references were updated in issue 1
updated, err := testStore.GetIssue(ctx, "bd-10")
if err != nil {
t.Fatalf("Failed to get updated issue 1: %v", err)
}
if !strings.Contains(updated.Description, newID2) {
t.Errorf("Description not updated: %s (should contain %s)", updated.Description, newID2)
}
if !strings.Contains(updated.Description, newID3) {
t.Errorf("Description not updated: %s (should contain %s)", updated.Description, newID3)
}
if !strings.Contains(updated.Design, newID2) {
t.Errorf("Design not updated: %s (should contain %s)", updated.Design, newID2)
}
if !strings.Contains(updated.Notes, newID3) {
t.Errorf("Notes not updated: %s (should contain %s)", updated.Notes, newID3)
}
if !strings.Contains(updated.AcceptanceCriteria, newID2) {
t.Errorf("AcceptanceCriteria not updated: %s (should contain %s)", updated.AcceptanceCriteria, newID2)
}
// Verify old IDs are NOT present
if strings.Contains(updated.Description, "bd-11") {
t.Error("Old ID bd-11 still present in Description")
}
if strings.Contains(updated.Description, "bd-12") {
t.Error("Old ID bd-12 still present in Description")
}
}
// TestImportPartialIDMatch tests word boundary matching (bd-10 vs bd-100)
func TestImportPartialIDMatch(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer func() {
if err := os.RemoveAll(tmpDir); err != nil {
t.Logf("Warning: cleanup failed: %v", err)
}
}()
dbPath := filepath.Join(tmpDir, "test.db")
testStore := newTestStoreWithPrefix(t, dbPath, "bd")
ctx := context.Background()
// Create issues with similar IDs (use higher numbers to avoid conflicts)
issues := []*types.Issue{
{
ID: "bd-50",
Title: "Issue 50",
Description: "References bd-100 and bd-1000 and bd-10000",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
{
ID: "bd-100",
Title: "Issue 100",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
{
ID: "bd-1000",
Title: "Issue 1000",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
{
ID: "bd-10000",
Title: "Issue 10000",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
}
for _, issue := range issues {
if err := testStore.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("Failed to create %s: %v", issue.ID, err)
}
}
// Import colliding bd-100
incomingIssues := []*types.Issue{
{
ID: "bd-100",
Title: "Modified Issue 100",
Status: types.StatusInProgress,
Priority: 2,
IssueType: types.TypeBug,
},
}
result, err := sqlite.DetectCollisions(ctx, testStore, incomingIssues)
if err != nil {
t.Fatalf("DetectCollisions failed: %v", err)
}
// Resolve collision
allExisting, _ := testStore.SearchIssues(ctx, "", types.IssueFilter{})
if err := sqlite.ScoreCollisions(ctx, testStore, result.Collisions, allExisting); err != nil {
t.Fatalf("ScoreCollisions failed: %v", err)
}
idMapping, err := sqlite.RemapCollisions(ctx, testStore, result.Collisions, allExisting)
if err != nil {
t.Fatalf("RemapCollisions failed: %v", err)
}
newID100 := idMapping["bd-100"]
// Verify only bd-100 was replaced, not bd-1000 or bd-10000
updated, err := testStore.GetIssue(ctx, "bd-50")
if err != nil {
t.Fatalf("Failed to get updated issue: %v", err)
}
if !strings.Contains(updated.Description, newID100) {
t.Errorf("bd-100 not replaced: %s", updated.Description)
}
if !strings.Contains(updated.Description, "bd-1000") {
t.Errorf("bd-1000 incorrectly replaced: %s", updated.Description)
}
if !strings.Contains(updated.Description, "bd-10000") {
t.Errorf("bd-10000 incorrectly replaced: %s", updated.Description)
}
// Make sure old bd-100 reference is gone
if strings.Contains(updated.Description, " bd-100 ") || strings.Contains(updated.Description, " bd-100,") {
t.Errorf("Old bd-100 reference still present: %s", updated.Description)
}
}
// TestImportExactMatch tests idempotent import (no collision)
func TestImportExactMatch(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer func() {
if err := os.RemoveAll(tmpDir); err != nil {
t.Logf("Warning: cleanup failed: %v", err)
}
}()
dbPath := filepath.Join(tmpDir, "test.db")
testStore := newTestStoreWithPrefix(t, dbPath, "bd")
ctx := context.Background()
// Create existing issue
existing := &types.Issue{
ID: "bd-10",
Title: "Test issue",
Description: "Description",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
if err := testStore.CreateIssue(ctx, existing, "test"); err != nil {
t.Fatalf("Failed to create issue: %v", err)
}
// Import identical issue
incoming := &types.Issue{
ID: "bd-10",
Title: "Test issue",
Description: "Description",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
}
result, err := sqlite.DetectCollisions(ctx, testStore, []*types.Issue{incoming})
if err != nil {
t.Fatalf("DetectCollisions failed: %v", err)
}
// Should be exact match, not collision
if len(result.Collisions) != 0 {
t.Errorf("Expected 0 collisions for exact match, got %d", len(result.Collisions))
}
if len(result.ExactMatches) != 1 {
t.Errorf("Expected 1 exact match, got %d", len(result.ExactMatches))
}
}
// TestImportMixedScenario tests import with exact matches, collisions, and new issues
func TestImportMixedScenario(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer func() {
if err := os.RemoveAll(tmpDir); err != nil {
t.Logf("Warning: cleanup failed: %v", err)
}
}()
dbPath := filepath.Join(tmpDir, "test.db")
testStore := newTestStoreWithPrefix(t, dbPath, "bd")
ctx := context.Background()
// Create existing issues with high IDs
for i := 200; i <= 201; i++ {
issue := &types.Issue{
ID: fmt.Sprintf("bd-%d", i),
Title: fmt.Sprintf("Issue %d", i),
Description: "Original",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
if err := testStore.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("Failed to create issue %d: %v", i, err)
}
}
// Import: exact match (bd-200), collision (bd-201), new (bd-202)
incomingIssues := []*types.Issue{
{
ID: "bd-200",
Title: "Issue 200",
Description: "Original",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
},
{
ID: "bd-201",
Title: "Modified Issue 201",
Description: "Changed",
Status: types.StatusInProgress,
Priority: 2,
IssueType: types.TypeBug,
},
{
ID: "bd-202",
Title: "New Issue",
Description: "Brand new",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeFeature,
},
}
result, err := sqlite.DetectCollisions(ctx, testStore, incomingIssues)
if err != nil {
t.Fatalf("DetectCollisions failed: %v", err)
}
if len(result.ExactMatches) != 1 {
t.Errorf("Expected 1 exact match, got %d", len(result.ExactMatches))
}
if len(result.Collisions) != 1 {
t.Errorf("Expected 1 collision, got %d", len(result.Collisions))
}
if len(result.NewIssues) != 1 {
t.Errorf("Expected 1 new issue, got %d", len(result.NewIssues))
}
}
// TestImportWithDependenciesInJSONL tests importing issues with embedded dependencies
func TestImportWithDependenciesInJSONL(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer func() {
if err := os.RemoveAll(tmpDir); err != nil {
t.Logf("Warning: cleanup failed: %v", err)
}
}()
dbPath := filepath.Join(tmpDir, "test.db")
testStore := newTestStoreWithPrefix(t, dbPath, "bd")
ctx := context.Background()
// Create JSONL with dependencies
jsonl := `{"id":"bd-10","title":"Issue 1","status":"open","priority":1,"issue_type":"task"}
{"id":"bd-11","title":"Issue 2","status":"open","priority":1,"issue_type":"task","dependencies":[{"issue_id":"bd-11","depends_on_id":"bd-10","type":"blocks"}]}`
// Parse JSONL
var issues []*types.Issue
for _, line := range strings.Split(strings.TrimSpace(jsonl), "\n") {
var issue types.Issue
if err := json.Unmarshal([]byte(line), &issue); err != nil {
t.Fatalf("Failed to parse JSONL: %v", err)
}
issues = append(issues, &issue)
}
// Create issues
for _, issue := range issues {
if err := testStore.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("Failed to create issue: %v", err)
}
}
// Add dependencies from JSONL
for _, issue := range issues {
for _, dep := range issue.Dependencies {
if err := testStore.AddDependency(ctx, dep, "test"); err != nil {
t.Fatalf("Failed to add dependency: %v", err)
}
}
}
// Verify dependency
deps, err := testStore.GetDependencyRecords(ctx, "bd-11")
if err != nil {
t.Fatalf("Failed to get dependencies: %v", err)
}
if len(deps) != 1 {
t.Fatalf("Expected 1 dependency, got %d", len(deps))
}
if deps[0].DependsOnID != "bd-10" {
t.Errorf("Dependency target = %s, want bd-1", deps[0].DependsOnID)
}
}
func TestImportCounterSyncAfterHighID(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer func() {
if err := os.RemoveAll(tmpDir); err != nil {
t.Logf("Warning: cleanup failed: %v", err)
}
}()
dbPath := filepath.Join(tmpDir, "test.db")
testStore := newTestStoreWithPrefix(t, dbPath, "bd")
ctx := context.Background()
if err := testStore.SetConfig(ctx, "issue_prefix", "bd"); err != nil {
t.Fatalf("Failed to set issue prefix: %v", err)
}
for i := 0; i < 3; i++ {
issue := &types.Issue{
Title: fmt.Sprintf("Auto issue %d", i+1),
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
}
if err := testStore.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("Failed to create auto issue %d: %v", i+1, err)
}
}
highIDIssue := &types.Issue{
ID: "bd-100",
Title: "High ID issue",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
if err := testStore.CreateIssue(ctx, highIDIssue, "import"); err != nil {
t.Fatalf("Failed to import high ID issue: %v", err)
}
// REMOVED (bd-c7af): Counter sync - no longer needed with hash IDs
// Step 5: Create another auto-generated issue
// This should get bd-101 (counter should have synced to 100), not bd-4
newIssue := &types.Issue{
Title: "New issue after import",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
}
if err := testStore.CreateIssue(ctx, newIssue, "test"); err != nil {
t.Fatalf("Failed to create new issue: %v", err)
}
if newIssue.ID != "bd-101" {
t.Errorf("Expected new issue to get ID bd-101, got %s", newIssue.ID)
}
}

View File

@@ -1,7 +1,15 @@
# Test bd blocked command
bd init --prefix test
# Create first issue
bd create 'First issue'
bd create 'Second issue' --deps test-1
cp stdout first.txt
exec sh -c 'grep -oE "test-[a-f0-9]+" first.txt > first_id.txt'
# Create second issue that depends on first
exec sh -c 'bd create "Second issue" --deps $(cat first_id.txt)'
cp stdout second.txt
# Check for blocked issues
bd blocked
stdout 'Blocked issues'
stdout 'test-2'
stdout 'test-'

View File

@@ -1,8 +1,15 @@
# Test bd close command
bd init --prefix test
bd create 'Issue to close'
bd close test-1 --reason 'Fixed'
stdout 'Closed test-1'
bd show test-1
# Create issue and capture its hash ID
bd create 'Issue to close'
cp stdout issue.txt
exec sh -c 'grep -oE "test-[a-f0-9]+" issue.txt > issue_id.txt'
# Close the issue
exec sh -c 'bd close $(cat issue_id.txt) --reason Fixed'
stdout 'Closed test-'
# Verify it's closed
exec sh -c 'bd show $(cat issue_id.txt)'
stdout 'closed'

View File

@@ -1,10 +1,24 @@
# Test bd dep add command
bd init --prefix test
# Create issues and capture their hash IDs from output
bd create 'First issue'
bd create 'Second issue'
bd dep add test-2 test-1
cp stdout first.txt
grep 'Created issue: test-' first.txt
bd create 'Second issue'
cp stdout second.txt
grep 'Created issue: test-' second.txt
# Extract IDs using grep (hash IDs are test-XXXXXXXX format)
exec sh -c 'grep -oE "test-[a-f0-9]+" first.txt > first_id.txt'
exec sh -c 'grep -oE "test-[a-f0-9]+" second.txt > second_id.txt'
# Add dependency: second depends on first
exec sh -c 'bd dep add $(cat second_id.txt) $(cat first_id.txt)'
stdout 'Added dependency'
bd show test-2
# Verify the dependency was added
exec sh -c 'bd show $(cat second_id.txt)'
stdout 'Depends on'
stdout 'test-1'
stdout 'test-'

View File

@@ -1,10 +1,22 @@
# Test bd dep remove command
bd init --prefix test
# Create issues and capture their hash IDs
bd create 'First issue'
cp stdout first.txt
exec sh -c 'grep -oE "test-[a-f0-9]+" first.txt > first_id.txt'
bd create 'Second issue'
bd dep add test-2 test-1
bd dep remove test-2 test-1
cp stdout second.txt
exec sh -c 'grep -oE "test-[a-f0-9]+" second.txt > second_id.txt'
# Add dependency
exec sh -c 'bd dep add $(cat second_id.txt) $(cat first_id.txt)'
stdout 'Added dependency'
# Remove dependency
exec sh -c 'bd dep remove $(cat second_id.txt) $(cat first_id.txt)'
stdout 'Removed dependency'
bd show test-2
! stdout 'test-1'
# Verify dependency is gone
exec sh -c 'bd show $(cat second_id.txt) | grep -v "Depends on"'

View File

@@ -1,7 +1,18 @@
# Test bd dep tree command
bd init --prefix test
# Create issues and capture their hash IDs
bd create 'Root issue'
cp stdout root.txt
exec sh -c 'grep -oE "test-[a-f0-9]+" root.txt > root_id.txt'
bd create 'Child issue'
bd dep add test-2 test-1
bd dep tree test-1
stdout 'test-1'
cp stdout child.txt
exec sh -c 'grep -oE "test-[a-f0-9]+" child.txt > child_id.txt'
# Add dependency: child depends on root
exec sh -c 'bd dep add $(cat child_id.txt) $(cat root_id.txt)'
# Show dependency tree for root
exec sh -c 'bd dep tree $(cat root_id.txt)'
stdout 'test-'

View File

@@ -1,8 +1,16 @@
# Test bd show command
bd init --prefix test
bd create 'Test issue for show'
bd show test-1
# Create issue and capture its hash ID
bd create 'Test issue for show'
cp stdout issue.txt
grep 'Created issue: test-' issue.txt
# Extract ID using grep
exec sh -c 'grep -oE "test-[a-f0-9]+" issue.txt > issue_id.txt'
# Show the issue
exec sh -c 'bd show $(cat issue_id.txt)'
stdout 'Test issue for show'
stdout 'Status:'
stdout 'Priority:'

View File

@@ -1,8 +1,17 @@
# Test bd stats command
bd init --prefix test
# Create issues
bd create 'First issue'
cp stdout first.txt
exec sh -c 'grep -oE "test-[a-f0-9]+" first.txt > first_id.txt'
bd create 'Second issue'
bd close test-1
# Close one issue
exec sh -c 'bd close $(cat first_id.txt)'
# Check stats
bd stats
stdout 'Total Issues:'
stdout 'Open:'

View File

@@ -1,8 +1,18 @@
# Test bd update command
bd init --prefix test
# Create issue and capture its hash ID
bd create 'Issue to update'
bd update test-1 --status in_progress
cp stdout issue.txt
grep 'Created issue: test-' issue.txt
# Extract ID using grep
exec sh -c 'grep -oE "test-[a-f0-9]+" issue.txt > issue_id.txt'
# Update the issue status
exec sh -c 'bd update $(cat issue_id.txt) --status in_progress'
stdout 'Updated issue:'
bd show test-1
# Verify the update
exec sh -c 'bd show $(cat issue_id.txt)'
stdout 'in_progress'