Implement hash ID migration tool (bd-173)
- Add migrate-hash-ids command for converting sequential to hash-based IDs - Integrate into bd migrate --to-hash-ids for transparent user experience - Generate hash IDs for top-level issues, hierarchical IDs for children - Update all references: dependencies, comments, text mentions - Auto-backup database before migration - Comprehensive tests covering migration scenarios - Update AGENTS.md with migration documentation Amp-Thread-ID: https://ampcode.com/threads/T-492f81db-5b0e-437a-b54d-ae4525dd7827 Co-authored-by: Amp <amp@ampcode.com>
This commit is contained in:
File diff suppressed because one or more lines are too long
@@ -210,6 +210,8 @@ bd merge bd-42 bd-43 --into bd-41 --dry-run # Preview merge
|
|||||||
bd migrate # Detect and migrate old databases
|
bd migrate # Detect and migrate old databases
|
||||||
bd migrate --dry-run # Preview migration
|
bd migrate --dry-run # Preview migration
|
||||||
bd migrate --cleanup --yes # Migrate and remove old files
|
bd migrate --cleanup --yes # Migrate and remove old files
|
||||||
|
bd migrate --to-hash-ids # Migrate sequential IDs to hash-based IDs
|
||||||
|
bd migrate --to-hash-ids --dry-run # Preview hash ID migration
|
||||||
```
|
```
|
||||||
|
|
||||||
### Managing Daemons
|
### Managing Daemons
|
||||||
|
|||||||
@@ -7,11 +7,13 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/fatih/color"
|
"github.com/fatih/color"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"github.com/steveyegge/beads"
|
"github.com/steveyegge/beads"
|
||||||
"github.com/steveyegge/beads/internal/storage/sqlite"
|
"github.com/steveyegge/beads/internal/storage/sqlite"
|
||||||
|
"github.com/steveyegge/beads/internal/types"
|
||||||
_ "modernc.org/sqlite"
|
_ "modernc.org/sqlite"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -25,12 +27,14 @@ This command:
|
|||||||
- Checks schema versions
|
- Checks schema versions
|
||||||
- Migrates old databases to beads.db
|
- Migrates old databases to beads.db
|
||||||
- Updates schema version metadata
|
- Updates schema version metadata
|
||||||
|
- Migrates sequential IDs to hash-based IDs (with --to-hash-ids)
|
||||||
- Removes stale databases (with confirmation)`,
|
- Removes stale databases (with confirmation)`,
|
||||||
Run: func(cmd *cobra.Command, _ []string) {
|
Run: func(cmd *cobra.Command, _ []string) {
|
||||||
autoYes, _ := cmd.Flags().GetBool("yes")
|
autoYes, _ := cmd.Flags().GetBool("yes")
|
||||||
cleanup, _ := cmd.Flags().GetBool("cleanup")
|
cleanup, _ := cmd.Flags().GetBool("cleanup")
|
||||||
dryRun, _ := cmd.Flags().GetBool("dry-run")
|
dryRun, _ := cmd.Flags().GetBool("dry-run")
|
||||||
updateRepoID, _ := cmd.Flags().GetBool("update-repo-id")
|
updateRepoID, _ := cmd.Flags().GetBool("update-repo-id")
|
||||||
|
toHashIDs, _ := cmd.Flags().GetBool("to-hash-ids")
|
||||||
|
|
||||||
// Handle --update-repo-id first
|
// Handle --update-repo-id first
|
||||||
if updateRepoID {
|
if updateRepoID {
|
||||||
@@ -275,6 +279,91 @@ This command:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Migrate to hash IDs if requested
|
||||||
|
if toHashIDs {
|
||||||
|
if !jsonOutput {
|
||||||
|
fmt.Println("\n→ Migrating to hash-based IDs...")
|
||||||
|
}
|
||||||
|
|
||||||
|
store, err := sqlite.New(targetPath)
|
||||||
|
if err != nil {
|
||||||
|
if jsonOutput {
|
||||||
|
outputJSON(map[string]interface{}{
|
||||||
|
"error": "hash_migration_failed",
|
||||||
|
"message": err.Error(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error: failed to open database: %v\n", err)
|
||||||
|
}
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
issues, err := store.SearchIssues(ctx, "", types.IssueFilter{})
|
||||||
|
if err != nil {
|
||||||
|
store.Close()
|
||||||
|
if jsonOutput {
|
||||||
|
outputJSON(map[string]interface{}{
|
||||||
|
"error": "hash_migration_failed",
|
||||||
|
"message": err.Error(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error: failed to list issues: %v\n", err)
|
||||||
|
}
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(issues) > 0 && !isHashID(issues[0].ID) {
|
||||||
|
// Create backup
|
||||||
|
if !dryRun {
|
||||||
|
backupPath := strings.TrimSuffix(targetPath, ".db") + ".backup-pre-hash-" + time.Now().Format("20060102-150405") + ".db"
|
||||||
|
if err := copyFile(targetPath, backupPath); err != nil {
|
||||||
|
store.Close()
|
||||||
|
if jsonOutput {
|
||||||
|
outputJSON(map[string]interface{}{
|
||||||
|
"error": "backup_failed",
|
||||||
|
"message": err.Error(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error: failed to create backup: %v\n", err)
|
||||||
|
}
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
if !jsonOutput {
|
||||||
|
color.Green("✓ Created backup: %s\n", filepath.Base(backupPath))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mapping, err := migrateToHashIDs(ctx, store, issues, dryRun)
|
||||||
|
store.Close()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
if jsonOutput {
|
||||||
|
outputJSON(map[string]interface{}{
|
||||||
|
"error": "hash_migration_failed",
|
||||||
|
"message": err.Error(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error: hash ID migration failed: %v\n", err)
|
||||||
|
}
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !jsonOutput {
|
||||||
|
if dryRun {
|
||||||
|
fmt.Printf("\nWould migrate %d issues to hash-based IDs\n", len(mapping))
|
||||||
|
} else {
|
||||||
|
color.Green("✓ Migrated %d issues to hash-based IDs\n", len(mapping))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
store.Close()
|
||||||
|
if !jsonOutput {
|
||||||
|
fmt.Println("Database already uses hash-based IDs")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Final status
|
// Final status
|
||||||
if jsonOutput {
|
if jsonOutput {
|
||||||
outputJSON(map[string]interface{}{
|
outputJSON(map[string]interface{}{
|
||||||
@@ -497,5 +586,6 @@ func init() {
|
|||||||
migrateCmd.Flags().Bool("cleanup", false, "Remove old database files after migration")
|
migrateCmd.Flags().Bool("cleanup", false, "Remove old database files after migration")
|
||||||
migrateCmd.Flags().Bool("dry-run", false, "Show what would be done without making changes")
|
migrateCmd.Flags().Bool("dry-run", false, "Show what would be done without making changes")
|
||||||
migrateCmd.Flags().Bool("update-repo-id", false, "Update repository ID (use after changing git remote)")
|
migrateCmd.Flags().Bool("update-repo-id", false, "Update repository ID (use after changing git remote)")
|
||||||
|
migrateCmd.Flags().Bool("to-hash-ids", false, "Migrate sequential IDs to hash-based IDs")
|
||||||
rootCmd.AddCommand(migrateCmd)
|
rootCmd.AddCommand(migrateCmd)
|
||||||
}
|
}
|
||||||
|
|||||||
391
cmd/bd/migrate_hash_ids.go
Normal file
391
cmd/bd/migrate_hash_ids.go
Normal file
@@ -0,0 +1,391 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/hex"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/fatih/color"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"github.com/steveyegge/beads"
|
||||||
|
"github.com/steveyegge/beads/internal/storage/sqlite"
|
||||||
|
"github.com/steveyegge/beads/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
var migrateHashIDsCmd = &cobra.Command{
|
||||||
|
Use: "migrate-hash-ids",
|
||||||
|
Short: "Migrate sequential IDs to hash-based IDs",
|
||||||
|
Long: `Migrate database from sequential IDs (bd-1, bd-2) to hash-based IDs (bd-a3f8e9a2).
|
||||||
|
|
||||||
|
This command:
|
||||||
|
- Generates hash IDs for all top-level issues
|
||||||
|
- Assigns hierarchical child IDs (bd-a3f8e9a2.1) for epic children
|
||||||
|
- Updates all references (dependencies, comments, external refs)
|
||||||
|
- Creates mapping file for reference
|
||||||
|
- Validates all relationships are intact
|
||||||
|
|
||||||
|
Use --dry-run to preview changes before applying.`,
|
||||||
|
Run: func(cmd *cobra.Command, _ []string) {
|
||||||
|
dryRun, _ := cmd.Flags().GetBool("dry-run")
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
// Find database
|
||||||
|
dbPath := beads.FindDatabasePath()
|
||||||
|
if dbPath == "" {
|
||||||
|
if jsonOutput {
|
||||||
|
outputJSON(map[string]interface{}{
|
||||||
|
"error": "no_database",
|
||||||
|
"message": "No beads database found. Run 'bd init' first.",
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error: no beads database found\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Hint: run 'bd init' to initialize bd\n")
|
||||||
|
}
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create backup before migration
|
||||||
|
if !dryRun {
|
||||||
|
backupPath := strings.TrimSuffix(dbPath, ".db") + ".backup-" + time.Now().Format("20060102-150405") + ".db"
|
||||||
|
if err := copyFile(dbPath, backupPath); err != nil {
|
||||||
|
if jsonOutput {
|
||||||
|
outputJSON(map[string]interface{}{
|
||||||
|
"error": "backup_failed",
|
||||||
|
"message": err.Error(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error: failed to create backup: %v\n", err)
|
||||||
|
}
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
if !jsonOutput {
|
||||||
|
color.Green("✓ Created backup: %s\n\n", filepath.Base(backupPath))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open database
|
||||||
|
store, err := sqlite.New(dbPath)
|
||||||
|
if err != nil {
|
||||||
|
if jsonOutput {
|
||||||
|
outputJSON(map[string]interface{}{
|
||||||
|
"error": "open_failed",
|
||||||
|
"message": err.Error(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error: failed to open database: %v\n", err)
|
||||||
|
}
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
defer store.Close()
|
||||||
|
|
||||||
|
// Get all issues using SearchIssues with empty query and no filters
|
||||||
|
issues, err := store.SearchIssues(ctx, "", types.IssueFilter{})
|
||||||
|
if err != nil {
|
||||||
|
if jsonOutput {
|
||||||
|
outputJSON(map[string]interface{}{
|
||||||
|
"error": "list_failed",
|
||||||
|
"message": err.Error(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error: failed to list issues: %v\n", err)
|
||||||
|
}
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(issues) == 0 {
|
||||||
|
if jsonOutput {
|
||||||
|
outputJSON(map[string]interface{}{
|
||||||
|
"status": "no_issues",
|
||||||
|
"message": "No issues to migrate",
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
fmt.Println("No issues to migrate")
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if already using hash IDs
|
||||||
|
if isHashID(issues[0].ID) {
|
||||||
|
if jsonOutput {
|
||||||
|
outputJSON(map[string]interface{}{
|
||||||
|
"status": "already_migrated",
|
||||||
|
"message": "Database already uses hash-based IDs",
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
fmt.Println("Database already uses hash-based IDs")
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perform migration
|
||||||
|
mapping, err := migrateToHashIDs(ctx, store, issues, dryRun)
|
||||||
|
if err != nil {
|
||||||
|
if jsonOutput {
|
||||||
|
outputJSON(map[string]interface{}{
|
||||||
|
"error": "migration_failed",
|
||||||
|
"message": err.Error(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(os.Stderr, "Error: migration failed: %v\n", err)
|
||||||
|
}
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save mapping to file
|
||||||
|
if !dryRun {
|
||||||
|
mappingPath := filepath.Join(filepath.Dir(dbPath), "hash-id-mapping.json")
|
||||||
|
if err := saveMappingFile(mappingPath, mapping); err != nil {
|
||||||
|
if !jsonOutput {
|
||||||
|
color.Yellow("Warning: failed to save mapping file: %v\n", err)
|
||||||
|
}
|
||||||
|
} else if !jsonOutput {
|
||||||
|
color.Green("✓ Saved mapping to: %s\n", filepath.Base(mappingPath))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Output results
|
||||||
|
if jsonOutput {
|
||||||
|
outputJSON(map[string]interface{}{
|
||||||
|
"status": "success",
|
||||||
|
"dry_run": dryRun,
|
||||||
|
"issues_migrated": len(mapping),
|
||||||
|
"mapping": mapping,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
if dryRun {
|
||||||
|
fmt.Println("\nDry run complete - no changes made")
|
||||||
|
fmt.Printf("Would migrate %d issues\n\n", len(mapping))
|
||||||
|
fmt.Println("Preview of mapping (first 10):")
|
||||||
|
count := 0
|
||||||
|
for old, new := range mapping {
|
||||||
|
if count >= 10 {
|
||||||
|
fmt.Printf("... and %d more\n", len(mapping)-10)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
fmt.Printf(" %s → %s\n", old, new)
|
||||||
|
count++
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
color.Green("\n✓ Migration complete!\n\n")
|
||||||
|
fmt.Printf("Migrated %d issues to hash-based IDs\n", len(mapping))
|
||||||
|
fmt.Println("\nNext steps:")
|
||||||
|
fmt.Println(" 1. Run 'bd export' to update JSONL file")
|
||||||
|
fmt.Println(" 2. Commit changes to git")
|
||||||
|
fmt.Println(" 3. Notify team members to pull and re-initialize")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// migrateToHashIDs performs the actual migration
|
||||||
|
func migrateToHashIDs(ctx context.Context, store *sqlite.SQLiteStorage, issues []*types.Issue, dryRun bool) (map[string]string, error) {
|
||||||
|
// Build dependency graph to determine top-level vs child issues
|
||||||
|
parentMap := make(map[string]string) // child ID → parent ID
|
||||||
|
|
||||||
|
// Get all dependencies to find parent-child relationships
|
||||||
|
for _, issue := range issues {
|
||||||
|
deps, err := store.GetDependencyRecords(ctx, issue.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to get dependencies for %s: %w", issue.ID, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, dep := range deps {
|
||||||
|
if dep.Type == types.DepParentChild {
|
||||||
|
// issue depends on parent
|
||||||
|
parentMap[issue.ID] = dep.DependsOnID
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get prefix from config or use default
|
||||||
|
prefix, err := store.GetConfig(ctx, "issue_prefix")
|
||||||
|
if err != nil || prefix == "" {
|
||||||
|
prefix = "bd"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate mapping: old ID → new hash ID
|
||||||
|
mapping := make(map[string]string)
|
||||||
|
childCounters := make(map[string]int) // parent hash ID → next child number
|
||||||
|
|
||||||
|
// First pass: generate hash IDs for top-level issues (no parent)
|
||||||
|
for _, issue := range issues {
|
||||||
|
if _, hasParent := parentMap[issue.ID]; !hasParent {
|
||||||
|
// Top-level issue - generate hash ID
|
||||||
|
hashID := generateHashIDForIssue(prefix, issue)
|
||||||
|
mapping[issue.ID] = hashID
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Second pass: assign hierarchical IDs to child issues
|
||||||
|
for _, issue := range issues {
|
||||||
|
if parentID, hasParent := parentMap[issue.ID]; hasParent {
|
||||||
|
// Child issue - use parent's hash ID + sequential number
|
||||||
|
parentHashID, ok := mapping[parentID]
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("parent %s not yet mapped for child %s", parentID, issue.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get next child number for this parent
|
||||||
|
childNum := childCounters[parentHashID] + 1
|
||||||
|
childCounters[parentHashID] = childNum
|
||||||
|
|
||||||
|
// Assign hierarchical ID
|
||||||
|
mapping[issue.ID] = fmt.Sprintf("%s.%d", parentHashID, childNum)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if dryRun {
|
||||||
|
return mapping, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply the migration
|
||||||
|
// UpdateIssueID handles updating the issue, dependencies, comments, events, labels, and dirty_issues
|
||||||
|
// We need to also update text references in descriptions, notes, design, acceptance criteria
|
||||||
|
|
||||||
|
// Sort issues by ID to process parents before children
|
||||||
|
sort.Slice(issues, func(i, j int) bool {
|
||||||
|
return issues[i].ID < issues[j].ID
|
||||||
|
})
|
||||||
|
|
||||||
|
// Update all issues
|
||||||
|
for _, issue := range issues {
|
||||||
|
newID := mapping[issue.ID]
|
||||||
|
|
||||||
|
// Update text references in this issue
|
||||||
|
issue.Description = replaceIDReferences(issue.Description, mapping)
|
||||||
|
if issue.Design != "" {
|
||||||
|
issue.Design = replaceIDReferences(issue.Design, mapping)
|
||||||
|
}
|
||||||
|
if issue.Notes != "" {
|
||||||
|
issue.Notes = replaceIDReferences(issue.Notes, mapping)
|
||||||
|
}
|
||||||
|
if issue.AcceptanceCriteria != "" {
|
||||||
|
issue.AcceptanceCriteria = replaceIDReferences(issue.AcceptanceCriteria, mapping)
|
||||||
|
}
|
||||||
|
if issue.ExternalRef != nil {
|
||||||
|
updated := replaceIDReferences(*issue.ExternalRef, mapping)
|
||||||
|
issue.ExternalRef = &updated
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use UpdateIssueID to change the primary key and cascade to all foreign keys
|
||||||
|
// This method handles dependencies, comments, events, labels, and dirty_issues
|
||||||
|
oldID := issue.ID
|
||||||
|
if err := store.UpdateIssueID(ctx, oldID, newID, issue, "migration"); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to update issue %s → %s: %w", oldID, newID, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return mapping, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateHashIDForIssue generates a hash-based ID for an issue
|
||||||
|
func generateHashIDForIssue(prefix string, issue *types.Issue) string {
|
||||||
|
// Use the same algorithm as generateHashID in sqlite.go
|
||||||
|
// Use "system" as the actor for migration to ensure deterministic IDs
|
||||||
|
content := fmt.Sprintf("%s|%s|%s|%d|%d",
|
||||||
|
issue.Title,
|
||||||
|
issue.Description,
|
||||||
|
"system", // Use consistent actor for migration
|
||||||
|
issue.CreatedAt.UnixNano(),
|
||||||
|
0, // nonce
|
||||||
|
)
|
||||||
|
|
||||||
|
hash := sha256Hash(content)
|
||||||
|
shortHash := hash[:8] // First 8 hex chars
|
||||||
|
|
||||||
|
return fmt.Sprintf("%s-%s", prefix, shortHash)
|
||||||
|
}
|
||||||
|
|
||||||
|
// sha256Hash computes SHA256 hash and returns first 8 hex chars
|
||||||
|
func sha256Hash(content string) string {
|
||||||
|
h := sha256.Sum256([]byte(content))
|
||||||
|
return hex.EncodeToString(h[:4]) // 4 bytes = 8 hex chars
|
||||||
|
}
|
||||||
|
|
||||||
|
// replaceIDReferences replaces all old ID references with new hash IDs
|
||||||
|
func replaceIDReferences(text string, mapping map[string]string) string {
|
||||||
|
// Match patterns like "bd-123" or "bd-123.4"
|
||||||
|
re := regexp.MustCompile(`\bbd-\d+(?:\.\d+)*\b`)
|
||||||
|
|
||||||
|
return re.ReplaceAllStringFunc(text, func(match string) string {
|
||||||
|
if newID, ok := mapping[match]; ok {
|
||||||
|
return newID
|
||||||
|
}
|
||||||
|
return match // Keep unchanged if not in mapping
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// isHashID checks if an ID is hash-based (not sequential)
|
||||||
|
func isHashID(id string) bool {
|
||||||
|
// Hash IDs contain hex characters after the prefix
|
||||||
|
// Sequential IDs are only digits
|
||||||
|
parts := strings.SplitN(id, "-", 2)
|
||||||
|
if len(parts) != 2 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the suffix starts with a hex digit (a-f)
|
||||||
|
suffix := parts[1]
|
||||||
|
if len(suffix) == 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// If it contains any letter a-f, it's a hash ID
|
||||||
|
return regexp.MustCompile(`[a-f]`).MatchString(suffix)
|
||||||
|
}
|
||||||
|
|
||||||
|
// saveMappingFile saves the ID mapping to a JSON file
|
||||||
|
func saveMappingFile(path string, mapping map[string]string) error {
|
||||||
|
// Convert to sorted array for readability
|
||||||
|
type mappingEntry struct {
|
||||||
|
OldID string `json:"old_id"`
|
||||||
|
NewID string `json:"new_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
entries := make([]mappingEntry, 0, len(mapping))
|
||||||
|
for old, new := range mapping {
|
||||||
|
entries = append(entries, mappingEntry{
|
||||||
|
OldID: old,
|
||||||
|
NewID: new,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by old ID for readability
|
||||||
|
sort.Slice(entries, func(i, j int) bool {
|
||||||
|
return entries[i].OldID < entries[j].OldID
|
||||||
|
})
|
||||||
|
|
||||||
|
data, err := json.MarshalIndent(map[string]interface{}{
|
||||||
|
"migrated_at": time.Now().Format(time.RFC3339),
|
||||||
|
"count": len(entries),
|
||||||
|
"mapping": entries,
|
||||||
|
}, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return os.WriteFile(path, data, 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
// copyFile copies a file from src to dst
|
||||||
|
func copyFile(src, dst string) error {
|
||||||
|
data, err := os.ReadFile(src)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return os.WriteFile(dst, data, 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
migrateHashIDsCmd.Flags().Bool("dry-run", false, "Show what would be done without making changes")
|
||||||
|
rootCmd.AddCommand(migrateHashIDsCmd)
|
||||||
|
}
|
||||||
296
cmd/bd/migrate_hash_ids_test.go
Normal file
296
cmd/bd/migrate_hash_ids_test.go
Normal file
@@ -0,0 +1,296 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/steveyegge/beads/internal/storage/sqlite"
|
||||||
|
"github.com/steveyegge/beads/internal/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMigrateHashIDs(t *testing.T) {
|
||||||
|
// Create temporary directory for test database
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
dbPath := filepath.Join(tmpDir, "test.db")
|
||||||
|
|
||||||
|
// Create test database with sequential IDs
|
||||||
|
store, err := sqlite.New(dbPath)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create database: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
// Set ID prefix config
|
||||||
|
if err := store.SetConfig(ctx, "issue_prefix", "bd"); err != nil {
|
||||||
|
t.Fatalf("Failed to set prefix: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create test issues with sequential IDs
|
||||||
|
issue1 := &types.Issue{
|
||||||
|
ID: "bd-1",
|
||||||
|
Title: "First issue",
|
||||||
|
Description: "This is issue bd-1",
|
||||||
|
Status: types.StatusOpen,
|
||||||
|
Priority: 1,
|
||||||
|
IssueType: types.TypeTask,
|
||||||
|
}
|
||||||
|
if err := store.CreateIssue(ctx, issue1, "test"); err != nil {
|
||||||
|
t.Fatalf("Failed to create issue 1: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
issue2 := &types.Issue{
|
||||||
|
ID: "bd-2",
|
||||||
|
Title: "Second issue",
|
||||||
|
Description: "This is issue bd-2 which references bd-1",
|
||||||
|
Status: types.StatusOpen,
|
||||||
|
Priority: 1,
|
||||||
|
IssueType: types.TypeTask,
|
||||||
|
}
|
||||||
|
if err := store.CreateIssue(ctx, issue2, "test"); err != nil {
|
||||||
|
t.Fatalf("Failed to create issue 2: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a dependency
|
||||||
|
dep := &types.Dependency{
|
||||||
|
IssueID: "bd-2",
|
||||||
|
DependsOnID: "bd-1",
|
||||||
|
Type: types.DepBlocks,
|
||||||
|
}
|
||||||
|
if err := store.AddDependency(ctx, dep, "test"); err != nil {
|
||||||
|
t.Fatalf("Failed to add dependency: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close store before migration
|
||||||
|
store.Close()
|
||||||
|
|
||||||
|
// Test dry run
|
||||||
|
store, err = sqlite.New(dbPath)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to reopen database: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
issues, err := store.SearchIssues(ctx, "", types.IssueFilter{})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to get issues: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
mapping, err := migrateToHashIDs(ctx, store, issues, true)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Dry run failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(mapping) != 2 {
|
||||||
|
t.Errorf("Expected 2 issues in mapping, got %d", len(mapping))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check mapping contains both IDs
|
||||||
|
if _, ok := mapping["bd-1"]; !ok {
|
||||||
|
t.Error("Mapping missing bd-1")
|
||||||
|
}
|
||||||
|
if _, ok := mapping["bd-2"]; !ok {
|
||||||
|
t.Error("Mapping missing bd-2")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify new IDs are hash-based
|
||||||
|
for old, new := range mapping {
|
||||||
|
if !isHashID(new) {
|
||||||
|
t.Errorf("New ID %s for %s is not a hash ID", new, old)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
store.Close()
|
||||||
|
|
||||||
|
// Test actual migration
|
||||||
|
store, err = sqlite.New(dbPath)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to reopen database: %v", err)
|
||||||
|
}
|
||||||
|
defer store.Close()
|
||||||
|
|
||||||
|
issues, err = store.SearchIssues(ctx, "", types.IssueFilter{})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to get issues: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
mapping, err = migrateToHashIDs(ctx, store, issues, false)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Migration failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify migration
|
||||||
|
newID1 := mapping["bd-1"]
|
||||||
|
newID2 := mapping["bd-2"]
|
||||||
|
|
||||||
|
// Get migrated issues
|
||||||
|
migratedIssue1, err := store.GetIssue(ctx, newID1)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to get migrated issue 1: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
migratedIssue2, err := store.GetIssue(ctx, newID2)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to get migrated issue 2: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify content is preserved
|
||||||
|
if migratedIssue1.Title != "First issue" {
|
||||||
|
t.Errorf("Issue 1 title changed: %s", migratedIssue1.Title)
|
||||||
|
}
|
||||||
|
if migratedIssue2.Title != "Second issue" {
|
||||||
|
t.Errorf("Issue 2 title changed: %s", migratedIssue2.Title)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify text reference was updated
|
||||||
|
if migratedIssue2.Description != "This is issue "+newID2+" which references "+newID1 {
|
||||||
|
t.Errorf("Text references not updated: %s", migratedIssue2.Description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify dependency was updated
|
||||||
|
deps, err := store.GetDependencyRecords(ctx, newID2)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to get dependencies: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(deps) != 1 {
|
||||||
|
t.Fatalf("Expected 1 dependency, got %d", len(deps))
|
||||||
|
}
|
||||||
|
|
||||||
|
if deps[0].IssueID != newID2 {
|
||||||
|
t.Errorf("Dependency issue_id not updated: %s", deps[0].IssueID)
|
||||||
|
}
|
||||||
|
if deps[0].DependsOnID != newID1 {
|
||||||
|
t.Errorf("Dependency depends_on_id not updated: %s", deps[0].DependsOnID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMigrateHashIDsWithParentChild(t *testing.T) {
|
||||||
|
// Create temporary directory for test database
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
dbPath := filepath.Join(tmpDir, "test.db")
|
||||||
|
|
||||||
|
// Create test database
|
||||||
|
store, err := sqlite.New(dbPath)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to create database: %v", err)
|
||||||
|
}
|
||||||
|
defer store.Close()
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
// Set ID prefix config
|
||||||
|
if err := store.SetConfig(ctx, "issue_prefix", "bd"); err != nil {
|
||||||
|
t.Fatalf("Failed to set prefix: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create epic (parent)
|
||||||
|
epic := &types.Issue{
|
||||||
|
ID: "bd-1",
|
||||||
|
Title: "Epic issue",
|
||||||
|
Description: "This is an epic",
|
||||||
|
Status: types.StatusOpen,
|
||||||
|
Priority: 1,
|
||||||
|
IssueType: types.TypeEpic,
|
||||||
|
}
|
||||||
|
if err := store.CreateIssue(ctx, epic, "test"); err != nil {
|
||||||
|
t.Fatalf("Failed to create epic: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create child issue
|
||||||
|
child := &types.Issue{
|
||||||
|
ID: "bd-2",
|
||||||
|
Title: "Child issue",
|
||||||
|
Description: "This is a child of bd-1",
|
||||||
|
Status: types.StatusOpen,
|
||||||
|
Priority: 1,
|
||||||
|
IssueType: types.TypeTask,
|
||||||
|
}
|
||||||
|
if err := store.CreateIssue(ctx, child, "test"); err != nil {
|
||||||
|
t.Fatalf("Failed to create child: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create parent-child dependency
|
||||||
|
dep := &types.Dependency{
|
||||||
|
IssueID: "bd-2",
|
||||||
|
DependsOnID: "bd-1",
|
||||||
|
Type: types.DepParentChild,
|
||||||
|
}
|
||||||
|
if err := store.AddDependency(ctx, dep, "test"); err != nil {
|
||||||
|
t.Fatalf("Failed to add dependency: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Migrate
|
||||||
|
issues, err := store.SearchIssues(ctx, "", types.IssueFilter{})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to get issues: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
mapping, err := migrateToHashIDs(ctx, store, issues, false)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Migration failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify parent got hash ID
|
||||||
|
newEpicID := mapping["bd-1"]
|
||||||
|
if !isHashID(newEpicID) {
|
||||||
|
t.Errorf("Epic ID is not a hash ID: %s", newEpicID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify child got hierarchical ID (parent.1)
|
||||||
|
newChildID := mapping["bd-2"]
|
||||||
|
expectedChildID := newEpicID + ".1"
|
||||||
|
if newChildID != expectedChildID {
|
||||||
|
t.Errorf("Child ID should be %s, got %s", expectedChildID, newChildID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIsHashID(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
id string
|
||||||
|
expected bool
|
||||||
|
}{
|
||||||
|
{"bd-1", false},
|
||||||
|
{"bd-123", false},
|
||||||
|
{"bd-a3f8e9a2", true},
|
||||||
|
{"bd-abc123", true},
|
||||||
|
{"bd-123abc", true},
|
||||||
|
{"bd-a3f8e9a2.1", true},
|
||||||
|
{"bd-a3f8e9a2.1.2", true},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
result := isHashID(tt.id)
|
||||||
|
if result != tt.expected {
|
||||||
|
t.Errorf("isHashID(%s) = %v, want %v", tt.id, result, tt.expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCopyFile(t *testing.T) {
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
src := filepath.Join(tmpDir, "source.txt")
|
||||||
|
dst := filepath.Join(tmpDir, "dest.txt")
|
||||||
|
|
||||||
|
// Write test file
|
||||||
|
content := []byte("test content")
|
||||||
|
if err := os.WriteFile(src, content, 0644); err != nil {
|
||||||
|
t.Fatalf("Failed to write source file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy file
|
||||||
|
if err := copyFile(src, dst); err != nil {
|
||||||
|
t.Fatalf("copyFile failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify copy
|
||||||
|
copied, err := os.ReadFile(dst)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to read destination file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if string(copied) != string(content) {
|
||||||
|
t.Errorf("Content mismatch: got %s, want %s", copied, content)
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user