Fix bd-ar2 code review issues: metadata tracking and multi-repo support
This commit addresses critical code review findings from bd-dvd and bd-ymj fixes: ## Completed Tasks ### bd-ar2.1: Extract duplicated metadata update code - Created `updateExportMetadata()` helper function - Eliminated 22-line duplication between createExportFunc and createSyncFunc - Single source of truth for metadata updates ### bd-ar2.2: Add multi-repo support to export metadata updates - Added per-repo metadata key tracking with keySuffix parameter - Both export and sync functions now update metadata for all repos ### bd-ar2.3: Fix tests to use actual daemon functions - TestExportUpdatesMetadata now calls updateExportMetadata() directly - Added TestUpdateExportMetadataMultiRepo() for multi-repo testing - Fixed export_mtime_test.go tests to call updateExportMetadata() ### bd-ar2.9: Fix variable shadowing in GetNextChildID - Changed `err` to `resurrectErr` to avoid shadowing - Improves code clarity and passes linter checks ### bd-ar2.10: Fix hasJSONLChanged to support per-repo keys - Updated hasJSONLChanged() to accept keySuffix parameter - Reads metadata with correct per-repo keys - All callers updated (validatePreExport, daemon import, sync command) ### bd-ar2.11: Use stable repo identifiers instead of paths - Added getRepoKeyForPath() helper function - Uses stable identifiers like ".", "../frontend" instead of absolute paths - Metadata keys now portable across machines and clones - Prevents orphaned metadata when repos are moved ## Files Changed - cmd/bd/daemon_sync.go: Helper functions, metadata updates - cmd/bd/integrity.go: hasJSONLChanged() with keySuffix support - cmd/bd/sync.go: Updated to use getRepoKeyForPath() - cmd/bd/*_test.go: Tests updated for new signatures - internal/storage/sqlite/hash_ids.go: Fixed variable shadowing ## Testing All export, sync, and integrity tests pass. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -8,9 +8,11 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/steveyegge/beads/internal/beads"
|
||||
"github.com/steveyegge/beads/internal/config"
|
||||
"github.com/steveyegge/beads/internal/storage"
|
||||
"github.com/steveyegge/beads/internal/storage/sqlite"
|
||||
"github.com/steveyegge/beads/internal/types"
|
||||
@@ -199,6 +201,81 @@ func importToJSONLWithStore(ctx context.Context, store storage.Storage, jsonlPat
|
||||
return err
|
||||
}
|
||||
|
||||
// getRepoKeyForPath extracts the stable repo identifier from a JSONL path.
|
||||
// For single-repo mode, returns empty string (no suffix needed).
|
||||
// For multi-repo mode, extracts the repo path (e.g., ".", "../frontend").
|
||||
// This creates portable metadata keys that work across different machine paths.
|
||||
func getRepoKeyForPath(jsonlPath string) string {
|
||||
multiRepo := config.GetMultiRepoConfig()
|
||||
if multiRepo == nil {
|
||||
return "" // Single-repo mode
|
||||
}
|
||||
|
||||
// Normalize the jsonlPath for comparison
|
||||
// Remove trailing "/.beads/issues.jsonl" to get repo path
|
||||
const suffix = "/.beads/issues.jsonl"
|
||||
if strings.HasSuffix(jsonlPath, suffix) {
|
||||
repoPath := strings.TrimSuffix(jsonlPath, suffix)
|
||||
|
||||
// Try to match against primary repo
|
||||
primaryPath := multiRepo.Primary
|
||||
if primaryPath == "" {
|
||||
primaryPath = "."
|
||||
}
|
||||
if repoPath == primaryPath {
|
||||
return primaryPath
|
||||
}
|
||||
|
||||
// Try to match against additional repos
|
||||
for _, additional := range multiRepo.Additional {
|
||||
if repoPath == additional {
|
||||
return additional
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: return empty string for single-repo mode behavior
|
||||
return ""
|
||||
}
|
||||
|
||||
// updateExportMetadata updates last_import_hash and related metadata after a successful export.
|
||||
// This prevents "JSONL content has changed since last import" errors on subsequent exports (bd-ymj fix).
|
||||
// In multi-repo mode, keySuffix should be the stable repo identifier (e.g., ".", "../frontend").
|
||||
func updateExportMetadata(ctx context.Context, store storage.Storage, jsonlPath string, log daemonLogger, keySuffix string) {
|
||||
currentHash, err := computeJSONLHash(jsonlPath)
|
||||
if err != nil {
|
||||
log.log("Warning: failed to compute JSONL hash for metadata update: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Build metadata keys with optional suffix for per-repo tracking
|
||||
hashKey := "last_import_hash"
|
||||
timeKey := "last_import_time"
|
||||
mtimeKey := "last_import_mtime"
|
||||
if keySuffix != "" {
|
||||
hashKey += ":" + keySuffix
|
||||
timeKey += ":" + keySuffix
|
||||
mtimeKey += ":" + keySuffix
|
||||
}
|
||||
|
||||
if err := store.SetMetadata(ctx, hashKey, currentHash); err != nil {
|
||||
log.log("Warning: failed to update %s: %v", hashKey, err)
|
||||
}
|
||||
|
||||
exportTime := time.Now().Format(time.RFC3339)
|
||||
if err := store.SetMetadata(ctx, timeKey, exportTime); err != nil {
|
||||
log.log("Warning: failed to update %s: %v", timeKey, err)
|
||||
}
|
||||
|
||||
// Store mtime for fast-path optimization
|
||||
if jsonlInfo, statErr := os.Stat(jsonlPath); statErr == nil {
|
||||
mtimeStr := fmt.Sprintf("%d", jsonlInfo.ModTime().Unix())
|
||||
if err := store.SetMetadata(ctx, mtimeKey, mtimeStr); err != nil {
|
||||
log.log("Warning: failed to update %s: %v", mtimeKey, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// validateDatabaseFingerprint checks that the database belongs to this repository
|
||||
func validateDatabaseFingerprint(ctx context.Context, store storage.Storage, log *daemonLogger) error {
|
||||
|
||||
@@ -306,25 +383,17 @@ func createExportFunc(ctx context.Context, store storage.Storage, autoCommit, au
|
||||
}
|
||||
log.log("Exported to JSONL")
|
||||
|
||||
// Update last_import_hash metadata to prevent "content has changed" errors (bd-ymj fix)
|
||||
// This keeps metadata in sync after export so next export doesn't fail
|
||||
if currentHash, err := computeJSONLHash(jsonlPath); err == nil {
|
||||
if err := store.SetMetadata(exportCtx, "last_import_hash", currentHash); err != nil {
|
||||
log.log("Warning: failed to update last_import_hash: %v", err)
|
||||
}
|
||||
exportTime := time.Now().Format(time.RFC3339)
|
||||
if err := store.SetMetadata(exportCtx, "last_import_time", exportTime); err != nil {
|
||||
log.log("Warning: failed to update last_import_time: %v", err)
|
||||
}
|
||||
// Store mtime for fast-path optimization
|
||||
if jsonlInfo, statErr := os.Stat(jsonlPath); statErr == nil {
|
||||
mtimeStr := fmt.Sprintf("%d", jsonlInfo.ModTime().Unix())
|
||||
if err := store.SetMetadata(exportCtx, "last_import_mtime", mtimeStr); err != nil {
|
||||
log.log("Warning: failed to update last_import_mtime: %v", err)
|
||||
}
|
||||
// Update export metadata (bd-ymj fix, bd-ar2.2 multi-repo support, bd-ar2.11 stable keys)
|
||||
multiRepoPaths := getMultiRepoJSONLPaths()
|
||||
if multiRepoPaths != nil {
|
||||
// Multi-repo mode: update metadata for each JSONL with stable repo key
|
||||
for _, path := range multiRepoPaths {
|
||||
repoKey := getRepoKeyForPath(path)
|
||||
updateExportMetadata(exportCtx, store, path, log, repoKey)
|
||||
}
|
||||
} else {
|
||||
log.log("Warning: failed to compute JSONL hash for metadata update: %v", err)
|
||||
// Single-repo mode: update metadata for main JSONL
|
||||
updateExportMetadata(exportCtx, store, jsonlPath, log, "")
|
||||
}
|
||||
|
||||
// Update database mtime to be >= JSONL mtime (fixes #278, #301, #321)
|
||||
@@ -408,7 +477,9 @@ func createAutoImportFunc(ctx context.Context, store storage.Storage, log daemon
|
||||
|
||||
// Check JSONL content hash to avoid redundant imports
|
||||
// Use content-based check (not mtime) to avoid git resurrection bug (bd-khnb)
|
||||
if !hasJSONLChanged(importCtx, store, jsonlPath) {
|
||||
// Use getRepoKeyForPath for multi-repo support (bd-ar2.10, bd-ar2.11)
|
||||
repoKey := getRepoKeyForPath(jsonlPath)
|
||||
if !hasJSONLChanged(importCtx, store, jsonlPath, repoKey) {
|
||||
log.log("Skipping import: JSONL content unchanged")
|
||||
return
|
||||
}
|
||||
@@ -517,25 +588,16 @@ func createSyncFunc(ctx context.Context, store storage.Storage, autoCommit, auto
|
||||
}
|
||||
log.log("Exported to JSONL")
|
||||
|
||||
// Update last_import_hash metadata to prevent "content has changed" errors (bd-ymj fix)
|
||||
// This keeps metadata in sync after export so next export doesn't fail
|
||||
if currentHash, err := computeJSONLHash(jsonlPath); err == nil {
|
||||
if err := store.SetMetadata(syncCtx, "last_import_hash", currentHash); err != nil {
|
||||
log.log("Warning: failed to update last_import_hash: %v", err)
|
||||
}
|
||||
exportTime := time.Now().Format(time.RFC3339)
|
||||
if err := store.SetMetadata(syncCtx, "last_import_time", exportTime); err != nil {
|
||||
log.log("Warning: failed to update last_import_time: %v", err)
|
||||
}
|
||||
// Store mtime for fast-path optimization
|
||||
if jsonlInfo, statErr := os.Stat(jsonlPath); statErr == nil {
|
||||
mtimeStr := fmt.Sprintf("%d", jsonlInfo.ModTime().Unix())
|
||||
if err := store.SetMetadata(syncCtx, "last_import_mtime", mtimeStr); err != nil {
|
||||
log.log("Warning: failed to update last_import_mtime: %v", err)
|
||||
}
|
||||
// Update export metadata (bd-ymj fix, bd-ar2.2 multi-repo support, bd-ar2.11 stable keys)
|
||||
if multiRepoPaths != nil {
|
||||
// Multi-repo mode: update metadata for each JSONL with stable repo key
|
||||
for _, path := range multiRepoPaths {
|
||||
repoKey := getRepoKeyForPath(path)
|
||||
updateExportMetadata(syncCtx, store, path, log, repoKey)
|
||||
}
|
||||
} else {
|
||||
log.log("Warning: failed to compute JSONL hash for metadata update: %v", err)
|
||||
// Single-repo mode: update metadata for main JSONL
|
||||
updateExportMetadata(syncCtx, store, jsonlPath, log, "")
|
||||
}
|
||||
|
||||
// Update database mtime to be >= JSONL mtime (fixes #278, #301, #321)
|
||||
|
||||
Reference in New Issue
Block a user