refactor: rename last_import_hash to jsonl_content_hash (bd-39o)

The metadata key 'last_import_hash' was misleading because it's updated on
both import AND export. Renamed to 'jsonl_content_hash' which more accurately
describes its purpose - tracking the content hash of the JSONL file.

Added migration support: read operations try new key first, then fall back
to old key for backwards compatibility with existing databases.

Files modified:
- cmd/bd/integrity.go: Update key name with migration support
- cmd/bd/import.go: Update key name
- cmd/bd/sync.go: Update key name
- cmd/bd/autoflush.go: Update key name with migration support
- cmd/bd/daemon_sync.go: Update key name
- cmd/bd/daemon_event_loop.go: Update key name with migration support
- internal/autoimport/autoimport.go: Update key name with migration support
- Updated all related tests

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Steve Yegge
2025-11-28 23:14:06 -08:00
parent bca8022bee
commit f59f8c20d0
9 changed files with 90 additions and 64 deletions

View File

@@ -83,14 +83,17 @@ func autoImportIfNewer() {
hasher.Write(jsonlData)
currentHash := hex.EncodeToString(hasher.Sum(nil))
// Get last import hash from DB metadata
// Get content hash from DB metadata (try new key first, fall back to old for migration - bd-39o)
ctx := rootCtx
lastHash, err := store.GetMetadata(ctx, "last_import_hash")
if err != nil {
// Metadata error - treat as first import rather than skipping (bd-663)
// This allows auto-import to recover from corrupt/missing metadata
debug.Logf("metadata read failed (%v), treating as first import", err)
lastHash = ""
lastHash, err := store.GetMetadata(ctx, "jsonl_content_hash")
if err != nil || lastHash == "" {
lastHash, err = store.GetMetadata(ctx, "last_import_hash")
if err != nil {
// Metadata error - treat as first import rather than skipping (bd-663)
// This allows auto-import to recover from corrupt/missing metadata
debug.Logf("metadata read failed (%v), treating as first import", err)
lastHash = ""
}
}
// Compare hashes
@@ -232,9 +235,9 @@ func autoImportIfNewer() {
}
}
// Store new hash after successful import
if err := store.SetMetadata(ctx, "last_import_hash", currentHash); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to update last_import_hash after import: %v\n", err)
// Store new hash after successful import (renamed from last_import_hash - bd-39o)
if err := store.SetMetadata(ctx, "jsonl_content_hash", currentHash); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to update jsonl_content_hash after import: %v\n", err)
fmt.Fprintf(os.Stderr, "This may cause auto-import to retry the same import on next operation.\n")
}
@@ -699,13 +702,14 @@ func flushToJSONLWithState(state flushState) {
}
// Store hash of exported JSONL (fixes bd-84: enables hash-based auto-import)
// Renamed from last_import_hash to jsonl_content_hash (bd-39o)
jsonlData, err := os.ReadFile(jsonlPath)
if err == nil {
hasher := sha256.New()
hasher.Write(jsonlData)
exportedHash := hex.EncodeToString(hasher.Sum(nil))
if err := store.SetMetadata(ctx, "last_import_hash", exportedHash); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to update last_import_hash after export: %v\n", err)
if err := store.SetMetadata(ctx, "jsonl_content_hash", exportedHash); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to update jsonl_content_hash after export: %v\n", err)
}
// Store JSONL file hash for integrity validation (bd-160)

View File

@@ -174,10 +174,13 @@ func checkDaemonHealth(ctx context.Context, store storage.Storage, log daemonLog
// Health check 1: Verify metadata is accessible
// This helps detect if external operations (like bd import --force) have modified metadata
// Without this, daemon may continue operating with stale metadata cache
if _, err := store.GetMetadata(ctx, "last_import_hash"); err != nil {
log.log("Health check: metadata read failed: %v", err)
// Non-fatal: daemon continues but logs the issue
// This helps diagnose stuck states in sandboxed environments
// Try new key first, fall back to old for migration (bd-39o)
if _, err := store.GetMetadata(ctx, "jsonl_content_hash"); err != nil {
if _, err := store.GetMetadata(ctx, "last_import_hash"); err != nil {
log.log("Health check: metadata read failed: %v", err)
// Non-fatal: daemon continues but logs the issue
// This helps diagnose stuck states in sandboxed environments
}
}
// Health check 2: Database integrity check

View File

@@ -247,16 +247,17 @@ func sanitizeMetadataKey(key string) string {
return strings.ReplaceAll(key, ":", "_")
}
// updateExportMetadata updates last_import_hash and related metadata after a successful export.
// updateExportMetadata updates jsonl_content_hash and related metadata after a successful export.
// This prevents "JSONL content has changed since last import" errors on subsequent exports (bd-ymj fix).
// In multi-repo mode, keySuffix should be the stable repo identifier (e.g., ".", "../frontend").
//
// Metadata key format (bd-ar2.12):
// - Single-repo mode: "last_import_hash", "last_import_time"
// - Multi-repo mode: "last_import_hash:<repo_key>", "last_import_time:<repo_key>", etc.
// Metadata key format (bd-ar2.12, bd-39o):
// - Single-repo mode: "jsonl_content_hash", "last_import_time"
// - Multi-repo mode: "jsonl_content_hash:<repo_key>", "last_import_time:<repo_key>", etc.
// where <repo_key> is a stable repo identifier like "." or "../frontend"
// - Windows paths: Colons in absolute paths (e.g., C:\...) are replaced with underscores (bd-web8)
// - Note: "last_import_mtime" was removed in bd-v0y fix (git doesn't preserve mtime)
// - Note: "last_import_hash" renamed to "jsonl_content_hash" (bd-39o) - more accurate name
//
// Transaction boundaries (bd-ar2.6):
// This function does NOT provide atomicity between JSONL write, metadata updates, and DB mtime.
@@ -279,7 +280,8 @@ func updateExportMetadata(ctx context.Context, store storage.Storage, jsonlPath
}
// Build metadata keys with optional suffix for per-repo tracking
hashKey := "last_import_hash"
// Renamed from last_import_hash to jsonl_content_hash (bd-39o)
hashKey := "jsonl_content_hash"
timeKey := "last_import_time"
if keySuffix != "" {
hashKey += ":" + keySuffix

View File

@@ -342,13 +342,13 @@ func TestExportUpdatesMetadata(t *testing.T) {
}
updateExportMetadata(ctx, store, jsonlPath, mockLogger, "")
// Verify metadata was set
lastHash, err := store.GetMetadata(ctx, "last_import_hash")
// Verify metadata was set (renamed from last_import_hash to jsonl_content_hash - bd-39o)
lastHash, err := store.GetMetadata(ctx, "jsonl_content_hash")
if err != nil {
t.Fatalf("failed to get last_import_hash: %v", err)
t.Fatalf("failed to get jsonl_content_hash: %v", err)
}
if lastHash == "" {
t.Error("expected last_import_hash to be set after export")
t.Error("expected jsonl_content_hash to be set after export")
}
lastTime, err := store.GetMetadata(ctx, "last_import_time")
@@ -449,7 +449,8 @@ func TestUpdateExportMetadataMultiRepo(t *testing.T) {
updateExportMetadata(ctx, store, jsonlPath2, mockLogger, jsonlPath2)
// Verify per-repo metadata was set with correct keys (bd-web8: keys are sanitized)
hash1Key := "last_import_hash:" + sanitizeMetadataKey(jsonlPath1)
// Renamed from last_import_hash to jsonl_content_hash (bd-39o)
hash1Key := "jsonl_content_hash:" + sanitizeMetadataKey(jsonlPath1)
hash1, err := store.GetMetadata(ctx, hash1Key)
if err != nil {
t.Fatalf("failed to get %s: %v", hash1Key, err)
@@ -458,7 +459,7 @@ func TestUpdateExportMetadataMultiRepo(t *testing.T) {
t.Errorf("expected %s to be set", hash1Key)
}
hash2Key := "last_import_hash:" + sanitizeMetadataKey(jsonlPath2)
hash2Key := "jsonl_content_hash:" + sanitizeMetadataKey(jsonlPath2)
hash2, err := store.GetMetadata(ctx, hash2Key)
if err != nil {
t.Fatalf("failed to get %s: %v", hash2Key, err)
@@ -468,12 +469,12 @@ func TestUpdateExportMetadataMultiRepo(t *testing.T) {
}
// Verify that single-repo metadata key is NOT set (we're using per-repo keys)
globalHash, err := store.GetMetadata(ctx, "last_import_hash")
globalHash, err := store.GetMetadata(ctx, "jsonl_content_hash")
if err != nil {
t.Fatalf("failed to get last_import_hash: %v", err)
t.Fatalf("failed to get jsonl_content_hash: %v", err)
}
if globalHash != "" {
t.Error("expected global last_import_hash to not be set when using per-repo keys")
t.Error("expected global jsonl_content_hash to not be set when using per-repo keys")
}
// Note: last_import_mtime removed in bd-v0y fix (git doesn't preserve mtime)
@@ -568,8 +569,8 @@ func TestExportWithMultiRepoConfigUpdatesAllMetadata(t *testing.T) {
updateExportMetadata(ctx, store, path, mockLogger, repoKey)
}
// Verify metadata for primary repo (bd-web8: keys are sanitized)
primaryHashKey := "last_import_hash:" + sanitizeMetadataKey(primaryDir)
// Verify metadata for primary repo (bd-web8: keys are sanitized, bd-39o: renamed key)
primaryHashKey := "jsonl_content_hash:" + sanitizeMetadataKey(primaryDir)
primaryHash, err := store.GetMetadata(ctx, primaryHashKey)
if err != nil {
t.Fatalf("failed to get %s: %v", primaryHashKey, err)
@@ -589,8 +590,8 @@ func TestExportWithMultiRepoConfigUpdatesAllMetadata(t *testing.T) {
// Note: last_import_mtime removed in bd-v0y fix (git doesn't preserve mtime)
// Verify metadata for additional repo (bd-web8: keys are sanitized)
additionalHashKey := "last_import_hash:" + sanitizeMetadataKey(additionalDir)
// Verify metadata for additional repo (bd-web8: keys are sanitized, bd-39o: renamed key)
additionalHashKey := "jsonl_content_hash:" + sanitizeMetadataKey(additionalDir)
additionalHash, err := store.GetMetadata(ctx, additionalHashKey)
if err != nil {
t.Fatalf("failed to get %s: %v", additionalHashKey, err)
@@ -616,12 +617,12 @@ func TestExportWithMultiRepoConfigUpdatesAllMetadata(t *testing.T) {
// metadata is set with correct per-repo keys.
// Verify global metadata keys are NOT set (multi-repo mode uses suffixed keys)
globalHash, err := store.GetMetadata(ctx, "last_import_hash")
globalHash, err := store.GetMetadata(ctx, "jsonl_content_hash")
if err != nil {
t.Fatalf("failed to get last_import_hash: %v", err)
t.Fatalf("failed to get jsonl_content_hash: %v", err)
}
if globalHash != "" {
t.Error("expected global last_import_hash to not be set in multi-repo mode")
t.Error("expected global jsonl_content_hash to not be set in multi-repo mode")
}
// Test that subsequent exports don't fail with "content has changed" error
@@ -687,8 +688,9 @@ func TestUpdateExportMetadataInvalidKeySuffix(t *testing.T) {
updateExportMetadata(ctx, store, jsonlPath, mockLogger, keySuffixWithColon)
// Verify metadata WAS set with sanitized key (colons replaced with underscores)
// bd-39o: renamed from last_import_hash to jsonl_content_hash
sanitized := sanitizeMetadataKey(keySuffixWithColon)
sanitizedKey := "last_import_hash:" + sanitized
sanitizedKey := "jsonl_content_hash:" + sanitized
hash, err := store.GetMetadata(ctx, sanitizedKey)
if err != nil {
t.Fatalf("failed to get metadata: %v", err)
@@ -698,7 +700,7 @@ func TestUpdateExportMetadataInvalidKeySuffix(t *testing.T) {
}
// Verify that the original unsanitized key was NOT used
unsanitizedKey := "last_import_hash:" + keySuffixWithColon
unsanitizedKey := "jsonl_content_hash:" + keySuffixWithColon
unsanitizedHash, err := store.GetMetadata(ctx, unsanitizedKey)
if err != nil {
t.Fatalf("failed to check unsanitized key: %v", err)

View File

@@ -350,17 +350,18 @@ NOTE: Import requires direct database access and does not work with daemon mode.
flushToJSONL()
}
// Update last_import_hash metadata to enable content-based staleness detection (bd-khnb fix)
// Update jsonl_content_hash metadata to enable content-based staleness detection (bd-khnb fix)
// This prevents git operations from resurrecting deleted issues by comparing content instead of mtime
// ALWAYS update metadata after successful import, even if no changes were made (fixes staleness check)
// This ensures that running `bd import` marks the database as fresh for staleness detection
// Renamed from last_import_hash (bd-39o) - more accurate since updated on both import AND export
if input != "" {
if currentHash, err := computeJSONLHash(input); err == nil {
if err := store.SetMetadata(ctx, "last_import_hash", currentHash); err != nil {
if err := store.SetMetadata(ctx, "jsonl_content_hash", currentHash); err != nil {
// Non-fatal warning: Metadata update failures are intentionally non-fatal to prevent blocking
// successful imports. System degrades gracefully to mtime-based staleness detection if metadata
// is unavailable. This ensures import operations always succeed even if metadata storage fails.
debug.Logf("Warning: failed to update last_import_hash: %v", err)
debug.Logf("Warning: failed to update jsonl_content_hash: %v", err)
}
// Use RFC3339Nano for nanosecond precision to avoid race with file mtime (fixes #399)
importTime := time.Now().Format(time.RFC3339Nano)

View File

@@ -106,9 +106,13 @@ func hasJSONLChanged(ctx context.Context, store storage.Storage, jsonlPath strin
}
// Build metadata keys with optional suffix for per-repo tracking (bd-ar2.10, bd-ar2.11)
hashKey := "last_import_hash"
// Renamed from last_import_hash to jsonl_content_hash (bd-39o) - more accurate name
// since this hash is updated on both import AND export
hashKey := "jsonl_content_hash"
oldHashKey := "last_import_hash" // Migration: check old key if new key missing
if keySuffix != "" {
hashKey += ":" + keySuffix
oldHashKey += ":" + keySuffix
}
// Always compute content hash (bd-v0y fix)
@@ -121,12 +125,16 @@ func hasJSONLChanged(ctx context.Context, store storage.Storage, jsonlPath strin
return false
}
// Get last import hash from metadata
// Get content hash from metadata (try new key first, fall back to old for migration)
lastHash, err := store.GetMetadata(ctx, hashKey)
if err != nil {
// No previous import hash - this is the first run or metadata is missing
// Assume changed to trigger import
return true
if err != nil || lastHash == "" {
// Try old key for migration (bd-39o)
lastHash, err = store.GetMetadata(ctx, oldHashKey)
if err != nil || lastHash == "" {
// No previous hash - this is the first run or metadata is missing
// Assume changed to trigger import
return true
}
}
// Compare hashes

View File

@@ -63,11 +63,12 @@ func TestValidatePreExportSuite(t *testing.T) {
}
// Store hash metadata to indicate JSONL and DB are in sync
// bd-39o: renamed from last_import_hash to jsonl_content_hash
hash, err := computeJSONLHash(jsonlPath)
if err != nil {
t.Fatalf("Failed to compute hash: %v", err)
}
if err := s.SetMetadata(ctx, "last_import_hash", hash); err != nil {
if err := s.SetMetadata(ctx, "jsonl_content_hash", hash); err != nil {
t.Fatalf("Failed to set hash metadata: %v", err)
}
@@ -135,12 +136,12 @@ func TestValidatePreExportSuite(t *testing.T) {
t.Fatalf("Failed to write JSONL: %v", err)
}
// Store hash of original content
// Store hash of original content (bd-39o: renamed to jsonl_content_hash)
hash, err := computeJSONLHash(jsonlPath)
if err != nil {
t.Fatalf("Failed to compute hash: %v", err)
}
if err := s.SetMetadata(ctx, "last_import_hash", hash); err != nil {
if err := s.SetMetadata(ctx, "jsonl_content_hash", hash); err != nil {
t.Fatalf("Failed to set hash: %v", err)
}
@@ -402,7 +403,7 @@ func TestHasJSONLChangedSuite(t *testing.T) {
if err != nil {
t.Fatalf("Failed to compute hash: %v", err)
}
if err := s.SetMetadata(ctx, "last_import_hash:"+keySuffix, hash); err != nil {
if err := s.SetMetadata(ctx, "jsonl_content_hash:"+keySuffix, hash); err != nil {
t.Fatalf("Failed to set metadata: %v", err)
}
@@ -437,7 +438,7 @@ func TestHasJSONLChangedSuite(t *testing.T) {
if err != nil {
t.Fatalf("Failed to compute hash: %v", err)
}
if err := s.SetMetadata(ctx, "last_import_hash:"+keySuffix, hash); err != nil {
if err := s.SetMetadata(ctx, "jsonl_content_hash:"+keySuffix, hash); err != nil {
t.Fatalf("Failed to set metadata: %v", err)
}
@@ -522,7 +523,7 @@ func TestHasJSONLChangedSuite(t *testing.T) {
if err != nil {
t.Fatalf("Failed to compute hash: %v", err)
}
if err := s.SetMetadata(ctx, "last_import_hash:"+keySuffix, hash); err != nil {
if err := s.SetMetadata(ctx, "jsonl_content_hash:"+keySuffix, hash); err != nil {
t.Fatalf("Failed to set hash: %v", err)
}
@@ -560,7 +561,7 @@ func TestHasJSONLChangedSuite(t *testing.T) {
if err != nil {
t.Fatalf("Failed to compute hash: %v", err)
}
if err := s.SetMetadata(ctx, "last_import_hash:"+keySuffix, hash); err != nil {
if err := s.SetMetadata(ctx, "jsonl_content_hash:"+keySuffix, hash); err != nil {
t.Fatalf("Failed to set hash: %v", err)
}

View File

@@ -986,14 +986,15 @@ func exportToJSONL(ctx context.Context, jsonlPath string) error {
// Clear auto-flush state
clearAutoFlushState()
// Update last_import_hash metadata to enable content-based staleness detection (bd-khnb fix)
// Update jsonl_content_hash metadata to enable content-based staleness detection (bd-khnb fix)
// After export, database and JSONL are in sync, so update hash to prevent unnecessary auto-import
// Renamed from last_import_hash (bd-39o) - more accurate since updated on both import AND export
if currentHash, err := computeJSONLHash(jsonlPath); err == nil {
if err := store.SetMetadata(ctx, "last_import_hash", currentHash); err != nil {
if err := store.SetMetadata(ctx, "jsonl_content_hash", currentHash); err != nil {
// Non-fatal warning: Metadata update failures are intentionally non-fatal to prevent blocking
// successful exports. System degrades gracefully to mtime-based staleness detection if metadata
// is unavailable. This ensures export operations always succeed even if metadata storage fails.
fmt.Fprintf(os.Stderr, "Warning: failed to update last_import_hash: %v\n", err)
fmt.Fprintf(os.Stderr, "Warning: failed to update jsonl_content_hash: %v\n", err)
}
// Use RFC3339Nano for nanosecond precision to avoid race with file mtime (fixes #399)
exportTime := time.Now().Format(time.RFC3339Nano)