Fix bd-dvd and bd-ymj: Parent resurrection and export metadata

Bug 1 (bd-dvd): GetNextChildID now attempts parent resurrection from JSONL
before failing. Added TryResurrectParent call to match CreateIssue behavior.

Bug 2 (bd-ymj): Export now updates last_import_hash metadata to prevent
'JSONL content has changed' errors on subsequent exports.

Files changed:
- internal/storage/sqlite/hash_ids.go: Add resurrection attempt
- cmd/bd/daemon_sync.go: Add metadata updates after export
- Tests added for both fixes
- Fixed pre-existing bug in integrity_content_test.go

Follow-up work tracked in epic bd-ar2 (9 issues for improvements).

Fixes GH #334
This commit is contained in:
Steve Yegge
2025-11-21 10:29:30 -05:00
parent ff3ccdd26e
commit 4c5f99c5bd
6 changed files with 230 additions and 20 deletions

View File

@@ -306,6 +306,27 @@ func createExportFunc(ctx context.Context, store storage.Storage, autoCommit, au
}
log.log("Exported to JSONL")
// Update last_import_hash metadata to prevent "content has changed" errors (bd-ymj fix)
// This keeps metadata in sync after export so next export doesn't fail
if currentHash, err := computeJSONLHash(jsonlPath); err == nil {
if err := store.SetMetadata(exportCtx, "last_import_hash", currentHash); err != nil {
log.log("Warning: failed to update last_import_hash: %v", err)
}
exportTime := time.Now().Format(time.RFC3339)
if err := store.SetMetadata(exportCtx, "last_import_time", exportTime); err != nil {
log.log("Warning: failed to update last_import_time: %v", err)
}
// Store mtime for fast-path optimization
if jsonlInfo, statErr := os.Stat(jsonlPath); statErr == nil {
mtimeStr := fmt.Sprintf("%d", jsonlInfo.ModTime().Unix())
if err := store.SetMetadata(exportCtx, "last_import_mtime", mtimeStr); err != nil {
log.log("Warning: failed to update last_import_mtime: %v", err)
}
}
} else {
log.log("Warning: failed to compute JSONL hash for metadata update: %v", err)
}
// Update database mtime to be >= JSONL mtime (fixes #278, #301, #321)
// This prevents validatePreExport from incorrectly blocking on next export
// with "JSONL is newer than database" after daemon auto-export
@@ -496,6 +517,27 @@ func createSyncFunc(ctx context.Context, store storage.Storage, autoCommit, auto
}
log.log("Exported to JSONL")
// Update last_import_hash metadata to prevent "content has changed" errors (bd-ymj fix)
// This keeps metadata in sync after export so next export doesn't fail
if currentHash, err := computeJSONLHash(jsonlPath); err == nil {
if err := store.SetMetadata(syncCtx, "last_import_hash", currentHash); err != nil {
log.log("Warning: failed to update last_import_hash: %v", err)
}
exportTime := time.Now().Format(time.RFC3339)
if err := store.SetMetadata(syncCtx, "last_import_time", exportTime); err != nil {
log.log("Warning: failed to update last_import_time: %v", err)
}
// Store mtime for fast-path optimization
if jsonlInfo, statErr := os.Stat(jsonlPath); statErr == nil {
mtimeStr := fmt.Sprintf("%d", jsonlInfo.ModTime().Unix())
if err := store.SetMetadata(syncCtx, "last_import_mtime", mtimeStr); err != nil {
log.log("Warning: failed to update last_import_mtime: %v", err)
}
}
} else {
log.log("Warning: failed to compute JSONL hash for metadata update: %v", err)
}
// Update database mtime to be >= JSONL mtime (fixes #278, #301, #321)
// This prevents validatePreExport from incorrectly blocking on next export
dbPath := filepath.Join(beadsDir, "beads.db")

View File

@@ -3,6 +3,7 @@ package main
import (
"context"
"encoding/json"
"fmt"
"os"
"path/filepath"
"testing"
@@ -291,3 +292,92 @@ func TestExportImportRoundTrip(t *testing.T) {
t.Errorf("expected label 'bug', got %v", labels)
}
}
// TestExportUpdatesMetadata verifies that export updates last_import_hash metadata (bd-ymj fix)
func TestExportUpdatesMetadata(t *testing.T) {
tmpDir := t.TempDir()
dbPath := filepath.Join(tmpDir, ".beads", "beads.db")
jsonlPath := filepath.Join(tmpDir, ".beads", "issues.jsonl")
// Create storage
store, err := sqlite.New(context.Background(), dbPath)
if err != nil {
t.Fatalf("failed to create store: %v", err)
}
defer store.Close()
ctx := context.Background()
// Set issue_prefix
if err := store.SetConfig(ctx, "issue_prefix", "test"); err != nil {
t.Fatalf("failed to set issue_prefix: %v", err)
}
// Create test issue
issue := &types.Issue{
ID: "test-1",
Title: "Test Issue",
Description: "Test description",
IssueType: types.TypeBug,
Priority: 1,
Status: types.StatusOpen,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
if err := store.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("failed to create issue: %v", err)
}
// First export
if err := exportToJSONLWithStore(ctx, store, jsonlPath); err != nil {
t.Fatalf("first export failed: %v", err)
}
// Manually update metadata as daemon would (this is what we're testing)
// Note: In production, createExportFunc and createSyncFunc do this
currentHash, err := computeJSONLHash(jsonlPath)
if err != nil {
t.Fatalf("failed to compute JSONL hash: %v", err)
}
if err := store.SetMetadata(ctx, "last_import_hash", currentHash); err != nil {
t.Fatalf("failed to set last_import_hash: %v", err)
}
exportTime := time.Now().Format(time.RFC3339)
if err := store.SetMetadata(ctx, "last_import_time", exportTime); err != nil {
t.Fatalf("failed to set last_import_time: %v", err)
}
if jsonlInfo, statErr := os.Stat(jsonlPath); statErr == nil {
mtimeStr := jsonlInfo.ModTime().Unix()
if err := store.SetMetadata(ctx, "last_import_mtime", fmt.Sprintf("%d", mtimeStr)); err != nil {
t.Fatalf("failed to set last_import_mtime: %v", err)
}
}
// Verify metadata was set
lastHash, err := store.GetMetadata(ctx, "last_import_hash")
if err != nil {
t.Fatalf("failed to get last_import_hash: %v", err)
}
if lastHash == "" {
t.Error("expected last_import_hash to be set after export")
}
lastTime, err := store.GetMetadata(ctx, "last_import_time")
if err != nil {
t.Fatalf("failed to get last_import_time: %v", err)
}
if lastTime == "" {
t.Error("expected last_import_time to be set after export")
}
// Second export should succeed without "content has changed" error
if err := exportToJSONLWithStore(ctx, store, jsonlPath); err != nil {
t.Fatalf("second export failed (metadata not updated properly): %v", err)
}
// Verify validatePreExport doesn't fail with "content has changed"
if err := validatePreExport(ctx, store, jsonlPath); err != nil {
t.Fatalf("validatePreExport failed after metadata update: %v", err)
}
}

View File

@@ -27,6 +27,8 @@ func TestContentBasedComparison(t *testing.T) {
dbPath := filepath.Join(beadsDir, "beads.db")
jsonlPath := filepath.Join(beadsDir, "issues.jsonl")
ctx := context.Background()
// Create and populate database
localStore, err := sqlite.New(ctx, dbPath)
if err != nil {
@@ -34,8 +36,6 @@ func TestContentBasedComparison(t *testing.T) {
}
defer localStore.Close()
ctx := context.Background()
// Initialize database with issue_prefix
if err := localStore.SetConfig(ctx, "issue_prefix", "test"); err != nil {
t.Fatalf("Failed to set issue_prefix: %v", err)
@@ -179,6 +179,8 @@ func TestContentHashComputation(t *testing.T) {
dbPath := filepath.Join(beadsDir, "beads.db")
jsonlPath := filepath.Join(beadsDir, "issues.jsonl")
ctx := context.Background()
// Create and populate database
localStore, err := sqlite.New(ctx, dbPath)
if err != nil {
@@ -186,8 +188,6 @@ func TestContentHashComputation(t *testing.T) {
}
defer localStore.Close()
ctx := context.Background()
if err := localStore.SetConfig(ctx, "issue_prefix", "test"); err != nil {
t.Fatalf("Failed to set issue_prefix: %v", err)
}