fix(export): populate export_hashes after successful export (GH#1278) (#1286)

Child issues created with --parent were missing from export_hashes table,
which affects integrity tracking and future incremental export features.

This fix ensures SetExportHash() is called for all exported issues:
- Updated ExportResult to include IssueContentHashes map
- Updated finalizeExport() to call SetExportHash() for each exported issue
- Updated exportToJSONLDeferred() to collect content hashes during export
- Updated performIncrementalExport() to collect content hashes for dirty issues
- Updated exportToJSONLWithStore() to call SetExportHash() after export
- Updated daemon's handleExport() to call SetExportHash() after export

Added test TestExportPopulatesExportHashes to verify the fix works for
both regular and hierarchical (child) issue IDs.

Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Scott Nixon
2026-01-24 17:10:02 -08:00
committed by GitHub
parent fde40a79cf
commit 810192157c
4 changed files with 152 additions and 9 deletions

View File

@@ -43,6 +43,10 @@ type ExportResult struct {
// ExportTime is when the export was performed (RFC3339Nano format)
ExportTime string
// IssueContentHashes maps issue IDs to their content hashes (GH#1278)
// Used to populate export_hashes table after successful export
IssueContentHashes map[string]string
}
// finalizeExport updates SQLite metadata after a successful git commit.
@@ -69,6 +73,18 @@ func finalizeExport(ctx context.Context, result *ExportResult) {
}
}
// Update export_hashes for all exported issues (GH#1278)
// This ensures child issues created with --parent are properly registered
// for integrity tracking and incremental export detection.
if len(result.IssueContentHashes) > 0 {
for issueID, contentHash := range result.IssueContentHashes {
if err := store.SetExportHash(ctx, issueID, contentHash); err != nil {
// Non-fatal warning - continue with other issues
fmt.Fprintf(os.Stderr, "Warning: failed to set export hash for %s: %v\n", issueID, err)
}
}
}
// Clear auto-flush state
clearAutoFlushState()
@@ -233,14 +249,19 @@ func exportToJSONLDeferred(ctx context.Context, jsonlPath string) (*ExportResult
_ = os.Remove(tempPath)
}()
// Write JSONL
// Write JSONL and collect content hashes (GH#1278)
encoder := json.NewEncoder(tempFile)
exportedIDs := make([]string, 0, len(issues))
issueContentHashes := make(map[string]string, len(issues))
for _, issue := range issues {
if err := encoder.Encode(issue); err != nil {
return nil, fmt.Errorf("failed to encode issue %s: %w", issue.ID, err)
}
exportedIDs = append(exportedIDs, issue.ID)
// Collect content hash for export_hashes table
if issue.ContentHash != "" {
issueContentHashes[issue.ID] = issue.ContentHash
}
}
// Close temp file before rename (error checked implicitly by Rename success)
@@ -262,10 +283,11 @@ func exportToJSONLDeferred(ctx context.Context, jsonlPath string) (*ExportResult
exportTime := time.Now().Format(time.RFC3339Nano)
return &ExportResult{
JSONLPath: jsonlPath,
ExportedIDs: exportedIDs,
ContentHash: contentHash,
ExportTime: exportTime,
JSONLPath: jsonlPath,
ExportedIDs: exportedIDs,
ContentHash: contentHash,
ExportTime: exportTime,
IssueContentHashes: issueContentHashes,
}, nil
}
@@ -366,8 +388,10 @@ func performIncrementalExport(ctx context.Context, jsonlPath string, dirtyIDs []
}
// Query dirty issues from database and track which IDs were found
// Also collect content hashes for export_hashes table (GH#1278)
dirtyIssues := make([]*types.Issue, 0, len(dirtyIDs))
issueByID := make(map[string]*types.Issue, len(dirtyIDs))
issueContentHashes := make(map[string]string, len(dirtyIDs))
for _, id := range dirtyIDs {
issue, err := store.GetIssue(ctx, id)
if err != nil {
@@ -376,6 +400,9 @@ func performIncrementalExport(ctx context.Context, jsonlPath string, dirtyIDs []
issueByID[id] = issue // Store result (may be nil for deleted issues)
if issue != nil {
dirtyIssues = append(dirtyIssues, issue)
if issue.ContentHash != "" {
issueContentHashes[issue.ID] = issue.ContentHash
}
}
}
@@ -502,10 +529,11 @@ func performIncrementalExport(ctx context.Context, jsonlPath string, dirtyIDs []
// Note: exportedIDs contains ALL IDs in the file, but we only need to clear
// dirty flags for the dirtyIDs (which we received as parameter)
return &ExportResult{
JSONLPath: jsonlPath,
ExportedIDs: dirtyIDs, // Only clear dirty flags for actually dirty issues
ContentHash: contentHash,
ExportTime: exportTime,
JSONLPath: jsonlPath,
ExportedIDs: dirtyIDs, // Only clear dirty flags for actually dirty issues
ContentHash: contentHash,
ExportTime: exportTime,
IssueContentHashes: issueContentHashes,
}, nil
}