diff --git a/cmd/bd/autoflush.go b/cmd/bd/autoflush.go index 47b32e8c..2c707236 100644 --- a/cmd/bd/autoflush.go +++ b/cmd/bd/autoflush.go @@ -454,43 +454,40 @@ func writeJSONLAtomic(jsonlPath string, issues []*types.Issue) ([]string, error) } }() - // Write all issues as JSONL (with timestamp-only deduplication for bd-159) - ctx := context.Background() + // Write all issues as JSONL (timestamp-only deduplication DISABLED - bd-160) encoder := json.NewEncoder(f) skippedCount := 0 exportedIDs := make([]string, 0, len(issues)) for _, issue := range issues { - // Check if this is only a timestamp change (bd-159) - skip, err := shouldSkipExport(ctx, issue) - if err != nil { - // Log warning but continue - don't fail export on hash check errors - if os.Getenv("BD_DEBUG") != "" { - fmt.Fprintf(os.Stderr, "Debug: failed to check if %s should skip: %v\n", issue.ID, err) - } - skip = false - } - - if skip { - skippedCount++ - continue - } + // DISABLED: timestamp-only deduplication causes data loss (bd-160) + // skip, err := shouldSkipExport(ctx, issue) + // if err != nil { + // if os.Getenv("BD_DEBUG") != "" { + // fmt.Fprintf(os.Stderr, "Debug: failed to check if %s should skip: %v\n", issue.ID, err) + // } + // skip = false + // } + // if skip { + // skippedCount++ + // continue + // } if err := encoder.Encode(issue); err != nil { return nil, fmt.Errorf("failed to encode issue %s: %w", issue.ID, err) } - // Save content hash after successful export (bd-159) - contentHash, err := computeIssueContentHash(issue) - if err != nil { - if os.Getenv("BD_DEBUG") != "" { - fmt.Fprintf(os.Stderr, "Debug: failed to compute hash for %s: %v\n", issue.ID, err) - } - } else if err := store.SetExportHash(ctx, issue.ID, contentHash); err != nil { - if os.Getenv("BD_DEBUG") != "" { - fmt.Fprintf(os.Stderr, "Debug: failed to save export hash for %s: %v\n", issue.ID, err) - } - } + // DISABLED: export hash tracking (bd-160) + // contentHash, err := computeIssueContentHash(issue) + // if err != nil { + // if os.Getenv("BD_DEBUG") != "" { + // fmt.Fprintf(os.Stderr, "Debug: failed to compute hash for %s: %v\n", issue.ID, err) + // } + // } else if err := store.SetExportHash(ctx, issue.ID, contentHash); err != nil { + // if os.Getenv("BD_DEBUG") != "" { + // fmt.Fprintf(os.Stderr, "Debug: failed to save export hash for %s: %v\n", issue.ID, err) + // } + // } exportedIDs = append(exportedIDs, issue.ID) } diff --git a/cmd/bd/export.go b/cmd/bd/export.go index 432fcae4..bf8f3db4 100644 --- a/cmd/bd/export.go +++ b/cmd/bd/export.go @@ -222,36 +222,37 @@ Output to stdout by default, or use -o flag for file output.`, out = tempFile } - // Write JSONL (with timestamp-only deduplication for bd-164) + // Write JSONL (timestamp-only deduplication DISABLED due to bd-160) encoder := json.NewEncoder(out) exportedIDs := make([]string, 0, len(issues)) skippedCount := 0 for _, issue := range issues { - // Check if this is only a timestamp change (bd-164) - skip, err := shouldSkipExport(ctx, issue) - if err != nil { - // Log warning but continue - don't fail export on hash check errors - fmt.Fprintf(os.Stderr, "Warning: failed to check if %s should skip: %v\n", issue.ID, err) - skip = false - } - - if skip { - skippedCount++ - continue - } + // DISABLED: timestamp-only deduplication causes data loss (bd-160) + // The export_hashes table gets out of sync with JSONL after git operations, + // causing exports to skip issues that aren't actually in the file. + // + // skip, err := shouldSkipExport(ctx, issue) + // if err != nil { + // fmt.Fprintf(os.Stderr, "Warning: failed to check if %s should skip: %v\n", issue.ID, err) + // skip = false + // } + // if skip { + // skippedCount++ + // continue + // } if err := encoder.Encode(issue); err != nil { fmt.Fprintf(os.Stderr, "Error encoding issue %s: %v\n", issue.ID, err) os.Exit(1) } - // Save content hash after successful export (bd-164) - contentHash, err := computeIssueContentHash(issue) - if err != nil { - fmt.Fprintf(os.Stderr, "Warning: failed to compute hash for %s: %v\n", issue.ID, err) - } else if err := store.SetExportHash(ctx, issue.ID, contentHash); err != nil { - fmt.Fprintf(os.Stderr, "Warning: failed to save export hash for %s: %v\n", issue.ID, err) - } + // DISABLED: export hash tracking (bd-160) + // contentHash, err := computeIssueContentHash(issue) + // if err != nil { + // fmt.Fprintf(os.Stderr, "Warning: failed to compute hash for %s: %v\n", issue.ID, err) + // } else if err := store.SetExportHash(ctx, issue.ID, contentHash); err != nil { + // fmt.Fprintf(os.Stderr, "Warning: failed to save export hash for %s: %v\n", issue.ID, err) + // } exportedIDs = append(exportedIDs, issue.ID) }