From 8b0cd1ec665a50cd7b1c8a6badcf43a17b29bdbb Mon Sep 17 00:00:00 2001 From: Steve Yegge Date: Thu, 18 Dec 2025 11:09:26 -0800 Subject: [PATCH] refactor(import): remove redundant pre-batch deduplication layer MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The early ID deduplication in the main loop (seenIDs check) already prevents duplicate IDs from reaching newIssues. The pre-batch deduplication was dead code that could never fire. Keeping two layers: 1. Early dedup in main loop - catches duplicates in JSONL input 2. INSERT OR IGNORE at DB level - safety net for all code paths 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- internal/importer/importer.go | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/internal/importer/importer.go b/internal/importer/importer.go index 14ed14b1..8522254c 100644 --- a/internal/importer/importer.go +++ b/internal/importer/importer.go @@ -710,20 +710,6 @@ if len(newIssues) > 0 { return newIssues[i].ID < newIssues[j].ID // Stable sort }) -// Deduplicate by ID to prevent UNIQUE constraint errors during batch insert -// This handles cases where JSONL contains multiple versions of the same issue -seenNewIDs := make(map[string]bool) -var dedupedNewIssues []*types.Issue -for _, issue := range newIssues { - if !seenNewIDs[issue.ID] { - seenNewIDs[issue.ID] = true - dedupedNewIssues = append(dedupedNewIssues, issue) - } else { - result.Skipped++ // Count duplicates that were skipped - } -} -newIssues = dedupedNewIssues - // Create in batches by depth level (max depth 3) for depth := 0; depth <= 3; depth++ { var batchForDepth []*types.Issue