refactor(import): remove redundant pre-batch deduplication layer
The early ID deduplication in the main loop (seenIDs check) already prevents duplicate IDs from reaching newIssues. The pre-batch deduplication was dead code that could never fire. Keeping two layers: 1. Early dedup in main loop - catches duplicates in JSONL input 2. INSERT OR IGNORE at DB level - safety net for all code paths 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -710,20 +710,6 @@ if len(newIssues) > 0 {
|
||||
return newIssues[i].ID < newIssues[j].ID // Stable sort
|
||||
})
|
||||
|
||||
// Deduplicate by ID to prevent UNIQUE constraint errors during batch insert
|
||||
// This handles cases where JSONL contains multiple versions of the same issue
|
||||
seenNewIDs := make(map[string]bool)
|
||||
var dedupedNewIssues []*types.Issue
|
||||
for _, issue := range newIssues {
|
||||
if !seenNewIDs[issue.ID] {
|
||||
seenNewIDs[issue.ID] = true
|
||||
dedupedNewIssues = append(dedupedNewIssues, issue)
|
||||
} else {
|
||||
result.Skipped++ // Count duplicates that were skipped
|
||||
}
|
||||
}
|
||||
newIssues = dedupedNewIssues
|
||||
|
||||
// Create in batches by depth level (max depth 3)
|
||||
for depth := 0; depth <= 3; depth++ {
|
||||
var batchForDepth []*types.Issue
|
||||
|
||||
Reference in New Issue
Block a user