Update issues database and main command
Amp-Thread-ID: https://ampcode.com/threads/T-ad2e485a-ee9a-4055-886d-c875a2824091 Co-authored-by: Amp <amp@ampcode.com>
This commit is contained in:
@@ -622,7 +622,7 @@
|
||||
{"id":"bd-661","title":"Code review follow-up: Post-PR #8 merge improvements","description":"Follow-up tasks from the ultrathink code review of PR #8 merge (bd-62).\n\n**Context:** PR #8 successfully merged atomic counter + dirty tracking. Core functionality is solid but several improvements identified.\n\n**Critical (P0-P1):**\n- bd-64: Fix SyncAllCounters performance bottleneck (P0)\n- bd-65: Add migration for issue_counters table (P1)\n- bd-66: Make import counter sync failure fatal (P1)\n\n**Nice to have (P2-P3):**\n- bd-67: Update test comments (P2)\n- bd-68: Add performance benchmarks (P2)\n- bd-69: Add metrics/logging (P3)\n- bd-70: Add EXPLAIN QUERY PLAN tests (P3)\n\n**Overall assessment:** 4/5 stars - Excellent implementation with one critical performance issue. After bd-64 is fixed, this becomes 5/5.\n\n**Review document:** Available if needed","design":"Command should:\n1. Export pending changes to JSONL\n2. Commit changes to git\n3. Pull from remote (with conflict resolution)\n4. Push local commits to remote\n\nWraps the entire sync workflow in one command for better UX.","acceptance_criteria":"- bd sync command implemented\n- Exports dirty changes to JSONL automatically\n- Commits to git with descriptive message\n- Pulls and handles merge conflicts\n- Auto-imports updated JSONL\n- Pushes to remote\n- Error handling for git failures\n- Tests cover success and failure scenarios\n- Documentation in README.md\n\n---","notes":"All tasks completed: bd-64 (performance), bd-65 (migration), bd-66 (version sync), bd-67 (version script), bd-69 (CI coverage), bd-70 (test coverage). Code review follow-up complete.","status":"closed","priority":1,"issue_type":"epic","created_at":"2025-10-16T18:57:16.327043-07:00","updated_at":"2025-10-16T18:57:16.327043-07:00","closed_at":"2025-10-16T17:49:54.510305-07:00"}
|
||||
{"id":"bd-662","title":"Build collision resolution tooling for distributed branch workflows","description":"When branches diverge and both create issues, auto-incrementing IDs collide on merge. Build excellent tooling to detect collisions during import, auto-renumber issues with fewer dependencies, update all references in descriptions and dependency links, and provide clear user feedback. Goal: keep beautiful brevity of numeric IDs (bd-641) while handling distributed creation gracefully.","status":"closed","priority":1,"issue_type":"feature","created_at":"2025-10-16T18:57:16.330979-07:00","updated_at":"2025-10-16T18:57:16.382685-07:00","closed_at":"2025-10-16T17:49:54.510752-07:00","dependencies":[{"issue_id":"bd-662","depends_on_id":"bd-656","type":"discovered-from","created_at":"2025-10-16T18:57:16.79771-07:00","created_by":"import-remap"}]}
|
||||
{"id":"bd-663","title":"Fix metadata error handling in auto-import","description":"GetMetadata() failure causes auto-import to stop forever. Should treat missing/corrupt metadata as first import (lastHash='') instead of skipping.","status":"closed","priority":0,"issue_type":"bug","created_at":"2025-10-16T19:04:18.00281-07:00","updated_at":"2025-10-16T19:15:13.728892-07:00","closed_at":"2025-10-16T19:15:13.728892-07:00","dependencies":[{"issue_id":"bd-663","depends_on_id":"bd-379","type":"discovered-from","created_at":"2025-10-16T19:04:18.011842-07:00","created_by":"stevey"}]}
|
||||
{"id":"bd-664","title":"Ensure last_import_hash update is error-checked","description":"If hash update fails after successful import, auto-import will retry same import forever. Must error-check SetMetadata() and warn user.","status":"open","priority":0,"issue_type":"bug","created_at":"2025-10-16T19:04:18.015002-07:00","updated_at":"2025-10-16T19:04:18.015002-07:00","dependencies":[{"issue_id":"bd-664","depends_on_id":"bd-379","type":"discovered-from","created_at":"2025-10-16T19:04:18.018601-07:00","created_by":"stevey"}]}
|
||||
{"id":"bd-664","title":"Ensure last_import_hash update is error-checked","description":"If hash update fails after successful import, auto-import will retry same import forever. Must error-check SetMetadata() and warn user.","status":"closed","priority":0,"issue_type":"bug","created_at":"2025-10-16T19:04:18.015002-07:00","updated_at":"2025-10-16T19:17:40.916559-07:00","closed_at":"2025-10-16T19:17:40.916559-07:00","dependencies":[{"issue_id":"bd-664","depends_on_id":"bd-379","type":"discovered-from","created_at":"2025-10-16T19:04:18.018601-07:00","created_by":"stevey"}]}
|
||||
{"id":"bd-666","title":"Fix N+1 query pattern in auto-import","description":"Line 347 calls GetIssue() for every imported issue. With 1000+ issues this is slow. Batch fetch all existing issues upfront.","status":"closed","priority":0,"issue_type":"bug","created_at":"2025-10-16T19:04:18.082894-07:00","updated_at":"2025-10-16T19:13:22.93101-07:00","closed_at":"2025-10-16T19:13:22.93101-07:00","dependencies":[{"issue_id":"bd-666","depends_on_id":"bd-379","type":"discovered-from","created_at":"2025-10-16T19:04:18.085777-07:00","created_by":"stevey"}]}
|
||||
{"id":"bd-667","title":"Implement bd renumber command to compact issue ID space","description":"After closing many duplicates, we have high issue numbers (bd-666) but only 114 open issues. Add command to renumber all issues sequentially to fill gaps.\n\nBehavior:\n- Renumber all issues starting from 1 (or next available after last closed)\n- Update all references: dependencies, parent-child, discovered-from links\n- Update text references in descriptions, notes, acceptance criteria\n- Preserve chronological order by created_at\n- Show mapping report (old ID -\u003e new ID)\n- Require confirmation or --force flag\n- Export clean JSONL after renumber\n\nBenefits:\n- Cleaner ID space after collision remapping creates gaps\n- More human-friendly issue numbers\n- Easier to track project scope\n\nRisks:\n- Breaking external references (GitHub issues, docs, commits)\n- Complexity of updating all references\n- Git history confusion\n\nConsider: Maybe only renumber open issues, leave closed ones untouched?","status":"open","priority":2,"issue_type":"feature","created_at":"2025-10-16T19:11:16.097068-07:00","updated_at":"2025-10-16T19:11:16.097068-07:00"}
|
||||
{"id":"bd-67","title":"Create version bump script","description":"Create scripts/bump-version.sh to automate version syncing across all components.\n\nThe script should:\n1. Take a version number as argument (e.g., ./scripts/bump-version.sh 0.9.3)\n2. Update all version files:\n - cmd/bd/version.go (Version constant)\n - .claude-plugin/plugin.json (version field)\n - .claude-plugin/marketplace.json (plugins[].version)\n - integrations/beads-mcp/pyproject.toml (version field)\n - README.md (Alpha version mention)\n - PLUGIN.md (version requirements)\n3. Validate semantic versioning format\n4. Show diff preview before applying\n5. Optionally create git commit with standard message\n\nThis prevents the version mismatch issue that occurred when only version.go was updated.\n\nRelated: bd-66 (version sync issue)","status":"closed","priority":2,"issue_type":"task","created_at":"2025-10-14T14:43:06.933094-07:00","updated_at":"2025-10-16T14:33:21.515891-07:00","closed_at":"2025-10-15T03:01:29.58971-07:00","dependencies":[{"issue_id":"bd-67","depends_on_id":"bd-389","type":"parent-child","created_at":"2025-10-16T14:57:49.217301-07:00","created_by":"import-remap"},{"issue_id":"bd-67","depends_on_id":"bd-661","type":"parent-child","created_at":"2025-10-16T18:57:16.818769-07:00","created_by":"import-remap"},{"issue_id":"bd-67","depends_on_id":"bd-378","type":"parent-child","created_at":"2025-10-16T18:57:17.072564-07:00","created_by":"auto-import"}]}
|
||||
|
||||
@@ -437,7 +437,10 @@ func autoImportIfNewer() {
|
||||
}
|
||||
|
||||
// Store new hash after successful import
|
||||
_ = store.SetMetadata(ctx, "last_import_hash", currentHash)
|
||||
if err := store.SetMetadata(ctx, "last_import_hash", currentHash); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: failed to update last_import_hash after import: %v\n", err)
|
||||
fmt.Fprintf(os.Stderr, "This may cause auto-import to retry the same import on next operation.\n")
|
||||
}
|
||||
}
|
||||
|
||||
// checkVersionMismatch checks if the binary version matches the database version
|
||||
@@ -699,7 +702,9 @@ func flushToJSONL() {
|
||||
hasher := sha256.New()
|
||||
hasher.Write(jsonlData)
|
||||
exportedHash := hex.EncodeToString(hasher.Sum(nil))
|
||||
_ = store.SetMetadata(ctx, "last_import_hash", exportedHash)
|
||||
if err := store.SetMetadata(ctx, "last_import_hash", exportedHash); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: failed to update last_import_hash after export: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Success!
|
||||
|
||||
Reference in New Issue
Block a user