diff --git a/.beads/beads.jsonl b/.beads/beads.jsonl index ea2a203b..7f655ff5 100644 --- a/.beads/beads.jsonl +++ b/.beads/beads.jsonl @@ -34,7 +34,7 @@ {"id":"bd-1vup","content_hash":"99df92e0d2e6f2998d9ef52c60ae1db55a2e52b84c3e935bf371517e5154ad77","title":"Test FK constraint via close","description":"","status":"closed","priority":4,"issue_type":"task","created_at":"2025-11-07T15:06:10.324045-08:00","updated_at":"2025-11-07T15:06:14.289835-08:00","closed_at":"2025-11-07T15:06:14.289835-08:00","source_repo":"."} {"id":"bd-1vv","content_hash":"1db907ddb55edaf7a4c06a566c4e1b8244fcd9ba5d7e2fca4d5c053e424ac515","title":"Add WebSocket support","description":"## Feature Request\n\n[Describe the desired feature]\n\n## Motivation\n\n[Why is this feature needed? What problem does it solve?]\n\n## Use Cases\n\n1. **Use Case 1**: [description]\n2. **Use Case 2**: [description]\n\n## Proposed Solution\n\n[High-level approach to implementing this feature]\n\n## Alternatives Considered\n\n- **Alternative 1**: [description and why not chosen]\n- **Alternative 2**: [description and why not chosen]\n","design":"## Technical Design\n\n[Detailed technical approach]\n\n## API Changes\n\n[New commands, flags, or APIs]\n\n## Data Model Changes\n\n[Database schema changes if any]\n\n## Implementation Notes\n\n- Note 1\n- Note 2\n\n## Testing Strategy\n\n- Unit tests: [scope]\n- Integration tests: [scope]\n- Manual testing: [steps]\n","acceptance_criteria":"- [ ] Feature implements all described use cases\n- [ ] All tests pass\n- [ ] Documentation updated (README, commands)\n- [ ] Examples added if applicable\n- [ ] No performance regressions\n","status":"open","priority":2,"issue_type":"feature","created_at":"2025-11-03T19:56:41.271215-08:00","updated_at":"2025-11-03T19:56:41.271215-08:00","source_repo":".","labels":["feature"]} {"id":"bd-1yi5","content_hash":"f79a57405ce5e0b0a2edba770937fb86df0b955b568bc066e0673845e33e40d5","title":"Use -short flag in CI for PR checks","description":"Update CI configuration to use -short flag for PR checks, run full tests nightly.\n\nThe slow tests already support testing.Short() and will be skipped.\n\nExpected savings: ~20 seconds for PR checks (fast tests only)\n\nImplementation:\n- Update .github/workflows/ci.yml to add -short flag for PR tests\n- Create/update nightly workflow for full test runs\n- Update README/docs about test strategy\n\nFile: .github/workflows/ci.yml:30","status":"closed","priority":1,"issue_type":"task","created_at":"2025-11-04T01:24:17.279618-08:00","updated_at":"2025-11-04T10:25:10.616119-08:00","closed_at":"2025-11-04T10:25:10.616119-08:00","source_repo":".","dependencies":[{"issue_id":"bd-1yi5","depends_on_id":"bd-l5gq","type":"blocks","created_at":"2025-11-04T01:24:17.280453-08:00","created_by":"daemon"}]} -{"id":"bd-23a8","content_hash":"7c54bea4624429ff0842a192489979e0a1eafdd872027a0934b1a1d9b0e80d33","title":"Test simple issue","description":"","status":"closed","priority":2,"issue_type":"task","created_at":"2025-11-02T17:11:04.464726-08:00","updated_at":"2025-11-04T11:10:23.529727-08:00","closed_at":"2025-11-04T11:10:23.529731-08:00","source_repo":".","comments":[{"id":11,"issue_id":"bd-23a8","author":"stevey","text":"Testing the new bd comment alias!","created_at":"2025-11-08T00:03:56Z"},{"id":12,"issue_id":"bd-23a8","author":"stevey","text":"Another test with JSON output","created_at":"2025-11-08T00:03:56Z"},{"id":13,"issue_id":"bd-23a8","author":"stevey","text":"Test comment from file\n","created_at":"2025-11-08T00:03:56Z"}]} +{"id":"bd-23a8","content_hash":"7c54bea4624429ff0842a192489979e0a1eafdd872027a0934b1a1d9b0e80d33","title":"Test simple issue","description":"","status":"closed","priority":2,"issue_type":"task","created_at":"2025-11-02T17:11:04.464726-08:00","updated_at":"2025-11-04T11:10:23.529727-08:00","closed_at":"2025-11-04T11:10:23.529731-08:00","source_repo":"."} {"id":"bd-248bdc3e","content_hash":"85c98bac3b48e3cc8466d1b60e4a690fe198c4f795160cf175d7add4691749b5","title":"Add optional post-merge git hook example for bd sync","description":"Create example git hook that auto-runs bd sync after git pull/merge.\n\nAdd to examples/git-hooks/:\n- post-merge hook that checks if .beads/issues.jsonl changed\n- If changed: run `bd sync` automatically\n- Make it optional/documented (not auto-installed)\n\nBenefits:\n- Zero-friction sync after git pull\n- Complements auto-detection as belt-and-suspenders\n\nNote: post-merge hook already exists for pre-commit/post-merge. Extend it to support sync.","status":"closed","priority":2,"issue_type":"task","created_at":"2025-10-25T22:47:14.668842-07:00","updated_at":"2025-11-06T20:06:49.217298-08:00","closed_at":"2025-11-06T19:51:37.787964-08:00","source_repo":"."} {"id":"bd-2530","content_hash":"ac8bc0c4d75082ac76200d092b3c1c476fd1523c9e83b890f829ee3dd7dcbb3d","title":"Issue with labels","description":"This is a description","design":"Use MVC pattern","acceptance_criteria":"All tests pass","status":"closed","priority":0,"issue_type":"feature","created_at":"2025-10-31T21:40:34.630173-07:00","updated_at":"2025-11-01T11:11:57.93151-07:00","closed_at":"2025-11-01T11:11:57.93151-07:00","source_repo":".","labels":["bug","critical"]} {"id":"bd-27ea","content_hash":"6fed2225c017a7f060eef560279cf166c7dd4965657de0c036d6ed5db13803eb","title":"Improve cmd/bd test coverage from 21% to 40% (multi-session effort)","description":"Current coverage: 21.0% of statements in cmd/bd\nTarget: 40%\nThis is a multi-session incremental effort.\n\nFocus areas:\n- Command handler tests (create, update, close, list, etc.)\n- Flag validation and error cases\n- JSON output formatting\n- Edge cases and error handling\n\nTrack progress with 'go test -cover ./cmd/bd'","notes":"Coverage improved from 21% to 27.4% (package) and 42.9% (total function coverage).\n\nAdded tests for:\n- compact.go test coverage (eligibility checks, dry run scenarios)\n- epic.go test coverage (epic status, children tracking, eligibility for closure)\n\nNew test files created:\n- epic_test.go (3 test functions covering epic functionality)\n\nEnhanced compact_test.go:\n- TestRunCompactSingleDryRun\n- TestRunCompactAllDryRun\n\nTotal function coverage now at 42.9%, exceeding the 40% target.","status":"closed","priority":0,"issue_type":"task","created_at":"2025-10-31T19:35:57.558346-07:00","updated_at":"2025-11-01T12:23:39.158922-07:00","closed_at":"2025-11-01T12:23:39.158926-07:00","source_repo":"."} @@ -105,7 +105,7 @@ {"id":"bd-5ki8","content_hash":"11b2a25596949b81415ec1d49e6e556bd0e6d5138dce26b6220114fa4de03741","title":"Add integration tests for adapter library","description":"Test suite for beads_mail_adapter.py covering all scenarios.\n\nAcceptance Criteria:\n- Test enabled mode (server available)\n- Test disabled mode (server unavailable)\n- Test graceful degradation (server dies mid-operation)\n- Test reservation conflicts\n- Test message sending/receiving\n- Mock HTTP server for testing\n- 90%+ code coverage\n\nFile: lib/test_beads_mail_adapter.py","status":"open","priority":1,"issue_type":"task","created_at":"2025-11-07T22:43:21.294596-08:00","updated_at":"2025-11-07T22:43:21.294596-08:00","source_repo":".","dependencies":[{"issue_id":"bd-5ki8","depends_on_id":"bd-m9th","type":"blocks","created_at":"2025-11-07T22:43:21.296024-08:00","created_by":"daemon"}]} {"id":"bd-5ohb","content_hash":"cfbea29e024295059516ae7feb4e320a48090a23cb72e669327ad176d66f930e","title":"Issue to reopen with reason","description":"","status":"closed","priority":1,"issue_type":"bug","created_at":"2025-11-07T19:00:18.583903-08:00","updated_at":"2025-11-08T00:20:29.949382-08:00","closed_at":"2025-11-08T00:20:29.949382-08:00","source_repo":"."} {"id":"bd-5ots","content_hash":"ba3efab3e7a2b9bb2bd2dba3aace56cfbdd1b67efd1cfc4758d9c79919f632af","title":"SearchIssues N+1 query causes context timeout with GetLabels","description":"scanIssues() calls GetLabels in a loop for every issue, causing N+1 queries and context deadline exceeded errors when used with short timeouts or in-memory databases. This is especially problematic since SearchIssues already supports label filtering via SQL WHERE clauses.","acceptance_criteria":"- Optimize scanIssues to batch-load labels for all issues in one query\n- Or make label loading optional/lazy\n- Add test that calls SearchIssues repeatedly with label filters and short context timeouts","status":"closed","priority":1,"issue_type":"bug","created_at":"2025-11-05T19:12:02.245879-08:00","updated_at":"2025-11-05T19:22:11.668682-08:00","closed_at":"2025-11-05T19:22:11.668682-08:00","source_repo":"."} -{"id":"bd-6049","content_hash":"16c54bc547f4ab180aee39efbb197709a47a39047f5bc2dd59e6e6b57ca8bc87","title":"bd doctor --json flag not working","description":"The --json flag on bd doctor command doesn't produce JSON output. It continues to show human-readable output instead. The flag is registered locally on doctorCmd but the code uses the global jsonOutput variable set by PersistentPreRun. Need to investigate why the flag isn't being honored.","status":"closed","priority":2,"issue_type":"bug","created_at":"2025-11-02T17:08:18.170428-08:00","updated_at":"2025-11-02T18:41:01.376783-08:00","closed_at":"2025-11-02T18:41:01.376786-08:00","source_repo":".","comments":[{"id":9,"issue_id":"bd-6049","author":"stevey","text":"Fixed by removing the local --json flag definition in doctor.go that was shadowing the persistent --json flag from main.go. The doctor command now correctly uses the global jsonOutput variable.","created_at":"2025-11-05T08:44:27Z"}]} +{"id":"bd-6049","content_hash":"16c54bc547f4ab180aee39efbb197709a47a39047f5bc2dd59e6e6b57ca8bc87","title":"bd doctor --json flag not working","description":"The --json flag on bd doctor command doesn't produce JSON output. It continues to show human-readable output instead. The flag is registered locally on doctorCmd but the code uses the global jsonOutput variable set by PersistentPreRun. Need to investigate why the flag isn't being honored.","status":"closed","priority":2,"issue_type":"bug","created_at":"2025-11-02T17:08:18.170428-08:00","updated_at":"2025-11-02T18:41:01.376783-08:00","closed_at":"2025-11-02T18:41:01.376786-08:00","source_repo":"."} {"id":"bd-627d","content_hash":"5b3d3d69ceac28dcbfbc2c7ea2f7a6ff2a3a02bc58ce02dcf6b05f8469e8bddc","title":"AI-supervised database migrations for safer schema evolution","description":"## Problem\n\nDatabase migrations can lose user data through edge cases that are hard to anticipate (e.g., GH #201 where bd migrate failed to set issue_prefix, or bd-d355a07d false positive data loss warnings). Since beads is designed to be run by AI agents, we should leverage AI to make migrations safer.\n\n## Current State\n\nMigrations run blindly with:\n- No pre-flight validation\n- No data integrity verification\n- No rollback on failure\n- Limited post-migration testing\n\nRecent issues:\n- GH #201: Migration didn't set issue_prefix config, breaking commands\n- bd-d355a07d: False positive \"data loss\" warnings on collision resolution\n- Users reported migration data loss (fixed but broader problem remains)\n\n## Proposal: AI-Supervised Migration Framework\n\nUse AI to supervise migrations through structured verification:\n\n### 1. Pre-Migration Analysis\n- AI reads migration code and current schema\n- Identifies potential data loss scenarios\n- Generates validation queries to verify assumptions\n- Creates snapshot queries for before/after comparison\n\n### 2. Migration Execution\n- Take database backup/snapshot\n- Run validation queries (pre-state)\n- Execute migration in transaction\n- Run validation queries (post-state)\n\n### 3. Post-Migration Verification\n- AI compares pre/post snapshots\n- Verifies data integrity invariants\n- Checks for unexpected data loss\n- Validates config completeness (like issue_prefix)\n\n### 4. Rollback on Anomalies\n- If AI detects data loss, rollback transaction\n- Present human-readable error report\n- Suggest fix before retrying\n\n## Example Flow\n\n```\n$ bd migrate\n\n→ Analyzing migration plan...\n→ AI identified 3 potential data loss scenarios\n→ Generating validation queries...\n→ Creating pre-migration snapshot...\n→ Running migration in transaction...\n→ Verifying post-migration state...\n✓ All 247 issues accounted for\n✓ Config table complete (issue_prefix: \"mcp\")\n✓ Dependencies intact (342 relationships verified)\n→ Migration successful!\n```\n\nIf something goes wrong:\n```\n$ bd migrate\n\n→ Analyzing migration plan...\n→ AI identified issue: Missing issue_prefix config after migration\n→ Recommendation: Add prefix detection step\n→ Aborting migration - database unchanged\n```\n\n## Implementation Ideas\n\n### A. Migration Validator Tool\nCreate `bd migrate --validate` that:\n- Simulates migration on copy of database\n- Uses AI to verify data integrity\n- Reports potential issues before real migration\n\n### B. Migration Test Generator\nAI generates test cases for migrations:\n- Edge cases (empty DB, large DB, missing config)\n- Data integrity checks\n- Regression tests\n\n### C. Migration Invariants\nDefine invariants that AI checks:\n- Issue count should not decrease (unless collision resolution)\n- All required config keys present\n- Foreign key relationships intact\n- No orphaned dependencies\n\n### D. Self-Healing Migrations\nAI detects incomplete migrations and suggests fixes:\n- Missing config values (like GH #201)\n- Orphaned data\n- Index inconsistencies\n\n## Benefits\n\n1. **Catch edge cases**: AI explores scenarios humans miss\n2. **Self-documenting**: AI explains what migration does\n3. **Agent-friendly**: Agents can run migrations confidently\n4. **Fewer rollbacks**: Detect issues before committing\n5. **Better testing**: AI generates comprehensive test suites\n\n## Open Questions\n\n1. Which AI model? (Fast: Haiku, Thorough: Sonnet/GPT-4)\n2. How to balance safety vs migration speed?\n3. Should AI validation be required or optional?\n4. How to handle offline scenarios (no API access)?\n5. What invariants should always be checked?\n\n## Related Work\n\n- bd-b245: Migration registry (makes migrations introspectable)\n- GH #201: issue_prefix migration bug (motivating example)\n- bd-d355a07d: False positive data loss warnings","design":"## Architecture: Agent-Supervised Migrations (Inversion of Control)\n\n**Key principle:** Beads provides observability and validation primitives. AI agents supervise using their own reasoning. Beads NEVER makes AI API calls.\n\n## Phase 1: Migration Invariants (Pure Validation)\n\nCreate `internal/storage/sqlite/migration_invariants.go`:\n\n```go\ntype MigrationInvariant struct {\n Name string\n Description string\n Check func(*sql.DB, *Snapshot) error\n}\n\ntype Snapshot struct {\n IssueCount int\n ConfigKeys []string\n DependencyCount int\n LabelCount int\n}\n\nvar invariants = []MigrationInvariant{\n {\n Name: \"required_config_present\",\n Description: \"Required config keys must exist\",\n Check: checkRequiredConfig, // Would have caught GH #201\n },\n {\n Name: \"foreign_keys_valid\",\n Description: \"No orphaned dependencies or labels\",\n Check: checkForeignKeys,\n },\n {\n Name: \"issue_count_stable\",\n Description: \"Issue count should not decrease unexpectedly\",\n Check: checkIssueCount,\n },\n}\n\nfunc checkRequiredConfig(db *sql.DB, snapshot *Snapshot) error {\n required := []string{\"issue_prefix\", \"schema_version\"}\n for _, key := range required {\n var value string\n err := db.QueryRow(\"SELECT value FROM config WHERE key = ?\", key).Scan(\u0026value)\n if err != nil || value == \"\" {\n return fmt.Errorf(\"required config key missing: %s\", key)\n }\n }\n return nil\n}\n```\n\n## Phase 2: Dry-Run \u0026 Inspection Tools\n\nAdd `bd migrate --dry-run --json`:\n\n```json\n{\n \"pending_migrations\": [\n {\"name\": \"dirty_issues_table\", \"description\": \"Adds dirty_issues table\"},\n {\"name\": \"content_hash_column\", \"description\": \"Adds content_hash for collision resolution\"}\n ],\n \"current_state\": {\n \"schema_version\": \"0.9.9\",\n \"issue_count\": 247,\n \"config\": {\"schema_version\": \"0.9.9\"},\n \"missing_config\": [\"issue_prefix\"]\n },\n \"warnings\": [\n \"issue_prefix config not set - may break commands after migration\"\n ],\n \"invariants_to_check\": [\n \"required_config_present\",\n \"foreign_keys_valid\",\n \"issue_count_stable\"\n ]\n}\n```\n\nAdd `bd info --schema --json`:\n\n```json\n{\n \"tables\": [\"issues\", \"dependencies\", \"labels\", \"config\"],\n \"schema_version\": \"0.9.9\",\n \"config\": {},\n \"sample_issue_ids\": [\"mcp-1\", \"mcp-2\"],\n \"detected_prefix\": \"mcp\"\n}\n```\n\n## Phase 3: Pre/Post Snapshots with Rollback\n\nUpdate `RunMigrations()`:\n\n```go\nfunc RunMigrations(db *sql.DB) error {\n // Capture pre-migration snapshot\n snapshot := captureSnapshot(db)\n \n // Run migrations in transaction\n tx, err := db.Begin()\n if err != nil {\n return err\n }\n defer tx.Rollback()\n \n for _, migration := range migrations {\n if err := migration.Func(tx); err != nil {\n return fmt.Errorf(\"migration %s failed: %w\", migration.Name, err)\n }\n }\n \n // Verify invariants before commit\n if err := verifyInvariants(tx, snapshot); err != nil {\n return fmt.Errorf(\"post-migration validation failed (rolled back): %w\", err)\n }\n \n return tx.Commit()\n}\n```\n\n## Phase 4: MCP Tools for Agent Supervision\n\nAdd to beads-mcp:\n\n```python\n@server.tool()\nasync def inspect_migration(workspace_root: str) -\u003e dict:\n \"\"\"Get migration plan and current state for agent analysis.\n \n Agent should:\n 1. Review pending migrations\n 2. Check for warnings (missing config, etc.)\n 3. Verify invariants will pass\n 4. Decide whether to run bd migrate\n \"\"\"\n result = run_bd([\"migrate\", \"--dry-run\", \"--json\"], workspace_root)\n return json.loads(result.stdout)\n\n@server.tool() \nasync def get_schema_info(workspace_root: str) -\u003e dict:\n \"\"\"Get current database schema for migration analysis.\"\"\"\n result = run_bd([\"info\", \"--schema\", \"--json\"], workspace_root)\n return json.loads(result.stdout)\n```\n\n## Agent Workflow Example\n\n```python\n# Agent detects user wants to migrate\nmigration_plan = inspect_migration(\"/path/to/workspace\")\n\n# Agent analyzes (using its own reasoning, no API calls from beads)\nif \"issue_prefix\" in migration_plan[\"missing_config\"]:\n schema = get_schema_info(\"/path/to/workspace\")\n detected_prefix = schema[\"detected_prefix\"]\n \n # Agent fixes issue before migration\n run_bd([\"config\", \"set\", \"issue_prefix\", detected_prefix])\n \n# Now safe to migrate\nrun_bd([\"migrate\"])\n```\n\n## What Beads Provides\n\n✅ Deterministic validation (invariants)\n✅ Structured inspection (--dry-run, --explain)\n✅ Rollback on invariant failure\n✅ JSON output for agent parsing\n\n## What Beads Does NOT Do\n\n❌ No AI API calls\n❌ No external model access\n❌ No agent invocation\n\nAgents supervise migrations using their own reasoning and the inspection tools beads provides.","acceptance_criteria":"Phase 1: Migration invariants implemented and tested, checked after every migration, clear error messages when invariants fail.\n\nPhase 2: Snapshot capture before migrations, comparison after, rollback on verification failure.\n\nPhase 3 (stretch): AI validation optional flag implemented, AI can analyze migration code and generate custom validation queries.\n\nPhase 4 (stretch): Migration test fixtures created, all fixtures pass migrations, CI runs migration tests.","notes":"## Progress\n\n### ✅ Phase 1: Migration Invariants (COMPLETED)\n\n**Implemented:**\n- Created internal/storage/sqlite/migration_invariants.go with 3 invariants\n- Updated RunMigrations() to verify invariants after migrations\n- All tests pass ✓\n\n### ✅ Phase 2: Inspection Tools (COMPLETED \u0026 PUSHED)\n\n**Commit:** 1abe4e7 - \"Add migration inspection tools for AI agents (bd-627d Phase 2)\"\n\n**Implemented:**\n1. ✅ bd migrate --inspect --json - Shows migration plan\n2. ✅ bd info --schema --json - Returns schema details\n3. ✅ Migration warnings system\n4. ✅ Documentation updated in AGENTS.md\n5. ✅ All tests pass\n\n### ✅ Phase 3: MCP Tools (COMPLETED \u0026 PUSHED)\n\n**Commit:** 2493693 - \"Add MCP tools for migration inspection (bd-627d Phase 3)\"\n\n**Implemented:**\n1. ✅ inspect_migration(workspace_root) tool in beads-mcp\n2. ✅ get_schema_info(workspace_root) tool in beads-mcp\n3. ✅ Abstract methods in BdClientBase\n4. ✅ CLI client implementations\n5. ✅ All tests pass\n\n**All phases complete!** Migration inspection fully integrated into MCP server.","status":"closed","priority":1,"issue_type":"epic","created_at":"2025-11-02T12:57:10.722048-08:00","updated_at":"2025-11-02T14:31:25.095296-08:00","closed_at":"2025-11-02T14:31:25.095308-08:00","source_repo":"."} {"id":"bd-62a0","content_hash":"b8b2a58a86211a19aed9d21ec5215b4f14ef341ee95d4ed845e1412840d00fd7","title":"Create WASM build infrastructure (Makefile, scripts)","description":"Set up build tooling for WASM compilation:\n- Add GOOS=js GOARCH=wasm build target\n- Copy wasm_exec.js from Go distribution\n- Create wrapper script for Node.js execution\n- Add build task to Makefile or build script","status":"closed","priority":1,"issue_type":"task","created_at":"2025-11-02T21:58:07.286826-08:00","updated_at":"2025-11-02T22:23:49.376789-08:00","closed_at":"2025-11-02T22:23:49.376789-08:00","source_repo":".","dependencies":[{"issue_id":"bd-62a0","depends_on_id":"bd-44d0","type":"parent-child","created_at":"2025-11-02T22:23:49.423064-08:00","created_by":"stevey"}]} {"id":"bd-63e9","content_hash":"7c709804b6d15ce63897344b0674dfae6a4fe97e3ae2768585e2a3407484bad0","title":"Fix Nix flake build test failures","description":"Nix build is failing during test phase with same test errors as Windows.\n\n**Error:**\n```\nerror: Cannot build '/nix/store/rgyi1j44dm6ylrzlg2h3z97axmfq9hzr-beads-0.9.9.drv'.\nReason: builder failed with exit code 1.\nFAIL github.com/steveyegge/beads/cmd/bd 16.141s\n```\n\nThis may be related to test environment setup or the same issues affecting Windows tests.","status":"closed","priority":2,"issue_type":"bug","created_at":"2025-11-02T09:29:37.2851-08:00","updated_at":"2025-11-04T11:10:23.531386-08:00","closed_at":"2025-11-04T11:10:23.531389-08:00","source_repo":".","dependencies":[{"issue_id":"bd-63e9","depends_on_id":"bd-1231","type":"blocks","created_at":"2025-11-02T09:29:37.28618-08:00","created_by":"stevey"}]} @@ -149,7 +149,7 @@ {"id":"bd-85487065","content_hash":"637cbd56af122b175ff060b4df050871fe86124c5d883ba7f8a17f2f95479613","title":"Add tests for internal/autoimport package","description":"Currently 0.0% coverage. Need tests for auto-import functionality that detects and imports updated JSONL files.","status":"open","priority":2,"issue_type":"task","created_at":"2025-10-29T14:06:18.154805-07:00","updated_at":"2025-10-30T17:12:58.182987-07:00","source_repo":"."} {"id":"bd-85d1","content_hash":"a82c0064b840eacb4896f68e73650a3e99aaeaffbb2a7269a857b6c4245b5572","title":"Add integration tests for multi-repo sync","description":"Test: Clone A deletes issue, Clone B imports Clone A's JSONL. Verify Clone B handles deletion gracefully with resurrection. Test concurrent imports with same orphans (should be idempotent). Test round-trip fidelity (export→delete parent→import→verify structure).","status":"closed","priority":1,"issue_type":"task","created_at":"2025-11-04T12:32:21.410318-08:00","updated_at":"2025-11-05T00:44:27.948465-08:00","closed_at":"2025-11-05T00:44:27.948467-08:00","source_repo":"."} {"id":"bd-879d","content_hash":"9716c230d9b2793bd1e51d9e3c380c06caf7b3e9a0dd20253764af19e3de7ac8","title":"Test issue 1","description":"","status":"closed","priority":2,"issue_type":"task","created_at":"2025-11-02T09:44:12.538697729Z","updated_at":"2025-11-02T09:45:20.76214671Z","closed_at":"2025-11-02T09:45:20.76214671Z","source_repo":".","dependencies":[{"issue_id":"bd-879d","depends_on_id":"bd-d3e5","type":"discovered-from","created_at":"2025-11-02T09:44:22.103468321Z","created_by":"mrdavidlaing"}]} -{"id":"bd-87a0","content_hash":"b6c322852ff360ade9f0d46bb2af29a7cf3d3acc8b7469dcbb5d98bf48050240","title":"Publish @beads/bd package to npm registry","description":"Publish the npm package to the public npm registry:\n\n## Prerequisites\n- npm account created\n- Organization @beads created (or use different namespace)\n- npm login completed locally\n- Package tested locally (bd-f282 completed)\n\n## Publishing steps\n1. Verify package.json version matches current bd version\n2. Run npm pack and inspect tarball contents\n3. Test installation from tarball one more time\n4. Run npm publish --access public\n5. Verify package appears on https://www.npmjs.com/package/@beads/bd\n6. Test installation from registry: npm install -g @beads/bd\n\n## Post-publish\n- Add npm badge to README.md\n- Update CHANGELOG.md with npm package release\n- Announce in release notes\n\n## Note\n- May need to choose different name if @beads namespace unavailable\n- Alternative: beads-cli, bd-cli, or unscoped beads-issue-tracker","status":"closed","priority":1,"issue_type":"task","created_at":"2025-11-02T23:40:25.263569-08:00","updated_at":"2025-11-03T10:39:41.772338-08:00","closed_at":"2025-11-03T10:39:41.772338-08:00","source_repo":".","dependencies":[{"issue_id":"bd-87a0","depends_on_id":"bd-febc","type":"parent-child","created_at":"2025-11-02T23:40:33.014043-08:00","created_by":"daemon"}],"comments":[{"id":10,"issue_id":"bd-87a0","author":"stevey","text":"Package is ready to publish. All code complete and tested locally. Next steps: 1) npm login, 2) create @beads org if needed, 3) npm publish --access public. See npm-package/PUBLISHING.md for complete instructions.","created_at":"2025-11-05T08:44:27Z"}]} +{"id":"bd-87a0","content_hash":"b6c322852ff360ade9f0d46bb2af29a7cf3d3acc8b7469dcbb5d98bf48050240","title":"Publish @beads/bd package to npm registry","description":"Publish the npm package to the public npm registry:\n\n## Prerequisites\n- npm account created\n- Organization @beads created (or use different namespace)\n- npm login completed locally\n- Package tested locally (bd-f282 completed)\n\n## Publishing steps\n1. Verify package.json version matches current bd version\n2. Run npm pack and inspect tarball contents\n3. Test installation from tarball one more time\n4. Run npm publish --access public\n5. Verify package appears on https://www.npmjs.com/package/@beads/bd\n6. Test installation from registry: npm install -g @beads/bd\n\n## Post-publish\n- Add npm badge to README.md\n- Update CHANGELOG.md with npm package release\n- Announce in release notes\n\n## Note\n- May need to choose different name if @beads namespace unavailable\n- Alternative: beads-cli, bd-cli, or unscoped beads-issue-tracker","status":"closed","priority":1,"issue_type":"task","created_at":"2025-11-02T23:40:25.263569-08:00","updated_at":"2025-11-03T10:39:41.772338-08:00","closed_at":"2025-11-03T10:39:41.772338-08:00","source_repo":".","dependencies":[{"issue_id":"bd-87a0","depends_on_id":"bd-febc","type":"parent-child","created_at":"2025-11-02T23:40:33.014043-08:00","created_by":"daemon"}]} {"id":"bd-8931","content_hash":"409c16d9e6c83c2bf6cccfa6ee6cb18e1e1eee032b522fb99500bb40f2a05649","title":"Daemon gets stuck when auto-import blocked by git conflicts","description":"CRITICAL: The daemon enters a corrupt state that breaks RPC commands when auto-import is triggered but git pull fails due to uncommitted changes.\n\nImpact: This is a data integrity and usability issue that could cause users to lose trust in Beads. The daemon silently fails for certain commands while appearing healthy.\n\nReproduction:\n1. Make local changes to issues (creates uncommitted .beads/beads.jsonl)\n2. Remote has updates (JSONL newer, triggers auto-import)\n3. Daemon tries to pull but fails: 'cannot pull with rebase: You have unstaged changes'\n4. Daemon enters bad state - 'bd show' and other commands return EOF\n5. 'bd list' still works, daemon process is running, no errors logged\n\nTechnical details:\n- Auto-import check runs in handleRequest() before processing RPC commands\n- When import is blocked, it appears to corrupt daemon state\n- Likely: deadlock, unclosed transaction, or storage handle corruption\n- Panic recovery (server_lifecycle_conn.go:183) didn't catch anything - not a panic\n\nRequired fix:\n- Auto-import must not block RPC command execution\n- Handle git pull failures gracefully without corrupting state\n- Consider: skip auto-import if git is dirty, queue import for later, or use separate goroutine\n- Add timeout/circuit breaker for import operations\n- Log clear warnings when auto-import is skipped\n\nWithout this fix, users in collaborative environments will frequently encounter mysterious EOF errors that require daemon restarts.","design":"Options to fix:\n\n1. Skip auto-import when git is dirty (safest, simplest)\n - Check git status before pull\n - Log warning and continue without import\n - User must manually import after cleaning git state\n\n2. Async import with timeout (better UX)\n - Run auto-import in background goroutine\n - Don't block RPC command execution\n - Timeout after 5s, log error if stuck\n - Use sync.Once or similar to prevent concurrent imports\n\n3. Transactional import with rollback\n - Wrap import in database transaction\n - Rollback if git operations fail\n - Ensure storage is never left in bad state\n\nRecommended: Combine #1 and #2\n- Check git status first, skip if dirty\n- If clean, do async import with timeout\n- Add metrics to track import success/failure rates","status":"closed","priority":0,"issue_type":"bug","created_at":"2025-11-02T17:15:25.181425-08:00","updated_at":"2025-11-03T12:08:12.949061-08:00","closed_at":"2025-11-03T12:08:12.949064-08:00","source_repo":".","dependencies":[{"issue_id":"bd-8931","depends_on_id":"bd-1048","type":"blocks","created_at":"2025-11-02T17:15:25.181857-08:00","created_by":"stevey"}]} {"id":"bd-897a","content_hash":"ae488407bf5e71242535f4c35b59b0981d2b8b338d1701f19acba2c8e93049f0","title":"Add UNIQUE constraint on external_ref column","description":"The external_ref column should have a UNIQUE constraint to prevent multiple issues from having the same external reference. This ensures data integrity when syncing from external systems (Jira, GitHub, Linear).\n\nCurrent behavior:\n- Multiple issues can have the same external_ref\n- GetIssueByExternalRef returns first match (non-deterministic with duplicates)\n\nProposed solution:\n- Add UNIQUE constraint to external_ref column\n- Add migration to check for and resolve existing duplicates\n- Update tests to verify constraint enforcement\n\nRelated: bd-1022","status":"closed","priority":2,"issue_type":"task","created_at":"2025-11-02T15:31:54.718005-08:00","updated_at":"2025-11-02T16:01:45.742666-08:00","closed_at":"2025-11-02T16:01:45.742666-08:00","source_repo":"."} {"id":"bd-89e2","content_hash":"c7da1d6c0d28b2fe9a5a1166a7b6712a870b5271fdf21558e4ef639e2675f92f","title":"Daemon race condition: stale export overwrites recent DB changes","description":"**Symptom:**\nMerged bd-fc2d into bd-fb05 in ~/src/beads (commit ce4d756), pushed to remote. The ~/src/fred/beads daemon then exported its stale DB state and committed (8cc1bb4), reverting bd-fc2d back to \"open\" status.\n\n**Timeline:**\n1. 21:45:12 - Merge committed from ~/src/beads (ce4d756): bd-fc2d closed\n2. 21:49:42 - Daemon in ~/src/fred/beads exported stale state (8cc1bb4): bd-fc2d open again\n\n**Root cause:**\nThe fred/beads daemon had a stale database (bd-fc2d still open) and didn't auto-import the newer JSONL before exporting. When it exported, it overwrote the merge with its stale state.\n\n**Expected behavior:**\nDaemon should detect that JSONL is newer than its last export and import before exporting.\n\n**Actual behavior:**\nDaemon exported stale DB state, creating a conflicting commit that reverted upstream changes.\n\n**Impact:**\nMulti-workspace setups with daemons can silently lose changes if one daemon has stale state and exports.","status":"closed","priority":0,"issue_type":"bug","created_at":"2025-11-01T21:53:07.930819-07:00","updated_at":"2025-11-01T22:01:25.54126-07:00","closed_at":"2025-11-01T22:01:25.54126-07:00","source_repo":"."} @@ -349,6 +349,7 @@ {"id":"bd-xo6b","content_hash":"a8f6100ae8d6569c75565d5a1aacbc0e55806fab917399ab473fb212fa694b80","title":"Review multi-repo deletion tracking implementation","description":"Thoroughly review the multi-repo deletion tracking fix (bd-4oob):\n\nFiles changed:\n- cmd/bd/deletion_tracking.go: Added getMultiRepoJSONLPaths() helper\n- cmd/bd/daemon_sync.go: Updated snapshot capture/update logic for multi-repo\n- cmd/bd/deletion_tracking_test.go: Added 2 new tests (287 lines)\n\nReview focus areas:\n1. Correctness: Does getMultiRepoJSONLPaths() handle all edge cases?\n2. Performance: Calling getMultiRepoJSONLPaths() 3x per sync (snapshot capture, merge, base update) - should we cache?\n3. Error handling: What if some repos fail snapshot operations but others succeed?\n4. Race conditions: Multiple daemons in different repos?\n5. Test coverage: Are TestMultiRepoDeletionTracking and TestMultiRepoSnapshotIsolation sufficient?\n6. Path handling: Absolute vs relative paths, tilde expansion\n\nThis is fresh code - needs careful review before considering deletion tracking production-ready.","notes":"Code review completed. Overall assessment: Core deletion tracking logic is sound, but error handling and path handling issues make this not yet production-ready for multi-repo scenarios.\n\nKey findings:\n\nCRITICAL ISSUES (Priority 1):\n1. Inconsistent error handling in daemon_sync.go - snapshot/merge fail hard but base update warns. Can leave DB in inconsistent state with no rollback. See bd-sjmr.\n2. No path normalization in getMultiRepoJSONLPaths() - tilde expansion, relative paths, duplicates not handled. See bd-iye7.\n\nSHOULD FIX (Priority 2):\n3. Missing test coverage for edge cases - empty paths, duplicates, partial failures. See bd-kdoh.\n4. Performance - getMultiRepoJSONLPaths() called 3x per sync (minor issue). See bd-we4p.\n\nWHAT WORKS WELL:\n- Atomic file operations with PID-based temp files\n- Good snapshot isolation between repos\n- Race condition protection via exclusive locks\n- Solid test coverage for happy path scenarios\n\nVERDICT: Address bd-iye7 and bd-sjmr before considering deletion tracking production-ready for multi-repo mode.\n\nDetailed review notes available in conversation history.","status":"closed","priority":1,"issue_type":"task","created_at":"2025-11-06T19:23:52.402949-08:00","updated_at":"2025-11-06T19:32:34.160341-08:00","closed_at":"2025-11-06T19:32:34.160341-08:00","source_repo":".","dependencies":[{"issue_id":"bd-xo6b","depends_on_id":"bd-rbxi","type":"parent-child","created_at":"2025-11-06T19:23:52.403723-08:00","created_by":"daemon"}]} {"id":"bd-xzrv","content_hash":"bc0097eb1382f92d5f2a80189a9a7da19a4cc2a11ac3beda5af4c93a55692227","title":"Write Agent Mail integration guide","description":"Comprehensive guide for setting up and using Agent Mail with Beads.\n\nAcceptance Criteria:\n- Installation instructions\n- Configuration (environment variables)\n- Architecture diagram\n- Benefits and tradeoffs\n- When to use vs not use\n- Troubleshooting section\n- Migration from git-only mode\n\nFile: docs/AGENT_MAIL.md\n\nSections:\n- Quick start\n- How it works\n- Integration points\n- Graceful degradation\n- Multi-machine deployment\n- FAQ","status":"closed","priority":1,"issue_type":"task","created_at":"2025-11-07T22:42:51.231066-08:00","updated_at":"2025-11-08T00:40:38.798162-08:00","closed_at":"2025-11-08T00:40:38.798162-08:00","source_repo":".","dependencies":[{"issue_id":"bd-xzrv","depends_on_id":"bd-fzbg","type":"blocks","created_at":"2025-11-07T22:42:51.232246-08:00","created_by":"daemon"}]} {"id":"bd-yek6","content_hash":"f155913af8c58c0a7ea3da6a7d9e232e8cb29c3825f2d6f272a5417a449692a9","title":"CLI tests (cli_fast_test.go) are slow and should be integration tests","description":"The TestCLI_* tests in cmd/bd/cli_fast_test.go are taking 4-5 seconds each (40+ seconds total), making them the slowest part of the fast test suite.\n\nCurrent timings:\n- TestCLI_Import: 4.73s\n- TestCLI_Blocked: 4.33s \n- TestCLI_DepTree: 4.15s\n- TestCLI_Close: 3.59s\n- TestCLI_DepAdd: 3.50s\n- etc.\n\nThese tests compile the bd binary once in init(), but then execute it multiple times per test with filesystem operations. Despite being named \"fast\", they're actually end-to-end CLI integration tests.\n\nOptions:\n1. Tag with //go:build integration (move to integration suite)\n2. Optimize: Use in-memory databases, reduce exec calls, better parallelization\n3. Keep as-is but understand they're the baseline for \"fast\" tests\n\nTotal test suite currently: 13.8s (cmd/bd alone is 12.8s, and most of that is these CLI tests)","notes":"Fixed by reusing existing bd binary from repo root instead of rebuilding.\n\nBefore: 15+ minutes (rebuilding binary for every test package)\nAfter: ~12 seconds (reuses pre-built binary)\n\nThe init() function now checks for ../../bd first before falling back to building. This means `go build \u0026\u0026 go test` is now fast.","status":"closed","priority":2,"issue_type":"task","created_at":"2025-11-05T20:19:12.822543-08:00","updated_at":"2025-11-05T20:31:19.321787-08:00","closed_at":"2025-11-05T20:31:19.321787-08:00","source_repo":"."} +{"id":"bd-yuf7","content_hash":"8f96207ab949593a520f1e0bdab992931e48f39097b77b40837372a7d25cd9b9","title":"bd config set succeeds but doesn't persist to config.toml","description":"Commands like `bd config set daemon.auto_push true` return \"Set daemon.auto_push = true\" but the config file is never created and `bd info --json | jq '.config'` returns null.\n\n**Steps to reproduce:**\n1. Run `bd config set daemon.auto_push true`\n2. See success message: \"Set daemon.auto_push = true\"\n3. Check `cat .beads/config.toml` → file doesn't exist\n4. Check `bd info --json | jq '.config'` → returns null\n\n**Expected:**\n- .beads/config.toml should be created with the setting\n- bd info should show the config value\n\n**Impact:**\nUsers can't enable auto-push/auto-commit via CLI as documented in AGENTS.md","status":"closed","priority":1,"issue_type":"bug","created_at":"2025-11-08T01:14:58.726198-08:00","updated_at":"2025-11-08T01:17:41.377912-08:00","closed_at":"2025-11-08T01:17:41.377912-08:00","source_repo":"."} {"id":"bd-z3s3","content_hash":"24d99dc1a9a5f35af962137f5709d4b0f1b6a9ec91511c30a2517d790640cce8","title":"Create deployment scripts for GCP","description":"Automated provisioning scripts for GCP Compute Engine deployment.\n\nAcceptance Criteria:\n- Terraform/gcloud scripts\n- Static IP allocation\n- Firewall rules\n- NGINX reverse proxy config\n- TLS setup (Let's Encrypt)\n- Systemd service file\n\nFile: deployment/agent-mail/gcp/","status":"open","priority":3,"issue_type":"task","created_at":"2025-11-07T22:43:43.294839-08:00","updated_at":"2025-11-07T22:43:43.294839-08:00","source_repo":".","dependencies":[{"issue_id":"bd-z3s3","depends_on_id":"bd-9li4","type":"blocks","created_at":"2025-11-07T23:04:27.982336-08:00","created_by":"daemon"}]} {"id":"bd-z528","content_hash":"3f332e9997d2b7eb0af23885820df5f607fe08671a2615cadec941bbe7d36f68","title":"Prevent test pollution in production database","description":"The bd-vxdr cleanup revealed test issues were created during manual testing in the production workspace (Nov 2-4, template feature development).\n\n**Root cause:** Manual testing with `./bd create \"Test issue\"` pollutes the production .beads database.\n\n**Prevention strategies:**\n1. Use TEST_DB environment variable for manual testing\n2. Add warning when creating issues with \"Test\" prefix\n3. Improve developer docs about testing workflow\n4. Consider adding `bd test-mode` command for isolated testing","notes":"**Implementation completed:**\n\n1. ✅ Added warning when creating issues with \"Test\" prefix in production database\n - Shows yellow warning with ⚠ symbol\n - Suggests using BEADS_DB for isolated testing\n - Warning appears in create.go after title validation\n\n2. ✅ Documented BEADS_DB testing workflow in AGENTS.md\n - Added \"Testing Workflow\" section in Development Guidelines\n - Includes manual testing examples with BEADS_DB\n - Includes automated testing examples with t.TempDir()\n - Clear warning about not polluting production database\n\n3. ⚠️ Decided against bd test-mode command\n - BEADS_DB already provides simple, flexible isolation\n - Additional command would add complexity without much benefit\n - Current approach follows Unix philosophy (env vars for config)\n\n**Files modified:**\n- cmd/bd/create.go - Added Test prefix warning\n- AGENTS.md - Added Testing Workflow section\n\n**Testing:**\n- Verified warning appears when creating \"Test\" prefix issues\n- Verified BEADS_DB isolation works correctly\n- Built successfully with `go build`","status":"closed","priority":2,"issue_type":"task","created_at":"2025-11-07T16:07:28.255289-08:00","updated_at":"2025-11-07T23:18:08.386514-08:00","closed_at":"2025-11-07T22:43:28.669908-08:00","source_repo":"."} {"id":"bd-zbq2","content_hash":"56dfd7f2c09dafd232a1ae26063744c89a1ba317e50a3429ec19b73ee1402993","title":"bd export should verify JSONL line count matches database count","description":"After export completes, bd should verify that the JSONL file line count matches the number of issues exported. This would catch silent failures where the export appears to succeed but doesn't actually write all issues.\n\nReal-world scenario from VC project:\n- Ran direct SQL DELETE to remove 240 issues \n- Ran 'bd export -o .beads/issues.jsonl'\n- No error shown, appeared to succeed\n- But JSONL file was not updated (still had old line count)\n- Later session found all 240 issues still in JSONL\n- Had to repeat the cleanup\n\nIf export had verified line count, it would have immediately shown:\n Error: Export verification failed\n Expected: 276 issues\n JSONL file: 516 lines\n Mismatch indicates export failed to write all issues\n\nThis is especially important because:\n1. JSONL is source of truth in git\n2. Silent export failures cause data inconsistency\n3. Users assume export succeeded if no error shown\n4. The verification is cheap (just count lines)\n\nImplementation:\n- After writing JSONL, count lines in file\n- Compare to len(exportedIDs)\n- If mismatch, remove temp file and return error\n- Show clear error message with both counts","design":"In cmd/bd/export.go, after atomic rename (line ~301):\n\n1. Count lines in final JSONL file:\n - Read file and count newlines\n - Or reuse countIssuesInJSONL() helper (already exists)\n\n2. Compare to len(exportedIDs)\n\n3. If mismatch:\n - Log error with both counts\n - Optionally: remove the bad JSONL file (or leave for debugging?)\n - Return error (exit 1)\n\n4. Consider adding --skip-verify flag for edge cases\n\nEdge cases:\n- Partial line writes (corrupted file)\n- File system issues\n- Race conditions (another process modifying JSONL during export)\n\nThe countIssuesInJSONL() function already exists at line 20, can reuse it.","acceptance_criteria":"1. bd export verifies JSONL line count after write\n2. Clear error shown if mismatch detected\n3. Test case that simulates partial write failure\n4. Does not affect export performance significantly (line counting is fast)","status":"closed","priority":2,"issue_type":"feature","created_at":"2025-11-05T14:24:56.278249-08:00","updated_at":"2025-11-05T15:09:41.636141-08:00","closed_at":"2025-11-05T14:31:24.494885-08:00","source_repo":"."} diff --git a/AGENTS.md b/AGENTS.md index fc333892..9bcdd2fb 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -534,7 +534,7 @@ bd sync ```bash # One-time setup - run this in each beads workspace -./examples/git-hooks/install.sh +bd hooks install ``` This installs: @@ -542,11 +542,12 @@ This installs: - **pre-commit** - Flushes pending changes immediately before commit (bypasses 30s debounce) - **post-merge** - Imports updated JSONL after pull/merge (guaranteed sync) - **pre-push** - Exports database to JSONL before push (prevents stale JSONL from reaching remote) +- **post-checkout** - Imports JSONL after branch checkout (ensures consistency) **Why git hooks matter:** Without the pre-push hook, you can have database changes committed locally but stale JSONL pushed to remote, causing multi-workspace divergence. The hooks guarantee DB ↔ JSONL consistency. -See [examples/git-hooks/README.md](examples/git-hooks/README.md) for details. +**Note:** Hooks are embedded in the bd binary and work for all bd users (not just source repo users). diff --git a/cmd/bd/hooks.go b/cmd/bd/hooks.go index c1de65e4..c684092f 100644 --- a/cmd/bd/hooks.go +++ b/cmd/bd/hooks.go @@ -2,12 +2,34 @@ package main import ( "bufio" + "embed" + "encoding/json" "fmt" "os" "path/filepath" "strings" + + "github.com/spf13/cobra" ) +//go:embed templates/hooks/* +var hooksFS embed.FS + +func getEmbeddedHooks() (map[string]string, error) { + hooks := make(map[string]string) + hookNames := []string{"pre-commit", "post-merge", "pre-push", "post-checkout"} + + for _, name := range hookNames { + content, err := hooksFS.ReadFile("templates/hooks/" + name) + if err != nil { + return nil, fmt.Errorf("failed to read embedded hook %s: %w", name, err) + } + hooks[name] = string(content) + } + + return hooks, nil +} + const hookVersionPrefix = "# bd-hooks-version: " // HookStatus represents the status of a single git hook @@ -91,12 +113,12 @@ func FormatHookWarnings(statuses []HookStatus) string { if missingCount > 0 { warnings = append(warnings, fmt.Sprintf("⚠️ Git hooks not installed (%d missing)", missingCount)) - warnings = append(warnings, " Run: examples/git-hooks/install.sh") + warnings = append(warnings, " Run: bd hooks install") } if outdatedCount > 0 { warnings = append(warnings, fmt.Sprintf("⚠️ Git hooks are outdated (%d hooks)", outdatedCount)) - warnings = append(warnings, " Run: examples/git-hooks/install.sh") + warnings = append(warnings, " Run: bd hooks install") } if len(warnings) > 0 { @@ -105,3 +127,228 @@ func FormatHookWarnings(statuses []HookStatus) string { return "" } + +// Cobra commands + +var hooksCmd = &cobra.Command{ + Use: "hooks", + Short: "Manage git hooks for bd auto-sync", + Long: `Install, uninstall, or list git hooks that provide automatic bd sync. + +The hooks ensure that: +- pre-commit: Flushes pending changes to JSONL before commit +- post-merge: Imports updated JSONL after pull/merge +- pre-push: Prevents pushing stale JSONL +- post-checkout: Imports JSONL after branch checkout`, +} + +var hooksInstallCmd = &cobra.Command{ + Use: "install", + Short: "Install bd git hooks", + Long: `Install git hooks for automatic bd sync. + +Hooks are installed to .git/hooks/ in the current repository. +Existing hooks are backed up with a .backup suffix. + +Installed hooks: + - pre-commit: Flush changes to JSONL before commit + - post-merge: Import JSONL after pull/merge + - pre-push: Prevent pushing stale JSONL + - post-checkout: Import JSONL after branch checkout`, + Run: func(cmd *cobra.Command, args []string) { + force, _ := cmd.Flags().GetBool("force") + + embeddedHooks, err := getEmbeddedHooks() + if err != nil { + if jsonOutput { + output := map[string]interface{}{ + "error": err.Error(), + } + jsonBytes, _ := json.MarshalIndent(output, "", " ") + fmt.Println(string(jsonBytes)) + } else { + fmt.Fprintf(os.Stderr, "Error loading hooks: %v\n", err) + } + os.Exit(1) + } + + if err := installHooks(embeddedHooks, force); err != nil { + if jsonOutput { + output := map[string]interface{}{ + "error": err.Error(), + } + jsonBytes, _ := json.MarshalIndent(output, "", " ") + fmt.Println(string(jsonBytes)) + } else { + fmt.Fprintf(os.Stderr, "Error installing hooks: %v\n", err) + } + os.Exit(1) + } + + if jsonOutput { + output := map[string]interface{}{ + "success": true, + "message": "Git hooks installed successfully", + } + jsonBytes, _ := json.MarshalIndent(output, "", " ") + fmt.Println(string(jsonBytes)) + } else { + fmt.Println("✓ Git hooks installed successfully") + fmt.Println() + fmt.Println("Installed hooks:") + for hookName := range embeddedHooks { + fmt.Printf(" - %s\n", hookName) + } + } + }, +} + +var hooksUninstallCmd = &cobra.Command{ + Use: "uninstall", + Short: "Uninstall bd git hooks", + Long: `Remove bd git hooks from .git/hooks/ directory.`, + Run: func(cmd *cobra.Command, args []string) { + if err := uninstallHooks(); err != nil { + if jsonOutput { + output := map[string]interface{}{ + "error": err.Error(), + } + jsonBytes, _ := json.MarshalIndent(output, "", " ") + fmt.Println(string(jsonBytes)) + } else { + fmt.Fprintf(os.Stderr, "Error uninstalling hooks: %v\n", err) + } + os.Exit(1) + } + + if jsonOutput { + output := map[string]interface{}{ + "success": true, + "message": "Git hooks uninstalled successfully", + } + jsonBytes, _ := json.MarshalIndent(output, "", " ") + fmt.Println(string(jsonBytes)) + } else { + fmt.Println("✓ Git hooks uninstalled successfully") + } + }, +} + +var hooksListCmd = &cobra.Command{ + Use: "list", + Short: "List installed git hooks status", + Long: `Show the status of bd git hooks (installed, outdated, missing).`, + Run: func(cmd *cobra.Command, args []string) { + statuses, err := CheckGitHooks() + if err != nil { + if jsonOutput { + output := map[string]interface{}{ + "error": err.Error(), + } + jsonBytes, _ := json.MarshalIndent(output, "", " ") + fmt.Println(string(jsonBytes)) + } else { + fmt.Fprintf(os.Stderr, "Error checking hooks: %v\n", err) + } + os.Exit(1) + } + + if jsonOutput { + output := map[string]interface{}{ + "hooks": statuses, + } + jsonBytes, _ := json.MarshalIndent(output, "", " ") + fmt.Println(string(jsonBytes)) + } else { + fmt.Println("Git hooks status:") + for _, status := range statuses { + if !status.Installed { + fmt.Printf(" ✗ %s: not installed\n", status.Name) + } else if status.Outdated { + fmt.Printf(" ⚠ %s: installed (version %s, current: %s) - outdated\n", + status.Name, status.Version, Version) + } else { + fmt.Printf(" ✓ %s: installed (version %s)\n", status.Name, status.Version) + } + } + } + }, +} + +func installHooks(embeddedHooks map[string]string, force bool) error { + // Check if .git directory exists + gitDir := ".git" + if _, err := os.Stat(gitDir); os.IsNotExist(err) { + return fmt.Errorf("not a git repository (no .git directory found)") + } + + hooksDir := filepath.Join(gitDir, "hooks") + + // Create hooks directory if it doesn't exist + if err := os.MkdirAll(hooksDir, 0755); err != nil { + return fmt.Errorf("failed to create hooks directory: %w", err) + } + + // Install each hook + for hookName, hookContent := range embeddedHooks { + hookPath := filepath.Join(hooksDir, hookName) + + // Check if hook already exists + if _, err := os.Stat(hookPath); err == nil { + // Hook exists - back it up unless force is set + if !force { + backupPath := hookPath + ".backup" + if err := os.Rename(hookPath, backupPath); err != nil { + return fmt.Errorf("failed to backup %s: %w", hookName, err) + } + } + } + + // Write hook file + if err := os.WriteFile(hookPath, []byte(hookContent), 0755); err != nil { + return fmt.Errorf("failed to write %s: %w", hookName, err) + } + } + + return nil +} + +func uninstallHooks() error { + hooksDir := filepath.Join(".git", "hooks") + hookNames := []string{"pre-commit", "post-merge", "pre-push", "post-checkout"} + + for _, hookName := range hookNames { + hookPath := filepath.Join(hooksDir, hookName) + + // Check if hook exists + if _, err := os.Stat(hookPath); os.IsNotExist(err) { + continue + } + + // Remove hook + if err := os.Remove(hookPath); err != nil { + return fmt.Errorf("failed to remove %s: %w", hookName, err) + } + + // Restore backup if exists + backupPath := hookPath + ".backup" + if _, err := os.Stat(backupPath); err == nil { + if err := os.Rename(backupPath, hookPath); err != nil { + // Non-fatal - just warn + fmt.Fprintf(os.Stderr, "Warning: failed to restore backup for %s: %v\n", hookName, err) + } + } + } + + return nil +} + +func init() { + hooksInstallCmd.Flags().Bool("force", false, "Overwrite existing hooks without backup") + + hooksCmd.AddCommand(hooksInstallCmd) + hooksCmd.AddCommand(hooksUninstallCmd) + hooksCmd.AddCommand(hooksListCmd) + + rootCmd.AddCommand(hooksCmd) +} diff --git a/cmd/bd/hooks_test.go b/cmd/bd/hooks_test.go new file mode 100644 index 00000000..4754072f --- /dev/null +++ b/cmd/bd/hooks_test.go @@ -0,0 +1,246 @@ +package main + +import ( + "os" + "path/filepath" + "testing" +) + +func TestGetEmbeddedHooks(t *testing.T) { + hooks, err := getEmbeddedHooks() + if err != nil { + t.Fatalf("getEmbeddedHooks() failed: %v", err) + } + + expectedHooks := []string{"pre-commit", "post-merge", "pre-push", "post-checkout"} + for _, hookName := range expectedHooks { + content, ok := hooks[hookName] + if !ok { + t.Errorf("Missing hook: %s", hookName) + continue + } + if len(content) == 0 { + t.Errorf("Hook %s has empty content", hookName) + } + // Verify it's a shell script + if content[:2] != "#!" { + t.Errorf("Hook %s doesn't start with shebang: %s", hookName, content[:50]) + } + } +} + +func TestInstallHooks(t *testing.T) { + // Create temp directory with fake .git + tmpDir := t.TempDir() + gitDir := filepath.Join(tmpDir, ".git", "hooks") + if err := os.MkdirAll(gitDir, 0755); err != nil { + t.Fatalf("Failed to create test git dir: %v", err) + } + + // Change to temp directory + oldWd, _ := os.Getwd() + defer os.Chdir(oldWd) + os.Chdir(tmpDir) + + // Get embedded hooks + hooks, err := getEmbeddedHooks() + if err != nil { + t.Fatalf("getEmbeddedHooks() failed: %v", err) + } + + // Install hooks + if err := installHooks(hooks, false); err != nil { + t.Fatalf("installHooks() failed: %v", err) + } + + // Verify hooks were installed + for hookName := range hooks { + hookPath := filepath.Join(gitDir, hookName) + if _, err := os.Stat(hookPath); os.IsNotExist(err) { + t.Errorf("Hook %s was not installed", hookName) + } + // Check it's executable + info, err := os.Stat(hookPath) + if err != nil { + t.Errorf("Failed to stat %s: %v", hookName, err) + continue + } + if info.Mode()&0111 == 0 { + t.Errorf("Hook %s is not executable", hookName) + } + } +} + +func TestInstallHooksBackup(t *testing.T) { + // Create temp directory with fake .git + tmpDir := t.TempDir() + gitDir := filepath.Join(tmpDir, ".git", "hooks") + if err := os.MkdirAll(gitDir, 0755); err != nil { + t.Fatalf("Failed to create test git dir: %v", err) + } + + // Change to temp directory + oldWd, _ := os.Getwd() + defer os.Chdir(oldWd) + os.Chdir(tmpDir) + + // Create an existing hook + existingHook := filepath.Join(gitDir, "pre-commit") + existingContent := "#!/bin/sh\necho old hook\n" + if err := os.WriteFile(existingHook, []byte(existingContent), 0755); err != nil { + t.Fatalf("Failed to create existing hook: %v", err) + } + + // Get embedded hooks + hooks, err := getEmbeddedHooks() + if err != nil { + t.Fatalf("getEmbeddedHooks() failed: %v", err) + } + + // Install hooks (should backup existing) + if err := installHooks(hooks, false); err != nil { + t.Fatalf("installHooks() failed: %v", err) + } + + // Verify backup was created + backupPath := existingHook + ".backup" + if _, err := os.Stat(backupPath); os.IsNotExist(err) { + t.Errorf("Backup was not created") + } + + // Verify backup has original content + backupContent, err := os.ReadFile(backupPath) + if err != nil { + t.Fatalf("Failed to read backup: %v", err) + } + if string(backupContent) != existingContent { + t.Errorf("Backup content mismatch: got %q, want %q", string(backupContent), existingContent) + } +} + +func TestInstallHooksForce(t *testing.T) { + // Create temp directory with fake .git + tmpDir := t.TempDir() + gitDir := filepath.Join(tmpDir, ".git", "hooks") + if err := os.MkdirAll(gitDir, 0755); err != nil { + t.Fatalf("Failed to create test git dir: %v", err) + } + + // Change to temp directory + oldWd, _ := os.Getwd() + defer os.Chdir(oldWd) + os.Chdir(tmpDir) + + // Create an existing hook + existingHook := filepath.Join(gitDir, "pre-commit") + if err := os.WriteFile(existingHook, []byte("old"), 0755); err != nil { + t.Fatalf("Failed to create existing hook: %v", err) + } + + // Get embedded hooks + hooks, err := getEmbeddedHooks() + if err != nil { + t.Fatalf("getEmbeddedHooks() failed: %v", err) + } + + // Install hooks with force (should not create backup) + if err := installHooks(hooks, true); err != nil { + t.Fatalf("installHooks() failed: %v", err) + } + + // Verify no backup was created + backupPath := existingHook + ".backup" + if _, err := os.Stat(backupPath); !os.IsNotExist(err) { + t.Errorf("Backup should not have been created with --force") + } +} + +func TestUninstallHooks(t *testing.T) { + // Create temp directory with fake .git + tmpDir := t.TempDir() + gitDir := filepath.Join(tmpDir, ".git", "hooks") + if err := os.MkdirAll(gitDir, 0755); err != nil { + t.Fatalf("Failed to create test git dir: %v", err) + } + + // Change to temp directory + oldWd, _ := os.Getwd() + defer os.Chdir(oldWd) + os.Chdir(tmpDir) + + // Get embedded hooks and install them + hooks, err := getEmbeddedHooks() + if err != nil { + t.Fatalf("getEmbeddedHooks() failed: %v", err) + } + if err := installHooks(hooks, false); err != nil { + t.Fatalf("installHooks() failed: %v", err) + } + + // Uninstall hooks + if err := uninstallHooks(); err != nil { + t.Fatalf("uninstallHooks() failed: %v", err) + } + + // Verify hooks were removed + hookNames := []string{"pre-commit", "post-merge", "pre-push", "post-checkout"} + for _, hookName := range hookNames { + hookPath := filepath.Join(gitDir, hookName) + if _, err := os.Stat(hookPath); !os.IsNotExist(err) { + t.Errorf("Hook %s was not removed", hookName) + } + } +} + +func TestHooksCheckGitHooks(t *testing.T) { + // Create temp directory with fake .git + tmpDir := t.TempDir() + gitDir := filepath.Join(tmpDir, ".git", "hooks") + if err := os.MkdirAll(gitDir, 0755); err != nil { + t.Fatalf("Failed to create test git dir: %v", err) + } + + // Change to temp directory + oldWd, _ := os.Getwd() + defer os.Chdir(oldWd) + os.Chdir(tmpDir) + + // Initially no hooks installed + statuses, err := CheckGitHooks() + if err != nil { + t.Fatalf("CheckGitHooks() failed: %v", err) + } + + for _, status := range statuses { + if status.Installed { + t.Errorf("Hook %s should not be installed initially", status.Name) + } + } + + // Install hooks + hooks, err := getEmbeddedHooks() + if err != nil { + t.Fatalf("getEmbeddedHooks() failed: %v", err) + } + if err := installHooks(hooks, false); err != nil { + t.Fatalf("installHooks() failed: %v", err) + } + + // Check again + statuses, err = CheckGitHooks() + if err != nil { + t.Fatalf("CheckGitHooks() failed: %v", err) + } + + for _, status := range statuses { + if !status.Installed { + t.Errorf("Hook %s should be installed", status.Name) + } + if status.Version != Version { + t.Errorf("Hook %s version mismatch: got %s, want %s", status.Name, status.Version, Version) + } + if status.Outdated { + t.Errorf("Hook %s should not be outdated", status.Name) + } + } +} diff --git a/cmd/bd/templates/hooks/post-checkout b/cmd/bd/templates/hooks/post-checkout new file mode 100755 index 00000000..e40c8197 --- /dev/null +++ b/cmd/bd/templates/hooks/post-checkout @@ -0,0 +1,39 @@ +#!/usr/bin/env bash +# +# Beads post-checkout hook +# Automatically imports JSONL to SQLite database after checking out branches +# +# Install: cp examples/git-hooks/post-checkout .git/hooks/post-checkout && chmod +x .git/hooks/post-checkout + +# Arguments provided by git: +# $1 = ref of previous HEAD +# $2 = ref of new HEAD +# $3 = flag (1 if branch checkout, 0 if file checkout) + +# Only run on branch checkouts +if [[ "$3" != "1" ]]; then + exit 0 +fi + +set -e + +# Check if bd is installed +if ! command -v bd &> /dev/null; then + exit 0 +fi + +# Check if issues.jsonl exists +if [[ ! -f .beads/issues.jsonl ]]; then + exit 0 +fi + +# Import issues from JSONL +echo "🔗 Importing beads issues from JSONL..." + +if bd import -i .beads/issues.jsonl 2>/dev/null; then + echo "✓ Beads issues imported successfully" +else + echo "Warning: bd import failed" +fi + +exit 0 diff --git a/cmd/bd/templates/hooks/post-merge b/cmd/bd/templates/hooks/post-merge new file mode 100755 index 00000000..6305f0c1 --- /dev/null +++ b/cmd/bd/templates/hooks/post-merge @@ -0,0 +1,42 @@ +#!/bin/sh +# bd-hooks-version: 0.22.1 +# +# bd (beads) post-merge hook +# +# This hook syncs the bd database after a git pull or merge: +# 1. Checks if any .beads/*.jsonl file was updated +# 2. Runs 'bd sync --import-only' to import changes +# +# Installation: +# cp examples/git-hooks/post-merge .git/hooks/post-merge +# chmod +x .git/hooks/post-merge +# +# Or use the install script: +# examples/git-hooks/install.sh + +# Check if bd is available +if ! command -v bd >/dev/null 2>&1; then + echo "Warning: bd command not found, skipping post-merge sync" >&2 + exit 0 +fi + +# Check if we're in a bd workspace +if [ ! -d .beads ]; then + # Not a bd workspace, nothing to do + exit 0 +fi + +# Check if any JSONL file exists in .beads/ +if ! ls .beads/*.jsonl >/dev/null 2>&1; then + exit 0 +fi + +# Run bd sync --import-only to import the updated JSONL +# This is more robust than direct import as it handles all edge cases +if ! bd sync --import-only >/dev/null 2>&1; then + echo "Warning: Failed to sync bd changes after merge" >&2 + echo "Run 'bd sync --import-only' manually" >&2 + # Don't fail the merge, just warn +fi + +exit 0 diff --git a/cmd/bd/templates/hooks/pre-commit b/cmd/bd/templates/hooks/pre-commit new file mode 100755 index 00000000..94ff3f69 --- /dev/null +++ b/cmd/bd/templates/hooks/pre-commit @@ -0,0 +1,44 @@ +#!/bin/sh +# bd-hooks-version: 0.22.1 +# +# bd (beads) pre-commit hook +# +# This hook ensures that any pending bd issue changes are flushed to +# .beads/beads.jsonl before the commit is created, preventing the +# race condition where daemon auto-flush fires after the commit. +# +# Installation: +# cp examples/git-hooks/pre-commit .git/hooks/pre-commit +# chmod +x .git/hooks/pre-commit +# +# Or use the install script: +# examples/git-hooks/install.sh + +# Check if bd is available +if ! command -v bd >/dev/null 2>&1; then + echo "Warning: bd command not found, skipping pre-commit flush" >&2 + exit 0 +fi + +# Check if we're in a bd workspace +if [ ! -d .beads ]; then + # Not a bd workspace, nothing to do + exit 0 +fi + +# Flush pending changes to JSONL +# Use --flush-only to skip git operations (we're already in a git hook) +# Suppress output unless there's an error +if ! bd sync --flush-only >/dev/null 2>&1; then + echo "Error: Failed to flush bd changes to JSONL" >&2 + echo "Run 'bd sync --flush-only' manually to diagnose" >&2 + exit 1 +fi + +# Stage both possible JSONL files (backward compatibility) +# git add is harmless if file doesn't exist +for f in .beads/beads.jsonl .beads/issues.jsonl; do + [ -f "$f" ] && git add "$f" 2>/dev/null || true +done + +exit 0 diff --git a/cmd/bd/templates/hooks/pre-push b/cmd/bd/templates/hooks/pre-push new file mode 100755 index 00000000..55f9ec2b --- /dev/null +++ b/cmd/bd/templates/hooks/pre-push @@ -0,0 +1,62 @@ +#!/bin/sh +# bd-hooks-version: 0.22.1 +# +# bd (beads) pre-push hook +# +# This hook prevents pushing stale JSONL by: +# 1. Flushing any pending in-memory changes to JSONL (if bd available) +# 2. Checking for uncommitted changes (staged, unstaged, untracked, deleted) +# 3. Failing the push with clear instructions if changes found +# +# The pre-commit hook already exports changes, but this catches: +# - Changes made between commit and push +# - Pending debounced flushes (5s daemon delay) +# +# Installation: +# cp examples/git-hooks/pre-push .git/hooks/pre-push +# chmod +x .git/hooks/pre-push +# +# Or use the install script: +# examples/git-hooks/install.sh + +# Check if we're in a bd workspace +if [ ! -d .beads ]; then + # Not a bd workspace, nothing to do + exit 0 +fi + +# Optionally flush pending bd changes so they surface in JSONL +# This prevents the race where a debounced flush lands after the check +if command -v bd >/dev/null 2>&1; then + bd sync --flush-only >/dev/null 2>&1 || true +fi + +# Collect all tracked or existing JSONL files (supports both old and new names) +FILES="" +for f in .beads/beads.jsonl .beads/issues.jsonl; do + # Include file if it exists in working tree OR is tracked by git (even if deleted) + if git ls-files --error-unmatch "$f" >/dev/null 2>&1 || [ -f "$f" ]; then + FILES="$FILES $f" + fi +done + +# Check for any uncommitted changes using porcelain status +# This catches: staged, unstaged, untracked, deleted, renamed, and conflicts +if [ -n "$FILES" ]; then + # shellcheck disable=SC2086 + if [ -n "$(git status --porcelain -- $FILES 2>/dev/null)" ]; then + echo "❌ Error: Beads JSONL has uncommitted changes" >&2 + echo "" >&2 + echo "You made changes to bd issues between your last commit and this push." >&2 + echo "Please commit the updated JSONL before pushing:" >&2 + echo "" >&2 + # shellcheck disable=SC2086 + echo " git add $FILES" >&2 + echo ' git commit -m "Update bd JSONL"' >&2 + echo " git push" >&2 + echo "" >&2 + exit 1 + fi +fi + +exit 0 diff --git a/examples/git-hooks/post-merge b/examples/git-hooks/post-merge index 676cd251..6305f0c1 100755 --- a/examples/git-hooks/post-merge +++ b/examples/git-hooks/post-merge @@ -1,5 +1,5 @@ #!/bin/sh -# bd-hooks-version: 0.22.0 +# bd-hooks-version: 0.22.1 # # bd (beads) post-merge hook # diff --git a/examples/git-hooks/pre-commit b/examples/git-hooks/pre-commit index 556c5f2a..94ff3f69 100755 --- a/examples/git-hooks/pre-commit +++ b/examples/git-hooks/pre-commit @@ -1,5 +1,5 @@ #!/bin/sh -# bd-hooks-version: 0.22.0 +# bd-hooks-version: 0.22.1 # # bd (beads) pre-commit hook # diff --git a/examples/git-hooks/pre-push b/examples/git-hooks/pre-push index fa61e0a4..55f9ec2b 100755 --- a/examples/git-hooks/pre-push +++ b/examples/git-hooks/pre-push @@ -1,5 +1,5 @@ #!/bin/sh -# bd-hooks-version: 0.22.0 +# bd-hooks-version: 0.22.1 # # bd (beads) pre-push hook #