chore: clean up root directory cruft
- Delete duplicate install.sh (scripts/install.sh is canonical) - Delete BD-3S8-CHANGES.md (implementation now in git history) - Delete .saved-stashes/ (3 obsolete patch files) - Move internal dev docs to docs/dev-notes/: - ERROR_HANDLING_AUDIT.md - MAIN_TEST_CLEANUP_PLAN.md - MAIN_TEST_REFACTOR_NOTES.md - TEST_SUITE_AUDIT.md 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -1,301 +0,0 @@
|
||||
diff --git a/.beads/issues.jsonl b/.beads/issues.jsonl
|
||||
index 7510ce0..9e1b63a 100644
|
||||
--- a/.beads/issues.jsonl
|
||||
+++ b/.beads/issues.jsonl
|
||||
@@ -68,6 +68,7 @@
|
||||
{"id":"bd-16","title":"Add EXPLAIN QUERY PLAN tests for ready work query","description":"Verify that the hierarchical blocking query uses proper indexes and doesn't do full table scans.\n\n**Queries to analyze:**\n1. The recursive CTE (both base case and recursive case)\n2. The final SELECT with NOT EXISTS\n3. Impact of various filters (status, priority, assignee)\n\n**Implementation:**\nAdd test function that:\n- Runs EXPLAIN QUERY PLAN on GetReadyWork query\n- Parses output to verify no SCAN TABLE operations\n- Documents expected query plan in comments\n- Fails if query plan degrades\n\n**Benefits:**\n- Catch performance regressions in tests\n- Document expected query behavior\n- Ensure indexes are being used\n\nRelated to: bd-77 (composite index on depends_on_id, type)","status":"closed","priority":3,"issue_type":"task","created_at":"2025-10-16T20:46:08.971822-07:00","updated_at":"2025-10-18T18:35:11.755001-07:00","closed_at":"2025-10-18T12:47:44.284846-07:00"}
|
||||
{"id":"bd-160","title":"Fix race condition in TestSocketCleanup","description":"Race condition detected in internal/rpc/rpc_test.go:195 in TestSocketCleanup. This is causing CI test failures.\n\nThe race appears to be between goroutines accessing shared state during server startup/shutdown in the socket cleanup test.\n\nLocation: internal/rpc/rpc_test.go:195\nTest output shows DATA RACE between goroutines 83 and 85.","status":"closed","priority":1,"issue_type":"bug","created_at":"2025-10-19T09:11:34.766584-07:00","updated_at":"2025-10-19T09:14:37.781034-07:00","closed_at":"2025-10-19T09:14:37.781034-07:00"}
|
||||
{"id":"bd-161","title":"Fix SQL timestamp scanning error on macOS (GH-88)","description":"User reported 'Scan error on column index 11, name created_at: unsupported Scan, storing driver.Value type string into type *time.Time' on macOS 13.5. \n\nRoot cause: modernc.org/sqlite driver doesn't recognize mattn-style DSN parameters like _journal_mode=WAL and _foreign_keys=ON. When these are present, it ignores _time_format=sqlite on some platforms, causing DATETIME columns to be returned as strings instead of time.Time.\n\nFix: Use modernc's native _pragma syntax for all options:\n- Changed _journal_mode=WAL to _pragma=journal_mode(WAL)\n- Changed _foreign_keys=ON to _pragma=foreign_keys(ON)\n\nThis ensures _time_format=sqlite is properly recognized and DATETIME columns are automatically parsed to time.Time.","status":"closed","priority":0,"issue_type":"bug","created_at":"2025-10-19T14:35:47.467251-07:00","updated_at":"2025-10-19T14:35:52.806252-07:00","closed_at":"2025-10-19T14:35:52.806252-07:00"}
|
||||
+{"id":"bd-162","title":"Add 'bd comments' command to view and manage issue comments","description":"Add support for commenting on issues with a new 'bd comments' command.\n\nCommands:\n- bd comments \u003cissue-id\u003e # List all comments on an issue\n- bd comments add \u003cissue-id\u003e \"text\" # Add a comment\n- bd comments add \u003cissue-id\u003e -f file.txt # Add comment from file\n- bd comments \u003cissue-id\u003e --json # JSON output for agents\n\nUse cases:\n- Track discussion/decisions on issues\n- Add context without cluttering description\n- Record why work was paused/resumed\n- Multi-person collaboration notes\n- Agent can leave progress updates\n\nImplementation:\n- Add comments table to schema (id, issue_id, author, text, timestamp)\n- Store in JSONL as nested array in issue objects\n- Show chronologically with timestamps\n- Include in 'bd show' output (collapsed by default?)\n\nQuestions:\n- Should comments be editable/deletable?\n- Include author field (env var or git config)?\n- Threading/replies to comments?","status":"in_progress","priority":2,"issue_type":"feature","created_at":"2025-10-19T15:58:07.483312-07:00","updated_at":"2025-10-19T16:01:29.473696-07:00"}
|
||||
{"id":"bd-17","title":"Make auto-flush debounce duration configurable","description":"flushDebounce is hardcoded to 5 seconds. Make it configurable via environment variable BEADS_FLUSH_DEBOUNCE (e.g., '500ms', '10s'). Current 5-second value is reasonable for interactive use, but CI/automated scenarios might want faster flush. Add getDebounceDuration() helper function. Located in cmd/bd/main.go:31.","status":"closed","priority":3,"issue_type":"feature","created_at":"2025-10-16T20:46:08.971822-07:00","updated_at":"2025-10-18T18:35:11.755588-07:00","closed_at":"2025-10-18T09:47:43.22126-07:00"}
|
||||
{"id":"bd-18","title":"Optimize auto-flush to use incremental updates","description":"Every flush exports ALL issues and ALL dependencies, even if only one issue changed. For large projects (1000+ issues), this could be expensive. Current approach guarantees consistency, which is fine for MVP, but future optimization could track which issues changed and use incremental updates. Located in cmd/bd/main.go:255-276.","status":"closed","priority":3,"issue_type":"feature","created_at":"2025-10-16T20:46:08.971822-07:00","updated_at":"2025-10-18T18:35:11.755965-07:00","closed_at":"2025-10-14T02:51:52.200141-07:00"}
|
||||
{"id":"bd-19","title":"Refactor duplicate flush logic in PersistentPostRun","description":"PersistentPostRun contains a complete copy of the flush logic instead of calling flushToJSONL(). This violates DRY principle and makes maintenance harder. Refactor to use flushToJSONL() with a force parameter to bypass isDirty check, or extract shared logic into a helper function. Located in cmd/bd/main.go:104-138.","status":"closed","priority":3,"issue_type":"task","created_at":"2025-10-16T20:46:08.971822-07:00","updated_at":"2025-10-18T18:35:11.756336-07:00","closed_at":"2025-10-18T09:44:24.167574-07:00"}
|
||||
diff --git a/cmd/bd/daemon.go b/cmd/bd/daemon.go
|
||||
index 9ac52bf..1756915 100644
|
||||
--- a/cmd/bd/daemon.go
|
||||
+++ b/cmd/bd/daemon.go
|
||||
@@ -664,6 +664,15 @@ func exportToJSONLWithStore(ctx context.Context, store storage.Storage, jsonlPat
|
||||
issue.Labels = labels
|
||||
}
|
||||
|
||||
+ // Populate comments for all issues
|
||||
+ for _, issue := range issues {
|
||||
+ comments, err := store.GetIssueComments(ctx, issue.ID)
|
||||
+ if err != nil {
|
||||
+ return fmt.Errorf("failed to get comments for %s: %w", issue.ID, err)
|
||||
+ }
|
||||
+ issue.Comments = comments
|
||||
+ }
|
||||
+
|
||||
// Create temp file for atomic write
|
||||
dir := filepath.Dir(jsonlPath)
|
||||
base := filepath.Base(jsonlPath)
|
||||
diff --git a/cmd/bd/import_shared.go b/cmd/bd/import_shared.go
|
||||
index 3ae2cf3..049b8df 100644
|
||||
--- a/cmd/bd/import_shared.go
|
||||
+++ b/cmd/bd/import_shared.go
|
||||
@@ -3,6 +3,7 @@ package main
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
+ "time"
|
||||
|
||||
"github.com/steveyegge/beads/internal/storage"
|
||||
"github.com/steveyegge/beads/internal/storage/sqlite"
|
||||
@@ -275,5 +276,39 @@ func importIssuesCore(ctx context.Context, dbPath string, store storage.Storage,
|
||||
}
|
||||
}
|
||||
|
||||
+ // Phase 7: Import comments
|
||||
+ for _, issue := range issues {
|
||||
+ if len(issue.Comments) == 0 {
|
||||
+ continue
|
||||
+ }
|
||||
+
|
||||
+ // Get current comments to avoid duplicates
|
||||
+ currentComments, err := sqliteStore.GetIssueComments(ctx, issue.ID)
|
||||
+ if err != nil {
|
||||
+ return nil, fmt.Errorf("error getting comments for %s: %w", issue.ID, err)
|
||||
+ }
|
||||
+
|
||||
+ // Build a set of existing comments (by author+text+timestamp)
|
||||
+ existingComments := make(map[string]bool)
|
||||
+ for _, c := range currentComments {
|
||||
+ key := fmt.Sprintf("%s:%s:%s", c.Author, c.Text, c.CreatedAt.Format(time.RFC3339))
|
||||
+ existingComments[key] = true
|
||||
+ }
|
||||
+
|
||||
+ // Add missing comments
|
||||
+ for _, comment := range issue.Comments {
|
||||
+ key := fmt.Sprintf("%s:%s:%s", comment.Author, comment.Text, comment.CreatedAt.Format(time.RFC3339))
|
||||
+ if !existingComments[key] {
|
||||
+ if _, err := sqliteStore.AddIssueComment(ctx, issue.ID, comment.Author, comment.Text); err != nil {
|
||||
+ if opts.Strict {
|
||||
+ return nil, fmt.Errorf("error adding comment to %s: %w", issue.ID, err)
|
||||
+ }
|
||||
+ // Non-strict mode: skip this comment
|
||||
+ continue
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
return result, nil
|
||||
}
|
||||
diff --git a/cmd/bd/main.go b/cmd/bd/main.go
|
||||
index a5288c0..49a0e60 100644
|
||||
--- a/cmd/bd/main.go
|
||||
+++ b/cmd/bd/main.go
|
||||
@@ -1731,17 +1731,19 @@ var showCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
if jsonOutput {
|
||||
- // Include labels and dependencies in JSON output
|
||||
+ // Include labels, dependencies, and comments in JSON output
|
||||
type IssueDetails struct {
|
||||
*types.Issue
|
||||
- Labels []string `json:"labels,omitempty"`
|
||||
- Dependencies []*types.Issue `json:"dependencies,omitempty"`
|
||||
- Dependents []*types.Issue `json:"dependents,omitempty"`
|
||||
+ Labels []string `json:"labels,omitempty"`
|
||||
+ Dependencies []*types.Issue `json:"dependencies,omitempty"`
|
||||
+ Dependents []*types.Issue `json:"dependents,omitempty"`
|
||||
+ Comments []*types.Comment `json:"comments,omitempty"`
|
||||
}
|
||||
details := &IssueDetails{Issue: issue}
|
||||
details.Labels, _ = store.GetLabels(ctx, issue.ID)
|
||||
details.Dependencies, _ = store.GetDependencies(ctx, issue.ID)
|
||||
details.Dependents, _ = store.GetDependents(ctx, issue.ID)
|
||||
+ details.Comments, _ = store.GetIssueComments(ctx, issue.ID)
|
||||
outputJSON(details)
|
||||
return
|
||||
}
|
||||
@@ -1835,6 +1837,15 @@ var showCmd = &cobra.Command{
|
||||
}
|
||||
}
|
||||
|
||||
+ // Show comments
|
||||
+ comments, _ := store.GetIssueComments(ctx, issue.ID)
|
||||
+ if len(comments) > 0 {
|
||||
+ fmt.Printf("\nComments (%d):\n", len(comments))
|
||||
+ for _, comment := range comments {
|
||||
+ fmt.Printf(" [%s at %s]\n %s\n\n", comment.Author, comment.CreatedAt.Format("2006-01-02 15:04"), comment.Text)
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
fmt.Println()
|
||||
},
|
||||
}
|
||||
diff --git a/cmd/bd/sync.go b/cmd/bd/sync.go
|
||||
index a65be6c..051572f 100644
|
||||
--- a/cmd/bd/sync.go
|
||||
+++ b/cmd/bd/sync.go
|
||||
@@ -270,6 +270,15 @@ func exportToJSONL(ctx context.Context, jsonlPath string) error {
|
||||
issue.Labels = labels
|
||||
}
|
||||
|
||||
+ // Populate comments for all issues
|
||||
+ for _, issue := range issues {
|
||||
+ comments, err := store.GetIssueComments(ctx, issue.ID)
|
||||
+ if err != nil {
|
||||
+ return fmt.Errorf("failed to get comments for %s: %w", issue.ID, err)
|
||||
+ }
|
||||
+ issue.Comments = comments
|
||||
+ }
|
||||
+
|
||||
// Create temp file for atomic write
|
||||
dir := filepath.Dir(jsonlPath)
|
||||
base := filepath.Base(jsonlPath)
|
||||
diff --git a/internal/storage/sqlite/schema.go b/internal/storage/sqlite/schema.go
|
||||
index 44d6d57..d390e10 100644
|
||||
--- a/internal/storage/sqlite/schema.go
|
||||
+++ b/internal/storage/sqlite/schema.go
|
||||
@@ -56,6 +56,19 @@ CREATE TABLE IF NOT EXISTS labels (
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_labels_label ON labels(label);
|
||||
|
||||
+-- Comments table
|
||||
+CREATE TABLE IF NOT EXISTS comments (
|
||||
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
+ issue_id TEXT NOT NULL,
|
||||
+ author TEXT NOT NULL,
|
||||
+ text TEXT NOT NULL,
|
||||
+ created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
+ FOREIGN KEY (issue_id) REFERENCES issues(id) ON DELETE CASCADE
|
||||
+);
|
||||
+
|
||||
+CREATE INDEX IF NOT EXISTS idx_comments_issue ON comments(issue_id);
|
||||
+CREATE INDEX IF NOT EXISTS idx_comments_created_at ON comments(created_at);
|
||||
+
|
||||
-- Events table (audit trail)
|
||||
CREATE TABLE IF NOT EXISTS events (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
diff --git a/internal/storage/sqlite/sqlite.go b/internal/storage/sqlite/sqlite.go
|
||||
index 8bd72a2..867f95a 100644
|
||||
--- a/internal/storage/sqlite/sqlite.go
|
||||
+++ b/internal/storage/sqlite/sqlite.go
|
||||
@@ -1716,6 +1716,81 @@ func (s *SQLiteStorage) GetMetadata(ctx context.Context, key string) (string, er
|
||||
return value, err
|
||||
}
|
||||
|
||||
+// AddIssueComment adds a comment to an issue
|
||||
+func (s *SQLiteStorage) AddIssueComment(ctx context.Context, issueID, author, text string) (*types.Comment, error) {
|
||||
+ // Verify issue exists
|
||||
+ var exists bool
|
||||
+ err := s.db.QueryRowContext(ctx, `SELECT EXISTS(SELECT 1 FROM issues WHERE id = ?)`, issueID).Scan(&exists)
|
||||
+ if err != nil {
|
||||
+ return nil, fmt.Errorf("failed to check issue existence: %w", err)
|
||||
+ }
|
||||
+ if !exists {
|
||||
+ return nil, fmt.Errorf("issue %s not found", issueID)
|
||||
+ }
|
||||
+
|
||||
+ // Insert comment
|
||||
+ result, err := s.db.ExecContext(ctx, `
|
||||
+ INSERT INTO comments (issue_id, author, text, created_at)
|
||||
+ VALUES (?, ?, ?, CURRENT_TIMESTAMP)
|
||||
+ `, issueID, author, text)
|
||||
+ if err != nil {
|
||||
+ return nil, fmt.Errorf("failed to insert comment: %w", err)
|
||||
+ }
|
||||
+
|
||||
+ // Get the inserted comment ID
|
||||
+ commentID, err := result.LastInsertId()
|
||||
+ if err != nil {
|
||||
+ return nil, fmt.Errorf("failed to get comment ID: %w", err)
|
||||
+ }
|
||||
+
|
||||
+ // Fetch the complete comment
|
||||
+ comment := &types.Comment{}
|
||||
+ err = s.db.QueryRowContext(ctx, `
|
||||
+ SELECT id, issue_id, author, text, created_at
|
||||
+ FROM comments WHERE id = ?
|
||||
+ `, commentID).Scan(&comment.ID, &comment.IssueID, &comment.Author, &comment.Text, &comment.CreatedAt)
|
||||
+ if err != nil {
|
||||
+ return nil, fmt.Errorf("failed to fetch comment: %w", err)
|
||||
+ }
|
||||
+
|
||||
+ // Mark issue as dirty for JSONL export
|
||||
+ if err := s.MarkIssueDirty(ctx, issueID); err != nil {
|
||||
+ return nil, fmt.Errorf("failed to mark issue dirty: %w", err)
|
||||
+ }
|
||||
+
|
||||
+ return comment, nil
|
||||
+}
|
||||
+
|
||||
+// GetIssueComments retrieves all comments for an issue
|
||||
+func (s *SQLiteStorage) GetIssueComments(ctx context.Context, issueID string) ([]*types.Comment, error) {
|
||||
+ rows, err := s.db.QueryContext(ctx, `
|
||||
+ SELECT id, issue_id, author, text, created_at
|
||||
+ FROM comments
|
||||
+ WHERE issue_id = ?
|
||||
+ ORDER BY created_at ASC
|
||||
+ `, issueID)
|
||||
+ if err != nil {
|
||||
+ return nil, fmt.Errorf("failed to query comments: %w", err)
|
||||
+ }
|
||||
+ defer rows.Close()
|
||||
+
|
||||
+ var comments []*types.Comment
|
||||
+ for rows.Next() {
|
||||
+ comment := &types.Comment{}
|
||||
+ err := rows.Scan(&comment.ID, &comment.IssueID, &comment.Author, &comment.Text, &comment.CreatedAt)
|
||||
+ if err != nil {
|
||||
+ return nil, fmt.Errorf("failed to scan comment: %w", err)
|
||||
+ }
|
||||
+ comments = append(comments, comment)
|
||||
+ }
|
||||
+
|
||||
+ if err := rows.Err(); err != nil {
|
||||
+ return nil, fmt.Errorf("error iterating comments: %w", err)
|
||||
+ }
|
||||
+
|
||||
+ return comments, nil
|
||||
+}
|
||||
+
|
||||
// Close closes the database connection
|
||||
func (s *SQLiteStorage) Close() error {
|
||||
return s.db.Close()
|
||||
diff --git a/internal/storage/storage.go b/internal/storage/storage.go
|
||||
index c928353..e3a8809 100644
|
||||
--- a/internal/storage/storage.go
|
||||
+++ b/internal/storage/storage.go
|
||||
@@ -42,6 +42,10 @@ type Storage interface {
|
||||
AddComment(ctx context.Context, issueID, actor, comment string) error
|
||||
GetEvents(ctx context.Context, issueID string, limit int) ([]*types.Event, error)
|
||||
|
||||
+ // Comments
|
||||
+ AddIssueComment(ctx context.Context, issueID, author, text string) (*types.Comment, error)
|
||||
+ GetIssueComments(ctx context.Context, issueID string) ([]*types.Comment, error)
|
||||
+
|
||||
// Statistics
|
||||
GetStatistics(ctx context.Context) (*types.Statistics, error)
|
||||
|
||||
diff --git a/internal/types/types.go b/internal/types/types.go
|
||||
index bd7fb2c..31184aa 100644
|
||||
--- a/internal/types/types.go
|
||||
+++ b/internal/types/types.go
|
||||
@@ -29,6 +29,7 @@ type Issue struct {
|
||||
OriginalSize int `json:"original_size,omitempty"`
|
||||
Labels []string `json:"labels,omitempty"` // Populated only for export/import
|
||||
Dependencies []*Dependency `json:"dependencies,omitempty"` // Populated only for export/import
|
||||
+ Comments []*Comment `json:"comments,omitempty"` // Populated only for export/import
|
||||
}
|
||||
|
||||
// Validate checks if the issue has valid field values
|
||||
@@ -137,6 +138,15 @@ type Label struct {
|
||||
Label string `json:"label"`
|
||||
}
|
||||
|
||||
+// Comment represents a comment on an issue
|
||||
+type Comment struct {
|
||||
+ ID int64 `json:"id"`
|
||||
+ IssueID string `json:"issue_id"`
|
||||
+ Author string `json:"author"`
|
||||
+ Text string `json:"text"`
|
||||
+ CreatedAt time.Time `json:"created_at"`
|
||||
+}
|
||||
+
|
||||
// Event represents an audit trail entry
|
||||
type Event struct {
|
||||
ID int64 `json:"id"`
|
||||
File diff suppressed because one or more lines are too long
@@ -1,26 +0,0 @@
|
||||
diff --git a/integrations/beads-mcp/src/beads_mcp/tools.py b/integrations/beads-mcp/src/beads_mcp/tools.py
|
||||
index 14f97d2..2d7f44b 100644
|
||||
--- a/integrations/beads-mcp/src/beads_mcp/tools.py
|
||||
+++ b/integrations/beads-mcp/src/beads_mcp/tools.py
|
||||
@@ -51,7 +51,7 @@ async def _get_client() -> BdClientBase:
|
||||
|
||||
_client = create_bd_client(
|
||||
prefer_daemon=use_daemon,
|
||||
- workspace_root=workspace_root
|
||||
+ working_dir=workspace_root
|
||||
)
|
||||
|
||||
# Check version once per server lifetime
|
||||
diff --git a/integrations/beads-mcp/uv.lock b/integrations/beads-mcp/uv.lock
|
||||
index 71c6da6..bfc2196 100644
|
||||
--- a/integrations/beads-mcp/uv.lock
|
||||
+++ b/integrations/beads-mcp/uv.lock
|
||||
@@ -48,7 +48,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "beads-mcp"
|
||||
-version = "0.9.9"
|
||||
+version = "0.9.10"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "fastmcp" },
|
||||
@@ -1,190 +0,0 @@
|
||||
# BD-3S8: Multi-Clone Sync Fix
|
||||
|
||||
## Problem
|
||||
|
||||
When multiple clones of a repository both commit to the `beads-sync` branch and one tries to pull, git's merge would fail due to diverged histories. This made multi-clone workflows unreliable.
|
||||
|
||||
## Solution
|
||||
|
||||
Replace git's commit-level merge with a content-based merge that handles divergence gracefully:
|
||||
|
||||
1. **Fetch** (not pull) from remote
|
||||
2. **Detect divergence** using `git rev-list --left-right --count`
|
||||
3. **Extract JSONL** from merge base, local HEAD, and remote
|
||||
4. **Merge content** using bd's 3-way merge algorithm
|
||||
5. **Reset to remote's history** (adopt their commit graph)
|
||||
6. **Commit merged content** on top
|
||||
|
||||
This ensures sync never fails due to git merge conflicts - we handle merging at the JSONL content level where we have semantic understanding of the data.
|
||||
|
||||
## Changes
|
||||
|
||||
### `internal/syncbranch/worktree.go`
|
||||
|
||||
**New functions:**
|
||||
- `getDivergence()` - Detects how many commits local/remote are ahead/behind
|
||||
- `performContentMerge()` - Extracts and merges JSONL content from base/local/remote
|
||||
- `performDeletionsMerge()` - Merges deletions.jsonl by union (keeps all deletions)
|
||||
- `extractJSONLFromCommit()` - Extracts file content from a specific git commit
|
||||
- `copyJSONLToMainRepo()` - Refactored helper for copying JSONL files
|
||||
- `preemptiveFetchAndFastForward()` - Reduces divergence by fetching before commit
|
||||
|
||||
**Modified functions:**
|
||||
- `PullFromSyncBranch()` - Now handles three cases:
|
||||
- Already up-to-date: Remote has nothing new
|
||||
- Fast-forward: Simple `--ff-only` merge
|
||||
- **Diverged**: Content-based merge (the fix)
|
||||
- `CommitToSyncBranch()` - Now fetches and fast-forwards before committing
|
||||
|
||||
**Enhanced structs:**
|
||||
- `PullResult` - Added `Merged` and `FastForwarded` fields
|
||||
|
||||
### `cmd/bd/sync.go`
|
||||
|
||||
- Updated output messages to show merge type (fast-forward vs merged divergent histories)
|
||||
|
||||
### `internal/syncbranch/worktree_divergence_test.go` (new file)
|
||||
|
||||
Test coverage for:
|
||||
- `getDivergence()` - 4 scenarios
|
||||
- `extractJSONLFromCommit()` - 3 scenarios
|
||||
- `performContentMerge()` - 2 scenarios
|
||||
- `performDeletionsMerge()` - 2 scenarios
|
||||
|
||||
## How It Works
|
||||
|
||||
```
|
||||
Clone A commits and pushes: origin/beads-sync = A -- B -- C
|
||||
Clone B commits locally: local beads-sync = A -- B -- D
|
||||
|
||||
When Clone B syncs:
|
||||
1. Fetch: gets C from origin
|
||||
2. Detect divergence: local ahead 1, remote ahead 1
|
||||
3. Find merge base: B
|
||||
4. Extract: base=B's JSONL, local=D's JSONL, remote=C's JSONL
|
||||
5. Content merge: merge JSONL using 3-way algorithm
|
||||
6. Reset to origin: beads-sync = A -- B -- C
|
||||
7. Commit merged: beads-sync = A -- B -- C -- M (merged content)
|
||||
8. Push: no conflict, linear history
|
||||
```
|
||||
|
||||
## Merge Rules
|
||||
|
||||
The 3-way merge uses these rules (from `internal/merge/merge.go`):
|
||||
|
||||
- **New issues**: Added from both sides
|
||||
- **Deleted issues**: Deletion wins over modification
|
||||
- **Modified issues**: Field-level merge
|
||||
- `status`: "closed" always wins over "open"
|
||||
- `updated_at`: Takes the max (latest)
|
||||
- `closed_at`: Only set if status is "closed"
|
||||
- `dependencies`: Union of both sides
|
||||
- Other fields: Standard 3-way merge
|
||||
|
||||
## Edge Cases Handled
|
||||
|
||||
1. **Remote branch doesn't exist** - Nothing to pull, return early
|
||||
2. **No common ancestor** - Use empty base for merge
|
||||
3. **File doesn't exist in commit** - Use empty content
|
||||
4. **Deletions.jsonl missing** - Non-fatal, skip deletion merge
|
||||
5. **True conflicts** - Currently fails with error (manual resolution required)
|
||||
|
||||
## Future Improvements
|
||||
|
||||
### 1. Auto-Resolve All Conflicts (No Manual Resolution Required)
|
||||
|
||||
Currently, true conflicts (both sides changed same field to different values) fail the sync. This should be changed to auto-resolve deterministically:
|
||||
|
||||
| Field | Auto-Resolution Strategy |
|
||||
|-------|-------------------------|
|
||||
| `updated_at` | Already handled - takes max (latest) |
|
||||
| `closed_at` | Already handled - takes max (latest) |
|
||||
| `status` | Already handled - "closed" wins |
|
||||
| `Priority` | Take higher priority (lower number = more urgent) |
|
||||
| `IssueType` | Take left (local wins) |
|
||||
| `Notes` | **Concatenate both** with separator (preserves all contributions) |
|
||||
| `Title` | Take from side with latest `updated_at` on the issue |
|
||||
| `Description` | Take from side with latest `updated_at` on the issue |
|
||||
|
||||
With this strategy, **no conflicts ever require manual resolution** - there's always a deterministic auto-resolution. The merge driver becomes fully automatic.
|
||||
|
||||
### 2. Auto-Push After Merge (Default Behavior)
|
||||
|
||||
Users shouldn't need to review merge diffs on beads metadata. The goal is "one command that just works":
|
||||
|
||||
```
|
||||
bd sync # Should handle everything, including push
|
||||
```
|
||||
|
||||
**Proposed behavior:**
|
||||
- After successful content merge, auto-push by default
|
||||
- Only hold off on push when unsafe conditions detected
|
||||
|
||||
**Safety checks before auto-push:**
|
||||
1. No conflict markers in JSONL (shouldn't happen with full auto-resolve)
|
||||
2. Issue count sanity check - didn't drop to zero unexpectedly
|
||||
3. Reasonable deletion threshold - didn't delete > N% of issues in one sync
|
||||
|
||||
**The deletions manifest problem:**
|
||||
- In multi-clone environments, deletions from one clone propagate to others
|
||||
- This is correct behavior, but can feel like "corruption" when unexpected
|
||||
- Swarms legitimately close/delete all issues sometimes
|
||||
- Hard to distinguish "swarm finished all work" from "corruption"
|
||||
|
||||
**Proposed safeguards:**
|
||||
- Track whether issues were *closed* (status change) vs *deleted* (removed from JSONL)
|
||||
- Closing all issues = legitimate (swarm finished)
|
||||
- Deleting all issues when there were many = suspicious, pause for confirmation
|
||||
- Config option: `sync.auto_push` (default: true, can set to false for paranoid mode)
|
||||
|
||||
**Integration with bd doctor:**
|
||||
- `bd doctor --fix` should also run this recovery logic
|
||||
- But `bd doctor` is for daily/upgrade maintenance, not inner loop
|
||||
- `bd sync` must handle divergence recovery itself
|
||||
|
||||
**The "one nuclear fix" philosophy:**
|
||||
- `bd sync` should just work 99.9% of the time
|
||||
- Auto-resolve all conflicts
|
||||
- Auto-push when safe
|
||||
- Only fail/pause when genuinely dangerous (mass deletion detected)
|
||||
|
||||
### 3. V1 Implementation Plan
|
||||
|
||||
Keep it simple for the first iteration:
|
||||
|
||||
**Auto-push behavior:**
|
||||
1. After successful content merge, auto-push by default
|
||||
2. One safety check: if issue count dropped by >50% AND there were >5 issues before, log a warning but still push
|
||||
3. Config option `sync.require_confirmation_on_mass_delete` (default: false) for paranoid users who want to be prompted
|
||||
|
||||
**Rationale:**
|
||||
- Logging gives forensics if something goes wrong
|
||||
- Doesn't block the happy path (99.9% of syncs)
|
||||
- Users who've been burned can enable confirmation mode
|
||||
- We can tighten safeguards later based on real-world feedback
|
||||
|
||||
**What "mass deletion" means:**
|
||||
- Issues that **vanished** from `issues.jsonl` (not just closed)
|
||||
- `status=closed` is fine - swarm finished legitimately
|
||||
- Issues disappearing entirely is suspicious
|
||||
|
||||
**Future safeguards (not v1):**
|
||||
- Tombstone TTL: Ignore deletions older than N days
|
||||
- Deletion rate limit: Pause if deletions.jsonl suddenly has 100+ new entries
|
||||
- Protected issues: Certain issues can't be deleted via sync
|
||||
|
||||
---
|
||||
|
||||
## Summary of Work Items
|
||||
|
||||
1. **Already implemented (this PR):**
|
||||
- Content-based merge for diverged histories
|
||||
- Pre-emptive fetch before commit
|
||||
- Deletions.jsonl merge
|
||||
- Fast-forward detection
|
||||
|
||||
2. **Still to implement:**
|
||||
- Auto-resolve all field conflicts (no manual resolution)
|
||||
- Auto-push after merge with safety check
|
||||
- Mass deletion warning/logging
|
||||
- Config option for confirmation mode
|
||||
445
install.sh
445
install.sh
@@ -1,445 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Beads (bd) installation script
|
||||
# Usage: curl -fsSL https://raw.githubusercontent.com/steveyegge/beads/main/scripts/install.sh | bash
|
||||
#
|
||||
# ⚠️ IMPORTANT: This script must be EXECUTED, never SOURCED
|
||||
# ❌ WRONG: source install.sh (will exit your shell on errors)
|
||||
# ✅ CORRECT: bash install.sh
|
||||
# ✅ CORRECT: curl -fsSL ... | bash
|
||||
#
|
||||
|
||||
set -e
|
||||
|
||||
# Colors
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
log_info() {
|
||||
echo -e "${BLUE}==>${NC} $1"
|
||||
}
|
||||
|
||||
log_success() {
|
||||
echo -e "${GREEN}==>${NC} $1"
|
||||
}
|
||||
|
||||
log_warning() {
|
||||
echo -e "${YELLOW}==>${NC} $1"
|
||||
}
|
||||
|
||||
log_error() {
|
||||
echo -e "${RED}Error:${NC} $1" >&2
|
||||
}
|
||||
|
||||
# Detect OS and architecture
|
||||
detect_platform() {
|
||||
local os arch
|
||||
|
||||
case "$(uname -s)" in
|
||||
Darwin)
|
||||
os="darwin"
|
||||
;;
|
||||
Linux)
|
||||
os="linux"
|
||||
;;
|
||||
*)
|
||||
log_error "Unsupported operating system: $(uname -s)"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
case "$(uname -m)" in
|
||||
x86_64|amd64)
|
||||
arch="amd64"
|
||||
;;
|
||||
aarch64|arm64)
|
||||
arch="arm64"
|
||||
;;
|
||||
*)
|
||||
log_error "Unsupported architecture: $(uname -m)"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "${os}_${arch}"
|
||||
}
|
||||
|
||||
# Download and install from GitHub releases
|
||||
install_from_release() {
|
||||
log_info "Installing bd from GitHub releases..."
|
||||
|
||||
local platform=$1
|
||||
local tmp_dir
|
||||
tmp_dir=$(mktemp -d)
|
||||
|
||||
# Get latest release version
|
||||
log_info "Fetching latest release..."
|
||||
local latest_url="https://api.github.com/repos/steveyegge/beads/releases/latest"
|
||||
local version
|
||||
|
||||
if command -v curl &> /dev/null; then
|
||||
version=$(curl -fsSL "$latest_url" | grep '"tag_name"' | sed -E 's/.*"tag_name": "([^"]+)".*/\1/')
|
||||
elif command -v wget &> /dev/null; then
|
||||
version=$(wget -qO- "$latest_url" | grep '"tag_name"' | sed -E 's/.*"tag_name": "([^"]+)".*/\1/')
|
||||
else
|
||||
log_error "Neither curl nor wget found. Please install one of them."
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [ -z "$version" ]; then
|
||||
log_error "Failed to fetch latest version"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log_info "Latest version: $version"
|
||||
|
||||
# Download URL
|
||||
local archive_name="beads_${version#v}_${platform}.tar.gz"
|
||||
local download_url="https://github.com/steveyegge/beads/releases/download/${version}/${archive_name}"
|
||||
|
||||
log_info "Downloading $archive_name..."
|
||||
|
||||
cd "$tmp_dir"
|
||||
if command -v curl &> /dev/null; then
|
||||
if ! curl -fsSL -o "$archive_name" "$download_url"; then
|
||||
log_error "Download failed"
|
||||
cd - > /dev/null || cd "$HOME"
|
||||
rm -rf "$tmp_dir"
|
||||
return 1
|
||||
fi
|
||||
elif command -v wget &> /dev/null; then
|
||||
if ! wget -q -O "$archive_name" "$download_url"; then
|
||||
log_error "Download failed"
|
||||
cd - > /dev/null || cd "$HOME"
|
||||
rm -rf "$tmp_dir"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Extract archive
|
||||
log_info "Extracting archive..."
|
||||
if ! tar -xzf "$archive_name"; then
|
||||
log_error "Failed to extract archive"
|
||||
rm -rf "$tmp_dir"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Determine install location
|
||||
local install_dir
|
||||
if [[ -w /usr/local/bin ]]; then
|
||||
install_dir="/usr/local/bin"
|
||||
else
|
||||
install_dir="$HOME/.local/bin"
|
||||
mkdir -p "$install_dir"
|
||||
fi
|
||||
|
||||
# Install binary
|
||||
log_info "Installing to $install_dir..."
|
||||
if [[ -w "$install_dir" ]]; then
|
||||
mv bd "$install_dir/"
|
||||
else
|
||||
sudo mv bd "$install_dir/"
|
||||
fi
|
||||
|
||||
log_success "bd installed to $install_dir/bd"
|
||||
|
||||
# Check if install_dir is in PATH
|
||||
if [[ ":$PATH:" != *":$install_dir:"* ]]; then
|
||||
log_warning "$install_dir is not in your PATH"
|
||||
echo ""
|
||||
echo "Add this to your shell profile (~/.bashrc, ~/.zshrc, etc.):"
|
||||
echo " export PATH=\"\$PATH:$install_dir\""
|
||||
echo ""
|
||||
fi
|
||||
|
||||
cd - > /dev/null || cd "$HOME"
|
||||
rm -rf "$tmp_dir"
|
||||
return 0
|
||||
}
|
||||
|
||||
# Check if Go is installed and meets minimum version
|
||||
check_go() {
|
||||
if command -v go &> /dev/null; then
|
||||
local go_version=$(go version | awk '{print $3}' | sed 's/go//')
|
||||
log_info "Go detected: $(go version)"
|
||||
|
||||
# Extract major and minor version numbers
|
||||
local major=$(echo "$go_version" | cut -d. -f1)
|
||||
local minor=$(echo "$go_version" | cut -d. -f2)
|
||||
|
||||
# Check if Go version is 1.24 or later
|
||||
if [ "$major" -eq 1 ] && [ "$minor" -lt 24 ]; then
|
||||
log_error "Go 1.24 or later is required (found: $go_version)"
|
||||
echo ""
|
||||
echo "Please upgrade Go:"
|
||||
echo " - Download from https://go.dev/dl/"
|
||||
echo " - Or use your package manager to update"
|
||||
echo ""
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Install using go install (fallback)
|
||||
install_with_go() {
|
||||
log_info "Installing bd using 'go install'..."
|
||||
|
||||
if go install github.com/steveyegge/beads/cmd/bd@latest; then
|
||||
log_success "bd installed successfully via go install"
|
||||
|
||||
# Record where we expect the binary to have been installed
|
||||
# Prefer GOBIN if set, otherwise GOPATH/bin
|
||||
local gobin
|
||||
gobin=$(go env GOBIN 2>/dev/null || true)
|
||||
if [ -n "$gobin" ]; then
|
||||
bin_dir="$gobin"
|
||||
else
|
||||
bin_dir="$(go env GOPATH)/bin"
|
||||
fi
|
||||
LAST_INSTALL_PATH="$bin_dir/bd"
|
||||
|
||||
# Check if GOPATH/bin (or GOBIN) is in PATH
|
||||
if [[ ":$PATH:" != *":$bin_dir:"* ]]; then
|
||||
log_warning "$bin_dir is not in your PATH"
|
||||
echo ""
|
||||
echo "Add this to your shell profile (~/.bashrc, ~/.zshrc, etc.):"
|
||||
echo " export PATH=\"\$PATH:$bin_dir\""
|
||||
echo ""
|
||||
fi
|
||||
|
||||
return 0
|
||||
else
|
||||
log_error "go install failed"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Build from source (last resort)
|
||||
build_from_source() {
|
||||
log_info "Building bd from source..."
|
||||
|
||||
local tmp_dir
|
||||
tmp_dir=$(mktemp -d)
|
||||
|
||||
cd "$tmp_dir"
|
||||
log_info "Cloning repository..."
|
||||
|
||||
if git clone --depth 1 https://github.com/steveyegge/beads.git; then
|
||||
cd beads
|
||||
log_info "Building binary..."
|
||||
|
||||
if go build -o bd ./cmd/bd; then
|
||||
# Determine install location
|
||||
local install_dir
|
||||
if [[ -w /usr/local/bin ]]; then
|
||||
install_dir="/usr/local/bin"
|
||||
else
|
||||
install_dir="$HOME/.local/bin"
|
||||
mkdir -p "$install_dir"
|
||||
fi
|
||||
|
||||
log_info "Installing to $install_dir..."
|
||||
if [[ -w "$install_dir" ]]; then
|
||||
mv bd "$install_dir/"
|
||||
else
|
||||
sudo mv bd "$install_dir/"
|
||||
fi
|
||||
|
||||
log_success "bd installed to $install_dir/bd"
|
||||
|
||||
# Record where we installed the binary when building from source
|
||||
LAST_INSTALL_PATH="$install_dir/bd"
|
||||
|
||||
# Check if install_dir is in PATH
|
||||
if [[ ":$PATH:" != *":$install_dir:"* ]]; then
|
||||
log_warning "$install_dir is not in your PATH"
|
||||
echo ""
|
||||
echo "Add this to your shell profile (~/.bashrc, ~/.zshrc, etc.):"
|
||||
echo " export PATH=\"\$PATH:$install_dir\""
|
||||
echo ""
|
||||
fi
|
||||
|
||||
cd - > /dev/null || cd "$HOME"
|
||||
rm -rf "$tmp_dir"
|
||||
return 0
|
||||
else
|
||||
log_error "Build failed"
|
||||
cd - > /dev/null || cd "$HOME"
|
||||
cd - > /dev/null
|
||||
rm -rf "$tmp_dir"
|
||||
return 1
|
||||
fi
|
||||
else
|
||||
log_error "Failed to clone repository"
|
||||
rm -rf "$tmp_dir"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Verify installation
|
||||
verify_installation() {
|
||||
# If multiple 'bd' binaries exist on PATH, warn the user before verification
|
||||
warn_if_multiple_bd || true
|
||||
|
||||
if command -v bd &> /dev/null; then
|
||||
log_success "bd is installed and ready!"
|
||||
echo ""
|
||||
bd version 2>/dev/null || echo "bd (development build)"
|
||||
echo ""
|
||||
echo "Get started:"
|
||||
echo " cd your-project"
|
||||
echo " bd init"
|
||||
echo " bd quickstart"
|
||||
echo ""
|
||||
return 0
|
||||
else
|
||||
log_error "bd was installed but is not in PATH"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Returns a list of full paths to 'bd' found in PATH (earlier entries first)
|
||||
get_bd_paths_in_path() {
|
||||
local IFS=':'
|
||||
local -a entries
|
||||
read -ra entries <<< "$PATH"
|
||||
local -a found
|
||||
local p
|
||||
for p in "${entries[@]}"; do
|
||||
[ -z "$p" ] && continue
|
||||
if [ -x "$p/bd" ]; then
|
||||
# Resolve symlink if possible
|
||||
if command -v readlink >/dev/null 2>&1; then
|
||||
resolved=$(readlink -f "$p/bd" 2>/dev/null || printf '%s' "$p/bd")
|
||||
else
|
||||
resolved="$p/bd"
|
||||
fi
|
||||
# avoid duplicates
|
||||
skip=0
|
||||
for existing in "${found[@]:-}"; do
|
||||
if [ "$existing" = "$resolved" ]; then skip=1; break; fi
|
||||
done
|
||||
if [ $skip -eq 0 ]; then
|
||||
found+=("$resolved")
|
||||
fi
|
||||
fi
|
||||
done
|
||||
# print results, one per line
|
||||
for item in "${found[@]:-}"; do
|
||||
printf '%s\n' "$item"
|
||||
done
|
||||
}
|
||||
|
||||
warn_if_multiple_bd() {
|
||||
# Use bash 3.2-compatible approach instead of mapfile (bash 4.0+)
|
||||
bd_paths=()
|
||||
while IFS= read -r line; do
|
||||
bd_paths+=("$line")
|
||||
done < <(get_bd_paths_in_path)
|
||||
if [ "${#bd_paths[@]}" -le 1 ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
log_warning "Multiple 'bd' executables found on your PATH. An older copy may be executed instead of the one we installed."
|
||||
echo "Found the following 'bd' executables (entries earlier in PATH take precedence):"
|
||||
local i=1
|
||||
for p in "${bd_paths[@]}"; do
|
||||
local ver
|
||||
if [ -x "$p" ]; then
|
||||
ver=$("$p" version 2>/dev/null || true)
|
||||
fi
|
||||
if [ -z "$ver" ]; then ver="<unknown version>"; fi
|
||||
echo " $i. $p -> $ver"
|
||||
i=$((i+1))
|
||||
done
|
||||
|
||||
if [ -n "$LAST_INSTALL_PATH" ]; then
|
||||
echo ""
|
||||
echo "We installed to: $LAST_INSTALL_PATH"
|
||||
# Compare first PATH entry vs installed path
|
||||
first="${bd_paths[0]}"
|
||||
if [ "$first" != "$LAST_INSTALL_PATH" ]; then
|
||||
log_warning "The 'bd' executable that appears first in your PATH is different from the one we installed. To make the newly installed 'bd' the one you get when running 'bd', either:"
|
||||
echo " - Remove or rename the older $first from your PATH, or"
|
||||
echo " - Reorder your PATH so that $(dirname "$LAST_INSTALL_PATH") appears before $(dirname "$first")"
|
||||
echo "After updating PATH, restart your shell and run 'bd version' to confirm."
|
||||
else
|
||||
echo "The installed 'bd' is first in your PATH.";
|
||||
fi
|
||||
else
|
||||
log_warning "We couldn't determine where we installed 'bd' during this run.";
|
||||
fi
|
||||
}
|
||||
|
||||
# Main installation flow
|
||||
main() {
|
||||
echo ""
|
||||
echo "🔗 Beads (bd) Installer"
|
||||
echo ""
|
||||
|
||||
log_info "Detecting platform..."
|
||||
local platform
|
||||
platform=$(detect_platform)
|
||||
log_info "Platform: $platform"
|
||||
|
||||
# Try downloading from GitHub releases first
|
||||
if install_from_release "$platform"; then
|
||||
verify_installation
|
||||
exit 0
|
||||
fi
|
||||
|
||||
log_warning "Failed to install from releases, trying alternative methods..."
|
||||
|
||||
# Try go install as fallback
|
||||
if check_go; then
|
||||
if install_with_go; then
|
||||
verify_installation
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# Try building from source as last resort
|
||||
log_warning "Falling back to building from source..."
|
||||
|
||||
if ! check_go; then
|
||||
log_warning "Go is not installed"
|
||||
echo ""
|
||||
echo "bd requires Go 1.24 or later to build from source. You can:"
|
||||
echo " 1. Install Go from https://go.dev/dl/"
|
||||
echo " 2. Use your package manager:"
|
||||
echo " - macOS: brew install go"
|
||||
echo " - Ubuntu/Debian: sudo apt install golang"
|
||||
echo " - Other Linux: Check your distro's package manager"
|
||||
echo ""
|
||||
echo "After installing Go, run this script again."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if build_from_source; then
|
||||
verify_installation
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# All methods failed
|
||||
log_error "Installation failed"
|
||||
echo ""
|
||||
echo "Manual installation:"
|
||||
echo " 1. Download from https://github.com/steveyegge/beads/releases/latest"
|
||||
echo " 2. Extract and move 'bd' to your PATH"
|
||||
echo ""
|
||||
echo "Or install from source:"
|
||||
echo " 1. Install Go from https://go.dev/dl/"
|
||||
echo " 2. Run: go install github.com/steveyegge/beads/cmd/bd@latest"
|
||||
echo ""
|
||||
exit 1
|
||||
}
|
||||
|
||||
main "$@"
|
||||
|
||||
Reference in New Issue
Block a user