Remove spurious and obsolete scripts

Cleaned up scripts/ directory by removing:
- Investigation/one-off tools (collision-calculator, latency benchmarks)
- Test cleanup utilities (cleanup-test-pollution)
- Nix packaging support (update-nix-hash)
- Agent Mail server management scripts (4 scripts, will be replaced)

Retains only actively-used scripts:
- release.sh, bump-version.sh, update-homebrew.sh
- install.sh, install-hooks.sh, hooks/

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Steve Yegge
2025-11-20 22:02:50 -05:00
parent 57253f93a3
commit f4fbf6f8c1
9 changed files with 0 additions and 659 deletions

View File

@@ -1,70 +0,0 @@
#!/bin/bash
# View Agent Mail server status and active projects
# Usage: ./agent-mail-status.sh
PORT="${AGENT_MAIL_PORT:-8765}"
URL="http://127.0.0.1:$PORT"
PID_FILE="${AGENT_MAIL_PID:-$HOME/agent-mail.pid}"
echo "=== Agent Mail Status ==="
echo ""
# Check PID file
if [[ -f "$PID_FILE" ]]; then
PID=$(cat "$PID_FILE")
if ps -p "$PID" > /dev/null 2>&1; then
echo "Server Process: ✅ Running (PID: $PID)"
else
echo "Server Process: ⚠️ Stale PID file (process $PID not found)"
fi
else
echo "Server Process: ⚠️ No PID file found"
fi
# Check health endpoint
echo ""
echo "Server Health:"
if HEALTH=$(curl -sf "$URL/health" 2>/dev/null); then
echo "$(echo $HEALTH | jq -r '.status // "OK"')"
echo " URL: $URL"
else
echo " ❌ UNREACHABLE at $URL"
echo ""
echo "Start server with:"
echo " ./scripts/start-agent-mail-server.sh"
exit 1
fi
# List projects
echo ""
echo "Active Projects:"
if PROJECTS=$(curl -sf "$URL/api/projects" 2>/dev/null); then
if [[ $(echo "$PROJECTS" | jq -r '. | length') -eq 0 ]]; then
echo " (none yet)"
else
echo "$PROJECTS" | jq -r '.[] | " • \(.slug)\n Path: \(.human_key)"'
fi
else
echo " (failed to fetch)"
fi
# List reservations
echo ""
echo "Active File Reservations:"
if RESERVATIONS=$(curl -sf "$URL/api/file_reservations" 2>/dev/null); then
if [[ $(echo "$RESERVATIONS" | jq -r '. | length') -eq 0 ]]; then
echo " (none)"
else
echo "$RESERVATIONS" | jq -r '.[] | " • \(.agent_name) → \(.resource_id)\n Project: \(.project_id)\n Expires: \(.expires_at)"'
fi
else
echo " (failed to fetch)"
fi
echo ""
echo "Web UI: $URL/mail"
echo ""
echo "Commands:"
echo " Start: ./scripts/start-agent-mail-server.sh"
echo " Stop: ./scripts/stop-agent-mail-server.sh"
echo " Logs: tail -f $HOME/agent-mail.log"

View File

@@ -1,143 +0,0 @@
#!/bin/bash
# Benchmark Agent Mail vs Git Sync latency
# Part of bd-htfk investigation
set -e
RESULTS_FILE="latency_benchmark_results.md"
AGENT_MAIL_URL="http://127.0.0.1:8765"
BEARER_TOKEN=$(grep BEARER_TOKEN ~/src/mcp_agent_mail/.env | cut -d= -f2 | tr -d '"')
echo "# Latency Benchmark: Agent Mail vs Git Sync" > "$RESULTS_FILE"
echo "" >> "$RESULTS_FILE"
echo "Date: $(date)" >> "$RESULTS_FILE"
echo "" >> "$RESULTS_FILE"
# Function to measure git sync latency
measure_git_sync() {
local iterations=$1
local times=()
echo "## Git Sync Latency (bd update → commit → push → pull → import)" >> "$RESULTS_FILE"
echo "" >> "$RESULTS_FILE"
for i in $(seq 1 "$iterations"); do
# Create a test issue
test_id=$(./bd create "Latency test $i" -p 3 --json | jq -r '.id')
# Measure: update → export → commit → push → pull (simulate)
start=$(date +%s%N)
# Update issue (triggers export after 30s debounce, but we'll force it)
./bd update "$test_id" --status in_progress >/dev/null 2>&1
# Force immediate sync (bypasses debounce)
./bd sync >/dev/null 2>&1
end=$(date +%s%N)
# Calculate latency in milliseconds
latency_ns=$((end - start))
latency_ms=$((latency_ns / 1000000))
times+=("$latency_ms")
echo "Run $i: ${latency_ms}ms" >> "$RESULTS_FILE"
# Cleanup
./bd close "$test_id" --reason "benchmark" >/dev/null 2>&1
done
# Calculate statistics
IFS=$'\n' sorted=($(sort -n <<<"${times[*]}"))
unset IFS
count=${#sorted[@]}
p50_idx=$((count / 2))
p95_idx=$((count * 95 / 100))
p99_idx=$((count * 99 / 100))
echo "" >> "$RESULTS_FILE"
echo "**Statistics (${iterations} runs):**" >> "$RESULTS_FILE"
echo "- p50: ${sorted[$p50_idx]}ms" >> "$RESULTS_FILE"
echo "- p95: ${sorted[$p95_idx]}ms" >> "$RESULTS_FILE"
echo "- p99: ${sorted[$p99_idx]}ms" >> "$RESULTS_FILE"
echo "" >> "$RESULTS_FILE"
}
# Function to measure Agent Mail latency
measure_agent_mail() {
local iterations=$1
local times=()
echo "## Agent Mail Latency (send_message → fetch_inbox)" >> "$RESULTS_FILE"
echo "" >> "$RESULTS_FILE"
# Check if server is running
if ! curl -s "$AGENT_MAIL_URL/health" >/dev/null 2>&1; then
echo "⚠️ Agent Mail server not running. Skipping Agent Mail benchmark." >> "$RESULTS_FILE"
echo "" >> "$RESULTS_FILE"
return
fi
for i in $(seq 1 "$iterations"); do
start=$(date +%s%N)
# Send a message via HTTP API
curl -s -X POST "$AGENT_MAIL_URL/api/messages" \
-H "Authorization: Bearer $BEARER_TOKEN" \
-H "Content-Type: application/json" \
-d "{
\"project_id\": \"beads\",
\"sender\": \"agent-benchmark\",
\"recipients\": [\"agent-test\"],
\"subject\": \"Latency test $i\",
\"body\": \"Benchmark message\",
\"message_type\": \"notification\"
}" >/dev/null 2>&1
# Fetch inbox to complete round-trip
curl -s "$AGENT_MAIL_URL/api/messages/beads/agent-test" \
-H "Authorization: Bearer $BEARER_TOKEN" >/dev/null 2>&1
end=$(date +%s%N)
latency_ns=$((end - start))
latency_ms=$((latency_ns / 1000000))
times+=("$latency_ms")
echo "Run $i: ${latency_ms}ms" >> "$RESULTS_FILE"
done
# Calculate statistics
IFS=$'\n' sorted=($(sort -n <<<"${times[*]}"))
unset IFS
count=${#sorted[@]}
p50_idx=$((count / 2))
p95_idx=$((count * 95 / 100))
p99_idx=$((count * 99 / 100))
echo "" >> "$RESULTS_FILE"
echo "**Statistics (${iterations} runs):**" >> "$RESULTS_FILE"
echo "- p50: ${sorted[$p50_idx]}ms" >> "$RESULTS_FILE"
echo "- p95: ${sorted[$p95_idx]}ms" >> "$RESULTS_FILE"
echo "- p99: ${sorted[$p99_idx]}ms" >> "$RESULTS_FILE"
echo "" >> "$RESULTS_FILE"
}
# Run benchmarks
ITERATIONS=10
echo "Running benchmarks ($ITERATIONS iterations each)..."
measure_git_sync "$ITERATIONS"
measure_agent_mail "$ITERATIONS"
echo "" >> "$RESULTS_FILE"
echo "## Conclusion" >> "$RESULTS_FILE"
echo "" >> "$RESULTS_FILE"
echo "Benchmark completed. See results above." >> "$RESULTS_FILE"
echo ""
echo "Results written to $RESULTS_FILE"
cat "$RESULTS_FILE"

View File

@@ -1,62 +0,0 @@
#!/bin/bash
# Cleanup test pollution from bd database
# Removes issues created during development/testing
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
BD="${SCRIPT_DIR}/../bd"
# Pattern to match test issues (case-insensitive)
# Matches: "Test issue", "Test Epic", "Test child", etc.
TEST_PATTERN="^Test (issue|Epic|child|parent|dependency|label|update|numeric|P1|FK|simple|lowercase)"
echo "=== BD Test Pollution Cleanup ==="
echo ""
# Find test issues
echo "Finding test pollution issues..."
TEST_ISSUES=$("$BD" list --json | jq -r --arg pattern "$TEST_PATTERN" \
'.[] | select(.title | test($pattern; "i")) | .id')
if [ -z "$TEST_ISSUES" ]; then
echo "✓ No test pollution found"
exit 0
fi
COUNT=$(echo "$TEST_ISSUES" | wc -l | tr -d ' ')
echo "Found $COUNT test pollution issues:"
"$BD" list --json | jq -r --arg pattern "$TEST_PATTERN" \
'.[] | select(.title | test($pattern; "i")) | " - \(.id): \(.title)"'
echo ""
read -p "Delete these $COUNT issues? [y/N] " -n 1 -r
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
echo "Aborted"
exit 1
fi
# Delete via SQLite directly (bd doesn't have delete command yet)
DB="${SCRIPT_DIR}/../.beads/beads.db"
echo ""
echo "Deleting from database..."
echo "$TEST_ISSUES" | while read -r id; do
echo " Deleting $id..."
sqlite3 "$DB" "DELETE FROM labels WHERE issue_id = '$id';"
sqlite3 "$DB" "DELETE FROM dependencies WHERE issue_id = '$id' OR depends_on_id = '$id';"
sqlite3 "$DB" "DELETE FROM comments WHERE issue_id = '$id';"
sqlite3 "$DB" "DELETE FROM events WHERE issue_id = '$id';"
sqlite3 "$DB" "DELETE FROM dirty_issues WHERE issue_id = '$id';"
sqlite3 "$DB" "DELETE FROM export_hashes WHERE issue_id = '$id';"
sqlite3 "$DB" "DELETE FROM issue_snapshots WHERE issue_id = '$id';"
sqlite3 "$DB" "DELETE FROM compaction_snapshots WHERE issue_id = '$id';"
sqlite3 "$DB" "DELETE FROM issues WHERE id = '$id';"
done
echo ""
echo "✓ Cleanup complete"
echo ""
echo "Run 'bd stats' to verify new counts"
echo "Run 'bd sync' to export cleaned database to JSONL"

View File

@@ -1,120 +0,0 @@
package main
import (
"fmt"
"math"
)
// Birthday paradox: P(collision) ≈ 1 - e^(-n²/2N)
// where n = number of items, N = total possible values
func collisionProbability(numIssues int, idLength int) float64 {
base := 36.0 // lowercase alphanumeric
totalPossibilities := math.Pow(base, float64(idLength))
exponent := -float64(numIssues*numIssues) / (2.0 * totalPossibilities)
return 1.0 - math.Exp(exponent)
}
// Find the expected number of collisions
func expectedCollisions(numIssues int, idLength int) float64 {
// Expected number of pairs that collide
totalPairs := float64(numIssues * (numIssues - 1) / 2)
return totalPairs * (1.0 / math.Pow(36, float64(idLength)))
}
// Find optimal ID length for a given database size and max collision probability
func optimalIdLength(numIssues int, maxCollisionProb float64) int {
for length := 3; length <= 12; length++ {
prob := collisionProbability(numIssues, length)
if prob <= maxCollisionProb {
return length
}
}
return 12 // fallback
}
func main() {
fmt.Println("=== Collision Probability Analysis ===")
dbSizes := []int{50, 100, 200, 500, 1000, 2000, 5000, 10000}
idLengths := []int{4, 5, 6, 7, 8}
// Print table header
fmt.Printf("%-10s", "DB Size")
for _, length := range idLengths {
fmt.Printf("%8d-char", length)
}
fmt.Println()
fmt.Println("----------------------------------------------------------")
// Print collision probabilities
for _, size := range dbSizes {
fmt.Printf("%-10d", size)
for _, length := range idLengths {
prob := collisionProbability(size, length)
fmt.Printf("%11.2f%%", prob*100)
}
fmt.Println()
}
fmt.Println("\n=== Recommended ID Length by Threshold ===")
thresholds := []float64{0.10, 0.25, 0.50}
fmt.Printf("%-10s", "DB Size")
for _, threshold := range thresholds {
fmt.Printf("%10.0f%%", threshold*100)
}
fmt.Println()
fmt.Println("----------------------------------")
for _, size := range dbSizes {
fmt.Printf("%-10d", size)
for _, threshold := range thresholds {
optimal := optimalIdLength(size, threshold)
fmt.Printf("%10d", optimal)
}
fmt.Println()
}
fmt.Println("\n=== Expected Number of Collisions ===")
fmt.Printf("%-10s", "DB Size")
for _, length := range idLengths {
fmt.Printf("%10d-char", length)
}
fmt.Println()
fmt.Println("----------------------------------------------------------")
for _, size := range dbSizes {
fmt.Printf("%-10d", size)
for _, length := range idLengths {
expected := expectedCollisions(size, length)
fmt.Printf("%14.2f", expected)
}
fmt.Println()
}
fmt.Println("\n=== Adaptive Scaling Strategy ===")
fmt.Println("Threshold: 25% collision probability")
fmt.Printf("%-15s %-12s %-20s\n", "DB Size Range", "ID Length", "Collision Prob")
fmt.Println("-------------------------------------------------------")
ranges := []struct {
min, max int
}{
{0, 50},
{51, 150},
{151, 500},
{501, 1500},
{1501, 5000},
{5001, 15000},
}
threshold := 0.25
for _, r := range ranges {
optimal := optimalIdLength(r.max, threshold)
prob := collisionProbability(r.max, optimal)
fmt.Printf("%-15s %-12d %18.2f%%\n",
fmt.Sprintf("%d-%d", r.min, r.max),
optimal,
prob*100)
}
}

View File

@@ -1,67 +0,0 @@
#!/bin/bash
# Setup Agent Mail configuration for a beads workspace
# Usage: ./setup-agent-mail-workspace.sh [workspace-path]
set -e
WORKSPACE="${1:-$(pwd)}"
cd "$WORKSPACE"
WORKSPACE_NAME=$(basename "$WORKSPACE")
PARENT=$(basename $(dirname "$WORKSPACE"))
HOSTNAME=$(hostname -s)
# Determine project ID based on workspace type
determine_project_id() {
local ws_name="$1"
case "$ws_name" in
beads)
echo "beads.dev"
;;
vc)
echo "vc.dev"
;;
wyvern)
echo "wyvern.dev"
;;
*)
echo "unknown.dev"
;;
esac
}
PROJECT_ID=$(determine_project_id "$WORKSPACE_NAME")
AGENT_NAME="${PARENT}-${WORKSPACE_NAME}-${HOSTNAME}"
# Create .envrc for direnv
cat > .envrc <<EOF
# Agent Mail Configuration
# Generated: $(date)
# Workspace: $WORKSPACE
# Coupling: $(basename "$PROJECT_ID")
export BEADS_AGENT_MAIL_URL=http://127.0.0.1:8765
export BEADS_AGENT_NAME=$AGENT_NAME
export BEADS_PROJECT_ID=$PROJECT_ID
# Optional: Uncomment for debugging
# export BEADS_AGENT_MAIL_DEBUG=1
EOF
echo "✅ Created .envrc in $WORKSPACE"
echo ""
echo "Configuration:"
echo " BEADS_AGENT_MAIL_URL: http://127.0.0.1:8765"
echo " BEADS_AGENT_NAME: $AGENT_NAME"
echo " BEADS_PROJECT_ID: $PROJECT_ID"
echo ""
echo "Next steps:"
echo " 1. Review .envrc and adjust if needed"
echo " 2. Run: direnv allow"
echo " 3. Test: bd info | grep -i agent"
echo ""
echo "To install direnv:"
echo " brew install direnv"
echo " echo 'eval \"\$(direnv hook zsh)\"' >> ~/.zshrc"
echo " source ~/.zshrc"

View File

@@ -1,34 +0,0 @@
#!/bin/bash
# Simple latency benchmark for bd-htfk
set -e
echo "# Latency Benchmark Results"
echo ""
echo "## Git Sync Latency Test (10 runs)"
echo ""
# Test git sync latency
for i in {1..10}; do
start=$(date +%s%N)
# Create, update, and sync an issue
test_id=$(bd create "Latency test $i" -p 3 --json 2>/dev/null | jq -r '.id')
bd update "$test_id" --status in_progress >/dev/null 2>&1
bd sync >/dev/null 2>&1
end=$(date +%s%N)
latency_ms=$(((end - start) / 1000000))
echo "Run $i: ${latency_ms}ms"
# Cleanup
bd close "$test_id" --reason "test" >/dev/null 2>&1
done
echo ""
echo "## Notes"
echo "- Git sync includes: create → update → export → commit → push → pull → import"
echo "- This represents the full round-trip time for issue changes to sync via git"
echo "- Agent Mail latency test skipped (server not running)"
echo "- Expected git latency: 1000-5000ms"
echo "- Expected Agent Mail latency: <100ms (when server running)"

View File

@@ -1,86 +0,0 @@
#!/bin/bash
# Start Agent Mail server in background
# Usage: ./start-agent-mail-server.sh
set -e
AGENT_MAIL_DIR="${AGENT_MAIL_DIR:-$HOME/src/mcp_agent_mail}"
LOG_FILE="${AGENT_MAIL_LOG:-$HOME/agent-mail.log}"
PID_FILE="${AGENT_MAIL_PID:-$HOME/agent-mail.pid}"
PORT="${AGENT_MAIL_PORT:-8765}"
# Check if server already running
if [[ -f "$PID_FILE" ]]; then
PID=$(cat "$PID_FILE")
if ps -p "$PID" > /dev/null 2>&1; then
echo "⚠️ Agent Mail server already running (PID: $PID)"
echo " Stop it first: kill $PID"
exit 1
else
echo "🗑️ Removing stale PID file"
rm -f "$PID_FILE"
fi
fi
# Check if directory exists
if [[ ! -d "$AGENT_MAIL_DIR" ]]; then
echo "❌ Agent Mail directory not found: $AGENT_MAIL_DIR"
echo ""
echo "Install with:"
echo " git clone https://github.com/Dicklesworthstone/mcp_agent_mail.git $AGENT_MAIL_DIR"
echo " cd $AGENT_MAIL_DIR"
echo " python3 -m venv .venv"
echo " source .venv/bin/activate"
echo " pip install -e ."
exit 1
fi
# Check if venv exists
if [[ ! -d "$AGENT_MAIL_DIR/.venv" ]]; then
echo "❌ Virtual environment not found in $AGENT_MAIL_DIR/.venv"
echo ""
echo "Create with:"
echo " cd $AGENT_MAIL_DIR"
echo " python3 -m venv .venv"
echo " source .venv/bin/activate"
echo " pip install -e ."
exit 1
fi
# Start server
echo "🚀 Starting Agent Mail server..."
echo " Directory: $AGENT_MAIL_DIR"
echo " Log file: $LOG_FILE"
echo " Port: $PORT"
cd "$AGENT_MAIL_DIR"
source .venv/bin/activate
nohup python -m mcp_agent_mail.cli serve-http \
--host 127.0.0.1 \
--port "$PORT" \
> "$LOG_FILE" 2>&1 &
echo $! > "$PID_FILE"
# Wait a moment for server to start
sleep 2
# Check if server is healthy
if curl -sf http://127.0.0.1:$PORT/health > /dev/null; then
echo "✅ Agent Mail server started successfully!"
echo " PID: $(cat $PID_FILE)"
echo " Health: http://127.0.0.1:$PORT/health"
echo " Web UI: http://127.0.0.1:$PORT/mail"
echo ""
echo "View logs:"
echo " tail -f $LOG_FILE"
echo ""
echo "Stop server:"
echo " kill $(cat $PID_FILE)"
else
echo "❌ Server failed to start"
echo " Check logs: tail -f $LOG_FILE"
rm -f "$PID_FILE"
exit 1
fi

View File

@@ -1,53 +0,0 @@
#!/bin/bash
# Stop Agent Mail server
# Usage: ./stop-agent-mail-server.sh
PID_FILE="${AGENT_MAIL_PID:-$HOME/agent-mail.pid}"
if [[ ! -f "$PID_FILE" ]]; then
echo "⚠️ No PID file found: $PID_FILE"
echo " Attempting to kill by process name..."
if pkill -f "mcp_agent_mail.cli serve-http"; then
echo "✅ Killed Agent Mail server by process name"
else
echo " No Agent Mail server process found"
fi
exit 0
fi
PID=$(cat "$PID_FILE")
if ! ps -p "$PID" > /dev/null 2>&1; then
echo "⚠️ Process $PID not running (stale PID file)"
rm -f "$PID_FILE"
exit 0
fi
echo "🛑 Stopping Agent Mail server (PID: $PID)..."
kill "$PID"
# Wait for graceful shutdown
for i in {1..5}; do
if ! ps -p "$PID" > /dev/null 2>&1; then
echo "✅ Server stopped gracefully"
rm -f "$PID_FILE"
exit 0
fi
sleep 1
done
# Force kill if needed
if ps -p "$PID" > /dev/null 2>&1; then
echo "⚠️ Server didn't stop gracefully, forcing..."
kill -9 "$PID"
sleep 1
fi
if ! ps -p "$PID" > /dev/null 2>&1; then
echo "✅ Server stopped (forced)"
rm -f "$PID_FILE"
else
echo "❌ Failed to stop server"
exit 1
fi

View File

@@ -1,24 +0,0 @@
#!/usr/bin/env bash
set -e && cd "$(dirname "$0")/.."
# Update the vendorHash in default.nix after Go dependency changes
# Usage: ./scripts/update-nix-hash.sh
echo "Getting correct vendorHash from nix..."
# Set pkgs.lib.fakeHash to force nix to output the correct one
printf ',s|vendorHash = ".*";|vendorHash = pkgs.lib.fakeHash;|\nw\n' | ed -s default.nix
# Extract correct hash from nix build error
CORRECT_HASH=$(nix build --no-link 2>&1 | grep "got:" | awk '{print $2}')
if [ -z "$CORRECT_HASH" ]; then
echo "Error: Could not get hash from nix build"
git restore default.nix
exit 1
fi
# Update with correct hash
printf ',s|vendorHash = pkgs\.lib\.fakeHash;|vendorHash = "%s";|\nw\n' "$CORRECT_HASH" | ed -s default.nix
echo "✓ Updated vendorHash to: $CORRECT_HASH"