Compare commits
95 Commits
bead/nixos
...
polecat/ru
| Author | SHA1 | Date | |
|---|---|---|---|
| c3ed6c0a26 | |||
|
|
53fa89b2e9 | ||
| 3acf9d2796 | |||
| 123e7d3b3a | |||
|
|
56097aefa4 | ||
| 21a8b5c5d9 | |||
| 8f8582b0f3 | |||
| 94fb5a3e64 | |||
| 7df68ba8c8 | |||
| 2799632308 | |||
| 346c031278 | |||
| 188d2befb0 | |||
| 8e8b5f4304 | |||
| 4098ee3987 | |||
| e1e37da7c2 | |||
| a46d11a770 | |||
|
|
8553b9826e | ||
| a0c081e12e | |||
| d92e4b3ddf | |||
| 70b40966be | |||
| 475a633ab7 | |||
| a39416c9db | |||
| 63c3f4e84d | |||
| baf64f7f4a | |||
|
|
f0b6ede7ed | ||
| d0cb16391f | |||
| d872293f19 | |||
| 07182cfdcf | |||
|
|
65e91c20f7 | ||
|
|
01e376eac4 | ||
|
|
9c5be2e27a | ||
|
|
d9ffb14db5 | ||
|
|
07ea05afab | ||
|
|
4f5108c9d9 | ||
| 9243341ed7 | |||
|
|
b729ee8c7a | ||
|
|
ebc28cebd4 | ||
| c82358d586 | |||
| 74388e8c24 | |||
|
|
a98ccddab1 | ||
| 18570628a5 | |||
|
|
0c484b6601 | ||
|
|
4853a18474 | ||
|
|
8b8453a37a | ||
|
|
2b6e289b9a | ||
|
|
70d364544f | ||
|
|
1ffa8524f0 | ||
|
|
be3c27e868 | ||
| c2d286087f | |||
|
|
1172818062 | ||
|
|
9f63e1430c | ||
| b14ef1f62a | |||
| 87719fa9e6 | |||
| 933612da4c | |||
|
|
d2c7599267 | ||
|
|
3d16824eac | ||
| 2cdc15163c | |||
| a77b1230fe | |||
| 623a387127 | |||
| 737f2b09e4 | |||
| cddc9de14a | |||
| 53e3bbe78f | |||
| c258eafe34 | |||
| 03d0b76f97 | |||
| b5f7233214 | |||
| 1203662237 | |||
| 6ad714b57c | |||
| 5440214295 | |||
| cc305af899 | |||
| c06adec7d8 | |||
| 7903b2dfd0 | |||
| f472aa9b3d | |||
| 2e07454ffa | |||
| daf963b290 | |||
| c3c8688f31 | |||
| 1cee1cd365 | |||
| 66c27da142 | |||
| 7d6f71f4e4 | |||
| 7091ee3ad5 | |||
| d78e089695 | |||
| 28b7a0fda9 | |||
| b7bccb0b40 | |||
| 2d03714934 | |||
| 3f0e381de2 | |||
| 1d9fd0aee9 | |||
| 16f6dfcec7 | |||
| 90ef70eb2e | |||
| 667f5b28dc | |||
| 4bb71d0b7e | |||
| 0bc134f557 | |||
| 1b9df3926e | |||
| bd98793528 | |||
| d78637cf13 | |||
| 08d16bd2c9 | |||
| a14ff9be4d |
5
.beads/.gitignore
vendored
5
.beads/.gitignore
vendored
@@ -32,6 +32,11 @@ beads.left.meta.json
|
|||||||
beads.right.jsonl
|
beads.right.jsonl
|
||||||
beads.right.meta.json
|
beads.right.meta.json
|
||||||
|
|
||||||
|
# Sync state (local-only, per-machine)
|
||||||
|
# These files are machine-specific and should not be shared across clones
|
||||||
|
.sync.lock
|
||||||
|
sync_base.jsonl
|
||||||
|
|
||||||
# NOTE: Do NOT add negation patterns (e.g., !issues.jsonl) here.
|
# NOTE: Do NOT add negation patterns (e.g., !issues.jsonl) here.
|
||||||
# They would override fork protection in .git/info/exclude, allowing
|
# They would override fork protection in .git/info/exclude, allowing
|
||||||
# contributors to accidentally commit upstream issue databases.
|
# contributors to accidentally commit upstream issue databases.
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
# Issue prefix for this repository (used by bd init)
|
# Issue prefix for this repository (used by bd init)
|
||||||
# If not set, bd init will auto-detect from directory name
|
# If not set, bd init will auto-detect from directory name
|
||||||
# Example: issue-prefix: "myproject" creates issues like "myproject-1", "myproject-2", etc.
|
# Example: issue-prefix: "myproject" creates issues like "myproject-1", "myproject-2", etc.
|
||||||
# issue-prefix: ""
|
issue-prefix: "x"
|
||||||
|
|
||||||
# Use no-db mode: load from JSONL, no SQLite, write back after each command
|
# Use no-db mode: load from JSONL, no SQLite, write back after each command
|
||||||
# When true, bd will use .beads/issues.jsonl as the source of truth
|
# When true, bd will use .beads/issues.jsonl as the source of truth
|
||||||
@@ -59,4 +59,6 @@ sync-branch: "beads-sync"
|
|||||||
# - linear.url
|
# - linear.url
|
||||||
# - linear.api-key
|
# - linear.api-key
|
||||||
# - github.org
|
# - github.org
|
||||||
# - github.repo
|
# - github.repo
|
||||||
|
|
||||||
|
routing.mode: "explicit"
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
{
|
|
||||||
"database": "beads.db",
|
|
||||||
"jsonl_export": "sync_base.jsonl"
|
|
||||||
}
|
|
||||||
@@ -10,9 +10,11 @@ jobs:
|
|||||||
check:
|
check:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
- uses: https://git.johnogle.info/johno/gitea-actions/nix-setup@main
|
- uses: https://git.johnogle.info/johno/gitea-actions/nix-setup@v1
|
||||||
|
|
||||||
- name: Check flake
|
- name: Check flake
|
||||||
run: nix flake check
|
run: nix flake check
|
||||||
|
env:
|
||||||
|
NIX_CONFIG: "access-tokens = git.johnogle.info=${{ secrets.GITEA_ACCESS_TOKEN }}"
|
||||||
|
|||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1,3 +1,8 @@
|
|||||||
result
|
result
|
||||||
thoughts
|
thoughts
|
||||||
.beads
|
.beads
|
||||||
|
|
||||||
|
# Gas Town (added by gt)
|
||||||
|
.runtime/
|
||||||
|
.claude/
|
||||||
|
.logs/
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ Directory Structure:
|
|||||||
----------------------
|
----------------------
|
||||||
• packages/ - Custom Nix packages leveraged across various configurations.
|
• packages/ - Custom Nix packages leveraged across various configurations.
|
||||||
• roles/ - Role-based configurations (e.g., kodi, bluetooth) each with its own module (default.nix) for inclusion in machine setups.
|
• roles/ - Role-based configurations (e.g., kodi, bluetooth) each with its own module (default.nix) for inclusion in machine setups.
|
||||||
• machines/ - Machine-specific configurations (e.g., nix-book, z790prors, boxy, wixos) including configuration.nix and hardware-configuration.nix tailored for each hardware.
|
• machines/ - Machine-specific configurations (e.g., nix-book, zix790prors, boxy) including configuration.nix and hardware-configuration.nix tailored for each hardware.
|
||||||
• home/ - Home-manager configurations for personal environments and application settings (e.g., home-nix-book.nix, home-z790prors.nix).
|
• home/ - Home-manager configurations for personal environments and application settings (e.g., home-nix-book.nix, home-z790prors.nix).
|
||||||
|
|
||||||
Design Principles:
|
Design Principles:
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ This repository uses `beads` for issue tracking and management. Run `bd quicksta
|
|||||||
|
|
||||||
### Flake Structure
|
### Flake Structure
|
||||||
- **flake.nix**: Main entry point defining inputs (nixpkgs, home-manager, plasma-manager, etc.) and outputs for multiple NixOS configurations
|
- **flake.nix**: Main entry point defining inputs (nixpkgs, home-manager, plasma-manager, etc.) and outputs for multiple NixOS configurations
|
||||||
- **Machines**: `nix-book`, `boxy`, `wixos` (WSL configuration), `zix790prors`, `live-usb`, `johno-macbookpro` (Darwin/macOS)
|
- **Machines**: `nix-book`, `boxy`, `zix790prors`, `live-usb`, `johno-macbookpro` (Darwin/macOS)
|
||||||
- **Home configurations**: Standalone home-manager configuration for user `johno`
|
- **Home configurations**: Standalone home-manager configuration for user `johno`
|
||||||
|
|
||||||
### Directory Structure
|
### Directory Structure
|
||||||
@@ -78,7 +78,6 @@ The repository also uses a modular home-manager role system for user-space confi
|
|||||||
- **nix-book**: Compact laptop → excludes office/media roles due to SSD space constraints
|
- **nix-book**: Compact laptop → excludes office/media roles due to SSD space constraints
|
||||||
- **boxy**: Living room media center → optimized for media consumption, excludes sync/office (shared machine)
|
- **boxy**: Living room media center → optimized for media consumption, excludes sync/office (shared machine)
|
||||||
- **zix790prors**: All-purpose workstation → full desktop experience with all roles enabled
|
- **zix790prors**: All-purpose workstation → full desktop experience with all roles enabled
|
||||||
- **wixos**: WSL2 development → full desktop experience, inherits from zix790prors Windows host
|
|
||||||
- **live-usb**: Temporary environment → only base + desktop roles, no persistent services
|
- **live-usb**: Temporary environment → only base + desktop roles, no persistent services
|
||||||
- **johno-macbookpro**: macOS work laptop → Darwin-specific configuration with development tools
|
- **johno-macbookpro**: macOS work laptop → Darwin-specific configuration with development tools
|
||||||
|
|
||||||
@@ -111,7 +110,6 @@ darwin-rebuild build --flake .#johno-macbookpro
|
|||||||
- `nix-book`: Compact laptop with storage constraints, uses `home/home-laptop-compact.nix`
|
- `nix-book`: Compact laptop with storage constraints, uses `home/home-laptop-compact.nix`
|
||||||
- `boxy`: Shared living room media center/gaming desktop with AMD GPU, uses `home/home-media-center.nix`
|
- `boxy`: Shared living room media center/gaming desktop with AMD GPU, uses `home/home-media-center.nix`
|
||||||
- `zix790prors`: Powerful all-purpose workstation (gaming, 3D modeling, development), dual-boots Windows 11 with shared btrfs /games partition, uses `home/home-desktop.nix`
|
- `zix790prors`: Powerful all-purpose workstation (gaming, 3D modeling, development), dual-boots Windows 11 with shared btrfs /games partition, uses `home/home-desktop.nix`
|
||||||
- `wixos`: WSL2 development environment running in Windows partition of zix790prors, uses `home/home-desktop.nix`
|
|
||||||
- `live-usb`: Bootable ISO configuration, uses `home/home-live-usb.nix`
|
- `live-usb`: Bootable ISO configuration, uses `home/home-live-usb.nix`
|
||||||
- `johno-macbookpro`: macOS work laptop, uses `home/home-darwin-work.nix`
|
- `johno-macbookpro`: macOS work laptop, uses `home/home-darwin-work.nix`
|
||||||
|
|
||||||
|
|||||||
195
flake.lock
generated
195
flake.lock
generated
@@ -8,11 +8,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1767911810,
|
"lastModified": 1769405733,
|
||||||
"narHash": "sha256-0L4ATr01UsmBC0rSW62VIMVVSUihAQu2+ZOoHk9BQnA=",
|
"narHash": "sha256-WpROnW0dRi5ub0SlpKrMBs3pYlSBY4xw22hnTNvBMgI=",
|
||||||
"owner": "steveyegge",
|
"owner": "steveyegge",
|
||||||
"repo": "beads",
|
"repo": "beads",
|
||||||
"rev": "28ff9fe9919a9665a0f00f5b3fcd084b43fb6cc3",
|
"rev": "6e82d1e2eea121ce5dc0964d554879f8b0c08563",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -24,11 +24,11 @@
|
|||||||
"doomemacs": {
|
"doomemacs": {
|
||||||
"flake": false,
|
"flake": false,
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1767773143,
|
"lastModified": 1768984347,
|
||||||
"narHash": "sha256-QL/t9v2kFNxBDyNJb/s411o3mxujan+QX5IZglTdpTk=",
|
"narHash": "sha256-VvC4rgAAaFnYLCdcUoz7dTE3kuBNuHIc+GlXOrPCxpg=",
|
||||||
"owner": "doomemacs",
|
"owner": "doomemacs",
|
||||||
"repo": "doomemacs",
|
"repo": "doomemacs",
|
||||||
"rev": "3e15fb36d7f94f0a218bda977be4d3f5da983a71",
|
"rev": "57818a6da90fbef39ff80d62fab2cd319496c3b9",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -47,11 +47,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1768011937,
|
"lastModified": 1769848312,
|
||||||
"narHash": "sha256-SnU2XTo34vwVaijs+4VwcXTNwMWO4nwzzs08N39UagA=",
|
"narHash": "sha256-ggBocPd1L4l5MFNV0Fw9aSGZZO4aGzCfgh4e6hQ77RE=",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "emacs-overlay",
|
"repo": "emacs-overlay",
|
||||||
"rev": "79abf71d9897cf3b5189f7175cda1b1102abc65c",
|
"rev": "be0b4f4f28f69be61e9174807250e3235ee11d50",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -60,22 +60,6 @@
|
|||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"flake-compat": {
|
|
||||||
"flake": false,
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1765121682,
|
|
||||||
"narHash": "sha256-4VBOP18BFeiPkyhy9o4ssBNQEvfvv1kXkasAYd0+rrA=",
|
|
||||||
"owner": "edolstra",
|
|
||||||
"repo": "flake-compat",
|
|
||||||
"rev": "65f23138d8d09a92e30f1e5c87611b23ef451bf3",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "edolstra",
|
|
||||||
"repo": "flake-compat",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"flake-utils": {
|
"flake-utils": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"systems": "systems"
|
"systems": "systems"
|
||||||
@@ -94,6 +78,22 @@
|
|||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"gastown": {
|
||||||
|
"flake": false,
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1769538736,
|
||||||
|
"narHash": "sha256-A33gyS/ERUCFcaFG9PJdIHfIOafguqkRe+DuIZteH5s=",
|
||||||
|
"owner": "steveyegge",
|
||||||
|
"repo": "gastown",
|
||||||
|
"rev": "177094a2335786d1d450fd9e14b935877291c004",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "steveyegge",
|
||||||
|
"repo": "gastown",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
"google-cookie-retrieval": {
|
"google-cookie-retrieval": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"nixpkgs": [
|
"nixpkgs": [
|
||||||
@@ -101,11 +101,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1761423376,
|
"lastModified": 1768846578,
|
||||||
"narHash": "sha256-pMy3cnUFfue4vz/y0jx71BfcPGxZf+hk/DtnzWvfU0c=",
|
"narHash": "sha256-82f/+e8HAwmBukiLlr7I3HYvM/2GCd5SOc+BC+qzsOQ=",
|
||||||
"ref": "refs/heads/main",
|
"ref": "refs/heads/main",
|
||||||
"rev": "a1f695665771841a988afc965526cbf99160cd77",
|
"rev": "c11ff9d3c67372a843a0fa6bf23132e986bd6955",
|
||||||
"revCount": 11,
|
"revCount": 14,
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://git.johnogle.info/johno/google-cookie-retrieval.git"
|
"url": "https://git.johnogle.info/johno/google-cookie-retrieval.git"
|
||||||
},
|
},
|
||||||
@@ -121,11 +121,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1767514898,
|
"lastModified": 1768949235,
|
||||||
"narHash": "sha256-ONYqnKrPzfKEEPChoJ9qPcfvBqW9ZgieDKD7UezWPg4=",
|
"narHash": "sha256-TtjKgXyg1lMfh374w5uxutd6Vx2P/hU81aEhTxrO2cg=",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "home-manager",
|
"repo": "home-manager",
|
||||||
"rev": "7a06e8a2f844e128d3b210a000a62716b6040b7f",
|
"rev": "75ed713570ca17427119e7e204ab3590cc3bf2a5",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -142,11 +142,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1767556355,
|
"lastModified": 1769397130,
|
||||||
"narHash": "sha256-RDTUBDQBi9D4eD9iJQWtUDN/13MDLX+KmE+TwwNUp2s=",
|
"narHash": "sha256-TTM4KV9IHwa181X7afBRbhLJIrgynpDjAXJFMUOWfyU=",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "home-manager",
|
"repo": "home-manager",
|
||||||
"rev": "f894bc4ffde179d178d8deb374fcf9855d1a82b7",
|
"rev": "c37679d37bdbecf11bbe3c5eb238d89ca4f60641",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -164,11 +164,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1767082077,
|
"lastModified": 1769273817,
|
||||||
"narHash": "sha256-2tL1mRb9uFJThUNfuDm/ehrnPvImL/QDtCxfn71IEz4=",
|
"narHash": "sha256-+iyLihi/ynJokMgJZMRXuMuI6DPGUQRajz5ztNCHgnI=",
|
||||||
"owner": "Jovian-Experiments",
|
"owner": "Jovian-Experiments",
|
||||||
"repo": "Jovian-NixOS",
|
"repo": "Jovian-NixOS",
|
||||||
"rev": "efd4b22e6fdc6d7fb4e186ae333a4b74e03da440",
|
"rev": "98f988ad46e31f9956c5f6874dfb3580a7ff3969",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -184,11 +184,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1765066094,
|
"lastModified": 1767634391,
|
||||||
"narHash": "sha256-0YSU35gfRFJzx/lTGgOt6ubP8K6LeW0vaywzNNqxkl4=",
|
"narHash": "sha256-owcSz2ICqTSvhBbhPP+1eWzi88e54rRZtfCNE5E/wwg=",
|
||||||
"owner": "nix-darwin",
|
"owner": "nix-darwin",
|
||||||
"repo": "nix-darwin",
|
"repo": "nix-darwin",
|
||||||
"rev": "688427b1aab9afb478ca07989dc754fa543e03d5",
|
"rev": "08585aacc3d6d6c280a02da195fdbd4b9cf083c2",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -206,11 +206,11 @@
|
|||||||
"systems": "systems_2"
|
"systems": "systems_2"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1768034604,
|
"lastModified": 1769849328,
|
||||||
"narHash": "sha256-62pIZMvGHhYJmMiiBsxHqZt/dFyENPcFHlJq5NJF3Sw=",
|
"narHash": "sha256-BjH1Ge6O8ObN6Z97un2U87pl4POO99Q8RSsgIuTZq8Q=",
|
||||||
"owner": "marienz",
|
"owner": "marienz",
|
||||||
"repo": "nix-doom-emacs-unstraightened",
|
"repo": "nix-doom-emacs-unstraightened",
|
||||||
"rev": "9b3b8044fe4ccdcbb2d6f733d7dbe4d5feea18bc",
|
"rev": "fc1d7190c49558cdc6af20d7657075943a500a93",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -241,65 +241,13 @@
|
|||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"nixos-wsl": {
|
|
||||||
"inputs": {
|
|
||||||
"flake-compat": "flake-compat",
|
|
||||||
"nixpkgs": "nixpkgs"
|
|
||||||
},
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1765841014,
|
|
||||||
"narHash": "sha256-55V0AJ36V5Egh4kMhWtDh117eE3GOjwq5LhwxDn9eHg=",
|
|
||||||
"owner": "nix-community",
|
|
||||||
"repo": "NixOS-WSL",
|
|
||||||
"rev": "be4af8042e7a61fa12fda58fe9a3b3babdefe17b",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "nix-community",
|
|
||||||
"ref": "main",
|
|
||||||
"repo": "NixOS-WSL",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1765472234,
|
"lastModified": 1769089682,
|
||||||
"narHash": "sha256-9VvC20PJPsleGMewwcWYKGzDIyjckEz8uWmT0vCDYK0=",
|
"narHash": "sha256-9yA/LIuAVQq0lXelrZPjLuLVuZdm03p8tfmHhnDIkms=",
|
||||||
"owner": "NixOS",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"rev": "2fbfb1d73d239d2402a8fe03963e37aab15abe8b",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "NixOS",
|
|
||||||
"ref": "nixos-unstable",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nixpkgs-unstable": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1767379071,
|
|
||||||
"narHash": "sha256-EgE0pxsrW9jp9YFMkHL9JMXxcqi/OoumPJYwf+Okucw=",
|
|
||||||
"owner": "nixos",
|
"owner": "nixos",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "fb7944c166a3b630f177938e478f0378e64ce108",
|
"rev": "078d69f03934859a181e81ba987c2bb033eebfc5",
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "nixos",
|
|
||||||
"ref": "nixos-unstable",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nixpkgs_2": {
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1767480499,
|
|
||||||
"narHash": "sha256-8IQQUorUGiSmFaPnLSo2+T+rjHtiNWc+OAzeHck7N48=",
|
|
||||||
"owner": "nixos",
|
|
||||||
"repo": "nixpkgs",
|
|
||||||
"rev": "30a3c519afcf3f99e2c6df3b359aec5692054d92",
|
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -309,6 +257,38 @@
|
|||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"nixpkgs-unstable": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1769170682,
|
||||||
|
"narHash": "sha256-oMmN1lVQU0F0W2k6OI3bgdzp2YOHWYUAw79qzDSjenU=",
|
||||||
|
"owner": "nixos",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "c5296fdd05cfa2c187990dd909864da9658df755",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nixos",
|
||||||
|
"ref": "nixos-unstable",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"perles": {
|
||||||
|
"flake": false,
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1769460725,
|
||||||
|
"narHash": "sha256-zM2jw+emxe8+mNyR1ebMWkQiEx8uSmhoqqI0IxXLDgs=",
|
||||||
|
"owner": "zjrosen",
|
||||||
|
"repo": "perles",
|
||||||
|
"rev": "57b20413eea461452b59e13f5a4a367953b1f768",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "zjrosen",
|
||||||
|
"repo": "perles",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
"plasma-manager": {
|
"plasma-manager": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"home-manager": [
|
"home-manager": [
|
||||||
@@ -319,11 +299,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1763909441,
|
"lastModified": 1767662275,
|
||||||
"narHash": "sha256-56LwV51TX/FhgX+5LCG6akQ5KrOWuKgcJa+eUsRMxsc=",
|
"narHash": "sha256-d5Q1GmQ+sW1Bt8cgDE0vOihzLaswsm8cSdg8124EqXE=",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "plasma-manager",
|
"repo": "plasma-manager",
|
||||||
"rev": "b24ed4b272256dfc1cc2291f89a9821d5f9e14b4",
|
"rev": "51816be33a1ff0d4b22427de83222d5bfa96d30e",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -342,11 +322,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1763909441,
|
"lastModified": 1767662275,
|
||||||
"narHash": "sha256-56LwV51TX/FhgX+5LCG6akQ5KrOWuKgcJa+eUsRMxsc=",
|
"narHash": "sha256-d5Q1GmQ+sW1Bt8cgDE0vOihzLaswsm8cSdg8124EqXE=",
|
||||||
"owner": "nix-community",
|
"owner": "nix-community",
|
||||||
"repo": "plasma-manager",
|
"repo": "plasma-manager",
|
||||||
"rev": "b24ed4b272256dfc1cc2291f89a9821d5f9e14b4",
|
"rev": "51816be33a1ff0d4b22427de83222d5bfa96d30e",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -358,15 +338,16 @@
|
|||||||
"root": {
|
"root": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"beads": "beads",
|
"beads": "beads",
|
||||||
|
"gastown": "gastown",
|
||||||
"google-cookie-retrieval": "google-cookie-retrieval",
|
"google-cookie-retrieval": "google-cookie-retrieval",
|
||||||
"home-manager": "home-manager",
|
"home-manager": "home-manager",
|
||||||
"home-manager-unstable": "home-manager-unstable",
|
"home-manager-unstable": "home-manager-unstable",
|
||||||
"jovian": "jovian",
|
"jovian": "jovian",
|
||||||
"nix-darwin": "nix-darwin",
|
"nix-darwin": "nix-darwin",
|
||||||
"nix-doom-emacs-unstraightened": "nix-doom-emacs-unstraightened",
|
"nix-doom-emacs-unstraightened": "nix-doom-emacs-unstraightened",
|
||||||
"nixos-wsl": "nixos-wsl",
|
"nixpkgs": "nixpkgs",
|
||||||
"nixpkgs": "nixpkgs_2",
|
|
||||||
"nixpkgs-unstable": "nixpkgs-unstable",
|
"nixpkgs-unstable": "nixpkgs-unstable",
|
||||||
|
"perles": "perles",
|
||||||
"plasma-manager": "plasma-manager",
|
"plasma-manager": "plasma-manager",
|
||||||
"plasma-manager-unstable": "plasma-manager-unstable"
|
"plasma-manager-unstable": "plasma-manager-unstable"
|
||||||
}
|
}
|
||||||
|
|||||||
57
flake.nix
57
flake.nix
@@ -4,8 +4,7 @@
|
|||||||
inputs = {
|
inputs = {
|
||||||
nixpkgs.url = "github:nixos/nixpkgs/nixos-25.11";
|
nixpkgs.url = "github:nixos/nixpkgs/nixos-25.11";
|
||||||
nixpkgs-unstable.url = "github:nixos/nixpkgs/nixos-unstable";
|
nixpkgs-unstable.url = "github:nixos/nixpkgs/nixos-unstable";
|
||||||
nixos-wsl.url = "github:nix-community/NixOS-WSL/main";
|
|
||||||
|
|
||||||
nix-darwin = {
|
nix-darwin = {
|
||||||
url = "github:nix-darwin/nix-darwin/nix-darwin-25.11";
|
url = "github:nix-darwin/nix-darwin/nix-darwin-25.11";
|
||||||
inputs.nixpkgs.follows = "nixpkgs";
|
inputs.nixpkgs.follows = "nixpkgs";
|
||||||
@@ -48,6 +47,16 @@
|
|||||||
inputs.nixpkgs.follows = "nixpkgs-unstable";
|
inputs.nixpkgs.follows = "nixpkgs-unstable";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
gastown = {
|
||||||
|
url = "github:steveyegge/gastown";
|
||||||
|
flake = false; # No flake.nix upstream yet
|
||||||
|
};
|
||||||
|
|
||||||
|
perles = {
|
||||||
|
url = "github:zjrosen/perles";
|
||||||
|
flake = false; # No flake.nix upstream yet
|
||||||
|
};
|
||||||
|
|
||||||
nix-doom-emacs-unstraightened = {
|
nix-doom-emacs-unstraightened = {
|
||||||
url = "github:marienz/nix-doom-emacs-unstraightened";
|
url = "github:marienz/nix-doom-emacs-unstraightened";
|
||||||
# Don't follow nixpkgs to avoid rebuild issues with emacs-overlay
|
# Don't follow nixpkgs to avoid rebuild issues with emacs-overlay
|
||||||
@@ -55,7 +64,7 @@
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
outputs = { self, nixpkgs, nixpkgs-unstable, nixos-wsl, ... } @ inputs: let
|
outputs = { self, nixpkgs, nixpkgs-unstable, ... } @ inputs: let
|
||||||
# Shared overlay function to reduce duplication across module sets
|
# Shared overlay function to reduce duplication across module sets
|
||||||
# Parameters:
|
# Parameters:
|
||||||
# unstableOverlays: Additional overlays to apply when importing nixpkgs-unstable
|
# unstableOverlays: Additional overlays to apply when importing nixpkgs-unstable
|
||||||
@@ -84,11 +93,21 @@
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
# Shared unstable overlays for custom package builds
|
||||||
|
customUnstableOverlays = [
|
||||||
|
# Override claude-code in unstable to use our custom GCS-based build
|
||||||
|
# (needed for corporate networks that block npm registry)
|
||||||
|
(ufinal: uprev: {
|
||||||
|
claude-code = uprev.callPackage ./packages/claude-code {};
|
||||||
|
})
|
||||||
|
];
|
||||||
|
|
||||||
nixosModules = [
|
nixosModules = [
|
||||||
./roles
|
./roles
|
||||||
inputs.home-manager.nixosModules.home-manager
|
inputs.home-manager.nixosModules.home-manager
|
||||||
{
|
{
|
||||||
nixpkgs.overlays = [ (mkBaseOverlay {}) ];
|
nixpkgs.overlays = [ (mkBaseOverlay { unstableOverlays = customUnstableOverlays; }) ];
|
||||||
}
|
}
|
||||||
(mkHomeManagerConfig {
|
(mkHomeManagerConfig {
|
||||||
sharedModules = [ inputs.plasma-manager.homeModules.plasma-manager ];
|
sharedModules = [ inputs.plasma-manager.homeModules.plasma-manager ];
|
||||||
@@ -101,7 +120,7 @@
|
|||||||
inputs.home-manager-unstable.nixosModules.home-manager
|
inputs.home-manager-unstable.nixosModules.home-manager
|
||||||
inputs.jovian.nixosModules.jovian
|
inputs.jovian.nixosModules.jovian
|
||||||
{
|
{
|
||||||
nixpkgs.overlays = [ (mkBaseOverlay {}) ];
|
nixpkgs.overlays = [ (mkBaseOverlay { unstableOverlays = customUnstableOverlays; }) ];
|
||||||
}
|
}
|
||||||
(mkHomeManagerConfig {
|
(mkHomeManagerConfig {
|
||||||
sharedModules = [ inputs.plasma-manager-unstable.homeModules.plasma-manager ];
|
sharedModules = [ inputs.plasma-manager-unstable.homeModules.plasma-manager ];
|
||||||
@@ -112,17 +131,7 @@
|
|||||||
./roles/darwin.nix
|
./roles/darwin.nix
|
||||||
inputs.home-manager.darwinModules.home-manager
|
inputs.home-manager.darwinModules.home-manager
|
||||||
{
|
{
|
||||||
nixpkgs.overlays = [
|
nixpkgs.overlays = [ (mkBaseOverlay { unstableOverlays = customUnstableOverlays; }) ];
|
||||||
(mkBaseOverlay {
|
|
||||||
# Override claude-code in unstable to use our custom GCS-based build
|
|
||||||
# (needed for corporate networks that block npm registry)
|
|
||||||
unstableOverlays = [
|
|
||||||
(ufinal: uprev: {
|
|
||||||
claude-code = uprev.callPackage ./packages/claude-code {};
|
|
||||||
})
|
|
||||||
];
|
|
||||||
})
|
|
||||||
];
|
|
||||||
}
|
}
|
||||||
(mkHomeManagerConfig { sharedModules = []; })
|
(mkHomeManagerConfig { sharedModules = []; })
|
||||||
];
|
];
|
||||||
@@ -157,24 +166,10 @@
|
|||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
nixosConfigurations.wixos = nixpkgs.lib.nixosSystem rec {
|
|
||||||
system = "x86_64-linux";
|
|
||||||
modules = nixosModules ++ [
|
|
||||||
nixos-wsl.nixosModules.default
|
|
||||||
./machines/wixos/configuration.nix
|
|
||||||
inputs.home-manager.nixosModules.home-manager
|
|
||||||
{
|
|
||||||
home-manager.users.johno = import ./home/home-desktop.nix;
|
|
||||||
home-manager.extraSpecialArgs = { inherit system; };
|
|
||||||
}
|
|
||||||
];
|
|
||||||
};
|
|
||||||
|
|
||||||
nixosConfigurations.zix790prors = nixpkgs.lib.nixosSystem rec {
|
nixosConfigurations.zix790prors = nixpkgs.lib.nixosSystem rec {
|
||||||
system = "x86_64-linux";
|
system = "x86_64-linux";
|
||||||
modules = nixosModules ++ [
|
modules = nixosModules ++ [
|
||||||
./machines/zix790prors/configuration.nix
|
./machines/zix790prors/configuration.nix
|
||||||
inputs.home-manager.nixosModules.home-manager
|
|
||||||
{
|
{
|
||||||
home-manager.users.johno = import ./home/home-desktop.nix;
|
home-manager.users.johno = import ./home/home-desktop.nix;
|
||||||
home-manager.extraSpecialArgs = { inherit system; };
|
home-manager.extraSpecialArgs = { inherit system; };
|
||||||
@@ -220,7 +215,7 @@
|
|||||||
};
|
};
|
||||||
|
|
||||||
# Darwin/macOS configurations
|
# Darwin/macOS configurations
|
||||||
darwinConfigurations."blkfv4yf49kt7" = inputs.nix-darwin.lib.darwinSystem rec {
|
darwinConfigurations."BLKFV4YF49KT7" = inputs.nix-darwin.lib.darwinSystem rec {
|
||||||
system = "aarch64-darwin";
|
system = "aarch64-darwin";
|
||||||
modules = darwinModules ++ [
|
modules = darwinModules ++ [
|
||||||
./machines/johno-macbookpro/configuration.nix
|
./machines/johno-macbookpro/configuration.nix
|
||||||
|
|||||||
@@ -107,7 +107,7 @@
|
|||||||
aerospace = {
|
aerospace = {
|
||||||
enable = true;
|
enable = true;
|
||||||
leader = "cmd";
|
leader = "cmd";
|
||||||
ctrlShortcuts.enable = true;
|
ctrlShortcuts.enable = false;
|
||||||
sketchybar.enable = true;
|
sketchybar.enable = true;
|
||||||
# Optional: Add per-machine userSettings overrides
|
# Optional: Add per-machine userSettings overrides
|
||||||
# userSettings = {
|
# userSettings = {
|
||||||
|
|||||||
@@ -10,6 +10,7 @@
|
|||||||
home.roles = {
|
home.roles = {
|
||||||
"3d-printing".enable = true;
|
"3d-printing".enable = true;
|
||||||
base.enable = true;
|
base.enable = true;
|
||||||
|
gaming.enable = true;
|
||||||
desktop.enable = true;
|
desktop.enable = true;
|
||||||
emacs.enable = true;
|
emacs.enable = true;
|
||||||
email.enable = true;
|
email.enable = true;
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
home.roles = {
|
home.roles = {
|
||||||
base.enable = true;
|
base.enable = true;
|
||||||
desktop.enable = true;
|
desktop.enable = true;
|
||||||
|
gaming.enable = true;
|
||||||
development.enable = true;
|
development.enable = true;
|
||||||
communication.enable = true;
|
communication.enable = true;
|
||||||
email.enable = true;
|
email.enable = true;
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
home.roles = {
|
home.roles = {
|
||||||
base.enable = true;
|
base.enable = true;
|
||||||
desktop.enable = true;
|
desktop.enable = true;
|
||||||
|
gaming.enable = true;
|
||||||
media.enable = true;
|
media.enable = true;
|
||||||
communication.enable = true;
|
communication.enable = true;
|
||||||
kdeconnect.enable = true;
|
kdeconnect.enable = true;
|
||||||
|
|||||||
@@ -632,7 +632,9 @@ in
|
|||||||
text = ''
|
text = ''
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
DISK_USAGE=$(df -H / | grep -v Filesystem | awk '{print $5}')
|
# Monitor /System/Volumes/Data which contains user data on APFS
|
||||||
|
# The root / is a read-only snapshot with minimal usage
|
||||||
|
DISK_USAGE=$(df -H /System/Volumes/Data | grep -v Filesystem | awk '{print $5}')
|
||||||
|
|
||||||
${pkgs.sketchybar}/bin/sketchybar --set $NAME label="$DISK_USAGE"
|
${pkgs.sketchybar}/bin/sketchybar --set $NAME label="$DISK_USAGE"
|
||||||
'';
|
'';
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ in
|
|||||||
shellcheck
|
shellcheck
|
||||||
tmux
|
tmux
|
||||||
tree
|
tree
|
||||||
|
watch
|
||||||
];
|
];
|
||||||
|
|
||||||
# Automatic garbage collection for user profile (home-manager generations).
|
# Automatic garbage collection for user profile (home-manager generations).
|
||||||
|
|||||||
44
home/roles/development/beads-search-query-optimization.patch
Normal file
44
home/roles/development/beads-search-query-optimization.patch
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
diff --git a/internal/storage/dolt/queries.go b/internal/storage/dolt/queries.go
|
||||||
|
index 7d8214ee..8acdaae2 100644
|
||||||
|
--- a/internal/storage/dolt/queries.go
|
||||||
|
+++ b/internal/storage/dolt/queries.go
|
||||||
|
@@ -212,8 +212,21 @@ func (s *DoltStore) SearchIssues(ctx context.Context, query string, filter types
|
||||||
|
}
|
||||||
|
|
||||||
|
// nolint:gosec // G201: whereSQL contains column comparisons with ?, limitSQL is a safe integer
|
||||||
|
+ // Performance fix: SELECT all columns directly instead of id-only + WHERE IN (all_ids)
|
||||||
|
+ // See: hq-ihwsj - bd list uses inefficient WHERE IN (all_ids) query pattern
|
||||||
|
querySQL := fmt.Sprintf(`
|
||||||
|
- SELECT id FROM issues
|
||||||
|
+ SELECT id, content_hash, title, description, design, acceptance_criteria, notes,
|
||||||
|
+ status, priority, issue_type, assignee, estimated_minutes,
|
||||||
|
+ created_at, created_by, owner, updated_at, closed_at, external_ref,
|
||||||
|
+ compaction_level, compacted_at, compacted_at_commit, original_size, source_repo, close_reason,
|
||||||
|
+ deleted_at, deleted_by, delete_reason, original_type,
|
||||||
|
+ sender, ephemeral, pinned, is_template, crystallizes,
|
||||||
|
+ await_type, await_id, timeout_ns, waiters,
|
||||||
|
+ hook_bead, role_bead, agent_state, last_activity, role_type, rig, mol_type,
|
||||||
|
+ event_kind, actor, target, payload,
|
||||||
|
+ due_at, defer_until,
|
||||||
|
+ quality_score, work_type, source_system
|
||||||
|
+ FROM issues
|
||||||
|
%s
|
||||||
|
ORDER BY priority ASC, created_at DESC
|
||||||
|
%s
|
||||||
|
@@ -225,7 +238,15 @@ func (s *DoltStore) SearchIssues(ctx context.Context, query string, filter types
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
- return s.scanIssueIDs(ctx, rows)
|
||||||
|
+ var issues []*types.Issue
|
||||||
|
+ for rows.Next() {
|
||||||
|
+ issue, err := scanIssueRow(rows)
|
||||||
|
+ if err != nil {
|
||||||
|
+ return nil, err
|
||||||
|
+ }
|
||||||
|
+ issues = append(issues, issue)
|
||||||
|
+ }
|
||||||
|
+ return issues, rows.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetReadyWork returns issues that are ready to work on (not blocked)
|
||||||
317
home/roles/development/commands/beads_batch_research_plan.md
Normal file
317
home/roles/development/commands/beads_batch_research_plan.md
Normal file
@@ -0,0 +1,317 @@
|
|||||||
|
---
|
||||||
|
description: Batch research and planning for multiple beads with interactive question review
|
||||||
|
model: opus
|
||||||
|
---
|
||||||
|
|
||||||
|
# Beads Batch Research+Plan
|
||||||
|
|
||||||
|
This skill automates the common workflow of:
|
||||||
|
1. Running /beads_research in parallel for multiple beads
|
||||||
|
2. Presenting open questions interactively for user input (bead-by-bead)
|
||||||
|
3. Running /beads_plan for all researched beads (plus any spawned from splits)
|
||||||
|
|
||||||
|
## When to Use
|
||||||
|
|
||||||
|
- You have multiple beads ready for work
|
||||||
|
- You want to research and plan them efficiently before implementation
|
||||||
|
- You prefer to batch your question-answering rather than context-switching between skills
|
||||||
|
|
||||||
|
## Phase 1: Selection
|
||||||
|
|
||||||
|
1. **Get ready beads**: Run `bd ready --limit=20` to list beads with no blockers
|
||||||
|
|
||||||
|
2. **Filter already-researched beads**:
|
||||||
|
For each ready bead, check if it already has research:
|
||||||
|
```bash
|
||||||
|
ls thoughts/beads-{bead-id}/research.md 2>/dev/null
|
||||||
|
```
|
||||||
|
|
||||||
|
Categorize beads:
|
||||||
|
- **Needs research**: No `research.md` exists
|
||||||
|
- **Has research, needs plan**: `research.md` exists but no `plan.md`
|
||||||
|
- **Already planned**: Both `research.md` and `plan.md` exist
|
||||||
|
|
||||||
|
3. **Present selection**:
|
||||||
|
```
|
||||||
|
Ready beads available for batch research+plan:
|
||||||
|
|
||||||
|
NEEDS RESEARCH:
|
||||||
|
- {bead-id}: {title} (type: {type})
|
||||||
|
- ...
|
||||||
|
|
||||||
|
HAS RESEARCH (plan only):
|
||||||
|
- {bead-id}: {title} (type: {type})
|
||||||
|
- ...
|
||||||
|
|
||||||
|
ALREADY PLANNED (skip):
|
||||||
|
- {bead-id}: {title}
|
||||||
|
|
||||||
|
Which beads would you like to process?
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **Use AskUserQuestion** with `multiSelect: true`:
|
||||||
|
- Include bead ID and title for each option
|
||||||
|
- Separate options by category
|
||||||
|
- Allow selection across categories
|
||||||
|
|
||||||
|
## Phase 2: Parallel Research
|
||||||
|
|
||||||
|
For each selected bead that NEEDS RESEARCH, launch a research subagent.
|
||||||
|
|
||||||
|
### Subagent Instructions Template
|
||||||
|
|
||||||
|
```
|
||||||
|
Research bead [BEAD_ID]: [BEAD_TITLE]
|
||||||
|
|
||||||
|
1. **Load bead context**:
|
||||||
|
```bash
|
||||||
|
bd show [BEAD_ID]
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Create artifact directory**:
|
||||||
|
```bash
|
||||||
|
mkdir -p thoughts/beads-[BEAD_ID]
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Conduct research** following beads_research.md patterns:
|
||||||
|
- Analyze and decompose the research question
|
||||||
|
- Spawn parallel sub-agent tasks (codebase-locator, codebase-analyzer, etc.)
|
||||||
|
- Synthesize findings
|
||||||
|
|
||||||
|
4. **Write research document** to `thoughts/beads-[BEAD_ID]/research.md`:
|
||||||
|
- Include frontmatter with metadata
|
||||||
|
- Document findings with file:line references
|
||||||
|
- **CRITICAL**: Include "## Open Questions" section listing any unresolved items
|
||||||
|
|
||||||
|
5. **Return summary**:
|
||||||
|
- Research status (complete/partial)
|
||||||
|
- Number of open questions
|
||||||
|
- Key findings summary (2-3 bullet points)
|
||||||
|
- List of open questions verbatim
|
||||||
|
```
|
||||||
|
|
||||||
|
### Launching Subagents
|
||||||
|
|
||||||
|
Use `subagent_type: "opus"` for research subagents (matches beads_research model setting).
|
||||||
|
|
||||||
|
Launch ALL research subagents in a single message for parallel execution:
|
||||||
|
```
|
||||||
|
<Task calls for each selected bead needing research - all in one message>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Collecting Results
|
||||||
|
|
||||||
|
Wait for ALL research subagents to complete. Collect:
|
||||||
|
- Bead ID
|
||||||
|
- Research status
|
||||||
|
- Open questions list
|
||||||
|
- Any errors encountered
|
||||||
|
|
||||||
|
## Phase 3: Interactive Question Review
|
||||||
|
|
||||||
|
Present each bead's open questions sequentially for user input.
|
||||||
|
|
||||||
|
### For Each Bead (in order):
|
||||||
|
|
||||||
|
1. **Present research summary**:
|
||||||
|
```
|
||||||
|
## Bead {N}/{total}: {bead-id} - {title}
|
||||||
|
|
||||||
|
Research complete. Key findings:
|
||||||
|
- {finding 1}
|
||||||
|
- {finding 2}
|
||||||
|
|
||||||
|
Open questions requiring your input:
|
||||||
|
1. {question 1}
|
||||||
|
2. {question 2}
|
||||||
|
|
||||||
|
Additionally:
|
||||||
|
- Should this bead be split into multiple beads? (y/n)
|
||||||
|
- If split, describe the split:
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Collect user responses**:
|
||||||
|
- Answers to open questions
|
||||||
|
- Split decision (yes/no)
|
||||||
|
- If split: new bead titles and how to divide the work
|
||||||
|
|
||||||
|
3. **Handle splits**:
|
||||||
|
If user indicates a split:
|
||||||
|
```bash
|
||||||
|
# Create new beads for split work
|
||||||
|
bd create --title="{split title 1}" --type={type} --priority={priority} \
|
||||||
|
--description="{description based on user input}"
|
||||||
|
|
||||||
|
# Update original bead if scope narrowed
|
||||||
|
bd update {original-bead-id} --description="{updated description}"
|
||||||
|
```
|
||||||
|
|
||||||
|
Track new bead IDs for inclusion in planning phase.
|
||||||
|
|
||||||
|
4. **Update research document**:
|
||||||
|
Append user answers to `thoughts/beads-{id}/research.md`:
|
||||||
|
```markdown
|
||||||
|
## User Clarifications [{timestamp}]
|
||||||
|
|
||||||
|
Q: {question 1}
|
||||||
|
A: {user answer 1}
|
||||||
|
|
||||||
|
Q: {question 2}
|
||||||
|
A: {user answer 2}
|
||||||
|
|
||||||
|
## Bead Splits
|
||||||
|
{If split: description of split and new bead IDs}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Progress Tracking
|
||||||
|
|
||||||
|
After each bead's questions are answered, confirm before moving to next:
|
||||||
|
```
|
||||||
|
Questions answered for {bead-id}. {N-1} beads remaining.
|
||||||
|
Continue to next bead? (y/n)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Beads with No Questions
|
||||||
|
|
||||||
|
If a bead's research had no open questions:
|
||||||
|
```
|
||||||
|
## Bead {N}/{total}: {bead-id} - {title}
|
||||||
|
|
||||||
|
Research complete with no open questions.
|
||||||
|
|
||||||
|
Key findings:
|
||||||
|
- {finding 1}
|
||||||
|
- {finding 2}
|
||||||
|
|
||||||
|
Should this bead be split? (y/n)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Phase 4: Parallel Planning
|
||||||
|
|
||||||
|
After all questions answered, launch planning subagents for all beads.
|
||||||
|
|
||||||
|
### Beads to Plan
|
||||||
|
|
||||||
|
Include:
|
||||||
|
- Original beads that were researched
|
||||||
|
- Beads that had existing research (from selection phase)
|
||||||
|
- New beads spawned from splits
|
||||||
|
|
||||||
|
### Subagent Instructions Template
|
||||||
|
|
||||||
|
```
|
||||||
|
Create implementation plan for bead [BEAD_ID]: [BEAD_TITLE]
|
||||||
|
|
||||||
|
1. **Load context**:
|
||||||
|
```bash
|
||||||
|
bd show [BEAD_ID]
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Read research** (it exists and has user clarifications):
|
||||||
|
Read `thoughts/beads-[BEAD_ID]/research.md` FULLY
|
||||||
|
|
||||||
|
3. **Create plan** following beads_plan.md patterns:
|
||||||
|
- Context gathering via sub-agents
|
||||||
|
- Design approach based on research findings and user clarifications
|
||||||
|
- **Skip interactive questions** - they were already answered in research review
|
||||||
|
|
||||||
|
4. **Write plan** to `thoughts/beads-[BEAD_ID]/plan.md`:
|
||||||
|
- Full plan structure with phases
|
||||||
|
- Success criteria (automated and manual)
|
||||||
|
- References to research document
|
||||||
|
|
||||||
|
5. **Update bead**:
|
||||||
|
```bash
|
||||||
|
bd update [BEAD_ID] --notes="Plan created: thoughts/beads-[BEAD_ID]/plan.md"
|
||||||
|
```
|
||||||
|
|
||||||
|
6. **Return summary**:
|
||||||
|
- Plan status (complete/failed)
|
||||||
|
- Number of phases
|
||||||
|
- Estimated complexity (small/medium/large)
|
||||||
|
- Any issues encountered
|
||||||
|
```
|
||||||
|
|
||||||
|
### Launching Subagents
|
||||||
|
|
||||||
|
Use `subagent_type: "opus"` for planning subagents (matches beads_plan model setting).
|
||||||
|
|
||||||
|
Launch ALL planning subagents in a single message:
|
||||||
|
```
|
||||||
|
<Task calls for each bead to plan - all in one message>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Handling Beads Without Research
|
||||||
|
|
||||||
|
For beads that had existing research but user didn't review questions:
|
||||||
|
- Planning subagent reads existing research
|
||||||
|
- If research has unresolved open questions, subagent should flag this in its return
|
||||||
|
|
||||||
|
## Phase 5: Summary
|
||||||
|
|
||||||
|
After all planning completes, present final summary.
|
||||||
|
|
||||||
|
### Summary Format
|
||||||
|
|
||||||
|
```
|
||||||
|
## Batch Research+Plan Complete
|
||||||
|
|
||||||
|
### Successfully Processed:
|
||||||
|
| Bead | Title | Research | Plan | Phases | Complexity |
|
||||||
|
|------|-------|----------|------|--------|------------|
|
||||||
|
| {id} | {title} | Complete | Complete | 3 | medium |
|
||||||
|
| {id} | {title} | Complete | Complete | 2 | small |
|
||||||
|
|
||||||
|
### New Beads (from splits):
|
||||||
|
| Bead | Title | Parent | Status |
|
||||||
|
|------|-------|--------|--------|
|
||||||
|
| {new-id} | {title} | {parent-id} | Planned |
|
||||||
|
|
||||||
|
### Failed:
|
||||||
|
| Bead | Title | Phase Failed | Error |
|
||||||
|
|------|-------|--------------|-------|
|
||||||
|
| {id} | {title} | Research | Timeout |
|
||||||
|
|
||||||
|
### Next Steps:
|
||||||
|
1. Review plans at `thoughts/beads-{id}/plan.md`
|
||||||
|
2. Run `/parallel_beads` to implement all planned beads
|
||||||
|
3. Or run `/beads_implement {id}` for individual implementation
|
||||||
|
|
||||||
|
### Artifacts Created:
|
||||||
|
- Research: thoughts/beads-{id}/research.md (x{N} files)
|
||||||
|
- Plans: thoughts/beads-{id}/plan.md (x{N} files)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
### Research Subagent Failure
|
||||||
|
- Log the failure with bead ID and error
|
||||||
|
- Continue with other beads
|
||||||
|
- Exclude failed beads from question review and planning
|
||||||
|
- Report in final summary
|
||||||
|
|
||||||
|
### Planning Subagent Failure
|
||||||
|
- Log the failure with bead ID and error
|
||||||
|
- Research still valid - can retry planning manually
|
||||||
|
- Report in final summary
|
||||||
|
|
||||||
|
### User Cancellation During Question Review
|
||||||
|
- Save progress to bead notes
|
||||||
|
- Report which beads were completed
|
||||||
|
- User can resume with remaining beads in new session
|
||||||
|
|
||||||
|
### Split Bead Creation Failure
|
||||||
|
- Report error but continue with original bead
|
||||||
|
- User can manually create split beads later
|
||||||
|
|
||||||
|
## Resource Limits
|
||||||
|
|
||||||
|
- Maximum concurrent research subagents: 5
|
||||||
|
- Maximum concurrent planning subagents: 5
|
||||||
|
- If more beads selected, process in batches
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- This skill is designed for the "research+plan before implementation" workflow
|
||||||
|
- Pairs well with `/parallel_beads` for subsequent implementation
|
||||||
|
- Run `/reconcile_beads` after implementation PRs merge
|
||||||
@@ -54,6 +54,8 @@ When this command is invoked:
|
|||||||
- Read `thoughts/beads-{bead-id}/plan.md` FULLY
|
- Read `thoughts/beads-{bead-id}/plan.md` FULLY
|
||||||
- Check for any existing checkmarks (- [x]) indicating partial progress
|
- Check for any existing checkmarks (- [x]) indicating partial progress
|
||||||
- Read any research at `thoughts/beads-{bead-id}/research.md`
|
- Read any research at `thoughts/beads-{bead-id}/research.md`
|
||||||
|
- If plan's Success Criteria references contribution guidelines (e.g., "Per CONTRIBUTING.md:"),
|
||||||
|
verify the original CONTRIBUTING.md still exists and requirements are current
|
||||||
|
|
||||||
5. **Mark bead in progress** (if not already):
|
5. **Mark bead in progress** (if not already):
|
||||||
```bash
|
```bash
|
||||||
@@ -127,6 +129,10 @@ All phases completed and automated verification passed:
|
|||||||
- {List manual verification items from plan}
|
- {List manual verification items from plan}
|
||||||
|
|
||||||
Let me know when manual testing is complete so I can close the bead.
|
Let me know when manual testing is complete so I can close the bead.
|
||||||
|
|
||||||
|
**Contribution guidelines compliance:**
|
||||||
|
- {List any contribution guideline requirements that were part of Success Criteria}
|
||||||
|
- {Note if any requirements could not be automated and need manual review}
|
||||||
```
|
```
|
||||||
|
|
||||||
**STOP HERE and wait for user confirmation.**
|
**STOP HERE and wait for user confirmation.**
|
||||||
@@ -51,13 +51,32 @@ When this command is invoked:
|
|||||||
- Any linked tickets or docs
|
- Any linked tickets or docs
|
||||||
- Use Read tool WITHOUT limit/offset
|
- Use Read tool WITHOUT limit/offset
|
||||||
|
|
||||||
2. **Spawn initial research tasks**:
|
2. **Check for contribution guidelines**:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Check standard locations for contribution guidelines
|
||||||
|
for f in CONTRIBUTING.md .github/CONTRIBUTING.md docs/CONTRIBUTING.md; do
|
||||||
|
if [ -f "$f" ]; then
|
||||||
|
echo "Found: $f"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
```
|
||||||
|
|
||||||
|
If found:
|
||||||
|
- Read the file fully
|
||||||
|
- Extract actionable requirements (testing, code style, documentation, PR conventions)
|
||||||
|
- These requirements MUST be incorporated into the plan's Success Criteria
|
||||||
|
|
||||||
|
If not found, note "No contribution guidelines found" and proceed.
|
||||||
|
|
||||||
|
3. **Spawn initial research tasks**:
|
||||||
- **codebase-locator**: Find all files related to the task
|
- **codebase-locator**: Find all files related to the task
|
||||||
- **codebase-analyzer**: Understand current implementation
|
- **codebase-analyzer**: Understand current implementation
|
||||||
- **codebase-pattern-finder**: Find similar features to model after
|
- **codebase-pattern-finder**: Find similar features to model after
|
||||||
- **thoughts-locator**: Find any existing plans or decisions
|
- **thoughts-locator**: Find any existing plans or decisions
|
||||||
|
|
||||||
3. **Read all files identified by research**:
|
4. **Read all files identified by research**:
|
||||||
- Read them FULLY into main context
|
- Read them FULLY into main context
|
||||||
- Cross-reference with requirements
|
- Cross-reference with requirements
|
||||||
|
|
||||||
@@ -273,6 +292,12 @@ Always separate into two categories:
|
|||||||
- Performance under real conditions
|
- Performance under real conditions
|
||||||
- Edge cases hard to automate
|
- Edge cases hard to automate
|
||||||
|
|
||||||
|
**From Contribution Guidelines** (if CONTRIBUTING.md exists):
|
||||||
|
- Include any testing requirements specified in guidelines
|
||||||
|
- Include any code style/linting requirements
|
||||||
|
- Include any documentation requirements
|
||||||
|
- Reference the guideline: "Per CONTRIBUTING.md: {requirement}"
|
||||||
|
|
||||||
## Example Invocation
|
## Example Invocation
|
||||||
|
|
||||||
```
|
```
|
||||||
@@ -51,6 +51,18 @@ When this command is invoked:
|
|||||||
- Use the Read tool WITHOUT limit/offset parameters
|
- Use the Read tool WITHOUT limit/offset parameters
|
||||||
- Read these files yourself in the main context before spawning sub-tasks
|
- Read these files yourself in the main context before spawning sub-tasks
|
||||||
|
|
||||||
|
### Step 1.5: Check for contribution guidelines
|
||||||
|
|
||||||
|
Before spawning sub-agents, check if the repository has contribution guidelines:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
for f in CONTRIBUTING.md .github/CONTRIBUTING.md docs/CONTRIBUTING.md; do
|
||||||
|
if [ -f "$f" ]; then echo "Found: $f"; break; fi
|
||||||
|
done
|
||||||
|
```
|
||||||
|
|
||||||
|
If found, read the file and note key requirements. These should be included in the research document under a "## Contribution Guidelines" section if relevant to the research question.
|
||||||
|
|
||||||
### Step 2: Analyze and decompose the research question
|
### Step 2: Analyze and decompose the research question
|
||||||
- Break down the query into composable research areas
|
- Break down the query into composable research areas
|
||||||
- Identify specific components, patterns, or concepts to investigate
|
- Identify specific components, patterns, or concepts to investigate
|
||||||
@@ -143,6 +155,10 @@ status: complete
|
|||||||
## Architecture Documentation
|
## Architecture Documentation
|
||||||
{Current patterns, conventions found in codebase}
|
{Current patterns, conventions found in codebase}
|
||||||
|
|
||||||
|
## Contribution Guidelines
|
||||||
|
{If CONTRIBUTING.md exists, summarize key requirements relevant to the research topic}
|
||||||
|
{If no guidelines found, omit this section}
|
||||||
|
|
||||||
## Historical Context (from thoughts/)
|
## Historical Context (from thoughts/)
|
||||||
{Relevant insights from thoughts/ with references}
|
{Relevant insights from thoughts/ with references}
|
||||||
|
|
||||||
@@ -42,7 +42,46 @@ AskUserQuestion with:
|
|||||||
- options from filtered bd ready output
|
- options from filtered bd ready output
|
||||||
```
|
```
|
||||||
|
|
||||||
## Phase 2: Parallel Implementation
|
## Phase 2: Worktree Setup
|
||||||
|
|
||||||
|
Before launching implementation subagents, create worktrees for all selected beads:
|
||||||
|
|
||||||
|
1. **Get repository name**:
|
||||||
|
```bash
|
||||||
|
REPO_NAME=$(git remote get-url origin | sed 's|.*/||' | sed 's/\.git$//')
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **For each selected bead**, create its worktree:
|
||||||
|
```bash
|
||||||
|
BEAD_ID="[bead-id]"
|
||||||
|
# Check if worktree already exists
|
||||||
|
if [ -d "$HOME/wt/${REPO_NAME}/${BEAD_ID}" ]; then
|
||||||
|
echo "Worktree already exists: ~/wt/${REPO_NAME}/${BEAD_ID}"
|
||||||
|
# Ask user: remove and recreate, or skip this bead?
|
||||||
|
else
|
||||||
|
git worktree add -b "bead/${BEAD_ID}" "$HOME/wt/${REPO_NAME}/${BEAD_ID}"
|
||||||
|
fi
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Track created worktrees**:
|
||||||
|
Maintain a list of (bead_id, worktree_path) pairs for use in subagent instructions.
|
||||||
|
|
||||||
|
4. **Report status**:
|
||||||
|
```
|
||||||
|
Created worktrees:
|
||||||
|
- nixos-configs-abc → ~/wt/nixos-configs/nixos-configs-abc (branch: bead/nixos-configs-abc)
|
||||||
|
- nixos-configs-xyz → ~/wt/nixos-configs/nixos-configs-xyz (branch: bead/nixos-configs-xyz)
|
||||||
|
|
||||||
|
Skipped (existing worktree):
|
||||||
|
- nixos-configs-123 → Ask user for resolution
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note**: If a worktree or branch already exists, ask the user before proceeding:
|
||||||
|
- Remove existing worktree and branch, then recreate
|
||||||
|
- Skip this bead
|
||||||
|
- Use existing worktree as-is (risky - branch may have diverged)
|
||||||
|
|
||||||
|
## Phase 3: Parallel Implementation
|
||||||
|
|
||||||
For each selected bead, launch a subagent using the Task tool. All subagents should be launched in parallel (single message with multiple Task tool calls).
|
For each selected bead, launch a subagent using the Task tool. All subagents should be launched in parallel (single message with multiple Task tool calls).
|
||||||
|
|
||||||
@@ -53,50 +92,92 @@ Each implementation subagent should receive these instructions:
|
|||||||
```
|
```
|
||||||
Work on bead [BEAD_ID]: [BEAD_TITLE]
|
Work on bead [BEAD_ID]: [BEAD_TITLE]
|
||||||
|
|
||||||
1. **Create worktree**:
|
Worktree path: [WORKTREE_PATH]
|
||||||
- Branch name: `bead/[BEAD_ID]`
|
|
||||||
- Worktree path: `~/wt/[REPO_NAME]/[BEAD_ID]`
|
|
||||||
- Command: `git worktree add -b bead/[BEAD_ID] ~/wt/[REPO_NAME]/[BEAD_ID]`
|
|
||||||
|
|
||||||
2. **Review the bead requirements**:
|
## CRITICAL: Branch Verification (MUST DO FIRST)
|
||||||
|
|
||||||
|
1. **Navigate to worktree**:
|
||||||
|
```bash
|
||||||
|
cd [WORKTREE_PATH]
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Verify branch** (MANDATORY before ANY modifications):
|
||||||
|
```bash
|
||||||
|
CURRENT_BRANCH=$(git branch --show-current)
|
||||||
|
echo "Current branch: $CURRENT_BRANCH"
|
||||||
|
pwd
|
||||||
|
```
|
||||||
|
|
||||||
|
**ABORT CONDITIONS** - If ANY of these are true, STOP IMMEDIATELY:
|
||||||
|
- Branch is `main` or `master`
|
||||||
|
- Branch does not match `bead/[BEAD_ID]`
|
||||||
|
|
||||||
|
If you detect any abort condition:
|
||||||
|
```
|
||||||
|
ABORTING: Branch verification failed.
|
||||||
|
Expected branch: bead/[BEAD_ID]
|
||||||
|
Actual branch: [CURRENT_BRANCH]
|
||||||
|
Working directory: [pwd output]
|
||||||
|
|
||||||
|
DO NOT PROCEED. Report this error to the orchestrator.
|
||||||
|
```
|
||||||
|
|
||||||
|
## After Verification Passes
|
||||||
|
|
||||||
|
3. **Review the bead requirements**:
|
||||||
- Run `bd show [BEAD_ID]` to understand the acceptance criteria
|
- Run `bd show [BEAD_ID]` to understand the acceptance criteria
|
||||||
- Note any external issue references (GitHub issues, Linear tickets, etc.)
|
- Note any external issue references (GitHub issues, Linear tickets, etc.)
|
||||||
|
|
||||||
3. **Extract validation criteria**:
|
4. **Extract validation criteria**:
|
||||||
- Check for a plan: `thoughts/beads-[BEAD_ID]/plan.md`
|
- Check for a plan: `thoughts/beads-[BEAD_ID]/plan.md`
|
||||||
- If plan exists:
|
- If plan exists:
|
||||||
- Read the plan and find the "Automated Verification" section
|
- Read the plan and find the "Automated Verification" section
|
||||||
- Extract each verification command (lines starting with `- [ ]` followed by a command)
|
- Extract each verification command (lines starting with `- [ ]` followed by a command)
|
||||||
- Example: `- [ ] Tests pass: \`make test\`` → extract `make test`
|
- Example: `- [ ] Tests pass: \`make test\`` → extract `make test`
|
||||||
|
- Note any "Per CONTRIBUTING.md:" requirements for additional validation
|
||||||
|
- Also read the "Manual Verification" section from the plan if present
|
||||||
|
- Save manual verification items for inclusion in the PR description (they won't be executed)
|
||||||
- If no plan exists, use best-effort validation:
|
- If no plan exists, use best-effort validation:
|
||||||
- Check if `Makefile` exists → try `make test` and `make lint`
|
- Check if `Makefile` exists → try `make test` and `make lint`
|
||||||
- Check if `flake.nix` exists → try `nix flake check`
|
- Check if `flake.nix` exists → try `nix flake check`
|
||||||
- Check if `package.json` exists → try `npm test`
|
- Check if `package.json` exists → try `npm test`
|
||||||
|
- **Check for CONTRIBUTING.md** → read and extract testing/linting requirements
|
||||||
|
- Track which requirements can be automated vs need manual review
|
||||||
|
- Automated: commands that can be run (e.g., "run `make test`")
|
||||||
|
- Manual: qualitative checks (e.g., "ensure documentation is updated")
|
||||||
- If none found, note "No validation criteria found"
|
- If none found, note "No validation criteria found"
|
||||||
|
|
||||||
4. **Implement the changes**:
|
5. **Implement the changes**:
|
||||||
- Work in the worktree directory
|
- Work in the worktree directory
|
||||||
- Complete all acceptance criteria listed in the bead
|
- Complete all acceptance criteria listed in the bead
|
||||||
|
|
||||||
After implementation, run validation:
|
After implementation, run validation:
|
||||||
- Execute each validation command from step 3
|
- Execute each validation command from step 4
|
||||||
- Track results in this format:
|
- Track results in this format:
|
||||||
```
|
```
|
||||||
VALIDATION_RESULTS:
|
VALIDATION_RESULTS:
|
||||||
- make test: PASS
|
- make test: PASS
|
||||||
- make lint: FAIL (exit code 1: src/foo.ts:23 - missing semicolon)
|
- make lint: FAIL (exit code 1: src/foo.ts:23 - missing semicolon)
|
||||||
- nix flake check: SKIP (command not found)
|
- nix flake check: SKIP (not applicable - no flake.nix)
|
||||||
|
- cargo test: ERROR (command not found)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**Status definitions:**
|
||||||
|
- **PASS**: Check executed successfully with no issues
|
||||||
|
- **FAIL**: Check executed but found issues that need attention
|
||||||
|
- **SKIP**: Check not applicable to this project (e.g., no Makefile for `make test`)
|
||||||
|
- **ERROR**: Check could not execute (missing tool, permission error, command not found)
|
||||||
|
|
||||||
- If any validation fails:
|
- If any validation fails:
|
||||||
- Continue with PR creation (don't block)
|
- Continue with PR creation (don't block)
|
||||||
- Document failures in bead notes: `bd update [BEAD_ID] --notes="Validation failures: [list]"`
|
- Document failures in bead notes: `bd update [BEAD_ID] --notes="Validation failures: [list]"`
|
||||||
|
|
||||||
5. **Commit and push**:
|
6. **Commit and push**:
|
||||||
- Stage all changes: `git add -A`
|
- Stage all changes: `git add -A`
|
||||||
- Create a descriptive commit message
|
- Create a descriptive commit message
|
||||||
- Push the branch: `git push -u origin bead/[BEAD_ID]`
|
- Push the branch: `git push -u origin bead/[BEAD_ID]`
|
||||||
|
|
||||||
6. **Create a PR**:
|
7. **Create a PR**:
|
||||||
- Detect hosting provider from origin URL: `git remote get-url origin`
|
- Detect hosting provider from origin URL: `git remote get-url origin`
|
||||||
- If URL contains `github.com`, use `gh`; otherwise use `tea` (Gitea/Forgejo)
|
- If URL contains `github.com`, use `gh`; otherwise use `tea` (Gitea/Forgejo)
|
||||||
- PR title: "[BEAD_ID] [BEAD_TITLE]"
|
- PR title: "[BEAD_ID] [BEAD_TITLE]"
|
||||||
@@ -119,14 +200,27 @@ Work on bead [BEAD_ID]: [BEAD_TITLE]
|
|||||||
## Changes
|
## Changes
|
||||||
- [List of changes made]
|
- [List of changes made]
|
||||||
|
|
||||||
## Validation
|
## Validation Steps Completed
|
||||||
[Include validation results from step 4]
|
|
||||||
|
|
||||||
|
### Automated Checks
|
||||||
| Check | Status | Details |
|
| Check | Status | Details |
|
||||||
|-------|--------|---------|
|
|-------|--------|---------|
|
||||||
| make test | PASS | |
|
| make test | PASS | |
|
||||||
| make lint | FAIL | src/foo.ts:23 - missing semicolon |
|
| make lint | FAIL | src/foo.ts:23 - missing semicolon |
|
||||||
| nix flake check | SKIP | command not found |
|
| nix flake check | SKIP | not applicable - no flake.nix |
|
||||||
|
| cargo test | ERROR | command not found |
|
||||||
|
|
||||||
|
### Manual Verification Required
|
||||||
|
[If plan has Manual Verification items, list them as unchecked boxes:]
|
||||||
|
- [ ] Verify UI changes match design mockups
|
||||||
|
- [ ] Test on mobile viewport sizes
|
||||||
|
[If no manual verification items: "None specified in plan."]
|
||||||
|
|
||||||
|
### CONTRIBUTING.md Compliance
|
||||||
|
[If CONTRIBUTING.md requirements were extracted:]
|
||||||
|
- [x] Tests pass (verified via `make test`)
|
||||||
|
- [ ] Documentation updated (needs manual review)
|
||||||
|
[If no CONTRIBUTING.md: "No contribution guidelines found."]
|
||||||
EOF
|
EOF
|
||||||
)"
|
)"
|
||||||
```
|
```
|
||||||
@@ -146,44 +240,66 @@ Work on bead [BEAD_ID]: [BEAD_TITLE]
|
|||||||
## Changes
|
## Changes
|
||||||
- [List of changes made]
|
- [List of changes made]
|
||||||
|
|
||||||
## Validation
|
## Validation Steps Completed
|
||||||
[Include validation results from step 4]
|
|
||||||
|
|
||||||
|
### Automated Checks
|
||||||
| Check | Status | Details |
|
| Check | Status | Details |
|
||||||
|-------|--------|---------|
|
|-------|--------|---------|
|
||||||
| make test | PASS | |
|
| make test | PASS | |
|
||||||
| make lint | FAIL | src/foo.ts:23 - missing semicolon |
|
| make lint | FAIL | src/foo.ts:23 - missing semicolon |
|
||||||
| nix flake check | SKIP | command not found |"
|
| nix flake check | SKIP | not applicable - no flake.nix |
|
||||||
|
| cargo test | ERROR | command not found |
|
||||||
|
|
||||||
|
### Manual Verification Required
|
||||||
|
[If plan has Manual Verification items, list them as unchecked boxes:]
|
||||||
|
- [ ] Verify UI changes match design mockups
|
||||||
|
- [ ] Test on mobile viewport sizes
|
||||||
|
[If no manual verification items: None specified in plan.]
|
||||||
|
|
||||||
|
### CONTRIBUTING.md Compliance
|
||||||
|
[If CONTRIBUTING.md requirements were extracted:]
|
||||||
|
- [x] Tests pass (verified via make test)
|
||||||
|
- [ ] Documentation updated (needs manual review)
|
||||||
|
[If no CONTRIBUTING.md: No contribution guidelines found.]"
|
||||||
```
|
```
|
||||||
|
|
||||||
7. **Update bead status**:
|
8. **Update bead status**:
|
||||||
- Mark the bead as "in_review": `bd update [BEAD_ID] --status=in_review`
|
- Mark the bead as "in_review": `bd update [BEAD_ID] --status=in_review`
|
||||||
- Add the PR URL to the bead notes: `bd update [BEAD_ID] --notes="$(bd show [BEAD_ID] --json | jq -r '.notes')
|
- Add the PR URL to the bead notes: `bd update [BEAD_ID] --notes="$(bd show [BEAD_ID] --json | jq -r '.notes')
|
||||||
|
|
||||||
PR: [PR_URL]"`
|
PR: [PR_URL]"`
|
||||||
|
|
||||||
8. **Report results**:
|
9. **Report results**:
|
||||||
- Return:
|
- Return:
|
||||||
- PR URL
|
- PR URL
|
||||||
- Bead ID
|
- Bead ID
|
||||||
- Implementation status (success/failure/blocked)
|
- Implementation status (success/failure/blocked)
|
||||||
- Validation summary: `X passed, Y failed, Z skipped`
|
- Validation summary: `X passed, Y failed, Z skipped, W errors`
|
||||||
- List of any validation failures with details
|
- List of any validation failures or errors with details
|
||||||
- If blocked or unable to complete, explain what's blocking progress
|
- If blocked or unable to complete, explain what's blocking progress
|
||||||
- If validation failed, include the specific failures so the main agent can summarize them for the user
|
- If validation failed, include the specific failures so the main agent can summarize them for the user
|
||||||
```
|
```
|
||||||
|
|
||||||
### Launching Subagents
|
### Launching Subagents
|
||||||
|
|
||||||
|
For each bead, substitute into the template:
|
||||||
|
- `[BEAD_ID]` - the bead ID
|
||||||
|
- `[BEAD_TITLE]` - the bead title
|
||||||
|
- `[WORKTREE_PATH]` - the worktree path created in Phase 2
|
||||||
|
|
||||||
Use `subagent_type: "general-purpose"` for implementation subagents. Launch all selected beads' subagents in a single message for parallel execution:
|
Use `subagent_type: "general-purpose"` for implementation subagents. Launch all selected beads' subagents in a single message for parallel execution:
|
||||||
|
|
||||||
```
|
```
|
||||||
<Task calls for each selected bead - all in one message>
|
<Task calls for each selected bead - all in one message>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**Important**: The worktree paths were created in Phase 2. Use the exact paths that were created, e.g.:
|
||||||
|
- `~/wt/nixos-configs/nixos-configs-abc`
|
||||||
|
- `~/wt/nixos-configs/nixos-configs-xyz`
|
||||||
|
|
||||||
Collect results from all subagents before proceeding.
|
Collect results from all subagents before proceeding.
|
||||||
|
|
||||||
## Phase 3: Parallel Review
|
## Phase 4: Parallel Review
|
||||||
|
|
||||||
After all implementation subagents complete, launch review subagents for each PR.
|
After all implementation subagents complete, launch review subagents for each PR.
|
||||||
|
|
||||||
@@ -218,7 +334,7 @@ Review PR for bead [BEAD_ID]
|
|||||||
|
|
||||||
Launch all review subagents in parallel.
|
Launch all review subagents in parallel.
|
||||||
|
|
||||||
## Phase 4: Cleanup and Summary
|
## Phase 5: Cleanup and Summary
|
||||||
|
|
||||||
After reviews complete:
|
After reviews complete:
|
||||||
|
|
||||||
@@ -264,9 +380,21 @@ Example output:
|
|||||||
|
|
||||||
## Error Handling
|
## Error Handling
|
||||||
|
|
||||||
|
- **Worktree creation failures** (Phase 2):
|
||||||
|
- If `git worktree add` fails (branch exists, path exists), prompt user:
|
||||||
|
- Remove existing and retry
|
||||||
|
- Skip this bead
|
||||||
|
- Use existing (with warning about potential divergence)
|
||||||
|
- Do NOT proceed to subagent launch until worktree is confirmed
|
||||||
|
|
||||||
|
- **Branch verification failures** (subagent reports):
|
||||||
|
- If subagent reports it's on `main` or `master`, do NOT retry
|
||||||
|
- Mark bead as failed with reason "Branch verification failed"
|
||||||
|
- Continue with other beads but flag this as a critical issue
|
||||||
|
- Investigation required: the worktree may have been corrupted or not created properly
|
||||||
|
|
||||||
- **Subagent failures**: If a subagent fails or times out, note it in the summary but continue with other beads
|
- **Subagent failures**: If a subagent fails or times out, note it in the summary but continue with other beads
|
||||||
- **PR creation failures**: Report the error but continue with reviews of successful PRs
|
- **PR creation failures**: Report the error but continue with reviews of successful PRs
|
||||||
- **Worktree conflicts**: If a worktree already exists, ask the user if they want to remove it or skip that bead
|
|
||||||
|
|
||||||
## Resource Limits
|
## Resource Limits
|
||||||
|
|
||||||
@@ -4,12 +4,13 @@ description: Reconcile beads with merged PRs and close completed beads
|
|||||||
|
|
||||||
# Reconcile Beads Workflow
|
# Reconcile Beads Workflow
|
||||||
|
|
||||||
This skill reconciles beads that are in `in_review` status with their corresponding PRs. If a PR has been merged, the bead is closed.
|
This skill reconciles beads that are in `in_review` status with their corresponding PRs. If a PR has been merged, the bead is closed and any linked Gitea issue is also closed.
|
||||||
|
|
||||||
## Prerequisites
|
## Prerequisites
|
||||||
|
|
||||||
- Custom status `in_review` must be configured: `bd config set status.custom "in_review"`
|
- Custom status `in_review` must be configured: `bd config set status.custom "in_review"`
|
||||||
- Beads in `in_review` status should have a PR URL in their notes
|
- Beads in `in_review` status should have a PR URL in their notes
|
||||||
|
- `tea` CLI must be configured for closing Gitea issues
|
||||||
|
|
||||||
## Workflow
|
## Workflow
|
||||||
|
|
||||||
@@ -52,6 +53,34 @@ If the PR is merged:
|
|||||||
bd close [BEAD_ID] --reason="PR merged: [PR_URL]"
|
bd close [BEAD_ID] --reason="PR merged: [PR_URL]"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Step 3.1: Close corresponding Gitea issue (if any)
|
||||||
|
|
||||||
|
After closing a bead, check if it has a linked Gitea issue:
|
||||||
|
|
||||||
|
1. **Check for Gitea issue URL in bead notes**:
|
||||||
|
Look for the pattern `Gitea issue: <URL>` in the notes. Extract the URL.
|
||||||
|
|
||||||
|
2. **Extract issue number from URL**:
|
||||||
|
```bash
|
||||||
|
# Example: https://git.johnogle.info/johno/nixos-configs/issues/16 -> 16
|
||||||
|
echo "$GITEA_URL" | grep -oP '/issues/\K\d+'
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Close the Gitea issue**:
|
||||||
|
```bash
|
||||||
|
tea issues close [ISSUE_NUMBER]
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **Handle errors gracefully**:
|
||||||
|
- If issue is already closed: Log warning, continue
|
||||||
|
- If issue not found: Log warning, continue
|
||||||
|
- If `tea` fails: Log error, continue with other beads
|
||||||
|
|
||||||
|
Example warning output:
|
||||||
|
```
|
||||||
|
Warning: Could not close Gitea issue #16: issue already closed
|
||||||
|
```
|
||||||
|
|
||||||
### Step 4: Report summary
|
### Step 4: Report summary
|
||||||
|
|
||||||
Present results:
|
Present results:
|
||||||
@@ -60,10 +89,17 @@ Present results:
|
|||||||
## Beads Reconciliation Summary
|
## Beads Reconciliation Summary
|
||||||
|
|
||||||
### Closed (PR Merged)
|
### Closed (PR Merged)
|
||||||
| Bead | PR | Title |
|
| Bead | PR | Gitea Issue | Title |
|
||||||
|------|-----|-------|
|
|------|-----|-------------|-------|
|
||||||
| beads-abc | #123 | Feature X |
|
| beads-abc | #123 | #16 closed | Feature X |
|
||||||
| beads-xyz | #456 | Bug fix Y |
|
| beads-xyz | #456 | (none) | Bug fix Y |
|
||||||
|
|
||||||
|
### Gitea Issues Closed
|
||||||
|
| Issue | Bead | Status |
|
||||||
|
|-------|------|--------|
|
||||||
|
| #16 | beads-abc | Closed successfully |
|
||||||
|
| #17 | beads-def | Already closed (skipped) |
|
||||||
|
| #99 | beads-ghi | Error: issue not found |
|
||||||
|
|
||||||
### Still in Review
|
### Still in Review
|
||||||
| Bead | PR | Status | Title |
|
| Bead | PR | Status | Title |
|
||||||
@@ -80,9 +116,14 @@ Present results:
|
|||||||
- **Missing PR URL**: Skip the bead and report it
|
- **Missing PR URL**: Skip the bead and report it
|
||||||
- **PR not found**: Report the error but continue with other beads
|
- **PR not found**: Report the error but continue with other beads
|
||||||
- **API errors**: Report and continue
|
- **API errors**: Report and continue
|
||||||
|
- **Gitea issue already closed**: Log warning, continue (not an error)
|
||||||
|
- **Gitea issue not found**: Log warning, continue (issue may have been deleted)
|
||||||
|
- **No Gitea issue linked**: Normal case, no action needed
|
||||||
|
- **tea command fails**: Log error with output, continue with other beads
|
||||||
|
|
||||||
## Notes
|
## Notes
|
||||||
|
|
||||||
- This skill complements `/parallel_beads` which sets beads to `in_review` status
|
- This skill complements `/parallel_beads` which sets beads to `in_review` status
|
||||||
- Run this skill periodically or after merging PRs to keep beads in sync
|
- Run this skill periodically or after merging PRs to keep beads in sync
|
||||||
- Beads with closed (but not merged) PRs are not automatically closed - they may need rework
|
- Beads with closed (but not merged) PRs are not automatically closed - they may need rework
|
||||||
|
- Gitea issues are only closed for beads that have a `Gitea issue: <URL>` in their notes
|
||||||
@@ -5,6 +5,91 @@ with lib;
|
|||||||
let
|
let
|
||||||
cfg = config.home.roles.development;
|
cfg = config.home.roles.development;
|
||||||
|
|
||||||
|
# FIXME: Temporary override for upstream beads vendorHash mismatch
|
||||||
|
# Remove after upstream fix: https://github.com/steveyegge/beads/issues/XXX
|
||||||
|
beadsPackage = globalInputs.beads.packages.${system}.default.overrideAttrs (old: {
|
||||||
|
vendorHash = "sha256-YU+bRLVlWtHzJ1QPzcKJ70f+ynp8lMoIeFlm+29BNPE=";
|
||||||
|
|
||||||
|
# Performance fix: avoid WHERE IN (8000+ IDs) query pattern that hammers Dolt CPU
|
||||||
|
# See: hq-ihwsj - bd list uses inefficient WHERE IN (all_ids) query pattern
|
||||||
|
# The fix changes SearchIssues to SELECT all columns directly instead of:
|
||||||
|
# 1. SELECT id FROM issues WHERE ... -> collect IDs
|
||||||
|
# 2. SELECT * FROM issues WHERE id IN (all_ids) -> 8000+ placeholder IN clause
|
||||||
|
patches = (old.patches or []) ++ [
|
||||||
|
./beads-search-query-optimization.patch
|
||||||
|
];
|
||||||
|
});
|
||||||
|
|
||||||
|
# Gastown - multi-agent workspace manager (no upstream flake.nix yet)
|
||||||
|
# Source is tracked via flake input for renovate updates
|
||||||
|
gastownRev = builtins.substring 0 8 (globalInputs.gastown.rev or "unknown");
|
||||||
|
gastownPackage = pkgs.buildGoModule {
|
||||||
|
pname = "gastown";
|
||||||
|
version = "unstable-${gastownRev}";
|
||||||
|
src = globalInputs.gastown;
|
||||||
|
vendorHash = "sha256-ripY9vrYgVW8bngAyMLh0LkU/Xx1UUaLgmAA7/EmWQU=";
|
||||||
|
subPackages = [ "cmd/gt" ];
|
||||||
|
doCheck = false;
|
||||||
|
|
||||||
|
# Must match ldflags from gastown Makefile - BuiltProperly=1 is required
|
||||||
|
# or gt will error with "This binary was built with 'go build' directly"
|
||||||
|
ldflags = [
|
||||||
|
"-X github.com/steveyegge/gastown/internal/cmd.Version=${gastownRev}"
|
||||||
|
"-X github.com/steveyegge/gastown/internal/cmd.Commit=${gastownRev}"
|
||||||
|
"-X github.com/steveyegge/gastown/internal/cmd.BuildTime=nix-build"
|
||||||
|
"-X github.com/steveyegge/gastown/internal/cmd.BuiltProperly=1"
|
||||||
|
];
|
||||||
|
|
||||||
|
# Bug fixes not yet merged upstream
|
||||||
|
# Each patch is stored in a separate file for clarity and maintainability
|
||||||
|
patches = [
|
||||||
|
# Fix validateRecipient bug: normalize addresses before comparison
|
||||||
|
./gastown-fix-validate-recipient.patch
|
||||||
|
# Fix agentBeadToAddress to use title field for hq- prefixed beads
|
||||||
|
./gastown-fix-agent-bead-address-title.patch
|
||||||
|
# Fix agentBeadToAddress to handle rig-specific prefixes (j-, sc-, etc.)
|
||||||
|
./gastown-fix-agent-bead-rig-prefix.patch
|
||||||
|
# Fix crew/polecat home paths: remove incorrect /rig suffix
|
||||||
|
./gastown-fix-role-home-paths.patch
|
||||||
|
# Fix town root detection: don't map to Mayor (causes spurious mismatch warnings)
|
||||||
|
./gastown-fix-town-root-detection.patch
|
||||||
|
# Fix copyDir to handle symlinks (broken symlinks cause "no such file" errors)
|
||||||
|
./gastown-fix-copydir-symlinks.patch
|
||||||
|
# Statusline optimization: skip expensive beads queries for detached sessions
|
||||||
|
# Reduces Dolt CPU from ~70% to ~20% by caching and early-exit
|
||||||
|
./gastown-statusline-optimization.patch
|
||||||
|
];
|
||||||
|
|
||||||
|
meta = with lib; {
|
||||||
|
description = "Gas Town - multi-agent workspace manager by Steve Yegge";
|
||||||
|
homepage = "https://github.com/steveyegge/gastown";
|
||||||
|
license = licenses.mit;
|
||||||
|
mainProgram = "gt";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
# Perles - TUI for beads issue tracking (no upstream flake.nix yet)
|
||||||
|
# Source is tracked via flake input for renovate updates
|
||||||
|
perlesRev = builtins.substring 0 8 (globalInputs.perles.rev or "unknown");
|
||||||
|
perlesPackage = pkgs.buildGoModule {
|
||||||
|
pname = "perles";
|
||||||
|
version = "unstable-${perlesRev}";
|
||||||
|
src = globalInputs.perles;
|
||||||
|
vendorHash = "sha256-JHERJDzbiqgjWXwRhXVjgDEiDQ3AUXRIONotfPF21B0=";
|
||||||
|
doCheck = false;
|
||||||
|
|
||||||
|
ldflags = [
|
||||||
|
"-X main.version=${perlesRev}"
|
||||||
|
];
|
||||||
|
|
||||||
|
meta = with lib; {
|
||||||
|
description = "Perles - Terminal UI for beads issue tracking";
|
||||||
|
homepage = "https://github.com/zjrosen/perles";
|
||||||
|
license = licenses.mit;
|
||||||
|
mainProgram = "perles";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
# Fetch the claude-plugins repository (for humanlayer commands/agents)
|
# Fetch the claude-plugins repository (for humanlayer commands/agents)
|
||||||
# Update the rev to get newer versions of the commands
|
# Update the rev to get newer versions of the commands
|
||||||
claudePluginsRepo = builtins.fetchGit {
|
claudePluginsRepo = builtins.fetchGit {
|
||||||
@@ -37,10 +122,14 @@ in
|
|||||||
|
|
||||||
config = mkIf cfg.enable {
|
config = mkIf cfg.enable {
|
||||||
home.packages = [
|
home.packages = [
|
||||||
globalInputs.beads.packages.${system}.default
|
beadsPackage
|
||||||
|
gastownPackage
|
||||||
|
perlesPackage
|
||||||
pkgs.unstable.claude-code
|
pkgs.unstable.claude-code
|
||||||
pkgs.unstable.claude-code-router
|
pkgs.unstable.claude-code-router
|
||||||
pkgs.unstable.codex
|
pkgs.unstable.codex
|
||||||
|
pkgs.dolt
|
||||||
|
pkgs.sqlite
|
||||||
|
|
||||||
# Custom packages
|
# Custom packages
|
||||||
pkgs.custom.tea-rbw
|
pkgs.custom.tea-rbw
|
||||||
@@ -61,12 +150,14 @@ in
|
|||||||
if [ -f "$file" ]; then
|
if [ -f "$file" ]; then
|
||||||
filename=$(basename "$file" .md)
|
filename=$(basename "$file" .md)
|
||||||
dest="$HOME/.claude/commands/humanlayer:''${filename}.md"
|
dest="$HOME/.claude/commands/humanlayer:''${filename}.md"
|
||||||
|
rm -f "$dest" 2>/dev/null || true
|
||||||
|
|
||||||
# Copy file and conditionally remove the "model:" line from frontmatter
|
# Copy file and conditionally remove the "model:" line from frontmatter
|
||||||
${if cfg.allowArbitraryClaudeCodeModelSelection
|
${if cfg.allowArbitraryClaudeCodeModelSelection
|
||||||
then "cp \"$file\" \"$dest\""
|
then "cp \"$file\" \"$dest\""
|
||||||
else "${pkgs.gnused}/bin/sed '/^model:/d' \"$file\" > \"$dest\""
|
else "${pkgs.gnused}/bin/sed '/^model:/d' \"$file\" > \"$dest\""
|
||||||
}
|
}
|
||||||
|
chmod u+w "$dest" 2>/dev/null || true
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
@@ -75,17 +166,19 @@ in
|
|||||||
if [ -f "$file" ]; then
|
if [ -f "$file" ]; then
|
||||||
filename=$(basename "$file" .md)
|
filename=$(basename "$file" .md)
|
||||||
dest="$HOME/.claude/agents/humanlayer:''${filename}.md"
|
dest="$HOME/.claude/agents/humanlayer:''${filename}.md"
|
||||||
|
rm -f "$dest" 2>/dev/null || true
|
||||||
|
|
||||||
# Copy file and conditionally remove the "model:" line from frontmatter
|
# Copy file and conditionally remove the "model:" line from frontmatter
|
||||||
${if cfg.allowArbitraryClaudeCodeModelSelection
|
${if cfg.allowArbitraryClaudeCodeModelSelection
|
||||||
then "cp \"$file\" \"$dest\""
|
then "cp \"$file\" \"$dest\""
|
||||||
else "${pkgs.gnused}/bin/sed '/^model:/d' \"$file\" > \"$dest\""
|
else "${pkgs.gnused}/bin/sed '/^model:/d' \"$file\" > \"$dest\""
|
||||||
}
|
}
|
||||||
|
chmod u+w "$dest" 2>/dev/null || true
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
# Copy local skills from this repo (with retry for race conditions with running Claude)
|
# Copy local commands from this repo (with retry for race conditions with running Claude)
|
||||||
for file in ${./skills}/*.md; do
|
for file in ${./commands}/*.md; do
|
||||||
if [ -f "$file" ]; then
|
if [ -f "$file" ]; then
|
||||||
filename=$(basename "$file" .md)
|
filename=$(basename "$file" .md)
|
||||||
dest="$HOME/.claude/commands/''${filename}.md"
|
dest="$HOME/.claude/commands/''${filename}.md"
|
||||||
@@ -95,14 +188,47 @@ in
|
|||||||
sleep 0.5
|
sleep 0.5
|
||||||
cp "$file" "$dest" || echo "Warning: Failed to copy $filename.md to commands"
|
cp "$file" "$dest" || echo "Warning: Failed to copy $filename.md to commands"
|
||||||
fi
|
fi
|
||||||
|
chmod u+w "$dest" 2>/dev/null || true
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
$DRY_RUN_CMD echo "Claude Code humanlayer commands and agents installed successfully${
|
# Copy local skills (reference materials) to skills subdirectory
|
||||||
if cfg.allowArbitraryClaudeCodeModelSelection
|
mkdir -p ~/.claude/commands/skills
|
||||||
then " (model specifications preserved)"
|
for file in ${./skills}/*.md; do
|
||||||
else " (model selection removed)"
|
if [ -f "$file" ]; then
|
||||||
} + local skills"
|
filename=$(basename "$file" .md)
|
||||||
|
dest="$HOME/.claude/commands/skills/''${filename}.md"
|
||||||
|
rm -f "$dest" 2>/dev/null || true
|
||||||
|
if ! cp "$file" "$dest" 2>/dev/null; then
|
||||||
|
sleep 0.5
|
||||||
|
cp "$file" "$dest" || echo "Warning: Failed to copy $filename.md to skills"
|
||||||
|
fi
|
||||||
|
chmod u+w "$dest" 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Copy micro-skills (compact reusable knowledge referenced by formulas)
|
||||||
|
for file in ${./skills/micro}/*.md; do
|
||||||
|
if [ -f "$file" ]; then
|
||||||
|
dest="$HOME/.claude/commands/skills/$(basename "$file")"
|
||||||
|
rm -f "$dest" 2>/dev/null || true
|
||||||
|
cp "$file" "$dest"
|
||||||
|
chmod u+w "$dest" 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Install beads formulas to user-level formula directory
|
||||||
|
mkdir -p ~/.beads/formulas
|
||||||
|
for file in ${./formulas}/*.formula.toml; do
|
||||||
|
if [ -f "$file" ]; then
|
||||||
|
dest="$HOME/.beads/formulas/$(basename "$file")"
|
||||||
|
rm -f "$dest" 2>/dev/null || true
|
||||||
|
cp "$file" "$dest"
|
||||||
|
chmod u+w "$dest" 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
$DRY_RUN_CMD echo "Claude Code plugins installed: humanlayer commands/agents + local commands + local skills + formulas"
|
||||||
'';
|
'';
|
||||||
|
|
||||||
# Set up beads Claude Code integration (hooks for SessionStart/PreCompact)
|
# Set up beads Claude Code integration (hooks for SessionStart/PreCompact)
|
||||||
@@ -110,11 +236,51 @@ in
|
|||||||
home.activation.claudeCodeBeadsSetup = lib.hm.dag.entryAfter ["writeBoundary" "claudeCodeCommands"] ''
|
home.activation.claudeCodeBeadsSetup = lib.hm.dag.entryAfter ["writeBoundary" "claudeCodeCommands"] ''
|
||||||
# Run bd setup claude to install hooks into ~/.claude/settings.json
|
# Run bd setup claude to install hooks into ~/.claude/settings.json
|
||||||
# This is idempotent - safe to run multiple times
|
# This is idempotent - safe to run multiple times
|
||||||
${globalInputs.beads.packages.${system}.default}/bin/bd setup claude 2>/dev/null || true
|
${beadsPackage}/bin/bd setup claude 2>/dev/null || true
|
||||||
|
|
||||||
$DRY_RUN_CMD echo "Claude Code beads integration configured (hooks installed)"
|
$DRY_RUN_CMD echo "Claude Code beads integration configured (hooks installed)"
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
# Beads timer gate checker (Linux only - uses systemd)
|
||||||
|
# Runs every 5 minutes to auto-resolve expired timer gates across all beads projects
|
||||||
|
# This enables self-scheduling molecules (watchers, patrols, etc.)
|
||||||
|
systemd.user.services.beads-gate-check = lib.mkIf pkgs.stdenv.isLinux {
|
||||||
|
Unit = {
|
||||||
|
Description = "Check and resolve expired beads timer gates";
|
||||||
|
};
|
||||||
|
Service = {
|
||||||
|
Type = "oneshot";
|
||||||
|
# Check gates in all workspaces that have running daemons
|
||||||
|
ExecStart = pkgs.writeShellScript "beads-gate-check-all" ''
|
||||||
|
# Get list of workspaces from daemon registry
|
||||||
|
workspaces=$(${beadsPackage}/bin/bd daemon list --json 2>/dev/null | ${pkgs.jq}/bin/jq -r '.[].workspace // empty' 2>/dev/null)
|
||||||
|
|
||||||
|
if [ -z "$workspaces" ]; then
|
||||||
|
exit 0 # No beads workspaces, nothing to do
|
||||||
|
fi
|
||||||
|
|
||||||
|
for ws in $workspaces; do
|
||||||
|
if [ -d "$ws" ]; then
|
||||||
|
cd "$ws" && ${beadsPackage}/bin/bd gate check --type=timer --quiet 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
systemd.user.timers.beads-gate-check = lib.mkIf pkgs.stdenv.isLinux {
|
||||||
|
Unit = {
|
||||||
|
Description = "Periodic beads timer gate check";
|
||||||
|
};
|
||||||
|
Timer = {
|
||||||
|
OnBootSec = "5min";
|
||||||
|
OnUnitActiveSec = "5min";
|
||||||
|
};
|
||||||
|
Install = {
|
||||||
|
WantedBy = [ "timers.target" ];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
# Note: modules must be imported at top-level home config
|
# Note: modules must be imported at top-level home config
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
115
home/roles/development/formulas/quick-fix.formula.toml
Normal file
115
home/roles/development/formulas/quick-fix.formula.toml
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
# Quick Fix Formula
|
||||||
|
#
|
||||||
|
# Streamlined workflow for well-understood bugs and small fixes.
|
||||||
|
# Skips the deep research and planning phases of RPI - get in, fix, get out.
|
||||||
|
#
|
||||||
|
# Use when:
|
||||||
|
# - Bug is well-understood (you know what's broken)
|
||||||
|
# - Fix is straightforward (no architectural decisions)
|
||||||
|
# - Change is small (< 100 lines)
|
||||||
|
#
|
||||||
|
# Use RPI instead when:
|
||||||
|
# - Root cause is unclear
|
||||||
|
# - Multiple approaches possible
|
||||||
|
# - Significant design decisions needed
|
||||||
|
|
||||||
|
formula = "quick-fix"
|
||||||
|
description = """
|
||||||
|
Streamlined workflow for bugs and small fixes.
|
||||||
|
|
||||||
|
A faster alternative to RPI for well-understood issues:
|
||||||
|
1. Quick investigation to confirm understanding
|
||||||
|
2. Implement the fix
|
||||||
|
3. Verify with tests
|
||||||
|
4. Commit and close
|
||||||
|
|
||||||
|
No human gates - designed for quick turnaround on obvious fixes.
|
||||||
|
"""
|
||||||
|
version = 1
|
||||||
|
type = "workflow"
|
||||||
|
|
||||||
|
# === Variables ===
|
||||||
|
|
||||||
|
[vars.title]
|
||||||
|
required = true
|
||||||
|
description = "Brief description of the bug/fix"
|
||||||
|
|
||||||
|
[vars.bead_id]
|
||||||
|
description = "Existing bead ID (creates new if not provided)"
|
||||||
|
|
||||||
|
[vars.test_cmd]
|
||||||
|
default = "make test"
|
||||||
|
description = "Command to verify the fix"
|
||||||
|
|
||||||
|
# === Steps ===
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "investigate"
|
||||||
|
title = "Investigate: {{title}}"
|
||||||
|
description = """
|
||||||
|
Quick investigation to confirm understanding of the bug.
|
||||||
|
|
||||||
|
Goals:
|
||||||
|
- Locate the problematic code
|
||||||
|
- Confirm root cause matches expectations
|
||||||
|
- Identify files that need changes
|
||||||
|
|
||||||
|
This is NOT deep research - spend 5-10 minutes max.
|
||||||
|
If the bug is more complex than expected, pivot to RPI workflow.
|
||||||
|
|
||||||
|
Output: Mental model of what to fix (no artifact needed).
|
||||||
|
"""
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "fix"
|
||||||
|
title = "Fix: {{title}}"
|
||||||
|
needs = ["investigate"]
|
||||||
|
description = """
|
||||||
|
Implement the fix.
|
||||||
|
|
||||||
|
Guidelines:
|
||||||
|
- Make minimal changes to fix the issue
|
||||||
|
- Follow existing code patterns
|
||||||
|
- Add/update tests if appropriate
|
||||||
|
- Keep changes focused (no drive-by refactors)
|
||||||
|
|
||||||
|
If the fix grows beyond expectations, pause and consider:
|
||||||
|
- Should this be an RPI workflow instead?
|
||||||
|
- Should we split into multiple changes?
|
||||||
|
"""
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "verify"
|
||||||
|
title = "Verify fix"
|
||||||
|
needs = ["fix"]
|
||||||
|
description = """
|
||||||
|
Verify the fix works correctly.
|
||||||
|
|
||||||
|
Run: {{test_cmd}}
|
||||||
|
|
||||||
|
Also check:
|
||||||
|
- Bug is actually fixed (manual verification)
|
||||||
|
- No obvious regressions introduced
|
||||||
|
- Code compiles/builds cleanly
|
||||||
|
|
||||||
|
If tests fail, iterate on the fix step.
|
||||||
|
"""
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "commit"
|
||||||
|
title = "Commit and close"
|
||||||
|
needs = ["verify"]
|
||||||
|
description = """
|
||||||
|
Commit the fix and close the bead.
|
||||||
|
|
||||||
|
Actions:
|
||||||
|
1. Stage changes: git add -A
|
||||||
|
2. Commit with descriptive message: git commit -m "fix: {{title}}"
|
||||||
|
3. Push to remote: git push
|
||||||
|
4. Close the bead: bd close {{bead_id}}
|
||||||
|
|
||||||
|
Commit message should explain:
|
||||||
|
- What was broken
|
||||||
|
- How it was fixed
|
||||||
|
- Any relevant context
|
||||||
|
"""
|
||||||
124
home/roles/development/formulas/rpi.formula.toml
Normal file
124
home/roles/development/formulas/rpi.formula.toml
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
# RPI Formula - Research -> Plan -> Implement
|
||||||
|
#
|
||||||
|
# Universal workflow for feature development with human gates.
|
||||||
|
|
||||||
|
formula = "rpi"
|
||||||
|
description = """
|
||||||
|
Research -> Plan -> Implement workflow.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
bd pour rpi --var title="Add user preferences"
|
||||||
|
bd pour rpi --var title="Auth" --var bead_id="project-abc" --var test_cmd="nix flake check"
|
||||||
|
"""
|
||||||
|
version = 1
|
||||||
|
type = "workflow"
|
||||||
|
|
||||||
|
# ─── Variables ───
|
||||||
|
|
||||||
|
[vars.title]
|
||||||
|
required = true
|
||||||
|
description = "What are we building?"
|
||||||
|
|
||||||
|
[vars.bead_id]
|
||||||
|
description = "Existing bead ID (creates new if not provided)"
|
||||||
|
|
||||||
|
[vars.test_cmd]
|
||||||
|
default = "make test"
|
||||||
|
description = "Command to run tests"
|
||||||
|
|
||||||
|
[vars.lint_cmd]
|
||||||
|
default = "make lint"
|
||||||
|
description = "Command to run linting"
|
||||||
|
|
||||||
|
# ─── Research Phase ───
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "research"
|
||||||
|
title = "Research: {{title}}"
|
||||||
|
skill = "research-agents"
|
||||||
|
description = """
|
||||||
|
Conduct comprehensive codebase research.
|
||||||
|
|
||||||
|
Goals:
|
||||||
|
- Understand current implementation
|
||||||
|
- Identify patterns to follow
|
||||||
|
- Find relevant files and dependencies
|
||||||
|
- Document key discoveries
|
||||||
|
|
||||||
|
Output: thoughts/beads-{{bead_id}}/research.md
|
||||||
|
"""
|
||||||
|
|
||||||
|
# ─── Planning Phase ───
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "plan"
|
||||||
|
title = "Plan: {{title}}"
|
||||||
|
needs = ["research"]
|
||||||
|
type = "human"
|
||||||
|
skill = "planning"
|
||||||
|
description = """
|
||||||
|
Create detailed implementation plan based on research.
|
||||||
|
|
||||||
|
Goals:
|
||||||
|
- Present understanding and clarify requirements
|
||||||
|
- Propose design options with tradeoffs
|
||||||
|
- Define phases with success criteria
|
||||||
|
- Identify what we're NOT doing
|
||||||
|
|
||||||
|
Output: thoughts/beads-{{bead_id}}/plan.md
|
||||||
|
"""
|
||||||
|
|
||||||
|
[steps.gate]
|
||||||
|
type = "human"
|
||||||
|
reason = "Plan approval before implementation"
|
||||||
|
|
||||||
|
# ─── Implementation Phase ───
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "implement"
|
||||||
|
title = "Implement: {{title}}"
|
||||||
|
needs = ["plan"]
|
||||||
|
description = """
|
||||||
|
Execute the approved plan phase by phase.
|
||||||
|
|
||||||
|
For each phase:
|
||||||
|
1. Make the changes
|
||||||
|
2. Run verification: {{test_cmd}}, {{lint_cmd}}
|
||||||
|
3. Update plan checkboxes for resumability
|
||||||
|
|
||||||
|
Stop and ask if encountering unexpected issues.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# ─── Verification Phase ───
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "verify"
|
||||||
|
title = "Manual verification"
|
||||||
|
needs = ["implement"]
|
||||||
|
type = "human"
|
||||||
|
description = """
|
||||||
|
Human confirms implementation works correctly.
|
||||||
|
|
||||||
|
Check: feature works, edge cases handled, no regressions.
|
||||||
|
Tests: {{test_cmd}} | Lint: {{lint_cmd}}
|
||||||
|
"""
|
||||||
|
|
||||||
|
[steps.gate]
|
||||||
|
type = "human"
|
||||||
|
reason = "Confirm implementation is correct"
|
||||||
|
|
||||||
|
# ─── Completion ───
|
||||||
|
|
||||||
|
[[steps]]
|
||||||
|
id = "complete"
|
||||||
|
title = "Close bead"
|
||||||
|
needs = ["verify"]
|
||||||
|
skill = "artifact-format"
|
||||||
|
description = """
|
||||||
|
Mark work as complete.
|
||||||
|
|
||||||
|
Actions:
|
||||||
|
- bd update {{bead_id}} --notes="Implementation complete"
|
||||||
|
- bd close {{bead_id}} --reason="Completed: {{title}}"
|
||||||
|
- bd sync && git push
|
||||||
|
"""
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
diff --git a/internal/mail/router.go b/internal/mail/router.go
|
||||||
|
--- a/internal/mail/router.go
|
||||||
|
+++ b/internal/mail/router.go
|
||||||
|
@@ -326,7 +326,10 @@ func agentBeadToAddress(bead *agentBead) string {
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to parsing description for role_type and rig
|
||||||
|
- return parseAgentAddressFromDescription(bead.Description)
|
||||||
|
+ if bead.Title != "" && strings.Contains(bead.Title, "/") {
|
||||||
|
+ return bead.Title
|
||||||
|
+ }
|
||||||
|
+ return parseAgentAddressFromDescription(bead.Description)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle gt- prefixed IDs (legacy format)
|
||||||
@@ -0,0 +1,35 @@
|
|||||||
|
diff --git a/internal/mail/router.go b/internal/mail/router.go
|
||||||
|
--- a/internal/mail/router.go
|
||||||
|
+++ b/internal/mail/router.go
|
||||||
|
@@ -330,8 +330,29 @@ func agentBeadToAddress(bead *agentBead) string {
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle gt- prefixed IDs (legacy format)
|
||||||
|
- if !strings.HasPrefix(id, "gt-") {
|
||||||
|
- return "" // Not a valid agent bead ID
|
||||||
|
+ // Handle rig-specific prefixes: <prefix>-<rig>-<role>-<name>
|
||||||
|
+ // Examples: j-java-crew-americano -> java/crew/americano
|
||||||
|
+ idParts := strings.Split(id, "-")
|
||||||
|
+ if len(idParts) >= 3 {
|
||||||
|
+ for i, part := range idParts {
|
||||||
|
+ if part == "crew" || part == "polecat" || part == "polecats" {
|
||||||
|
+ if i >= 1 && i < len(idParts)-1 {
|
||||||
|
+ rig := idParts[i-1]
|
||||||
|
+ name := strings.Join(idParts[i+1:], "-")
|
||||||
|
+ return rig + "/" + part + "/" + name
|
||||||
|
+ }
|
||||||
|
+ }
|
||||||
|
+ if part == "witness" || part == "refinery" {
|
||||||
|
+ if i >= 1 {
|
||||||
|
+ return idParts[i-1] + "/" + part
|
||||||
|
+ }
|
||||||
|
+ }
|
||||||
|
+ }
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
+ // Handle gt- prefixed IDs (legacy format)
|
||||||
|
+ if !strings.HasPrefix(id, "gt-") {
|
||||||
|
+ return "" // Not a valid agent bead ID
|
||||||
|
}
|
||||||
|
|
||||||
|
// Strip prefix
|
||||||
25
home/roles/development/gastown-fix-copydir-symlinks.patch
Normal file
25
home/roles/development/gastown-fix-copydir-symlinks.patch
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
diff --git a/internal/git/git.go b/internal/git/git.go
|
||||||
|
--- a/internal/git/git.go
|
||||||
|
+++ b/internal/git/git.go
|
||||||
|
@@ -73,7 +73,19 @@ func copyDir(src, dest string) error {
|
||||||
|
srcPath := filepath.Join(src, entry.Name())
|
||||||
|
destPath := filepath.Join(dest, entry.Name())
|
||||||
|
|
||||||
|
- if entry.IsDir() {
|
||||||
|
+ // Handle symlinks (recreate them, do not follow)
|
||||||
|
+ if entry.Type()&os.ModeSymlink != 0 {
|
||||||
|
+ linkTarget, err := os.Readlink(srcPath)
|
||||||
|
+ if err != nil {
|
||||||
|
+ return err
|
||||||
|
+ }
|
||||||
|
+ if err := os.Symlink(linkTarget, destPath); err != nil {
|
||||||
|
+ return err
|
||||||
|
+ }
|
||||||
|
+ continue
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
+ if entry.IsDir() {
|
||||||
|
if err := copyDir(srcPath, destPath); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
18
home/roles/development/gastown-fix-role-home-paths.patch
Normal file
18
home/roles/development/gastown-fix-role-home-paths.patch
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
diff --git a/internal/cmd/role.go b/internal/cmd/role.go
|
||||||
|
--- a/internal/cmd/role.go
|
||||||
|
+++ b/internal/cmd/role.go
|
||||||
|
@@ -326,11 +326,11 @@ func getRoleHome(role Role, rig, polecat, townRoot string) string {
|
||||||
|
if rig == "" || polecat == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
- return filepath.Join(townRoot, rig, "polecats", polecat, "rig")
|
||||||
|
+ return filepath.Join(townRoot, rig, "polecats", polecat)
|
||||||
|
case RoleCrew:
|
||||||
|
if rig == "" || polecat == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
- return filepath.Join(townRoot, rig, "crew", polecat, "rig")
|
||||||
|
+ return filepath.Join(townRoot, rig, "crew", polecat)
|
||||||
|
default:
|
||||||
|
return ""
|
||||||
|
}
|
||||||
18
home/roles/development/gastown-fix-town-root-detection.patch
Normal file
18
home/roles/development/gastown-fix-town-root-detection.patch
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
diff --git a/internal/cmd/prime.go b/internal/cmd/prime.go
|
||||||
|
--- a/internal/cmd/prime.go
|
||||||
|
+++ b/internal/cmd/prime.go
|
||||||
|
@@ -276,11 +276,12 @@ func detectRole(cwd, townRoot string) RoleInfo {
|
||||||
|
|
||||||
|
// Check for mayor role
|
||||||
|
// At town root, or in mayor/ or mayor/rig/
|
||||||
|
if relPath == "." || relPath == "" {
|
||||||
|
- ctx.Role = RoleMayor
|
||||||
|
- return ctx
|
||||||
|
+ return ctx // RoleUnknown - town root is shared space
|
||||||
|
}
|
||||||
|
+
|
||||||
|
+ // Check for mayor role: mayor/ or mayor/rig/
|
||||||
|
if len(parts) >= 1 && parts[0] == "mayor" {
|
||||||
|
ctx.Role = RoleMayor
|
||||||
|
return ctx
|
||||||
|
}
|
||||||
13
home/roles/development/gastown-fix-validate-recipient.patch
Normal file
13
home/roles/development/gastown-fix-validate-recipient.patch
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
diff --git a/internal/mail/router.go b/internal/mail/router.go
|
||||||
|
index b864c069..4b6a045b 100644
|
||||||
|
--- a/internal/mail/router.go
|
||||||
|
+++ b/internal/mail/router.go
|
||||||
|
@@ -646,7 +646,7 @@ func (r *Router) validateRecipient(identity string) error {
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, agent := range agents {
|
||||||
|
- if agentBeadToAddress(agent) == identity {
|
||||||
|
+ if AddressToIdentity(agentBeadToAddress(agent)) == AddressToIdentity(identity) {
|
||||||
|
return nil // Found matching agent
|
||||||
|
}
|
||||||
|
}
|
||||||
135
home/roles/development/gastown-statusline-optimization.patch
Normal file
135
home/roles/development/gastown-statusline-optimization.patch
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
diff --git a/internal/cmd/statusline.go b/internal/cmd/statusline.go
|
||||||
|
index 2edf1be8..00253eea 100644
|
||||||
|
--- a/internal/cmd/statusline.go
|
||||||
|
+++ b/internal/cmd/statusline.go
|
||||||
|
@@ -6,6 +6,7 @@ import (
|
||||||
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
+ "time"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
"github.com/steveyegge/gastown/internal/beads"
|
||||||
|
@@ -14,6 +15,37 @@ import (
|
||||||
|
"github.com/steveyegge/gastown/internal/tmux"
|
||||||
|
"github.com/steveyegge/gastown/internal/workspace"
|
||||||
|
)
|
||||||
|
+// statusLineCacheTTL is how long cached status output remains valid.
|
||||||
|
+const statusLineCacheTTL = 10 * time.Second
|
||||||
|
+
|
||||||
|
+// statusLineCachePath returns the cache file path for a session.
|
||||||
|
+func statusLineCachePath(session string) string {
|
||||||
|
+ return filepath.Join(os.TempDir(), fmt.Sprintf("gt-status-%s", session))
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
+// getStatusLineCache returns cached status if fresh, empty string otherwise.
|
||||||
|
+func getStatusLineCache(session string) string {
|
||||||
|
+ path := statusLineCachePath(session)
|
||||||
|
+ info, err := os.Stat(path)
|
||||||
|
+ if err != nil {
|
||||||
|
+ return ""
|
||||||
|
+ }
|
||||||
|
+ if time.Since(info.ModTime()) > statusLineCacheTTL {
|
||||||
|
+ return ""
|
||||||
|
+ }
|
||||||
|
+ data, err := os.ReadFile(path)
|
||||||
|
+ if err != nil {
|
||||||
|
+ return ""
|
||||||
|
+ }
|
||||||
|
+ return string(data)
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
+// setStatusLineCache writes status to cache file.
|
||||||
|
+func setStatusLineCache(session, status string) {
|
||||||
|
+ path := statusLineCachePath(session)
|
||||||
|
+ _ = os.WriteFile(path, []byte(status), 0644)
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
|
||||||
|
var (
|
||||||
|
statusLineSession string
|
||||||
|
@@ -34,6 +66,19 @@ func init() {
|
||||||
|
func runStatusLine(cmd *cobra.Command, args []string) error {
|
||||||
|
t := tmux.NewTmux()
|
||||||
|
|
||||||
|
+ // Optimization: skip expensive beads queries for detached sessions
|
||||||
|
+ if statusLineSession != "" {
|
||||||
|
+ if !t.IsSessionAttached(statusLineSession) {
|
||||||
|
+ fmt.Print("○ |")
|
||||||
|
+ return nil
|
||||||
|
+ }
|
||||||
|
+ // Check cache for attached sessions too
|
||||||
|
+ if cached := getStatusLineCache(statusLineSession); cached != "" {
|
||||||
|
+ fmt.Print(cached)
|
||||||
|
+ return nil
|
||||||
|
+ }
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
// Get session environment
|
||||||
|
var rigName, polecat, crew, issue, role string
|
||||||
|
|
||||||
|
@@ -150,7 +195,11 @@ func runWorkerStatusLine(t *tmux.Tmux, session, rigName, polecat, crew, issue st
|
||||||
|
|
||||||
|
// Output
|
||||||
|
if len(parts) > 0 {
|
||||||
|
- fmt.Print(strings.Join(parts, " | ") + " |")
|
||||||
|
+ output := strings.Join(parts, " | ") + " |"
|
||||||
|
+ if statusLineSession != "" {
|
||||||
|
+ setStatusLineCache(statusLineSession, output)
|
||||||
|
+ }
|
||||||
|
+ fmt.Print(output)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
@@ -389,7 +438,11 @@ func runMayorStatusLine(t *tmux.Tmux) error {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- fmt.Print(strings.Join(parts, " | ") + " |")
|
||||||
|
+ output := strings.Join(parts, " | ") + " |"
|
||||||
|
+ if statusLineSession != "" {
|
||||||
|
+ setStatusLineCache(statusLineSession, output)
|
||||||
|
+ }
|
||||||
|
+ fmt.Print(output)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
@@ -458,7 +511,11 @@ func runDeaconStatusLine(t *tmux.Tmux) error {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- fmt.Print(strings.Join(parts, " | ") + " |")
|
||||||
|
+ output := strings.Join(parts, " | ") + " |"
|
||||||
|
+ if statusLineSession != "" {
|
||||||
|
+ setStatusLineCache(statusLineSession, output)
|
||||||
|
+ }
|
||||||
|
+ fmt.Print(output)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
@@ -526,7 +583,11 @@ func runWitnessStatusLine(t *tmux.Tmux, rigName string) error {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- fmt.Print(strings.Join(parts, " | ") + " |")
|
||||||
|
+ output := strings.Join(parts, " | ") + " |"
|
||||||
|
+ if statusLineSession != "" {
|
||||||
|
+ setStatusLineCache(statusLineSession, output)
|
||||||
|
+ }
|
||||||
|
+ fmt.Print(output)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
@@ -617,7 +678,11 @@ func runRefineryStatusLine(t *tmux.Tmux, rigName string) error {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- fmt.Print(strings.Join(parts, " | ") + " |")
|
||||||
|
+ output := strings.Join(parts, " | ") + " |"
|
||||||
|
+ if statusLineSession != "" {
|
||||||
|
+ setStatusLineCache(statusLineSession, output)
|
||||||
|
+ }
|
||||||
|
+ fmt.Print(output)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
230
home/roles/development/skills/bd_workflow.md
Normal file
230
home/roles/development/skills/bd_workflow.md
Normal file
@@ -0,0 +1,230 @@
|
|||||||
|
---
|
||||||
|
description: How to use the bd (beads) CLI for issue tracking, dependencies, and workflow orchestration
|
||||||
|
---
|
||||||
|
|
||||||
|
# BD Workflow
|
||||||
|
|
||||||
|
The `bd` CLI is a git-backed issue tracker with first-class dependency support. Use it for multi-session work, blocking relationships, and persistent memory across conversation compaction.
|
||||||
|
|
||||||
|
## When to Use BD vs TodoWrite
|
||||||
|
|
||||||
|
| Use BD | Use TodoWrite |
|
||||||
|
|--------|---------------|
|
||||||
|
| Work spans multiple sessions | Single-session tasks |
|
||||||
|
| Dependencies between tasks | Independent subtasks |
|
||||||
|
| Need audit trail in git | Ephemeral tracking |
|
||||||
|
| Cross-repo coordination | Local project only |
|
||||||
|
| Resuming after compaction | Simple task lists |
|
||||||
|
|
||||||
|
## Core Commands
|
||||||
|
|
||||||
|
### Creating Issues
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bd create "Issue title" # Basic task
|
||||||
|
bd create "Bug title" --type=bug --priority=1 # P1 bug
|
||||||
|
bd create "Feature" --type=feature -d "Details" # With description
|
||||||
|
bd q "Quick capture" # Output only ID
|
||||||
|
```
|
||||||
|
|
||||||
|
### Managing Issues
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bd show <id> # View issue details
|
||||||
|
bd show <id> --children # View children of epic
|
||||||
|
bd list # List open issues (default 50)
|
||||||
|
bd list --all # Include closed
|
||||||
|
bd list -s in_progress # Filter by status
|
||||||
|
bd list -t bug -p 0 # P0 bugs
|
||||||
|
bd list --pretty # Tree format
|
||||||
|
```
|
||||||
|
|
||||||
|
### Updating Issues
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bd update <id> --status=in_progress # Start work
|
||||||
|
bd update <id> --status=blocked # Mark blocked
|
||||||
|
bd update <id> --claim # Claim atomically
|
||||||
|
bd update <id> --add-label=urgent # Add label
|
||||||
|
bd update <id> -d "New description" # Update description
|
||||||
|
```
|
||||||
|
|
||||||
|
### Closing Issues
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bd close <id> # Close issue
|
||||||
|
bd close <id> --continue # Auto-advance to next step
|
||||||
|
bd close <id> --suggest-next # Show newly unblocked
|
||||||
|
```
|
||||||
|
|
||||||
|
## Finding Work
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bd ready # Ready issues (no blockers)
|
||||||
|
bd ready --mol <mol-id> # Ready steps in molecule
|
||||||
|
bd ready -n 5 # Limit to 5
|
||||||
|
bd ready --assignee me # Assigned to me
|
||||||
|
bd blocked # Show blocked issues
|
||||||
|
bd blocked --parent <id> # Blocked within epic
|
||||||
|
```
|
||||||
|
|
||||||
|
## Dependency Management
|
||||||
|
|
||||||
|
### Creating Dependencies
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bd dep <blocker> --blocks <blocked> # A blocks B
|
||||||
|
bd dep add <blocked> <blocker> # Same as above
|
||||||
|
bd dep relate <id1> <id2> # Bidirectional link
|
||||||
|
```
|
||||||
|
|
||||||
|
### Viewing Dependencies
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bd dep list <id> # Show dependencies
|
||||||
|
bd dep tree <id> # Dependency tree
|
||||||
|
bd dep cycles # Detect cycles
|
||||||
|
```
|
||||||
|
|
||||||
|
### Removing Dependencies
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bd dep remove <blocked> <blocker> # Remove dependency
|
||||||
|
bd dep unrelate <id1> <id2> # Remove relation
|
||||||
|
```
|
||||||
|
|
||||||
|
## Sync Workflow
|
||||||
|
|
||||||
|
BD syncs issues via git. The daemon handles this automatically, but manual sync is available:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bd sync # Full sync (pull, merge, push)
|
||||||
|
bd sync --flush-only # Export to JSONL only
|
||||||
|
bd sync --import-only # Import from JSONL only
|
||||||
|
bd sync --status # Show sync branch diff
|
||||||
|
bd sync --squash # Accumulate without commit
|
||||||
|
```
|
||||||
|
|
||||||
|
## Formula and Molecule Workflow
|
||||||
|
|
||||||
|
Formulas are reusable workflow templates. Molecules are instantiated workflows.
|
||||||
|
|
||||||
|
### Formulas
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bd formula list # List available formulas
|
||||||
|
bd formula list --type=workflow # Filter by type
|
||||||
|
bd formula show <name> # Show formula details
|
||||||
|
bd cook <formula> # Compile to proto (stdout)
|
||||||
|
bd cook <formula> --var name=auth # With variable substitution
|
||||||
|
bd cook <formula> --dry-run # Preview steps
|
||||||
|
bd cook <formula> --persist # Save to database
|
||||||
|
```
|
||||||
|
|
||||||
|
### Molecules: Pour vs Wisp
|
||||||
|
|
||||||
|
| pour (persistent) | wisp (ephemeral) |
|
||||||
|
|-------------------|------------------|
|
||||||
|
| Feature implementations | Release workflows |
|
||||||
|
| Multi-session work | Patrol cycles |
|
||||||
|
| Audit trail needed | Health checks |
|
||||||
|
| Git-synced | Local only |
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Persistent molecule (liquid phase)
|
||||||
|
bd mol pour <proto> --var name=auth
|
||||||
|
|
||||||
|
# Ephemeral molecule (vapor phase)
|
||||||
|
bd mol wisp <proto> --var version=1.0
|
||||||
|
bd mol wisp list # List wisps
|
||||||
|
bd mol wisp gc # Garbage collect
|
||||||
|
```
|
||||||
|
|
||||||
|
### Tracking Molecule Progress
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bd mol show <mol-id> # Show structure
|
||||||
|
bd mol show <mol-id> --parallel # Parallelizable steps
|
||||||
|
bd mol current # Where am I?
|
||||||
|
bd mol current <mol-id> # Status for molecule
|
||||||
|
bd mol progress <mol-id> # Progress summary + ETA
|
||||||
|
```
|
||||||
|
|
||||||
|
### Molecule Lifecycle
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bd mol squash <mol-id> # Condense to digest
|
||||||
|
bd mol burn <mol-id> # Delete wisp
|
||||||
|
bd mol distill <epic-id> # Extract formula from epic
|
||||||
|
```
|
||||||
|
|
||||||
|
## Gates and Human Checkpoints
|
||||||
|
|
||||||
|
Gates are async wait conditions that block workflow steps:
|
||||||
|
|
||||||
|
| Gate Type | Wait Condition |
|
||||||
|
|-----------|---------------|
|
||||||
|
| human | Manual `bd close` |
|
||||||
|
| timer | Timeout expires |
|
||||||
|
| gh:run | GitHub workflow completes |
|
||||||
|
| gh:pr | PR merges |
|
||||||
|
| bead | Cross-rig bead closes |
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bd gate list # Show open gates
|
||||||
|
bd gate list --all # Include closed
|
||||||
|
bd gate check # Evaluate all gates
|
||||||
|
bd gate check --type=bead # Check bead gates only
|
||||||
|
bd gate resolve <id> # Close manually
|
||||||
|
```
|
||||||
|
|
||||||
|
## Common Patterns
|
||||||
|
|
||||||
|
### Starting Work on a Bead
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bd update <id> --status=in_progress
|
||||||
|
# ... do work ...
|
||||||
|
bd close <id>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Creating Related Issues
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bd create "Main task" --deps "blocks:<other-id>"
|
||||||
|
bd dep add <new-id> <blocker-id>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Working Through a Molecule
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bd mol pour my-workflow --var name=feature
|
||||||
|
bd ready --mol <mol-id> # Find next step
|
||||||
|
bd update <step-id> --claim # Claim step
|
||||||
|
# ... do work ...
|
||||||
|
bd close <step-id> --continue # Close and advance
|
||||||
|
```
|
||||||
|
|
||||||
|
### Quick Status Check
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bd ready -n 3 # Top 3 ready items
|
||||||
|
bd list -s in_progress # What's in flight?
|
||||||
|
bd blocked # What's stuck?
|
||||||
|
```
|
||||||
|
|
||||||
|
## Useful Flags
|
||||||
|
|
||||||
|
| Flag | Effect |
|
||||||
|
|------|--------|
|
||||||
|
| `--json` | JSON output for scripting |
|
||||||
|
| `--quiet` | Suppress non-essential output |
|
||||||
|
| `--dry-run` | Preview without executing |
|
||||||
|
| `--pretty` | Tree format display |
|
||||||
|
|
||||||
|
## Integration Notes
|
||||||
|
|
||||||
|
- BD auto-syncs via daemon (check with `bd info`)
|
||||||
|
- Issues stored in `.beads/` directory
|
||||||
|
- JSONL files sync through git
|
||||||
|
- Use `bd doctor` if something seems wrong
|
||||||
123
home/roles/development/skills/micro/artifact-format.md
Normal file
123
home/roles/development/skills/micro/artifact-format.md
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
---
|
||||||
|
description: How to structure research and plan artifacts in thoughts/
|
||||||
|
---
|
||||||
|
|
||||||
|
# Artifact Format
|
||||||
|
|
||||||
|
Standardized format for thoughts/ artifacts. All beads-related artifacts should follow these conventions for consistency and machine parseability.
|
||||||
|
|
||||||
|
## Frontmatter (Required)
|
||||||
|
|
||||||
|
Every artifact MUST include YAML frontmatter:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
---
|
||||||
|
date: 2026-01-15T10:00:00-08:00 # ISO 8601 with timezone
|
||||||
|
bead_id: project-abc # Bead identifier
|
||||||
|
bead_title: "Title of the bead" # Human-readable title
|
||||||
|
author: claude # Who created this
|
||||||
|
git_commit: abc123def # Commit hash at creation
|
||||||
|
branch: main # Branch name
|
||||||
|
repository: repo-name # Repository name
|
||||||
|
status: draft|complete # Artifact status
|
||||||
|
---
|
||||||
|
```
|
||||||
|
|
||||||
|
### Gathering Metadata
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git rev-parse HEAD # Current commit
|
||||||
|
git branch --show-current # Current branch
|
||||||
|
basename $(git rev-parse --show-toplevel) # Repo name
|
||||||
|
date -Iseconds # ISO timestamp
|
||||||
|
```
|
||||||
|
|
||||||
|
## Research Artifact Structure
|
||||||
|
|
||||||
|
Location: `thoughts/beads-{bead-id}/research.md`
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
# Research: {bead title}
|
||||||
|
|
||||||
|
**Bead**: {bead-id}
|
||||||
|
**Date**: {timestamp}
|
||||||
|
**Git Commit**: {commit hash}
|
||||||
|
|
||||||
|
## Research Question
|
||||||
|
{Original question from bead description}
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
{2-3 sentence overview answering the research question}
|
||||||
|
|
||||||
|
## Key Discoveries
|
||||||
|
- {Finding with file:line reference}
|
||||||
|
- {Pattern or convention found}
|
||||||
|
- {Architectural decision documented}
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
{Current patterns and conventions in the codebase}
|
||||||
|
|
||||||
|
## Code References
|
||||||
|
- `path/to/file.py:123` - Description of relevance
|
||||||
|
- `another/file.ts:45-67` - Description of relevance
|
||||||
|
|
||||||
|
## Open Questions
|
||||||
|
{Areas needing further investigation or human clarification}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Plan Artifact Structure
|
||||||
|
|
||||||
|
Location: `thoughts/beads-{bead-id}/plan.md`
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
# {Title} Implementation Plan
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
{What we're implementing and why - 1-2 sentences}
|
||||||
|
|
||||||
|
## Current State
|
||||||
|
{What exists now, key constraints discovered}
|
||||||
|
|
||||||
|
### Key Discoveries
|
||||||
|
- {Finding with file:line reference}
|
||||||
|
- {Pattern to follow}
|
||||||
|
|
||||||
|
## Desired End State
|
||||||
|
{Specification of what success looks like}
|
||||||
|
|
||||||
|
## What We're NOT Doing
|
||||||
|
{Explicitly list out-of-scope items}
|
||||||
|
|
||||||
|
## Phase 1: {Descriptive Name}
|
||||||
|
|
||||||
|
### Overview
|
||||||
|
{What this phase accomplishes}
|
||||||
|
|
||||||
|
### Changes
|
||||||
|
- [ ] {Specific change with file path}
|
||||||
|
- [ ] {Another change}
|
||||||
|
|
||||||
|
### Success Criteria
|
||||||
|
|
||||||
|
#### Automated
|
||||||
|
- [ ] Tests pass: `{test command}`
|
||||||
|
- [ ] Lint passes: `{lint command}`
|
||||||
|
|
||||||
|
#### Manual
|
||||||
|
- [ ] {Human verification step}
|
||||||
|
|
||||||
|
## Phase 2: {Descriptive Name}
|
||||||
|
{Repeat structure...}
|
||||||
|
|
||||||
|
## References
|
||||||
|
- Bead: {bead-id}
|
||||||
|
- Research: `thoughts/beads-{bead-id}/research.md`
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Principles
|
||||||
|
|
||||||
|
1. **Always include file:line references** - Makes artifacts actionable
|
||||||
|
2. **Separate automated vs manual verification** - Enables agent autonomy
|
||||||
|
3. **Use checkboxes for phases** - Enables resumability after interruption
|
||||||
|
4. **Keep frontmatter machine-parseable** - Enables tooling integration
|
||||||
|
5. **Link related artifacts** - Research links to plan, plan links to bead
|
||||||
121
home/roles/development/skills/micro/planning.md
Normal file
121
home/roles/development/skills/micro/planning.md
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
---
|
||||||
|
description: How to create effective implementation plans with phased delivery and clear success criteria
|
||||||
|
---
|
||||||
|
|
||||||
|
# Planning
|
||||||
|
|
||||||
|
Create implementation plans that enable incremental, verifiable progress.
|
||||||
|
|
||||||
|
## Core Principles
|
||||||
|
|
||||||
|
1. **Incremental delivery**: Each phase should produce working, testable changes
|
||||||
|
2. **Clear checkpoints**: Success criteria that can be verified without ambiguity
|
||||||
|
3. **Buy-in before detail**: Confirm understanding and approach before writing specifics
|
||||||
|
4. **Explicit scope**: State what we're NOT doing to prevent scope creep
|
||||||
|
|
||||||
|
## Plan Document Structure
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
# {Feature} Implementation Plan
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
{1-2 sentences: what we're building and why}
|
||||||
|
|
||||||
|
## Current State Analysis
|
||||||
|
{What exists now, key constraints, file:line references}
|
||||||
|
|
||||||
|
## Desired End State
|
||||||
|
{Specification of outcome and how to verify it}
|
||||||
|
|
||||||
|
## What We're NOT Doing
|
||||||
|
{Explicit out-of-scope items}
|
||||||
|
|
||||||
|
## Phase 1: {Descriptive Name}
|
||||||
|
### Overview
|
||||||
|
{What this phase accomplishes - should be independently valuable}
|
||||||
|
|
||||||
|
### Changes Required
|
||||||
|
{Specific files and modifications with code snippets}
|
||||||
|
|
||||||
|
### Success Criteria
|
||||||
|
#### Automated Verification
|
||||||
|
- [ ] Tests pass: `{test command}`
|
||||||
|
- [ ] Lint passes: `{lint command}`
|
||||||
|
|
||||||
|
#### Manual Verification
|
||||||
|
- [ ] {Human-observable outcome}
|
||||||
|
|
||||||
|
## Testing Strategy
|
||||||
|
{Unit tests, integration tests, manual testing steps}
|
||||||
|
|
||||||
|
## References
|
||||||
|
{Links to research, related files, similar implementations}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Phase Design
|
||||||
|
|
||||||
|
Good phases are:
|
||||||
|
- **Self-contained**: Completable in one session
|
||||||
|
- **Testable**: Has clear pass/fail criteria
|
||||||
|
- **Reversible**: Can be rolled back if needed
|
||||||
|
- **Incremental**: Builds on previous phases without requiring all phases
|
||||||
|
|
||||||
|
Bad phases are:
|
||||||
|
- "Refactor everything" (too broad)
|
||||||
|
- "Add helper function" (too granular)
|
||||||
|
- Phases that only work if ALL phases complete
|
||||||
|
|
||||||
|
## Success Criteria Guidelines
|
||||||
|
|
||||||
|
**Automated Verification** (agent-runnable):
|
||||||
|
- Test commands: `make test`, `npm test`, `nix flake check`
|
||||||
|
- Lint/format: `make lint`, `cargo fmt --check`
|
||||||
|
- Type checking: `make typecheck`, `tsc --noEmit`
|
||||||
|
- Build verification: `make build`, `nix build`
|
||||||
|
|
||||||
|
**Manual Verification** (requires human):
|
||||||
|
- UI/UX functionality and appearance
|
||||||
|
- Performance under realistic conditions
|
||||||
|
- Edge cases hard to automate
|
||||||
|
- Integration with external systems
|
||||||
|
|
||||||
|
**From Contribution Guidelines** (if CONTRIBUTING.md exists):
|
||||||
|
- Include any testing requirements specified
|
||||||
|
- Reference the guideline: "Per CONTRIBUTING.md: {requirement}"
|
||||||
|
|
||||||
|
## Presenting Understanding
|
||||||
|
|
||||||
|
Before writing the plan, confirm alignment:
|
||||||
|
|
||||||
|
```
|
||||||
|
Based on the requirements and my research, I understand we need to [summary].
|
||||||
|
|
||||||
|
I've found that:
|
||||||
|
- [Current implementation detail with file:line]
|
||||||
|
- [Relevant pattern or constraint]
|
||||||
|
- [Potential complexity identified]
|
||||||
|
|
||||||
|
Questions my research couldn't answer:
|
||||||
|
- [Specific technical question requiring judgment]
|
||||||
|
```
|
||||||
|
|
||||||
|
Only ask questions you genuinely cannot answer through code investigation.
|
||||||
|
|
||||||
|
## Design Options Pattern
|
||||||
|
|
||||||
|
When multiple approaches exist:
|
||||||
|
|
||||||
|
```
|
||||||
|
**Design Options:**
|
||||||
|
1. [Option A] - [1-sentence description]
|
||||||
|
- Pro: [benefit]
|
||||||
|
- Con: [drawback]
|
||||||
|
|
||||||
|
2. [Option B] - [1-sentence description]
|
||||||
|
- Pro: [benefit]
|
||||||
|
- Con: [drawback]
|
||||||
|
|
||||||
|
Which approach aligns best with [relevant consideration]?
|
||||||
|
```
|
||||||
|
|
||||||
|
Get buy-in on approach before detailing phases.
|
||||||
68
home/roles/development/skills/micro/pr-description.md
Normal file
68
home/roles/development/skills/micro/pr-description.md
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
---
|
||||||
|
description: How to write comprehensive PR descriptions that help reviewers understand changes
|
||||||
|
---
|
||||||
|
|
||||||
|
# PR Description
|
||||||
|
|
||||||
|
Write PR descriptions that help reviewers understand what changed and why.
|
||||||
|
|
||||||
|
## Structure
|
||||||
|
|
||||||
|
Use this standard structure for PR descriptions:
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
## Summary
|
||||||
|
<1-3 bullet points of what changed and why>
|
||||||
|
|
||||||
|
## Context
|
||||||
|
<Why this change was needed - the problem being solved>
|
||||||
|
<Link to related issues/tickets>
|
||||||
|
|
||||||
|
## Changes
|
||||||
|
<Detailed breakdown by area/component>
|
||||||
|
- Area 1: What changed and why
|
||||||
|
- Area 2: What changed and why
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
<How this was verified>
|
||||||
|
- Automated: Tests added/updated, CI status
|
||||||
|
- Manual: Steps to verify functionality
|
||||||
|
|
||||||
|
## Screenshots (if UI changes)
|
||||||
|
<Before/after screenshots if applicable>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Guidelines
|
||||||
|
|
||||||
|
### Lead with WHY, not WHAT
|
||||||
|
- The diff shows WHAT changed - your description explains WHY
|
||||||
|
- Start with the problem being solved
|
||||||
|
- Explain the approach chosen and alternatives considered
|
||||||
|
|
||||||
|
### Link to context
|
||||||
|
- Reference related issues: `Fixes #123` or `Relates to #456`
|
||||||
|
- Link to design docs or discussions
|
||||||
|
- Mention dependent PRs if any
|
||||||
|
|
||||||
|
### Call out review areas
|
||||||
|
- Highlight areas needing careful review
|
||||||
|
- Note any tricky or non-obvious code
|
||||||
|
- Point out architectural decisions
|
||||||
|
|
||||||
|
### Note breaking changes prominently
|
||||||
|
- Use a dedicated "Breaking Changes" section if applicable
|
||||||
|
- Explain migration path for consumers
|
||||||
|
- List any deprecations
|
||||||
|
|
||||||
|
### Be scannable
|
||||||
|
- Use bullet points over paragraphs
|
||||||
|
- Keep sections focused and concise
|
||||||
|
- Put the most important info first
|
||||||
|
|
||||||
|
## Anti-patterns to Avoid
|
||||||
|
|
||||||
|
- Empty descriptions or just "fixes bug"
|
||||||
|
- Repeating the commit messages verbatim
|
||||||
|
- Including irrelevant implementation details
|
||||||
|
- Missing context on why the change was made
|
||||||
|
- Forgetting to mention breaking changes
|
||||||
49
home/roles/development/skills/micro/research-agents.md
Normal file
49
home/roles/development/skills/micro/research-agents.md
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
---
|
||||||
|
description: How to spawn and coordinate research sub-agents
|
||||||
|
---
|
||||||
|
|
||||||
|
# Research Agents
|
||||||
|
|
||||||
|
Use parallel sub-agents for efficient codebase research.
|
||||||
|
|
||||||
|
## Available Agents
|
||||||
|
|
||||||
|
| Agent | Purpose |
|
||||||
|
|-------|---------|
|
||||||
|
| codebase-locator | Find WHERE files and components live |
|
||||||
|
| codebase-analyzer | Understand HOW specific code works |
|
||||||
|
| codebase-pattern-finder | Find examples of existing patterns |
|
||||||
|
| thoughts-locator | Discover relevant documents in thoughts/ |
|
||||||
|
|
||||||
|
## Spawning Protocol
|
||||||
|
|
||||||
|
1. **Decompose** - Break the research question into 3-5 specific questions
|
||||||
|
2. **Spawn parallel** - Use one Task call with multiple agents
|
||||||
|
3. **Be specific** - Include directories and file patterns in prompts
|
||||||
|
4. **Wait for all** - Do not synthesize until ALL agents complete
|
||||||
|
5. **Synthesize** - Combine findings into coherent summary with file:line references
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
```
|
||||||
|
Task(codebase-locator, "Find all files related to authentication in src/")
|
||||||
|
Task(codebase-analyzer, "Explain how JWT tokens are validated in src/auth/")
|
||||||
|
Task(codebase-pattern-finder, "Find examples of middleware patterns in src/")
|
||||||
|
Task(thoughts-locator, "Find documents about auth design decisions in thoughts/")
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Principles
|
||||||
|
|
||||||
|
- **Parallel when different** - Run agents in parallel when searching for different things
|
||||||
|
- **WHAT not HOW** - Each agent knows its job; tell it what you need, not how to search
|
||||||
|
- **Document, don't evaluate** - Agents should describe what exists, not critique it
|
||||||
|
- **Specific directories** - Always scope searches to relevant directories
|
||||||
|
- **File references** - Include specific file:line references in synthesis
|
||||||
|
|
||||||
|
## Agent Prompts
|
||||||
|
|
||||||
|
When spawning agents, include:
|
||||||
|
- The specific question or goal
|
||||||
|
- Relevant directories to search
|
||||||
|
- Reminder to document (not evaluate) what they find
|
||||||
|
- Request for file:line references in findings
|
||||||
@@ -53,6 +53,22 @@
|
|||||||
;; change `org-directory'. It must be set before org loads!
|
;; change `org-directory'. It must be set before org loads!
|
||||||
(setq org-directory "~/org/")
|
(setq org-directory "~/org/")
|
||||||
(after! org
|
(after! org
|
||||||
|
;; Skip recurring events past their CALDAV_UNTIL date
|
||||||
|
;; org-caldav ignores UNTIL from RRULE, so we store it as a property
|
||||||
|
;; and filter here in the agenda
|
||||||
|
(defun my/skip-if-past-until ()
|
||||||
|
"Return non-nil if entry has CALDAV_UNTIL and current date is past it."
|
||||||
|
(let ((until-str (org-entry-get nil "CALDAV_UNTIL")))
|
||||||
|
(when (and until-str
|
||||||
|
(string-match "^\\([0-9]\\{4\\}\\)\\([0-9]\\{2\\}\\)\\([0-9]\\{2\\}\\)" until-str))
|
||||||
|
(let* ((until-year (string-to-number (match-string 1 until-str)))
|
||||||
|
(until-month (string-to-number (match-string 2 until-str)))
|
||||||
|
(until-day (string-to-number (match-string 3 until-str)))
|
||||||
|
(until-time (encode-time 0 0 0 until-day until-month until-year))
|
||||||
|
(today (current-time)))
|
||||||
|
(when (time-less-p until-time today)
|
||||||
|
(org-end-of-subtree t))))))
|
||||||
|
|
||||||
(setq org-agenda-span 'week
|
(setq org-agenda-span 'week
|
||||||
org-agenda-start-with-log-mode t
|
org-agenda-start-with-log-mode t
|
||||||
my-agenda-dirs '("projects" "roam")
|
my-agenda-dirs '("projects" "roam")
|
||||||
@@ -61,6 +77,7 @@
|
|||||||
"\.org$"))
|
"\.org$"))
|
||||||
my-agenda-dirs))
|
my-agenda-dirs))
|
||||||
org-log-done 'time
|
org-log-done 'time
|
||||||
|
org-agenda-skip-function-global #'my/skip-if-past-until
|
||||||
org-agenda-custom-commands '(("n" "Agenda"
|
org-agenda-custom-commands '(("n" "Agenda"
|
||||||
((agenda "")
|
((agenda "")
|
||||||
(tags-todo "-someday-recurring")))
|
(tags-todo "-someday-recurring")))
|
||||||
@@ -83,25 +100,135 @@
|
|||||||
"d" #'org-agenda-day-view
|
"d" #'org-agenda-day-view
|
||||||
"w" #'org-agenda-week-view))
|
"w" #'org-agenda-week-view))
|
||||||
|
|
||||||
;; (use-package! org-caldav
|
;; org-caldav: Sync Org entries with Nextcloud CalDAV
|
||||||
;; :defer t
|
;; Setup requirements:
|
||||||
;; :config
|
;; 1. Create Nextcloud app password: Settings -> Security -> Devices & sessions
|
||||||
;; (setq org-caldav-url "https://nextcloud.johnogle.info/remote.php/dav/calendars/johno"
|
;; 2. Store in rbw: rbw add nextcloud-caldav (put app password as the secret)
|
||||||
;; org-caldav-calendar-id "personal"
|
;; 3. Run: doom sync
|
||||||
;; org-icalendar-timezone "America/Los_Angeles"
|
;; 4. Test: M-x my/org-caldav-sync-with-rbw (or SPC o a s)
|
||||||
;; org-caldav-inbox "~/org/calendar.org"
|
;;
|
||||||
;; org-caldav-files nil
|
;; Note: Conflict resolution is "Org always wins" - treat Org as source of truth
|
||||||
;; org-caldav-sync-direction 'cal->org))
|
;; for entries that originated in Org.
|
||||||
|
|
||||||
(defun my/get-rbw-password (alias)
|
;; Define sync wrapper before use-package (so keybinding works)
|
||||||
"Return the password for ALIAS via rbw, unlocking the vault only if needed."
|
(defun my/org-caldav-sync-with-rbw ()
|
||||||
(let* ((cmd (format "rbw get %s 2>&1" alias))
|
"Run org-caldav-sync with credentials from rbw embedded in URL."
|
||||||
(output (shell-command-to-string cmd)))
|
(interactive)
|
||||||
(string-trim output)))
|
(require 'org)
|
||||||
|
(require 'org-caldav)
|
||||||
|
(let* ((password (my/get-rbw-password "nextcloud-caldav"))
|
||||||
|
;; Embed credentials in URL (url-encode password in case of special chars)
|
||||||
|
(encoded-pass (url-hexify-string password)))
|
||||||
|
(setq org-caldav-url
|
||||||
|
(format "https://johno:%s@nextcloud.johnogle.info/remote.php/dav/calendars/johno"
|
||||||
|
encoded-pass))
|
||||||
|
(org-caldav-sync)))
|
||||||
|
|
||||||
|
(use-package! org-caldav
|
||||||
|
:after org
|
||||||
|
:commands (org-caldav-sync my/org-caldav-sync-with-rbw)
|
||||||
|
:init
|
||||||
|
(map! :leader
|
||||||
|
(:prefix ("o" . "open")
|
||||||
|
(:prefix ("a" . "agenda/calendar")
|
||||||
|
:desc "Sync CalDAV" "s" #'my/org-caldav-sync-with-rbw)))
|
||||||
|
:config
|
||||||
|
;; Nextcloud CalDAV base URL (credentials added dynamically by sync wrapper)
|
||||||
|
(setq org-caldav-url "https://nextcloud.johnogle.info/remote.php/dav/calendars/johno")
|
||||||
|
|
||||||
|
;; Timezone for iCalendar export
|
||||||
|
(setq org-icalendar-timezone "America/Los_Angeles")
|
||||||
|
|
||||||
|
;; Sync state storage (in org directory for multi-machine sync)
|
||||||
|
(setq org-caldav-save-directory (expand-file-name ".org-caldav/" org-directory))
|
||||||
|
|
||||||
|
;; Backup file for entries before modification
|
||||||
|
(setq org-caldav-backup-file (expand-file-name ".org-caldav/backup.org" org-directory))
|
||||||
|
|
||||||
|
;; Limit past events to 30 days (avoids uploading years of scheduled tasks)
|
||||||
|
(setq org-caldav-days-in-past 30)
|
||||||
|
|
||||||
|
;; Sync behavior: bidirectional by default
|
||||||
|
(setq org-caldav-sync-direction 'twoway)
|
||||||
|
|
||||||
|
;; What changes from calendar sync back to Org (conservative: title and timestamp only)
|
||||||
|
(setq org-caldav-sync-changes-to-org 'title-and-timestamp)
|
||||||
|
|
||||||
|
;; Deletion handling: never auto-delete to prevent accidental mass deletion
|
||||||
|
(setq org-caldav-delete-calendar-entries 'never)
|
||||||
|
(setq org-caldav-delete-org-entries 'never)
|
||||||
|
|
||||||
|
;; Enable TODO/VTODO sync
|
||||||
|
(setq org-icalendar-include-todo 'all)
|
||||||
|
(setq org-caldav-sync-todo t)
|
||||||
|
|
||||||
|
;; Map VTODO percent-complete to org-todo-keywords
|
||||||
|
;; Format: (PERCENT "KEYWORD") - percent thresholds map to states
|
||||||
|
(setq org-caldav-todo-percent-states
|
||||||
|
'((0 "TODO")
|
||||||
|
(25 "WAIT")
|
||||||
|
(50 "IN-PROGRESS")
|
||||||
|
(100 "DONE")
|
||||||
|
(100 "KILL")))
|
||||||
|
|
||||||
|
;; Allow export with broken links (mu4e links can't be resolved during export)
|
||||||
|
(setq org-export-with-broken-links 'mark)
|
||||||
|
|
||||||
|
;; Calendar-specific configuration
|
||||||
|
(setq org-caldav-calendars
|
||||||
|
'(;; Personal calendar: two-way sync with family-shared Nextcloud calendar
|
||||||
|
(:calendar-id "personal"
|
||||||
|
:inbox "~/org/personal-calendar.org"
|
||||||
|
:files ("~/org/personal-calendar.org"))
|
||||||
|
|
||||||
|
;; Tasks calendar: one-way sync (org → calendar only)
|
||||||
|
;; SCHEDULED/DEADLINE items from todo.org push to private Tasks calendar.
|
||||||
|
;; No inbox = no download from calendar (effectively one-way).
|
||||||
|
;; Note: Create 'tasks' calendar in Nextcloud first, keep it private.
|
||||||
|
(:calendar-id "tasks"
|
||||||
|
:files ("~/org/todo.org"))))
|
||||||
|
|
||||||
|
;; Handle UNTIL in recurring events
|
||||||
|
;; org-caldav ignores UNTIL from RRULE - events repeat forever.
|
||||||
|
;; This advice extracts UNTIL and stores it as a property for agenda filtering.
|
||||||
|
(defun my/org-caldav-add-until-property (orig-fun eventdata-alist)
|
||||||
|
"Advice to store CALDAV_UNTIL property for recurring events."
|
||||||
|
(let ((result (funcall orig-fun eventdata-alist)))
|
||||||
|
(let* ((rrule-props (alist-get 'rrule-props eventdata-alist))
|
||||||
|
(until-str (cadr (assoc 'UNTIL rrule-props)))
|
||||||
|
(summary (alist-get 'summary eventdata-alist)))
|
||||||
|
;; Debug: log what we're seeing
|
||||||
|
(message "CALDAV-DEBUG: %s | rrule-props: %S | until: %s"
|
||||||
|
(or summary "?") rrule-props until-str)
|
||||||
|
(when until-str
|
||||||
|
(save-excursion
|
||||||
|
(org-back-to-heading t)
|
||||||
|
(org-entry-put nil "CALDAV_UNTIL" until-str))))
|
||||||
|
result))
|
||||||
|
|
||||||
|
(advice-add 'org-caldav-insert-org-event-or-todo
|
||||||
|
:around #'my/org-caldav-add-until-property)
|
||||||
|
)
|
||||||
|
|
||||||
|
(defun my/get-rbw-password (alias &optional no-error)
|
||||||
|
"Return the password for ALIAS via rbw, unlocking the vault only if needed.
|
||||||
|
If NO-ERROR is non-nil, return nil instead of signaling an error when
|
||||||
|
rbw is unavailable or the entry is not found."
|
||||||
|
(if (not (executable-find "rbw"))
|
||||||
|
(if no-error
|
||||||
|
nil
|
||||||
|
(user-error "rbw: not installed or not in PATH"))
|
||||||
|
(let* ((cmd (format "rbw get %s 2>/dev/null" (shell-quote-argument alias)))
|
||||||
|
(output (string-trim (shell-command-to-string cmd))))
|
||||||
|
(if (string-empty-p output)
|
||||||
|
(if no-error
|
||||||
|
nil
|
||||||
|
(user-error "rbw: no entry found for '%s' - run: rbw add %s" alias alias))
|
||||||
|
output))))
|
||||||
|
|
||||||
(after! gptel
|
(after! gptel
|
||||||
:config
|
:config
|
||||||
(setq! gptel-api-key (my/get-rbw-password "openai-api-key-chatgpt-el")
|
(setq! gptel-api-key (my/get-rbw-password "openai-api-key-chatgpt-el" t)
|
||||||
gptel-default-mode 'org-mode
|
gptel-default-mode 'org-mode
|
||||||
gptel-use-tools t
|
gptel-use-tools t
|
||||||
gptel-confirm-tool-calls 'always
|
gptel-confirm-tool-calls 'always
|
||||||
@@ -225,11 +352,16 @@
|
|||||||
mu4e-headers-time-format "%H:%M")
|
mu4e-headers-time-format "%H:%M")
|
||||||
|
|
||||||
;; Sending mail via msmtp
|
;; Sending mail via msmtp
|
||||||
(setq message-send-mail-function 'message-send-mail-with-sendmail
|
;; NOTE: message-sendmail-f-is-evil and --read-envelope-from are required
|
||||||
sendmail-program (executable-find "msmtp")
|
;; to prevent msmtp from stripping the email body when processing headers.
|
||||||
message-sendmail-envelope-from 'header
|
;; Without these, multipart messages (especially from org-msg) may arrive
|
||||||
mail-envelope-from 'header
|
;; with empty bodies.
|
||||||
mail-specify-envelope-from t))
|
(setq sendmail-program (executable-find "msmtp")
|
||||||
|
send-mail-function #'message-send-mail-with-sendmail
|
||||||
|
message-send-mail-function #'message-send-mail-with-sendmail
|
||||||
|
message-sendmail-f-is-evil t
|
||||||
|
message-sendmail-extra-arguments '("--read-envelope-from")
|
||||||
|
message-sendmail-envelope-from 'header))
|
||||||
|
|
||||||
;; Whenever you reconfigure a package, make sure to wrap your config in an
|
;; Whenever you reconfigure a package, make sure to wrap your config in an
|
||||||
;; `after!' block, otherwise Doom's defaults may override your settings. E.g.
|
;; `after!' block, otherwise Doom's defaults may override your settings. E.g.
|
||||||
|
|||||||
@@ -49,7 +49,7 @@
|
|||||||
;; ...Or *all* packages (NOT RECOMMENDED; will likely break things)
|
;; ...Or *all* packages (NOT RECOMMENDED; will likely break things)
|
||||||
;; (unpin! t)
|
;; (unpin! t)
|
||||||
|
|
||||||
;; (package! org-caldav)
|
(package! org-caldav)
|
||||||
|
|
||||||
;; Note: Packages with custom recipes must be pinned for nix-doom-emacs-unstraightened
|
;; Note: Packages with custom recipes must be pinned for nix-doom-emacs-unstraightened
|
||||||
;; to build deterministically. Update pins when upgrading packages.
|
;; to build deterministically. Update pins when upgrading packages.
|
||||||
|
|||||||
@@ -12,9 +12,7 @@ in
|
|||||||
|
|
||||||
config = mkIf cfg.enable {
|
config = mkIf cfg.enable {
|
||||||
home.packages = with pkgs; [
|
home.packages = with pkgs; [
|
||||||
# Gaming applications would go here
|
custom.mcrcon-rbw
|
||||||
# This role is created for future expansion
|
|
||||||
# moonlight-qt is currently in media role but could be moved here
|
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -90,6 +90,8 @@ with lib;
|
|||||||
htop
|
htop
|
||||||
tmux
|
tmux
|
||||||
zfs
|
zfs
|
||||||
|
rclone
|
||||||
|
custom.rclone-torbox-setup # Helper script to set up TorBox credentials via rbw
|
||||||
];
|
];
|
||||||
|
|
||||||
# Enable SSH
|
# Enable SSH
|
||||||
@@ -126,6 +128,26 @@ with lib;
|
|||||||
|
|
||||||
roles.virtualisation.enable = true;
|
roles.virtualisation.enable = true;
|
||||||
|
|
||||||
|
# TorBox WebDAV mount for rdt-client and Jellyfin
|
||||||
|
roles.rclone-mount = {
|
||||||
|
enable = true;
|
||||||
|
mounts.torbox = {
|
||||||
|
webdavUrl = "https://webdav.torbox.app";
|
||||||
|
username = "john@ogle.fyi"; # TorBox account email
|
||||||
|
mountPoint = "/media/media/torbox-rclone";
|
||||||
|
environmentFile = "/etc/rclone/torbox.env";
|
||||||
|
vfsCacheMode = "full"; # Best for streaming media
|
||||||
|
dirCacheTime = "5m";
|
||||||
|
extraArgs = [
|
||||||
|
"--buffer-size=64M"
|
||||||
|
"--vfs-read-chunk-size=32M"
|
||||||
|
"--vfs-read-chunk-size-limit=off"
|
||||||
|
];
|
||||||
|
# Wait for ZFS media pool to be mounted before starting
|
||||||
|
requiresMountsFor = [ "/media" ];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
# Time zone
|
# Time zone
|
||||||
time.timeZone = "America/Los_Angeles"; # Adjust as needed
|
time.timeZone = "America/Los_Angeles"; # Adjust as needed
|
||||||
|
|
||||||
|
|||||||
@@ -23,12 +23,12 @@
|
|||||||
printing.enable = true;
|
printing.enable = true;
|
||||||
remote-build.builders = [
|
remote-build.builders = [
|
||||||
{
|
{
|
||||||
hostName = "zix790prors";
|
hostName = "zix790prors.oglehome";
|
||||||
maxJobs = 16;
|
maxJobs = 16;
|
||||||
speedFactor = 3;
|
speedFactor = 3;
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
hostName = "john-endesktop";
|
hostName = "john-endesktop.oglehome";
|
||||||
maxJobs = 1;
|
maxJobs = 1;
|
||||||
speedFactor = 1;
|
speedFactor = 1;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,11 +19,18 @@
|
|||||||
desktopSession = "plasma";
|
desktopSession = "plasma";
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
remote-build.builders = [{
|
remote-build.builders = [
|
||||||
hostName = "zix790prors";
|
{
|
||||||
maxJobs = 16;
|
hostName = "zix790prors.oglehome";
|
||||||
speedFactor = 4; # Prefer remote heavily on Steam Deck
|
maxJobs = 16;
|
||||||
}];
|
speedFactor = 4;
|
||||||
|
}
|
||||||
|
{
|
||||||
|
hostName = "john-endesktop.oglehome";
|
||||||
|
maxJobs = 1;
|
||||||
|
speedFactor = 2;
|
||||||
|
}
|
||||||
|
];
|
||||||
users = {
|
users = {
|
||||||
enable = true;
|
enable = true;
|
||||||
extraGroups = [ "video" ];
|
extraGroups = [ "video" ];
|
||||||
|
|||||||
@@ -1,56 +0,0 @@
|
|||||||
# Edit this configuration file to define what should be installed on
|
|
||||||
# your system. Help is available in the configuration.nix(5) man page, on
|
|
||||||
# https://search.nixos.org/options and in the NixOS manual (`nixos-help`).
|
|
||||||
|
|
||||||
# NixOS-WSL specific options are documented on the NixOS-WSL repository:
|
|
||||||
# https://github.com/nix-community/NixOS-WSL
|
|
||||||
|
|
||||||
{ config, lib, pkgs, ... }:
|
|
||||||
|
|
||||||
{
|
|
||||||
imports = [
|
|
||||||
];
|
|
||||||
|
|
||||||
roles = {
|
|
||||||
audio.enable = true;
|
|
||||||
desktop = {
|
|
||||||
enable = true;
|
|
||||||
wayland = true;
|
|
||||||
};
|
|
||||||
nvidia = {
|
|
||||||
enable = true;
|
|
||||||
package = "latest";
|
|
||||||
graphics.extraPackages = with pkgs; [
|
|
||||||
mesa
|
|
||||||
libvdpau-va-gl
|
|
||||||
libva-vdpau-driver
|
|
||||||
];
|
|
||||||
};
|
|
||||||
users.enable = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
networking.hostName = "wixos";
|
|
||||||
|
|
||||||
wsl.enable = true;
|
|
||||||
wsl.defaultUser = "johno";
|
|
||||||
wsl.startMenuLaunchers = true;
|
|
||||||
wsl.useWindowsDriver = true;
|
|
||||||
wsl.wslConf.network.hostname = "wixos";
|
|
||||||
wsl.wslConf.user.default = "johno";
|
|
||||||
|
|
||||||
# WSL-specific environment variables for graphics
|
|
||||||
environment.sessionVariables = {
|
|
||||||
LD_LIBRARY_PATH = [
|
|
||||||
"/usr/lib/wsl/lib"
|
|
||||||
"/run/opengl-driver/lib"
|
|
||||||
];
|
|
||||||
};
|
|
||||||
|
|
||||||
# This value determines the NixOS release from which the default
|
|
||||||
# settings for stateful data, like file locations and database versions
|
|
||||||
# on your system were taken. It's perfectly fine and recommended to leave
|
|
||||||
# this value at the release version of the first install of this system.
|
|
||||||
# Before changing this value read the documentation for this option
|
|
||||||
# (e.g. man configuration.nix or on https://nixos.org/nixos/options.html).
|
|
||||||
system.stateVersion = "24.05"; # Did you read the comment?
|
|
||||||
}
|
|
||||||
@@ -1,28 +1,29 @@
|
|||||||
{ lib
|
{ lib
|
||||||
, stdenv
|
, stdenv
|
||||||
, fetchurl
|
, fetchurl
|
||||||
, autoPatchelfHook
|
, patchelf
|
||||||
|
, glibc
|
||||||
}:
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
version = "2.0.76";
|
version = "2.1.19";
|
||||||
|
|
||||||
srcs = {
|
srcs = {
|
||||||
aarch64-darwin = {
|
aarch64-darwin = {
|
||||||
url = "https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/${version}/darwin-arm64/claude";
|
url = "https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/${version}/darwin-arm64/claude";
|
||||||
sha256 = "b76f6d4d09233e67295897b0a1ed2e22d7afa406431529d8b1b532b63b8cbcbd";
|
sha256 = "d386ac8f6d1479f85d31f369421c824135c10249c32087017d05a5f428852c41";
|
||||||
};
|
};
|
||||||
x86_64-darwin = {
|
x86_64-darwin = {
|
||||||
url = "https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/${version}/darwin-x64/claude";
|
url = "https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/${version}/darwin-x64/claude";
|
||||||
sha256 = "9d94582f0af5d2201f1c907bf24ff8d216104b897ee0b24795a6c081f40e08d7";
|
sha256 = "be266b3a952f483d8358ad141e2afe661170386506f479ead992319e4fdc38ac";
|
||||||
};
|
};
|
||||||
x86_64-linux = {
|
x86_64-linux = {
|
||||||
url = "https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/${version}/linux-x64/claude";
|
url = "https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/${version}/linux-x64/claude";
|
||||||
sha256 = "5dcdb480f91ba0df0bc8bd6aff148d3dfd3883f0899eeb5b9427a8b0abe7a687";
|
sha256 = "4e2a1c73871ecf3b133376b57ded03333a7a6387f2d2a3a6279bb90a07f7a944";
|
||||||
};
|
};
|
||||||
aarch64-linux = {
|
aarch64-linux = {
|
||||||
url = "https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/${version}/linux-arm64/claude";
|
url = "https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/${version}/linux-arm64/claude";
|
||||||
sha256 = "f64a994c8e5bfb84d7242cebbec75d6919db2ee46d50b8fc7a88d5066db193f9";
|
sha256 = "8c4b61b24ca760d6f7aa2f19727163d122e9fd0c3ce91f106a21b6918a7b1bbb";
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -38,8 +39,14 @@ in stdenv.mkDerivation {
|
|||||||
|
|
||||||
dontUnpack = true;
|
dontUnpack = true;
|
||||||
dontBuild = true;
|
dontBuild = true;
|
||||||
|
# Bun standalone binaries have JS code appended after the ELF sections
|
||||||
|
# stripping/patching would remove or corrupt this appended data
|
||||||
|
dontStrip = true;
|
||||||
|
dontPatchELF = true;
|
||||||
|
|
||||||
nativeBuildInputs = lib.optionals stdenv.isLinux [ autoPatchelfHook ];
|
# Don't use autoPatchelfHook - it rewrites the ELF and strips the appended
|
||||||
|
# bun bundle (the JS code is appended after the ELF sections)
|
||||||
|
nativeBuildInputs = lib.optionals stdenv.isLinux [ patchelf ];
|
||||||
|
|
||||||
installPhase = ''
|
installPhase = ''
|
||||||
runHook preInstall
|
runHook preInstall
|
||||||
@@ -49,6 +56,14 @@ in stdenv.mkDerivation {
|
|||||||
runHook postInstall
|
runHook postInstall
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
# Manually patch the interpreter for bun standalone binaries
|
||||||
|
# patchelf --set-interpreter modifies in-place without rewriting the entire ELF,
|
||||||
|
# preserving the appended JS bundle that bun needs at runtime
|
||||||
|
postFixup = lib.optionalString stdenv.isLinux ''
|
||||||
|
interpreter="${glibc}/lib/${if stdenv.hostPlatform.system == "aarch64-linux" then "ld-linux-aarch64.so.1" else "ld-linux-x86-64.so.2"}"
|
||||||
|
patchelf --set-interpreter "$interpreter" $out/bin/claude
|
||||||
|
'';
|
||||||
|
|
||||||
meta = with lib; {
|
meta = with lib; {
|
||||||
description = "Terminal-based AI coding assistant from Anthropic";
|
description = "Terminal-based AI coding assistant from Anthropic";
|
||||||
homepage = "https://www.anthropic.com/claude-code";
|
homepage = "https://www.anthropic.com/claude-code";
|
||||||
|
|||||||
@@ -3,4 +3,6 @@
|
|||||||
tea-rbw = pkgs.callPackage ./tea-rbw {};
|
tea-rbw = pkgs.callPackage ./tea-rbw {};
|
||||||
app-launcher-server = pkgs.callPackage ./app-launcher-server {};
|
app-launcher-server = pkgs.callPackage ./app-launcher-server {};
|
||||||
claude-code = pkgs.callPackage ./claude-code {};
|
claude-code = pkgs.callPackage ./claude-code {};
|
||||||
|
mcrcon-rbw = pkgs.callPackage ./mcrcon-rbw {};
|
||||||
|
rclone-torbox-setup = pkgs.callPackage ./rclone-torbox-setup {};
|
||||||
}
|
}
|
||||||
|
|||||||
40
packages/mcrcon-rbw/default.nix
Normal file
40
packages/mcrcon-rbw/default.nix
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
{ pkgs, ... }:
|
||||||
|
|
||||||
|
pkgs.writeShellScriptBin "mcrcon" ''
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Configuration - can be overridden with environment variables
|
||||||
|
MINECRAFT_RCON_HOST="''${MCRCON_HOST:-10.0.0.165}"
|
||||||
|
MINECRAFT_RCON_PORT="''${MCRCON_PORT:-25575}"
|
||||||
|
RBW_ENTRY="minecraft-rcon"
|
||||||
|
|
||||||
|
# Check if rbw is available
|
||||||
|
if ! command -v rbw &> /dev/null; then
|
||||||
|
echo "Error: rbw is not available. Please ensure rbw is installed and configured."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Retrieve password from Bitwarden
|
||||||
|
if ! MCRCON_PASS=$(rbw get "$RBW_ENTRY" 2>/dev/null); then
|
||||||
|
echo "Error: Failed to retrieve RCON password from rbw entry '$RBW_ENTRY'"
|
||||||
|
echo "Please ensure the entry exists in Bitwarden and rbw is synced."
|
||||||
|
echo ""
|
||||||
|
echo "To create the entry:"
|
||||||
|
echo " 1. Add 'minecraft-rcon' to Bitwarden with the RCON password"
|
||||||
|
echo " 2. Run 'rbw sync' to refresh the local cache"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Export for mcrcon
|
||||||
|
export MCRCON_HOST="$MINECRAFT_RCON_HOST"
|
||||||
|
export MCRCON_PORT="$MINECRAFT_RCON_PORT"
|
||||||
|
export MCRCON_PASS
|
||||||
|
|
||||||
|
# If no arguments provided, start interactive terminal mode
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
exec ${pkgs.mcrcon}/bin/mcrcon -t
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Execute mcrcon with all provided arguments
|
||||||
|
exec ${pkgs.mcrcon}/bin/mcrcon "$@"
|
||||||
|
''
|
||||||
98
packages/rclone-torbox-setup/default.nix
Normal file
98
packages/rclone-torbox-setup/default.nix
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
{ pkgs, ... }:
|
||||||
|
|
||||||
|
pkgs.writeShellScriptBin "rclone-torbox-setup" ''
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Default values
|
||||||
|
RBW_ENTRY="''${1:-torbox}"
|
||||||
|
ENV_FILE="''${2:-/etc/rclone/torbox.env}"
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
echo "Usage: rclone-torbox-setup [rbw-entry] [env-file]"
|
||||||
|
echo ""
|
||||||
|
echo "Sets up rclone credentials for TorBox WebDAV mount."
|
||||||
|
echo "Retrieves password from rbw (Bitwarden), obscures it for rclone,"
|
||||||
|
echo "and writes it to the environment file for the systemd service."
|
||||||
|
echo ""
|
||||||
|
echo "Arguments:"
|
||||||
|
echo " rbw-entry Name of the Bitwarden entry containing the password (default: torbox)"
|
||||||
|
echo " env-file Path to write the environment file (default: /etc/rclone/torbox.env)"
|
||||||
|
echo ""
|
||||||
|
echo "The Bitwarden entry should contain your TorBox password as the password field."
|
||||||
|
echo ""
|
||||||
|
echo "Example:"
|
||||||
|
echo " rclone-torbox-setup torbox-password /etc/rclone/torbox.env"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if [[ "''${1:-}" == "-h" ]] || [[ "''${1:-}" == "--help" ]]; then
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "rclone TorBox credential setup"
|
||||||
|
echo "=============================="
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Check if rbw is available
|
||||||
|
if ! command -v rbw &> /dev/null; then
|
||||||
|
echo "Error: rbw is not available. Please ensure rbw is installed and configured."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if rclone is available
|
||||||
|
if ! command -v rclone &> /dev/null; then
|
||||||
|
echo "Error: rclone is not available. Please ensure rclone is installed."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Retrieving password from rbw entry: $RBW_ENTRY"
|
||||||
|
|
||||||
|
# Retrieve password from Bitwarden
|
||||||
|
if ! TORBOX_PASS=$(rbw get "$RBW_ENTRY" 2>/dev/null); then
|
||||||
|
echo ""
|
||||||
|
echo "Error: Failed to retrieve password from rbw entry '$RBW_ENTRY'"
|
||||||
|
echo ""
|
||||||
|
echo "Please ensure:"
|
||||||
|
echo " 1. The entry '$RBW_ENTRY' exists in Bitwarden"
|
||||||
|
echo " 2. rbw is unlocked: rbw unlock"
|
||||||
|
echo " 3. rbw is synced: rbw sync"
|
||||||
|
echo ""
|
||||||
|
echo "To create the entry in Bitwarden:"
|
||||||
|
echo " - Name: $RBW_ENTRY"
|
||||||
|
echo " - Password: Your TorBox password"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Password retrieved successfully"
|
||||||
|
|
||||||
|
# Obscure the password for rclone
|
||||||
|
echo "Obscuring password for rclone..."
|
||||||
|
if ! OBSCURED_PASS=$(echo -n "$TORBOX_PASS" | rclone obscure -); then
|
||||||
|
echo "Error: Failed to obscure password with rclone"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create the directory if needed (requires sudo)
|
||||||
|
ENV_DIR=$(dirname "$ENV_FILE")
|
||||||
|
if [[ ! -d "$ENV_DIR" ]]; then
|
||||||
|
echo "Creating directory $ENV_DIR (requires sudo)..."
|
||||||
|
sudo mkdir -p "$ENV_DIR"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Write the environment file
|
||||||
|
echo "Writing environment file to $ENV_FILE (requires sudo)..."
|
||||||
|
echo "RCLONE_WEBDAV_PASS=$OBSCURED_PASS" | sudo tee "$ENV_FILE" > /dev/null
|
||||||
|
sudo chmod 600 "$ENV_FILE"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Setup complete!"
|
||||||
|
echo ""
|
||||||
|
echo "The environment file has been created at: $ENV_FILE"
|
||||||
|
echo "The rclone-mount-torbox systemd service will use this file."
|
||||||
|
echo ""
|
||||||
|
echo "To activate the mount after NixOS rebuild:"
|
||||||
|
echo " sudo systemctl start rclone-mount-torbox"
|
||||||
|
echo ""
|
||||||
|
echo "To check status:"
|
||||||
|
echo " sudo systemctl status rclone-mount-torbox"
|
||||||
|
''
|
||||||
90
renovate.json
Normal file
90
renovate.json
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||||
|
"timezone": "America/Los_Angeles",
|
||||||
|
"gitAuthor": "Renovate Bot <renovate@ogle.fyi>",
|
||||||
|
"nix": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"github-actions": {
|
||||||
|
"managerFilePatterns": [
|
||||||
|
"/.gitea/workflows/.+\\.ya?ml$/"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"lockFileMaintenance": {
|
||||||
|
"enabled": true,
|
||||||
|
"schedule": [
|
||||||
|
"before 5am on monday"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"dependencyDashboard": true,
|
||||||
|
"dependencyDashboardAutoclose": false,
|
||||||
|
"dependencyDashboardTitle": "NixOS Configs Dependency Dashboard",
|
||||||
|
"packageRules": [
|
||||||
|
{
|
||||||
|
"description": "Group all GitHub Actions updates",
|
||||||
|
"matchManagers": [
|
||||||
|
"github-actions"
|
||||||
|
],
|
||||||
|
"groupName": "github-actions"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Group stable NixOS ecosystem inputs",
|
||||||
|
"matchManagers": [
|
||||||
|
"nix"
|
||||||
|
],
|
||||||
|
"groupName": "nix-stable-ecosystem",
|
||||||
|
"matchPackageNames": [
|
||||||
|
"/^nixpkgs$/",
|
||||||
|
"/^home-manager$/",
|
||||||
|
"/^nix-darwin$/"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Group unstable NixOS ecosystem inputs",
|
||||||
|
"matchManagers": [
|
||||||
|
"nix"
|
||||||
|
],
|
||||||
|
"groupName": "nix-unstable-ecosystem",
|
||||||
|
"matchPackageNames": [
|
||||||
|
"/nixpkgs-unstable/",
|
||||||
|
"/home-manager-unstable/"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Ignore private Gitea inputs (handle separately)",
|
||||||
|
"matchManagers": [
|
||||||
|
"nix"
|
||||||
|
],
|
||||||
|
"enabled": false,
|
||||||
|
"matchPackageNames": [
|
||||||
|
"/google-cookie-retrieval/"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Gastown is under active development - check for updates daily",
|
||||||
|
"matchManagers": [
|
||||||
|
"nix"
|
||||||
|
],
|
||||||
|
"matchPackageNames": [
|
||||||
|
"/gastown/"
|
||||||
|
],
|
||||||
|
"schedule": [
|
||||||
|
"before 6am every day"
|
||||||
|
],
|
||||||
|
"automerge": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description": "Beads is under active development - check for updates daily",
|
||||||
|
"matchManagers": [
|
||||||
|
"nix"
|
||||||
|
],
|
||||||
|
"matchPackageNames": [
|
||||||
|
"/beads/"
|
||||||
|
],
|
||||||
|
"schedule": [
|
||||||
|
"before 6am every day"
|
||||||
|
],
|
||||||
|
"automerge": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -21,6 +21,8 @@ in
|
|||||||
|
|
||||||
services.pipewire = {
|
services.pipewire = {
|
||||||
enable = true;
|
enable = true;
|
||||||
|
alsa.enable = true;
|
||||||
|
alsa.support32Bit = true;
|
||||||
pulse.enable = true;
|
pulse.enable = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -8,11 +8,12 @@
|
|||||||
environment.systemPackages = with pkgs; [
|
environment.systemPackages = with pkgs; [
|
||||||
git
|
git
|
||||||
glances
|
glances
|
||||||
ghostty.terminfo # So tmux works when SSH'ing from ghostty
|
|
||||||
pciutils
|
pciutils
|
||||||
tree
|
tree
|
||||||
usbutils
|
usbutils
|
||||||
vim
|
vim
|
||||||
|
] ++ lib.optionals pkgs.stdenv.isLinux [
|
||||||
|
ghostty.terminfo # So tmux works when SSH'ing from ghostty
|
||||||
];
|
];
|
||||||
|
|
||||||
nix = {
|
nix = {
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ with lib;
|
|||||||
./nfs-mounts
|
./nfs-mounts
|
||||||
./nvidia
|
./nvidia
|
||||||
./printing
|
./printing
|
||||||
|
./rclone-mount
|
||||||
./remote-build
|
./remote-build
|
||||||
./spotifyd
|
./spotifyd
|
||||||
./users
|
./users
|
||||||
|
|||||||
@@ -8,6 +8,21 @@ in
|
|||||||
{
|
{
|
||||||
options.roles.nfs-mounts = {
|
options.roles.nfs-mounts = {
|
||||||
enable = mkEnableOption "Enable default NFS mounts";
|
enable = mkEnableOption "Enable default NFS mounts";
|
||||||
|
server = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "10.0.0.43";
|
||||||
|
description = "IP address or hostname of the NFS server";
|
||||||
|
};
|
||||||
|
remotePath = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "/media";
|
||||||
|
description = "Remote path to mount from the NFS server";
|
||||||
|
};
|
||||||
|
mountPoint = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "/media";
|
||||||
|
description = "Local mount point for the NFS share";
|
||||||
|
};
|
||||||
# TODO: implement requireMount
|
# TODO: implement requireMount
|
||||||
requireMount = mkOption {
|
requireMount = mkOption {
|
||||||
type = types.bool;
|
type = types.bool;
|
||||||
@@ -18,8 +33,8 @@ in
|
|||||||
|
|
||||||
config = mkIf cfg.enable
|
config = mkIf cfg.enable
|
||||||
{
|
{
|
||||||
fileSystems."/media" = {
|
fileSystems.${cfg.mountPoint} = {
|
||||||
device = "10.0.0.43:/media";
|
device = "${cfg.server}:${cfg.remotePath}";
|
||||||
fsType = "nfs";
|
fsType = "nfs";
|
||||||
options = [
|
options = [
|
||||||
"defaults"
|
"defaults"
|
||||||
|
|||||||
@@ -8,6 +8,21 @@ in
|
|||||||
{
|
{
|
||||||
options.roles.printing = {
|
options.roles.printing = {
|
||||||
enable = mkEnableOption "Enable default printing setup";
|
enable = mkEnableOption "Enable default printing setup";
|
||||||
|
printerName = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "MFC-L8900CDW_series";
|
||||||
|
description = "Name for the default printer";
|
||||||
|
};
|
||||||
|
printerUri = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "ipp://brother.oglehome/ipp/print";
|
||||||
|
description = "Device URI for the default printer (e.g., ipp://hostname/ipp/print)";
|
||||||
|
};
|
||||||
|
printerModel = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "everywhere";
|
||||||
|
description = "PPD model for the printer (use 'everywhere' for driverless IPP)";
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
config = mkIf cfg.enable
|
config = mkIf cfg.enable
|
||||||
@@ -21,11 +36,11 @@ in
|
|||||||
};
|
};
|
||||||
|
|
||||||
hardware.printers.ensurePrinters = [{
|
hardware.printers.ensurePrinters = [{
|
||||||
name = "MFC-L8900CDW_series";
|
name = cfg.printerName;
|
||||||
deviceUri = "ipp://brother.oglehome/ipp/print";
|
deviceUri = cfg.printerUri;
|
||||||
model = "everywhere";
|
model = cfg.printerModel;
|
||||||
}];
|
}];
|
||||||
hardware.printers.ensureDefaultPrinter = "MFC-L8900CDW_series";
|
hardware.printers.ensureDefaultPrinter = cfg.printerName;
|
||||||
|
|
||||||
# Fix ensure-printers service to wait for network availability
|
# Fix ensure-printers service to wait for network availability
|
||||||
systemd.services.ensure-printers = {
|
systemd.services.ensure-printers = {
|
||||||
|
|||||||
149
roles/rclone-mount/default.nix
Normal file
149
roles/rclone-mount/default.nix
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
{ config, lib, pkgs, ... }:
|
||||||
|
|
||||||
|
with lib;
|
||||||
|
|
||||||
|
let
|
||||||
|
cfg = config.roles.rclone-mount;
|
||||||
|
|
||||||
|
# Generate systemd service for a single mount
|
||||||
|
mkMountService = name: mountCfg: {
|
||||||
|
description = "rclone mount for ${name}";
|
||||||
|
after = [ "network-online.target" ];
|
||||||
|
wants = [ "network-online.target" ];
|
||||||
|
wantedBy = [ "multi-user.target" ];
|
||||||
|
|
||||||
|
# Wait for parent mount points (e.g., ZFS pools) to be available
|
||||||
|
unitConfig = mkIf (mountCfg.requiresMountsFor != []) {
|
||||||
|
RequiresMountsFor = mountCfg.requiresMountsFor;
|
||||||
|
};
|
||||||
|
|
||||||
|
serviceConfig = {
|
||||||
|
Type = "notify";
|
||||||
|
ExecStartPre = "${pkgs.coreutils}/bin/mkdir -p ${mountCfg.mountPoint}";
|
||||||
|
ExecStart = concatStringsSep " " ([
|
||||||
|
"${pkgs.rclone}/bin/rclone mount"
|
||||||
|
":webdav:${mountCfg.remotePath}"
|
||||||
|
"${mountCfg.mountPoint}"
|
||||||
|
"--webdav-url=${mountCfg.webdavUrl}"
|
||||||
|
"--webdav-vendor=${mountCfg.webdavVendor}"
|
||||||
|
"--webdav-user=${mountCfg.username}"
|
||||||
|
"--allow-other"
|
||||||
|
"--vfs-cache-mode=${mountCfg.vfsCacheMode}"
|
||||||
|
"--dir-cache-time=${mountCfg.dirCacheTime}"
|
||||||
|
"--poll-interval=${mountCfg.pollInterval}"
|
||||||
|
"--log-level=${mountCfg.logLevel}"
|
||||||
|
] ++ mountCfg.extraArgs);
|
||||||
|
ExecStop = "${pkgs.fuse}/bin/fusermount -uz ${mountCfg.mountPoint}";
|
||||||
|
Restart = "on-failure";
|
||||||
|
RestartSec = "10s";
|
||||||
|
EnvironmentFile = mountCfg.environmentFile;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
in
|
||||||
|
{
|
||||||
|
options.roles.rclone-mount = {
|
||||||
|
enable = mkEnableOption "Enable rclone WebDAV mounts";
|
||||||
|
|
||||||
|
mounts = mkOption {
|
||||||
|
type = types.attrsOf (types.submodule {
|
||||||
|
options = {
|
||||||
|
webdavUrl = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
description = "WebDAV server URL (e.g., https://webdav.torbox.app)";
|
||||||
|
};
|
||||||
|
|
||||||
|
webdavVendor = mkOption {
|
||||||
|
type = types.enum [ "other" "nextcloud" "owncloud" "sharepoint" "sharepoint-ntlm" "fastmail" ];
|
||||||
|
default = "other";
|
||||||
|
description = "WebDAV server vendor for optimizations";
|
||||||
|
};
|
||||||
|
|
||||||
|
username = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
description = "WebDAV username (often email address)";
|
||||||
|
};
|
||||||
|
|
||||||
|
environmentFile = mkOption {
|
||||||
|
type = types.path;
|
||||||
|
description = ''
|
||||||
|
Path to environment file containing RCLONE_WEBDAV_PASS.
|
||||||
|
The password should be obscured using: rclone obscure <password>
|
||||||
|
File format: RCLONE_WEBDAV_PASS=<obscured_password>
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
mountPoint = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
description = "Local mount point path";
|
||||||
|
};
|
||||||
|
|
||||||
|
remotePath = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "/";
|
||||||
|
description = "Remote path on WebDAV server to mount";
|
||||||
|
};
|
||||||
|
|
||||||
|
vfsCacheMode = mkOption {
|
||||||
|
type = types.enum [ "off" "minimal" "writes" "full" ];
|
||||||
|
default = "full";
|
||||||
|
description = ''
|
||||||
|
VFS cache mode. For streaming media, 'full' is recommended.
|
||||||
|
- off: No caching (direct reads/writes)
|
||||||
|
- minimal: Cache open files only
|
||||||
|
- writes: Cache writes and open files
|
||||||
|
- full: Full caching of all files
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
dirCacheTime = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "5m";
|
||||||
|
description = "Time to cache directory entries";
|
||||||
|
};
|
||||||
|
|
||||||
|
pollInterval = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "1m";
|
||||||
|
description = "Poll interval for remote changes";
|
||||||
|
};
|
||||||
|
|
||||||
|
logLevel = mkOption {
|
||||||
|
type = types.enum [ "DEBUG" "INFO" "NOTICE" "ERROR" ];
|
||||||
|
default = "INFO";
|
||||||
|
description = "rclone log level";
|
||||||
|
};
|
||||||
|
|
||||||
|
extraArgs = mkOption {
|
||||||
|
type = types.listOf types.str;
|
||||||
|
default = [];
|
||||||
|
description = "Extra arguments to pass to rclone mount";
|
||||||
|
};
|
||||||
|
|
||||||
|
requiresMountsFor = mkOption {
|
||||||
|
type = types.listOf types.str;
|
||||||
|
default = [];
|
||||||
|
description = ''
|
||||||
|
List of mount points that must be available before this service starts.
|
||||||
|
Use this when the mount point's parent is on a ZFS pool or other filesystem
|
||||||
|
that may not be mounted at boot time.
|
||||||
|
Example: [ "/media" ] to wait for the media ZFS pool to mount.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
});
|
||||||
|
default = {};
|
||||||
|
description = "Attribute set of rclone WebDAV mounts to configure";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
config = mkIf cfg.enable {
|
||||||
|
# Ensure FUSE is available
|
||||||
|
environment.systemPackages = [ pkgs.rclone pkgs.fuse ];
|
||||||
|
programs.fuse.userAllowOther = true;
|
||||||
|
|
||||||
|
# Create systemd services for each mount
|
||||||
|
systemd.services = mapAttrs' (name: mountCfg:
|
||||||
|
nameValuePair "rclone-mount-${name}" (mkMountService name mountCfg)
|
||||||
|
) cfg.mounts;
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -35,12 +35,12 @@
|
|||||||
# a) Configure builders in configuration.nix:
|
# a) Configure builders in configuration.nix:
|
||||||
# roles.remote-build.builders = [
|
# roles.remote-build.builders = [
|
||||||
# {
|
# {
|
||||||
# hostName = "zix790prors";
|
# hostName = "zix790prors.oglehome";
|
||||||
# maxJobs = 16; # Number of parallel build jobs
|
# maxJobs = 16; # Number of parallel build jobs
|
||||||
# speedFactor = 3; # Higher = prefer this builder
|
# speedFactor = 3; # Higher = prefer this builder
|
||||||
# }
|
# }
|
||||||
# {
|
# {
|
||||||
# hostName = "john-endesktop";
|
# hostName = "john-endesktop.oglehome";
|
||||||
# maxJobs = 1; # Conservative for busy machines
|
# maxJobs = 1; # Conservative for busy machines
|
||||||
# speedFactor = 1;
|
# speedFactor = 1;
|
||||||
# }
|
# }
|
||||||
|
|||||||
@@ -8,6 +8,11 @@ in
|
|||||||
{
|
{
|
||||||
options.roles.virtualisation = {
|
options.roles.virtualisation = {
|
||||||
enable = mkEnableOption "Enable virtualisation";
|
enable = mkEnableOption "Enable virtualisation";
|
||||||
|
dockerUsers = mkOption {
|
||||||
|
type = types.listOf types.str;
|
||||||
|
default = [ "johno" ];
|
||||||
|
description = "List of users to add to the docker group";
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
config = mkIf cfg.enable
|
config = mkIf cfg.enable
|
||||||
@@ -15,6 +20,6 @@ in
|
|||||||
virtualisation.libvirtd.enable = true;
|
virtualisation.libvirtd.enable = true;
|
||||||
programs.virt-manager.enable = true;
|
programs.virt-manager.enable = true;
|
||||||
virtualisation.docker.enable = true;
|
virtualisation.docker.enable = true;
|
||||||
users.extraGroups.docker.members = [ "johno" ];
|
users.extraGroups.docker.members = cfg.dockerUsers;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,30 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
--help|-h)
|
||||||
|
echo "Usage: $0 [OPTIONS]"
|
||||||
|
echo ""
|
||||||
|
echo "Rotate to the next wallpaper in the configured list."
|
||||||
|
echo ""
|
||||||
|
echo "This script increments the currentIndex in home/wallpapers/default.nix,"
|
||||||
|
echo "cycling through available wallpapers. Rebuild your system to apply"
|
||||||
|
echo "the new wallpaper."
|
||||||
|
echo ""
|
||||||
|
echo "Options:"
|
||||||
|
echo " --help, -h Show this help message"
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown option: $1"
|
||||||
|
echo "Use --help for usage information"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
# Colors for output
|
# Colors for output
|
||||||
RED='\033[0;31m'
|
RED='\033[0;31m'
|
||||||
GREEN='\033[0;32m'
|
GREEN='\033[0;32m'
|
||||||
|
|||||||
@@ -1,6 +1,30 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
--help|-h)
|
||||||
|
echo "Usage: $0 [OPTIONS]"
|
||||||
|
echo ""
|
||||||
|
echo "Update Doom Emacs to the latest commit from the doomemacs repository."
|
||||||
|
echo ""
|
||||||
|
echo "This script fetches the latest commit SHA from the default branch,"
|
||||||
|
echo "updates the rev and sha256 in home/roles/emacs/default.nix, and"
|
||||||
|
echo "prepares the configuration for a system rebuild."
|
||||||
|
echo ""
|
||||||
|
echo "Options:"
|
||||||
|
echo " --help, -h Show this help message"
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown option: $1"
|
||||||
|
echo "Use --help for usage information"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
# Colors for output
|
# Colors for output
|
||||||
RED='\033[0;31m'
|
RED='\033[0;31m'
|
||||||
GREEN='\033[0;32m'
|
GREEN='\033[0;32m'
|
||||||
|
|||||||
@@ -1,6 +1,35 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Parse arguments
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
--help|-h)
|
||||||
|
echo "Usage: $0 [OPTIONS]"
|
||||||
|
echo ""
|
||||||
|
echo "Perform a major upgrade of the NixOS configuration."
|
||||||
|
echo ""
|
||||||
|
echo "This script runs the following steps:"
|
||||||
|
echo " 1. Update all flake inputs (nix flake update)"
|
||||||
|
echo " 2. Update Doom Emacs to the latest commit"
|
||||||
|
echo " 3. Update Claude Code to the latest version"
|
||||||
|
echo " 4. Rotate to the next wallpaper"
|
||||||
|
echo ""
|
||||||
|
echo "After completion, review changes with 'git diff' and rebuild"
|
||||||
|
echo "your system with 'sudo nixos-rebuild switch --flake .'"
|
||||||
|
echo ""
|
||||||
|
echo "Options:"
|
||||||
|
echo " --help, -h Show this help message"
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown option: $1"
|
||||||
|
echo "Use --help for usage information"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
# Colors for output
|
# Colors for output
|
||||||
RED='\033[0;31m'
|
RED='\033[0;31m'
|
||||||
GREEN='\033[0;32m'
|
GREEN='\033[0;32m'
|
||||||
|
|||||||
Reference in New Issue
Block a user