Compare commits
96 Commits
bead/nixos
...
polecat/sh
| Author | SHA1 | Date | |
|---|---|---|---|
| 9eb1cd8e6c | |||
| 1b585847ab | |||
| e7906331dc | |||
| dc722843a9 | |||
| 03f169284d | |||
| 8908500073 | |||
| 87f6d5c759 | |||
| a851c2551c | |||
|
|
6cf63e86c1 | ||
| c3ed6c0a26 | |||
|
|
53fa89b2e9 | ||
| 3acf9d2796 | |||
| 123e7d3b3a | |||
|
|
56097aefa4 | ||
| 21a8b5c5d9 | |||
| 8f8582b0f3 | |||
| 94fb5a3e64 | |||
| 7df68ba8c8 | |||
| 2799632308 | |||
| 346c031278 | |||
| 188d2befb0 | |||
| 8e8b5f4304 | |||
| 4098ee3987 | |||
| e1e37da7c2 | |||
| a46d11a770 | |||
|
|
8553b9826e | ||
| a0c081e12e | |||
| d92e4b3ddf | |||
| 70b40966be | |||
| 475a633ab7 | |||
| a39416c9db | |||
| 63c3f4e84d | |||
| baf64f7f4a | |||
|
|
f0b6ede7ed | ||
| d0cb16391f | |||
| d872293f19 | |||
| 07182cfdcf | |||
|
|
65e91c20f7 | ||
|
|
01e376eac4 | ||
|
|
9c5be2e27a | ||
|
|
d9ffb14db5 | ||
|
|
07ea05afab | ||
|
|
4f5108c9d9 | ||
| 9243341ed7 | |||
|
|
b729ee8c7a | ||
|
|
ebc28cebd4 | ||
| c82358d586 | |||
| 74388e8c24 | |||
|
|
a98ccddab1 | ||
| 18570628a5 | |||
|
|
0c484b6601 | ||
|
|
4853a18474 | ||
|
|
8b8453a37a | ||
|
|
2b6e289b9a | ||
|
|
70d364544f | ||
|
|
1ffa8524f0 | ||
|
|
be3c27e868 | ||
| c2d286087f | |||
|
|
1172818062 | ||
|
|
9f63e1430c | ||
| b14ef1f62a | |||
| 87719fa9e6 | |||
| 933612da4c | |||
|
|
d2c7599267 | ||
|
|
3d16824eac | ||
| 2cdc15163c | |||
| a77b1230fe | |||
| 623a387127 | |||
| 737f2b09e4 | |||
| cddc9de14a | |||
| 53e3bbe78f | |||
| c258eafe34 | |||
| 03d0b76f97 | |||
| b5f7233214 | |||
| 1203662237 | |||
| 6ad714b57c | |||
| 5440214295 | |||
| cc305af899 | |||
| c06adec7d8 | |||
| 7903b2dfd0 | |||
| f472aa9b3d | |||
| 2e07454ffa | |||
| daf963b290 | |||
| c3c8688f31 | |||
| 1cee1cd365 | |||
| 66c27da142 | |||
| 7d6f71f4e4 | |||
| 7091ee3ad5 | |||
| d78e089695 | |||
| 28b7a0fda9 | |||
| b7bccb0b40 | |||
| 2d03714934 | |||
| 3f0e381de2 | |||
| 1d9fd0aee9 | |||
| 16f6dfcec7 | |||
| 90ef70eb2e |
5
.beads/.gitignore
vendored
5
.beads/.gitignore
vendored
@@ -32,6 +32,11 @@ beads.left.meta.json
|
||||
beads.right.jsonl
|
||||
beads.right.meta.json
|
||||
|
||||
# Sync state (local-only, per-machine)
|
||||
# These files are machine-specific and should not be shared across clones
|
||||
.sync.lock
|
||||
sync_base.jsonl
|
||||
|
||||
# NOTE: Do NOT add negation patterns (e.g., !issues.jsonl) here.
|
||||
# They would override fork protection in .git/info/exclude, allowing
|
||||
# contributors to accidentally commit upstream issue databases.
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
# Issue prefix for this repository (used by bd init)
|
||||
# If not set, bd init will auto-detect from directory name
|
||||
# Example: issue-prefix: "myproject" creates issues like "myproject-1", "myproject-2", etc.
|
||||
# issue-prefix: ""
|
||||
issue-prefix: "x"
|
||||
|
||||
# Use no-db mode: load from JSONL, no SQLite, write back after each command
|
||||
# When true, bd will use .beads/issues.jsonl as the source of truth
|
||||
@@ -59,4 +59,6 @@ sync-branch: "beads-sync"
|
||||
# - linear.url
|
||||
# - linear.api-key
|
||||
# - github.org
|
||||
# - github.repo
|
||||
# - github.repo
|
||||
|
||||
routing.mode: "explicit"
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"database": "beads.db",
|
||||
"jsonl_export": "sync_base.jsonl"
|
||||
}
|
||||
@@ -10,9 +10,11 @@ jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- uses: https://git.johnogle.info/johno/gitea-actions/nix-setup@main
|
||||
- uses: https://git.johnogle.info/johno/gitea-actions/nix-setup@v1
|
||||
|
||||
- name: Check flake
|
||||
run: nix flake check
|
||||
env:
|
||||
NIX_CONFIG: "access-tokens = git.johnogle.info=${{ secrets.GITEA_ACCESS_TOKEN }}"
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1,3 +1,8 @@
|
||||
result
|
||||
thoughts
|
||||
.beads
|
||||
|
||||
# Gas Town (added by gt)
|
||||
.runtime/
|
||||
.claude/
|
||||
.logs/
|
||||
|
||||
132
flake.lock
generated
132
flake.lock
generated
@@ -8,27 +8,28 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1767911810,
|
||||
"narHash": "sha256-0L4ATr01UsmBC0rSW62VIMVVSUihAQu2+ZOoHk9BQnA=",
|
||||
"lastModified": 1769840331,
|
||||
"narHash": "sha256-Yp0K4JoXX8EcHp1juH4OZ7dcCmkopDu4VvAgZEOxgL8=",
|
||||
"owner": "steveyegge",
|
||||
"repo": "beads",
|
||||
"rev": "28ff9fe9919a9665a0f00f5b3fcd084b43fb6cc3",
|
||||
"rev": "93965b4abeed920a4701e03571d1b6bb75810722",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "steveyegge",
|
||||
"repo": "beads",
|
||||
"rev": "93965b4abeed920a4701e03571d1b6bb75810722",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"doomemacs": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1767773143,
|
||||
"narHash": "sha256-QL/t9v2kFNxBDyNJb/s411o3mxujan+QX5IZglTdpTk=",
|
||||
"lastModified": 1768984347,
|
||||
"narHash": "sha256-VvC4rgAAaFnYLCdcUoz7dTE3kuBNuHIc+GlXOrPCxpg=",
|
||||
"owner": "doomemacs",
|
||||
"repo": "doomemacs",
|
||||
"rev": "3e15fb36d7f94f0a218bda977be4d3f5da983a71",
|
||||
"rev": "57818a6da90fbef39ff80d62fab2cd319496c3b9",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -47,11 +48,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1768011937,
|
||||
"narHash": "sha256-SnU2XTo34vwVaijs+4VwcXTNwMWO4nwzzs08N39UagA=",
|
||||
"lastModified": 1769848312,
|
||||
"narHash": "sha256-ggBocPd1L4l5MFNV0Fw9aSGZZO4aGzCfgh4e6hQ77RE=",
|
||||
"owner": "nix-community",
|
||||
"repo": "emacs-overlay",
|
||||
"rev": "79abf71d9897cf3b5189f7175cda1b1102abc65c",
|
||||
"rev": "be0b4f4f28f69be61e9174807250e3235ee11d50",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -78,6 +79,22 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"gastown": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1770098007,
|
||||
"narHash": "sha256-CFlN57BXlR5FobTChdE2GgdIGx4xJcFFCk1E5Q98cSQ=",
|
||||
"owner": "steveyegge",
|
||||
"repo": "gastown",
|
||||
"rev": "13461161063bf7b2365fe5fd4df88e32c3ba2a28",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "steveyegge",
|
||||
"repo": "gastown",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"google-cookie-retrieval": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
@@ -85,11 +102,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1761423376,
|
||||
"narHash": "sha256-pMy3cnUFfue4vz/y0jx71BfcPGxZf+hk/DtnzWvfU0c=",
|
||||
"lastModified": 1768846578,
|
||||
"narHash": "sha256-82f/+e8HAwmBukiLlr7I3HYvM/2GCd5SOc+BC+qzsOQ=",
|
||||
"ref": "refs/heads/main",
|
||||
"rev": "a1f695665771841a988afc965526cbf99160cd77",
|
||||
"revCount": 11,
|
||||
"rev": "c11ff9d3c67372a843a0fa6bf23132e986bd6955",
|
||||
"revCount": 14,
|
||||
"type": "git",
|
||||
"url": "https://git.johnogle.info/johno/google-cookie-retrieval.git"
|
||||
},
|
||||
@@ -105,11 +122,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1767514898,
|
||||
"narHash": "sha256-ONYqnKrPzfKEEPChoJ9qPcfvBqW9ZgieDKD7UezWPg4=",
|
||||
"lastModified": 1768949235,
|
||||
"narHash": "sha256-TtjKgXyg1lMfh374w5uxutd6Vx2P/hU81aEhTxrO2cg=",
|
||||
"owner": "nix-community",
|
||||
"repo": "home-manager",
|
||||
"rev": "7a06e8a2f844e128d3b210a000a62716b6040b7f",
|
||||
"rev": "75ed713570ca17427119e7e204ab3590cc3bf2a5",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -126,11 +143,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1767556355,
|
||||
"narHash": "sha256-RDTUBDQBi9D4eD9iJQWtUDN/13MDLX+KmE+TwwNUp2s=",
|
||||
"lastModified": 1769397130,
|
||||
"narHash": "sha256-TTM4KV9IHwa181X7afBRbhLJIrgynpDjAXJFMUOWfyU=",
|
||||
"owner": "nix-community",
|
||||
"repo": "home-manager",
|
||||
"rev": "f894bc4ffde179d178d8deb374fcf9855d1a82b7",
|
||||
"rev": "c37679d37bdbecf11bbe3c5eb238d89ca4f60641",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -148,11 +165,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1767082077,
|
||||
"narHash": "sha256-2tL1mRb9uFJThUNfuDm/ehrnPvImL/QDtCxfn71IEz4=",
|
||||
"lastModified": 1769273817,
|
||||
"narHash": "sha256-+iyLihi/ynJokMgJZMRXuMuI6DPGUQRajz5ztNCHgnI=",
|
||||
"owner": "Jovian-Experiments",
|
||||
"repo": "Jovian-NixOS",
|
||||
"rev": "efd4b22e6fdc6d7fb4e186ae333a4b74e03da440",
|
||||
"rev": "98f988ad46e31f9956c5f6874dfb3580a7ff3969",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -168,11 +185,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1765066094,
|
||||
"narHash": "sha256-0YSU35gfRFJzx/lTGgOt6ubP8K6LeW0vaywzNNqxkl4=",
|
||||
"lastModified": 1767634391,
|
||||
"narHash": "sha256-owcSz2ICqTSvhBbhPP+1eWzi88e54rRZtfCNE5E/wwg=",
|
||||
"owner": "nix-darwin",
|
||||
"repo": "nix-darwin",
|
||||
"rev": "688427b1aab9afb478ca07989dc754fa543e03d5",
|
||||
"rev": "08585aacc3d6d6c280a02da195fdbd4b9cf083c2",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -190,11 +207,11 @@
|
||||
"systems": "systems_2"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1768034604,
|
||||
"narHash": "sha256-62pIZMvGHhYJmMiiBsxHqZt/dFyENPcFHlJq5NJF3Sw=",
|
||||
"lastModified": 1769849328,
|
||||
"narHash": "sha256-BjH1Ge6O8ObN6Z97un2U87pl4POO99Q8RSsgIuTZq8Q=",
|
||||
"owner": "marienz",
|
||||
"repo": "nix-doom-emacs-unstraightened",
|
||||
"rev": "9b3b8044fe4ccdcbb2d6f733d7dbe4d5feea18bc",
|
||||
"rev": "fc1d7190c49558cdc6af20d7657075943a500a93",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -227,11 +244,27 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1767480499,
|
||||
"narHash": "sha256-8IQQUorUGiSmFaPnLSo2+T+rjHtiNWc+OAzeHck7N48=",
|
||||
"lastModified": 1769089682,
|
||||
"narHash": "sha256-9yA/LIuAVQq0lXelrZPjLuLVuZdm03p8tfmHhnDIkms=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "30a3c519afcf3f99e2c6df3b359aec5692054d92",
|
||||
"rev": "078d69f03934859a181e81ba987c2bb033eebfc5",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nixos",
|
||||
"ref": "nixos-25.11",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-qt": {
|
||||
"locked": {
|
||||
"lastModified": 1770464364,
|
||||
"narHash": "sha256-z5NJPSBwsLf/OfD8WTmh79tlSU8XgIbwmk6qB1/TFzY=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "23d72dabcb3b12469f57b37170fcbc1789bd7457",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -243,11 +276,11 @@
|
||||
},
|
||||
"nixpkgs-unstable": {
|
||||
"locked": {
|
||||
"lastModified": 1767379071,
|
||||
"narHash": "sha256-EgE0pxsrW9jp9YFMkHL9JMXxcqi/OoumPJYwf+Okucw=",
|
||||
"lastModified": 1769170682,
|
||||
"narHash": "sha256-oMmN1lVQU0F0W2k6OI3bgdzp2YOHWYUAw79qzDSjenU=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "fb7944c166a3b630f177938e478f0378e64ce108",
|
||||
"rev": "c5296fdd05cfa2c187990dd909864da9658df755",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -257,6 +290,22 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"perles": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1769460725,
|
||||
"narHash": "sha256-zM2jw+emxe8+mNyR1ebMWkQiEx8uSmhoqqI0IxXLDgs=",
|
||||
"owner": "zjrosen",
|
||||
"repo": "perles",
|
||||
"rev": "57b20413eea461452b59e13f5a4a367953b1f768",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "zjrosen",
|
||||
"repo": "perles",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"plasma-manager": {
|
||||
"inputs": {
|
||||
"home-manager": [
|
||||
@@ -267,11 +316,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1763909441,
|
||||
"narHash": "sha256-56LwV51TX/FhgX+5LCG6akQ5KrOWuKgcJa+eUsRMxsc=",
|
||||
"lastModified": 1767662275,
|
||||
"narHash": "sha256-d5Q1GmQ+sW1Bt8cgDE0vOihzLaswsm8cSdg8124EqXE=",
|
||||
"owner": "nix-community",
|
||||
"repo": "plasma-manager",
|
||||
"rev": "b24ed4b272256dfc1cc2291f89a9821d5f9e14b4",
|
||||
"rev": "51816be33a1ff0d4b22427de83222d5bfa96d30e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -290,11 +339,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1763909441,
|
||||
"narHash": "sha256-56LwV51TX/FhgX+5LCG6akQ5KrOWuKgcJa+eUsRMxsc=",
|
||||
"lastModified": 1767662275,
|
||||
"narHash": "sha256-d5Q1GmQ+sW1Bt8cgDE0vOihzLaswsm8cSdg8124EqXE=",
|
||||
"owner": "nix-community",
|
||||
"repo": "plasma-manager",
|
||||
"rev": "b24ed4b272256dfc1cc2291f89a9821d5f9e14b4",
|
||||
"rev": "51816be33a1ff0d4b22427de83222d5bfa96d30e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -306,6 +355,7 @@
|
||||
"root": {
|
||||
"inputs": {
|
||||
"beads": "beads",
|
||||
"gastown": "gastown",
|
||||
"google-cookie-retrieval": "google-cookie-retrieval",
|
||||
"home-manager": "home-manager",
|
||||
"home-manager-unstable": "home-manager-unstable",
|
||||
@@ -313,7 +363,9 @@
|
||||
"nix-darwin": "nix-darwin",
|
||||
"nix-doom-emacs-unstraightened": "nix-doom-emacs-unstraightened",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"nixpkgs-qt": "nixpkgs-qt",
|
||||
"nixpkgs-unstable": "nixpkgs-unstable",
|
||||
"perles": "perles",
|
||||
"plasma-manager": "plasma-manager",
|
||||
"plasma-manager-unstable": "plasma-manager-unstable"
|
||||
}
|
||||
|
||||
49
flake.nix
49
flake.nix
@@ -4,6 +4,9 @@
|
||||
inputs = {
|
||||
nixpkgs.url = "github:nixos/nixpkgs/nixos-25.11";
|
||||
nixpkgs-unstable.url = "github:nixos/nixpkgs/nixos-unstable";
|
||||
# Separate nixpkgs for qt5webengine-dependent packages (jellyfin-media-player, etc.)
|
||||
# Updates on separate Renovate schedule to avoid massive qt rebuilds
|
||||
nixpkgs-qt.url = "github:nixos/nixpkgs/nixos-25.11";
|
||||
|
||||
nix-darwin = {
|
||||
url = "github:nix-darwin/nix-darwin/nix-darwin-25.11";
|
||||
@@ -43,10 +46,22 @@
|
||||
};
|
||||
|
||||
beads = {
|
||||
url = "github:steveyegge/beads";
|
||||
# v0.49.1 has dolt server mode support (gt-1mf.3)
|
||||
# Pinned to 259ddd92 - uses Go 1.24 compatible with nixpkgs
|
||||
url = "github:steveyegge/beads/93965b4abeed920a4701e03571d1b6bb75810722";
|
||||
inputs.nixpkgs.follows = "nixpkgs-unstable";
|
||||
};
|
||||
|
||||
gastown = {
|
||||
url = "github:steveyegge/gastown";
|
||||
flake = false; # No flake.nix upstream yet
|
||||
};
|
||||
|
||||
perles = {
|
||||
url = "github:zjrosen/perles";
|
||||
flake = false; # No flake.nix upstream yet
|
||||
};
|
||||
|
||||
nix-doom-emacs-unstraightened = {
|
||||
url = "github:marienz/nix-doom-emacs-unstraightened";
|
||||
# Don't follow nixpkgs to avoid rebuild issues with emacs-overlay
|
||||
@@ -64,6 +79,11 @@
|
||||
config.allowUnfree = true;
|
||||
overlays = unstableOverlays;
|
||||
};
|
||||
# Separate nixpkgs for qt5webengine-heavy packages to avoid rebuild churn
|
||||
qt-pinned = import inputs.nixpkgs-qt {
|
||||
system = prev.stdenv.hostPlatform.system;
|
||||
config.allowUnfree = true;
|
||||
};
|
||||
custom = prev.callPackage ./packages {};
|
||||
# Compatibility: bitwarden renamed to bitwarden-desktop in unstable
|
||||
bitwarden-desktop = prev.bitwarden-desktop or prev.bitwarden;
|
||||
@@ -84,11 +104,20 @@
|
||||
};
|
||||
|
||||
|
||||
# Shared unstable overlays for custom package builds
|
||||
customUnstableOverlays = [
|
||||
# Override claude-code in unstable to use our custom GCS-based build
|
||||
# (needed for corporate networks that block npm registry)
|
||||
(ufinal: uprev: {
|
||||
claude-code = uprev.callPackage ./packages/claude-code {};
|
||||
})
|
||||
];
|
||||
|
||||
nixosModules = [
|
||||
./roles
|
||||
inputs.home-manager.nixosModules.home-manager
|
||||
{
|
||||
nixpkgs.overlays = [ (mkBaseOverlay {}) ];
|
||||
nixpkgs.overlays = [ (mkBaseOverlay { unstableOverlays = customUnstableOverlays; }) ];
|
||||
}
|
||||
(mkHomeManagerConfig {
|
||||
sharedModules = [ inputs.plasma-manager.homeModules.plasma-manager ];
|
||||
@@ -101,7 +130,7 @@
|
||||
inputs.home-manager-unstable.nixosModules.home-manager
|
||||
inputs.jovian.nixosModules.jovian
|
||||
{
|
||||
nixpkgs.overlays = [ (mkBaseOverlay {}) ];
|
||||
nixpkgs.overlays = [ (mkBaseOverlay { unstableOverlays = customUnstableOverlays; }) ];
|
||||
}
|
||||
(mkHomeManagerConfig {
|
||||
sharedModules = [ inputs.plasma-manager-unstable.homeModules.plasma-manager ];
|
||||
@@ -112,17 +141,7 @@
|
||||
./roles/darwin.nix
|
||||
inputs.home-manager.darwinModules.home-manager
|
||||
{
|
||||
nixpkgs.overlays = [
|
||||
(mkBaseOverlay {
|
||||
# Override claude-code in unstable to use our custom GCS-based build
|
||||
# (needed for corporate networks that block npm registry)
|
||||
unstableOverlays = [
|
||||
(ufinal: uprev: {
|
||||
claude-code = uprev.callPackage ./packages/claude-code {};
|
||||
})
|
||||
];
|
||||
})
|
||||
];
|
||||
nixpkgs.overlays = [ (mkBaseOverlay { unstableOverlays = customUnstableOverlays; }) ];
|
||||
}
|
||||
(mkHomeManagerConfig { sharedModules = []; })
|
||||
];
|
||||
@@ -206,7 +225,7 @@
|
||||
};
|
||||
|
||||
# Darwin/macOS configurations
|
||||
darwinConfigurations."blkfv4yf49kt7" = inputs.nix-darwin.lib.darwinSystem rec {
|
||||
darwinConfigurations."BLKFV4YF49KT7" = inputs.nix-darwin.lib.darwinSystem rec {
|
||||
system = "aarch64-darwin";
|
||||
modules = darwinModules ++ [
|
||||
./machines/johno-macbookpro/configuration.nix
|
||||
|
||||
@@ -107,7 +107,7 @@
|
||||
aerospace = {
|
||||
enable = true;
|
||||
leader = "cmd";
|
||||
ctrlShortcuts.enable = true;
|
||||
ctrlShortcuts.enable = false;
|
||||
sketchybar.enable = true;
|
||||
# Optional: Add per-machine userSettings overrides
|
||||
# userSettings = {
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
home.roles = {
|
||||
"3d-printing".enable = true;
|
||||
base.enable = true;
|
||||
gaming.enable = true;
|
||||
desktop.enable = true;
|
||||
emacs.enable = true;
|
||||
email.enable = true;
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
home.roles = {
|
||||
base.enable = true;
|
||||
plasma-manager-kodi.enable = true;
|
||||
kdeconnect.enable = true;
|
||||
};
|
||||
|
||||
home.packages = with pkgs; [
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
home.roles = {
|
||||
base.enable = true;
|
||||
desktop.enable = true;
|
||||
gaming.enable = true;
|
||||
development.enable = true;
|
||||
communication.enable = true;
|
||||
email.enable = true;
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
home.roles = {
|
||||
base.enable = true;
|
||||
desktop.enable = true;
|
||||
gaming.enable = true;
|
||||
media.enable = true;
|
||||
communication.enable = true;
|
||||
kdeconnect.enable = true;
|
||||
|
||||
@@ -632,7 +632,9 @@ in
|
||||
text = ''
|
||||
#!/bin/bash
|
||||
|
||||
DISK_USAGE=$(df -H / | grep -v Filesystem | awk '{print $5}')
|
||||
# Monitor /System/Volumes/Data which contains user data on APFS
|
||||
# The root / is a read-only snapshot with minimal usage
|
||||
DISK_USAGE=$(df -H /System/Volumes/Data | grep -v Filesystem | awk '{print $5}')
|
||||
|
||||
${pkgs.sketchybar}/bin/sketchybar --set $NAME label="$DISK_USAGE"
|
||||
'';
|
||||
|
||||
@@ -22,6 +22,7 @@ in
|
||||
shellcheck
|
||||
tmux
|
||||
tree
|
||||
watch
|
||||
];
|
||||
|
||||
# Automatic garbage collection for user profile (home-manager generations).
|
||||
|
||||
44
home/roles/development/beads-search-query-optimization.patch
Normal file
44
home/roles/development/beads-search-query-optimization.patch
Normal file
@@ -0,0 +1,44 @@
|
||||
diff --git a/internal/storage/dolt/queries.go b/internal/storage/dolt/queries.go
|
||||
index 7d8214ee..8acdaae2 100644
|
||||
--- a/internal/storage/dolt/queries.go
|
||||
+++ b/internal/storage/dolt/queries.go
|
||||
@@ -212,8 +212,21 @@ func (s *DoltStore) SearchIssues(ctx context.Context, query string, filter types
|
||||
}
|
||||
|
||||
// nolint:gosec // G201: whereSQL contains column comparisons with ?, limitSQL is a safe integer
|
||||
+ // Performance fix: SELECT all columns directly instead of id-only + WHERE IN (all_ids)
|
||||
+ // See: hq-ihwsj - bd list uses inefficient WHERE IN (all_ids) query pattern
|
||||
querySQL := fmt.Sprintf(`
|
||||
- SELECT id FROM issues
|
||||
+ SELECT id, content_hash, title, description, design, acceptance_criteria, notes,
|
||||
+ status, priority, issue_type, assignee, estimated_minutes,
|
||||
+ created_at, created_by, owner, updated_at, closed_at, external_ref,
|
||||
+ compaction_level, compacted_at, compacted_at_commit, original_size, source_repo, close_reason,
|
||||
+ deleted_at, deleted_by, delete_reason, original_type,
|
||||
+ sender, ephemeral, pinned, is_template, crystallizes,
|
||||
+ await_type, await_id, timeout_ns, waiters,
|
||||
+ hook_bead, role_bead, agent_state, last_activity, role_type, rig, mol_type,
|
||||
+ event_kind, actor, target, payload,
|
||||
+ due_at, defer_until,
|
||||
+ quality_score, work_type, source_system
|
||||
+ FROM issues
|
||||
%s
|
||||
ORDER BY priority ASC, created_at DESC
|
||||
%s
|
||||
@@ -225,7 +238,15 @@ func (s *DoltStore) SearchIssues(ctx context.Context, query string, filter types
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
- return s.scanIssueIDs(ctx, rows)
|
||||
+ var issues []*types.Issue
|
||||
+ for rows.Next() {
|
||||
+ issue, err := scanIssueRow(rows)
|
||||
+ if err != nil {
|
||||
+ return nil, err
|
||||
+ }
|
||||
+ issues = append(issues, issue)
|
||||
+ }
|
||||
+ return issues, rows.Err()
|
||||
}
|
||||
|
||||
// GetReadyWork returns issues that are ready to work on (not blocked)
|
||||
317
home/roles/development/commands/beads_batch_research_plan.md
Normal file
317
home/roles/development/commands/beads_batch_research_plan.md
Normal file
@@ -0,0 +1,317 @@
|
||||
---
|
||||
description: Batch research and planning for multiple beads with interactive question review
|
||||
model: opus
|
||||
---
|
||||
|
||||
# Beads Batch Research+Plan
|
||||
|
||||
This skill automates the common workflow of:
|
||||
1. Running /beads_research in parallel for multiple beads
|
||||
2. Presenting open questions interactively for user input (bead-by-bead)
|
||||
3. Running /beads_plan for all researched beads (plus any spawned from splits)
|
||||
|
||||
## When to Use
|
||||
|
||||
- You have multiple beads ready for work
|
||||
- You want to research and plan them efficiently before implementation
|
||||
- You prefer to batch your question-answering rather than context-switching between skills
|
||||
|
||||
## Phase 1: Selection
|
||||
|
||||
1. **Get ready beads**: Run `bd ready --limit=20` to list beads with no blockers
|
||||
|
||||
2. **Filter already-researched beads**:
|
||||
For each ready bead, check if it already has research:
|
||||
```bash
|
||||
ls thoughts/beads-{bead-id}/research.md 2>/dev/null
|
||||
```
|
||||
|
||||
Categorize beads:
|
||||
- **Needs research**: No `research.md` exists
|
||||
- **Has research, needs plan**: `research.md` exists but no `plan.md`
|
||||
- **Already planned**: Both `research.md` and `plan.md` exist
|
||||
|
||||
3. **Present selection**:
|
||||
```
|
||||
Ready beads available for batch research+plan:
|
||||
|
||||
NEEDS RESEARCH:
|
||||
- {bead-id}: {title} (type: {type})
|
||||
- ...
|
||||
|
||||
HAS RESEARCH (plan only):
|
||||
- {bead-id}: {title} (type: {type})
|
||||
- ...
|
||||
|
||||
ALREADY PLANNED (skip):
|
||||
- {bead-id}: {title}
|
||||
|
||||
Which beads would you like to process?
|
||||
```
|
||||
|
||||
4. **Use AskUserQuestion** with `multiSelect: true`:
|
||||
- Include bead ID and title for each option
|
||||
- Separate options by category
|
||||
- Allow selection across categories
|
||||
|
||||
## Phase 2: Parallel Research
|
||||
|
||||
For each selected bead that NEEDS RESEARCH, launch a research subagent.
|
||||
|
||||
### Subagent Instructions Template
|
||||
|
||||
```
|
||||
Research bead [BEAD_ID]: [BEAD_TITLE]
|
||||
|
||||
1. **Load bead context**:
|
||||
```bash
|
||||
bd show [BEAD_ID]
|
||||
```
|
||||
|
||||
2. **Create artifact directory**:
|
||||
```bash
|
||||
mkdir -p thoughts/beads-[BEAD_ID]
|
||||
```
|
||||
|
||||
3. **Conduct research** following beads_research.md patterns:
|
||||
- Analyze and decompose the research question
|
||||
- Spawn parallel sub-agent tasks (codebase-locator, codebase-analyzer, etc.)
|
||||
- Synthesize findings
|
||||
|
||||
4. **Write research document** to `thoughts/beads-[BEAD_ID]/research.md`:
|
||||
- Include frontmatter with metadata
|
||||
- Document findings with file:line references
|
||||
- **CRITICAL**: Include "## Open Questions" section listing any unresolved items
|
||||
|
||||
5. **Return summary**:
|
||||
- Research status (complete/partial)
|
||||
- Number of open questions
|
||||
- Key findings summary (2-3 bullet points)
|
||||
- List of open questions verbatim
|
||||
```
|
||||
|
||||
### Launching Subagents
|
||||
|
||||
Use `subagent_type: "opus"` for research subagents (matches beads_research model setting).
|
||||
|
||||
Launch ALL research subagents in a single message for parallel execution:
|
||||
```
|
||||
<Task calls for each selected bead needing research - all in one message>
|
||||
```
|
||||
|
||||
### Collecting Results
|
||||
|
||||
Wait for ALL research subagents to complete. Collect:
|
||||
- Bead ID
|
||||
- Research status
|
||||
- Open questions list
|
||||
- Any errors encountered
|
||||
|
||||
## Phase 3: Interactive Question Review
|
||||
|
||||
Present each bead's open questions sequentially for user input.
|
||||
|
||||
### For Each Bead (in order):
|
||||
|
||||
1. **Present research summary**:
|
||||
```
|
||||
## Bead {N}/{total}: {bead-id} - {title}
|
||||
|
||||
Research complete. Key findings:
|
||||
- {finding 1}
|
||||
- {finding 2}
|
||||
|
||||
Open questions requiring your input:
|
||||
1. {question 1}
|
||||
2. {question 2}
|
||||
|
||||
Additionally:
|
||||
- Should this bead be split into multiple beads? (y/n)
|
||||
- If split, describe the split:
|
||||
```
|
||||
|
||||
2. **Collect user responses**:
|
||||
- Answers to open questions
|
||||
- Split decision (yes/no)
|
||||
- If split: new bead titles and how to divide the work
|
||||
|
||||
3. **Handle splits**:
|
||||
If user indicates a split:
|
||||
```bash
|
||||
# Create new beads for split work
|
||||
bd create --title="{split title 1}" --type={type} --priority={priority} \
|
||||
--description="{description based on user input}"
|
||||
|
||||
# Update original bead if scope narrowed
|
||||
bd update {original-bead-id} --description="{updated description}"
|
||||
```
|
||||
|
||||
Track new bead IDs for inclusion in planning phase.
|
||||
|
||||
4. **Update research document**:
|
||||
Append user answers to `thoughts/beads-{id}/research.md`:
|
||||
```markdown
|
||||
## User Clarifications [{timestamp}]
|
||||
|
||||
Q: {question 1}
|
||||
A: {user answer 1}
|
||||
|
||||
Q: {question 2}
|
||||
A: {user answer 2}
|
||||
|
||||
## Bead Splits
|
||||
{If split: description of split and new bead IDs}
|
||||
```
|
||||
|
||||
### Progress Tracking
|
||||
|
||||
After each bead's questions are answered, confirm before moving to next:
|
||||
```
|
||||
Questions answered for {bead-id}. {N-1} beads remaining.
|
||||
Continue to next bead? (y/n)
|
||||
```
|
||||
|
||||
### Beads with No Questions
|
||||
|
||||
If a bead's research had no open questions:
|
||||
```
|
||||
## Bead {N}/{total}: {bead-id} - {title}
|
||||
|
||||
Research complete with no open questions.
|
||||
|
||||
Key findings:
|
||||
- {finding 1}
|
||||
- {finding 2}
|
||||
|
||||
Should this bead be split? (y/n)
|
||||
```
|
||||
|
||||
## Phase 4: Parallel Planning
|
||||
|
||||
After all questions answered, launch planning subagents for all beads.
|
||||
|
||||
### Beads to Plan
|
||||
|
||||
Include:
|
||||
- Original beads that were researched
|
||||
- Beads that had existing research (from selection phase)
|
||||
- New beads spawned from splits
|
||||
|
||||
### Subagent Instructions Template
|
||||
|
||||
```
|
||||
Create implementation plan for bead [BEAD_ID]: [BEAD_TITLE]
|
||||
|
||||
1. **Load context**:
|
||||
```bash
|
||||
bd show [BEAD_ID]
|
||||
```
|
||||
|
||||
2. **Read research** (it exists and has user clarifications):
|
||||
Read `thoughts/beads-[BEAD_ID]/research.md` FULLY
|
||||
|
||||
3. **Create plan** following beads_plan.md patterns:
|
||||
- Context gathering via sub-agents
|
||||
- Design approach based on research findings and user clarifications
|
||||
- **Skip interactive questions** - they were already answered in research review
|
||||
|
||||
4. **Write plan** to `thoughts/beads-[BEAD_ID]/plan.md`:
|
||||
- Full plan structure with phases
|
||||
- Success criteria (automated and manual)
|
||||
- References to research document
|
||||
|
||||
5. **Update bead**:
|
||||
```bash
|
||||
bd update [BEAD_ID] --notes="Plan created: thoughts/beads-[BEAD_ID]/plan.md"
|
||||
```
|
||||
|
||||
6. **Return summary**:
|
||||
- Plan status (complete/failed)
|
||||
- Number of phases
|
||||
- Estimated complexity (small/medium/large)
|
||||
- Any issues encountered
|
||||
```
|
||||
|
||||
### Launching Subagents
|
||||
|
||||
Use `subagent_type: "opus"` for planning subagents (matches beads_plan model setting).
|
||||
|
||||
Launch ALL planning subagents in a single message:
|
||||
```
|
||||
<Task calls for each bead to plan - all in one message>
|
||||
```
|
||||
|
||||
### Handling Beads Without Research
|
||||
|
||||
For beads that had existing research but user didn't review questions:
|
||||
- Planning subagent reads existing research
|
||||
- If research has unresolved open questions, subagent should flag this in its return
|
||||
|
||||
## Phase 5: Summary
|
||||
|
||||
After all planning completes, present final summary.
|
||||
|
||||
### Summary Format
|
||||
|
||||
```
|
||||
## Batch Research+Plan Complete
|
||||
|
||||
### Successfully Processed:
|
||||
| Bead | Title | Research | Plan | Phases | Complexity |
|
||||
|------|-------|----------|------|--------|------------|
|
||||
| {id} | {title} | Complete | Complete | 3 | medium |
|
||||
| {id} | {title} | Complete | Complete | 2 | small |
|
||||
|
||||
### New Beads (from splits):
|
||||
| Bead | Title | Parent | Status |
|
||||
|------|-------|--------|--------|
|
||||
| {new-id} | {title} | {parent-id} | Planned |
|
||||
|
||||
### Failed:
|
||||
| Bead | Title | Phase Failed | Error |
|
||||
|------|-------|--------------|-------|
|
||||
| {id} | {title} | Research | Timeout |
|
||||
|
||||
### Next Steps:
|
||||
1. Review plans at `thoughts/beads-{id}/plan.md`
|
||||
2. Run `/parallel_beads` to implement all planned beads
|
||||
3. Or run `/beads_implement {id}` for individual implementation
|
||||
|
||||
### Artifacts Created:
|
||||
- Research: thoughts/beads-{id}/research.md (x{N} files)
|
||||
- Plans: thoughts/beads-{id}/plan.md (x{N} files)
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
### Research Subagent Failure
|
||||
- Log the failure with bead ID and error
|
||||
- Continue with other beads
|
||||
- Exclude failed beads from question review and planning
|
||||
- Report in final summary
|
||||
|
||||
### Planning Subagent Failure
|
||||
- Log the failure with bead ID and error
|
||||
- Research still valid - can retry planning manually
|
||||
- Report in final summary
|
||||
|
||||
### User Cancellation During Question Review
|
||||
- Save progress to bead notes
|
||||
- Report which beads were completed
|
||||
- User can resume with remaining beads in new session
|
||||
|
||||
### Split Bead Creation Failure
|
||||
- Report error but continue with original bead
|
||||
- User can manually create split beads later
|
||||
|
||||
## Resource Limits
|
||||
|
||||
- Maximum concurrent research subagents: 5
|
||||
- Maximum concurrent planning subagents: 5
|
||||
- If more beads selected, process in batches
|
||||
|
||||
## Notes
|
||||
|
||||
- This skill is designed for the "research+plan before implementation" workflow
|
||||
- Pairs well with `/parallel_beads` for subsequent implementation
|
||||
- Run `/reconcile_beads` after implementation PRs merge
|
||||
@@ -54,6 +54,8 @@ When this command is invoked:
|
||||
- Read `thoughts/beads-{bead-id}/plan.md` FULLY
|
||||
- Check for any existing checkmarks (- [x]) indicating partial progress
|
||||
- Read any research at `thoughts/beads-{bead-id}/research.md`
|
||||
- If plan's Success Criteria references contribution guidelines (e.g., "Per CONTRIBUTING.md:"),
|
||||
verify the original CONTRIBUTING.md still exists and requirements are current
|
||||
|
||||
5. **Mark bead in progress** (if not already):
|
||||
```bash
|
||||
@@ -127,6 +129,10 @@ All phases completed and automated verification passed:
|
||||
- {List manual verification items from plan}
|
||||
|
||||
Let me know when manual testing is complete so I can close the bead.
|
||||
|
||||
**Contribution guidelines compliance:**
|
||||
- {List any contribution guideline requirements that were part of Success Criteria}
|
||||
- {Note if any requirements could not be automated and need manual review}
|
||||
```
|
||||
|
||||
**STOP HERE and wait for user confirmation.**
|
||||
@@ -51,13 +51,32 @@ When this command is invoked:
|
||||
- Any linked tickets or docs
|
||||
- Use Read tool WITHOUT limit/offset
|
||||
|
||||
2. **Spawn initial research tasks**:
|
||||
2. **Check for contribution guidelines**:
|
||||
|
||||
```bash
|
||||
# Check standard locations for contribution guidelines
|
||||
for f in CONTRIBUTING.md .github/CONTRIBUTING.md docs/CONTRIBUTING.md; do
|
||||
if [ -f "$f" ]; then
|
||||
echo "Found: $f"
|
||||
break
|
||||
fi
|
||||
done
|
||||
```
|
||||
|
||||
If found:
|
||||
- Read the file fully
|
||||
- Extract actionable requirements (testing, code style, documentation, PR conventions)
|
||||
- These requirements MUST be incorporated into the plan's Success Criteria
|
||||
|
||||
If not found, note "No contribution guidelines found" and proceed.
|
||||
|
||||
3. **Spawn initial research tasks**:
|
||||
- **codebase-locator**: Find all files related to the task
|
||||
- **codebase-analyzer**: Understand current implementation
|
||||
- **codebase-pattern-finder**: Find similar features to model after
|
||||
- **thoughts-locator**: Find any existing plans or decisions
|
||||
|
||||
3. **Read all files identified by research**:
|
||||
4. **Read all files identified by research**:
|
||||
- Read them FULLY into main context
|
||||
- Cross-reference with requirements
|
||||
|
||||
@@ -273,6 +292,12 @@ Always separate into two categories:
|
||||
- Performance under real conditions
|
||||
- Edge cases hard to automate
|
||||
|
||||
**From Contribution Guidelines** (if CONTRIBUTING.md exists):
|
||||
- Include any testing requirements specified in guidelines
|
||||
- Include any code style/linting requirements
|
||||
- Include any documentation requirements
|
||||
- Reference the guideline: "Per CONTRIBUTING.md: {requirement}"
|
||||
|
||||
## Example Invocation
|
||||
|
||||
```
|
||||
@@ -51,6 +51,18 @@ When this command is invoked:
|
||||
- Use the Read tool WITHOUT limit/offset parameters
|
||||
- Read these files yourself in the main context before spawning sub-tasks
|
||||
|
||||
### Step 1.5: Check for contribution guidelines
|
||||
|
||||
Before spawning sub-agents, check if the repository has contribution guidelines:
|
||||
|
||||
```bash
|
||||
for f in CONTRIBUTING.md .github/CONTRIBUTING.md docs/CONTRIBUTING.md; do
|
||||
if [ -f "$f" ]; then echo "Found: $f"; break; fi
|
||||
done
|
||||
```
|
||||
|
||||
If found, read the file and note key requirements. These should be included in the research document under a "## Contribution Guidelines" section if relevant to the research question.
|
||||
|
||||
### Step 2: Analyze and decompose the research question
|
||||
- Break down the query into composable research areas
|
||||
- Identify specific components, patterns, or concepts to investigate
|
||||
@@ -143,6 +155,10 @@ status: complete
|
||||
## Architecture Documentation
|
||||
{Current patterns, conventions found in codebase}
|
||||
|
||||
## Contribution Guidelines
|
||||
{If CONTRIBUTING.md exists, summarize key requirements relevant to the research topic}
|
||||
{If no guidelines found, omit this section}
|
||||
|
||||
## Historical Context (from thoughts/)
|
||||
{Relevant insights from thoughts/ with references}
|
||||
|
||||
@@ -42,7 +42,46 @@ AskUserQuestion with:
|
||||
- options from filtered bd ready output
|
||||
```
|
||||
|
||||
## Phase 2: Parallel Implementation
|
||||
## Phase 2: Worktree Setup
|
||||
|
||||
Before launching implementation subagents, create worktrees for all selected beads:
|
||||
|
||||
1. **Get repository name**:
|
||||
```bash
|
||||
REPO_NAME=$(git remote get-url origin | sed 's|.*/||' | sed 's/\.git$//')
|
||||
```
|
||||
|
||||
2. **For each selected bead**, create its worktree:
|
||||
```bash
|
||||
BEAD_ID="[bead-id]"
|
||||
# Check if worktree already exists
|
||||
if [ -d "$HOME/wt/${REPO_NAME}/${BEAD_ID}" ]; then
|
||||
echo "Worktree already exists: ~/wt/${REPO_NAME}/${BEAD_ID}"
|
||||
# Ask user: remove and recreate, or skip this bead?
|
||||
else
|
||||
git worktree add -b "bead/${BEAD_ID}" "$HOME/wt/${REPO_NAME}/${BEAD_ID}"
|
||||
fi
|
||||
```
|
||||
|
||||
3. **Track created worktrees**:
|
||||
Maintain a list of (bead_id, worktree_path) pairs for use in subagent instructions.
|
||||
|
||||
4. **Report status**:
|
||||
```
|
||||
Created worktrees:
|
||||
- nixos-configs-abc → ~/wt/nixos-configs/nixos-configs-abc (branch: bead/nixos-configs-abc)
|
||||
- nixos-configs-xyz → ~/wt/nixos-configs/nixos-configs-xyz (branch: bead/nixos-configs-xyz)
|
||||
|
||||
Skipped (existing worktree):
|
||||
- nixos-configs-123 → Ask user for resolution
|
||||
```
|
||||
|
||||
**Note**: If a worktree or branch already exists, ask the user before proceeding:
|
||||
- Remove existing worktree and branch, then recreate
|
||||
- Skip this bead
|
||||
- Use existing worktree as-is (risky - branch may have diverged)
|
||||
|
||||
## Phase 3: Parallel Implementation
|
||||
|
||||
For each selected bead, launch a subagent using the Task tool. All subagents should be launched in parallel (single message with multiple Task tool calls).
|
||||
|
||||
@@ -53,50 +92,92 @@ Each implementation subagent should receive these instructions:
|
||||
```
|
||||
Work on bead [BEAD_ID]: [BEAD_TITLE]
|
||||
|
||||
1. **Create worktree**:
|
||||
- Branch name: `bead/[BEAD_ID]`
|
||||
- Worktree path: `~/wt/[REPO_NAME]/[BEAD_ID]`
|
||||
- Command: `git worktree add -b bead/[BEAD_ID] ~/wt/[REPO_NAME]/[BEAD_ID]`
|
||||
Worktree path: [WORKTREE_PATH]
|
||||
|
||||
2. **Review the bead requirements**:
|
||||
## CRITICAL: Branch Verification (MUST DO FIRST)
|
||||
|
||||
1. **Navigate to worktree**:
|
||||
```bash
|
||||
cd [WORKTREE_PATH]
|
||||
```
|
||||
|
||||
2. **Verify branch** (MANDATORY before ANY modifications):
|
||||
```bash
|
||||
CURRENT_BRANCH=$(git branch --show-current)
|
||||
echo "Current branch: $CURRENT_BRANCH"
|
||||
pwd
|
||||
```
|
||||
|
||||
**ABORT CONDITIONS** - If ANY of these are true, STOP IMMEDIATELY:
|
||||
- Branch is `main` or `master`
|
||||
- Branch does not match `bead/[BEAD_ID]`
|
||||
|
||||
If you detect any abort condition:
|
||||
```
|
||||
ABORTING: Branch verification failed.
|
||||
Expected branch: bead/[BEAD_ID]
|
||||
Actual branch: [CURRENT_BRANCH]
|
||||
Working directory: [pwd output]
|
||||
|
||||
DO NOT PROCEED. Report this error to the orchestrator.
|
||||
```
|
||||
|
||||
## After Verification Passes
|
||||
|
||||
3. **Review the bead requirements**:
|
||||
- Run `bd show [BEAD_ID]` to understand the acceptance criteria
|
||||
- Note any external issue references (GitHub issues, Linear tickets, etc.)
|
||||
|
||||
3. **Extract validation criteria**:
|
||||
4. **Extract validation criteria**:
|
||||
- Check for a plan: `thoughts/beads-[BEAD_ID]/plan.md`
|
||||
- If plan exists:
|
||||
- Read the plan and find the "Automated Verification" section
|
||||
- Extract each verification command (lines starting with `- [ ]` followed by a command)
|
||||
- Example: `- [ ] Tests pass: \`make test\`` → extract `make test`
|
||||
- Note any "Per CONTRIBUTING.md:" requirements for additional validation
|
||||
- Also read the "Manual Verification" section from the plan if present
|
||||
- Save manual verification items for inclusion in the PR description (they won't be executed)
|
||||
- If no plan exists, use best-effort validation:
|
||||
- Check if `Makefile` exists → try `make test` and `make lint`
|
||||
- Check if `flake.nix` exists → try `nix flake check`
|
||||
- Check if `package.json` exists → try `npm test`
|
||||
- **Check for CONTRIBUTING.md** → read and extract testing/linting requirements
|
||||
- Track which requirements can be automated vs need manual review
|
||||
- Automated: commands that can be run (e.g., "run `make test`")
|
||||
- Manual: qualitative checks (e.g., "ensure documentation is updated")
|
||||
- If none found, note "No validation criteria found"
|
||||
|
||||
4. **Implement the changes**:
|
||||
5. **Implement the changes**:
|
||||
- Work in the worktree directory
|
||||
- Complete all acceptance criteria listed in the bead
|
||||
|
||||
After implementation, run validation:
|
||||
- Execute each validation command from step 3
|
||||
- Execute each validation command from step 4
|
||||
- Track results in this format:
|
||||
```
|
||||
VALIDATION_RESULTS:
|
||||
- make test: PASS
|
||||
- make lint: FAIL (exit code 1: src/foo.ts:23 - missing semicolon)
|
||||
- nix flake check: SKIP (command not found)
|
||||
- nix flake check: SKIP (not applicable - no flake.nix)
|
||||
- cargo test: ERROR (command not found)
|
||||
```
|
||||
|
||||
**Status definitions:**
|
||||
- **PASS**: Check executed successfully with no issues
|
||||
- **FAIL**: Check executed but found issues that need attention
|
||||
- **SKIP**: Check not applicable to this project (e.g., no Makefile for `make test`)
|
||||
- **ERROR**: Check could not execute (missing tool, permission error, command not found)
|
||||
|
||||
- If any validation fails:
|
||||
- Continue with PR creation (don't block)
|
||||
- Document failures in bead notes: `bd update [BEAD_ID] --notes="Validation failures: [list]"`
|
||||
|
||||
5. **Commit and push**:
|
||||
6. **Commit and push**:
|
||||
- Stage all changes: `git add -A`
|
||||
- Create a descriptive commit message
|
||||
- Push the branch: `git push -u origin bead/[BEAD_ID]`
|
||||
|
||||
6. **Create a PR**:
|
||||
7. **Create a PR**:
|
||||
- Detect hosting provider from origin URL: `git remote get-url origin`
|
||||
- If URL contains `github.com`, use `gh`; otherwise use `tea` (Gitea/Forgejo)
|
||||
- PR title: "[BEAD_ID] [BEAD_TITLE]"
|
||||
@@ -119,14 +200,27 @@ Work on bead [BEAD_ID]: [BEAD_TITLE]
|
||||
## Changes
|
||||
- [List of changes made]
|
||||
|
||||
## Validation
|
||||
[Include validation results from step 4]
|
||||
## Validation Steps Completed
|
||||
|
||||
### Automated Checks
|
||||
| Check | Status | Details |
|
||||
|-------|--------|---------|
|
||||
| make test | PASS | |
|
||||
| make lint | FAIL | src/foo.ts:23 - missing semicolon |
|
||||
| nix flake check | SKIP | command not found |
|
||||
| nix flake check | SKIP | not applicable - no flake.nix |
|
||||
| cargo test | ERROR | command not found |
|
||||
|
||||
### Manual Verification Required
|
||||
[If plan has Manual Verification items, list them as unchecked boxes:]
|
||||
- [ ] Verify UI changes match design mockups
|
||||
- [ ] Test on mobile viewport sizes
|
||||
[If no manual verification items: "None specified in plan."]
|
||||
|
||||
### CONTRIBUTING.md Compliance
|
||||
[If CONTRIBUTING.md requirements were extracted:]
|
||||
- [x] Tests pass (verified via `make test`)
|
||||
- [ ] Documentation updated (needs manual review)
|
||||
[If no CONTRIBUTING.md: "No contribution guidelines found."]
|
||||
EOF
|
||||
)"
|
||||
```
|
||||
@@ -146,44 +240,66 @@ Work on bead [BEAD_ID]: [BEAD_TITLE]
|
||||
## Changes
|
||||
- [List of changes made]
|
||||
|
||||
## Validation
|
||||
[Include validation results from step 4]
|
||||
## Validation Steps Completed
|
||||
|
||||
### Automated Checks
|
||||
| Check | Status | Details |
|
||||
|-------|--------|---------|
|
||||
| make test | PASS | |
|
||||
| make lint | FAIL | src/foo.ts:23 - missing semicolon |
|
||||
| nix flake check | SKIP | command not found |"
|
||||
| nix flake check | SKIP | not applicable - no flake.nix |
|
||||
| cargo test | ERROR | command not found |
|
||||
|
||||
### Manual Verification Required
|
||||
[If plan has Manual Verification items, list them as unchecked boxes:]
|
||||
- [ ] Verify UI changes match design mockups
|
||||
- [ ] Test on mobile viewport sizes
|
||||
[If no manual verification items: None specified in plan.]
|
||||
|
||||
### CONTRIBUTING.md Compliance
|
||||
[If CONTRIBUTING.md requirements were extracted:]
|
||||
- [x] Tests pass (verified via make test)
|
||||
- [ ] Documentation updated (needs manual review)
|
||||
[If no CONTRIBUTING.md: No contribution guidelines found.]"
|
||||
```
|
||||
|
||||
7. **Update bead status**:
|
||||
8. **Update bead status**:
|
||||
- Mark the bead as "in_review": `bd update [BEAD_ID] --status=in_review`
|
||||
- Add the PR URL to the bead notes: `bd update [BEAD_ID] --notes="$(bd show [BEAD_ID] --json | jq -r '.notes')
|
||||
|
||||
PR: [PR_URL]"`
|
||||
|
||||
8. **Report results**:
|
||||
9. **Report results**:
|
||||
- Return:
|
||||
- PR URL
|
||||
- Bead ID
|
||||
- Implementation status (success/failure/blocked)
|
||||
- Validation summary: `X passed, Y failed, Z skipped`
|
||||
- List of any validation failures with details
|
||||
- Validation summary: `X passed, Y failed, Z skipped, W errors`
|
||||
- List of any validation failures or errors with details
|
||||
- If blocked or unable to complete, explain what's blocking progress
|
||||
- If validation failed, include the specific failures so the main agent can summarize them for the user
|
||||
```
|
||||
|
||||
### Launching Subagents
|
||||
|
||||
For each bead, substitute into the template:
|
||||
- `[BEAD_ID]` - the bead ID
|
||||
- `[BEAD_TITLE]` - the bead title
|
||||
- `[WORKTREE_PATH]` - the worktree path created in Phase 2
|
||||
|
||||
Use `subagent_type: "general-purpose"` for implementation subagents. Launch all selected beads' subagents in a single message for parallel execution:
|
||||
|
||||
```
|
||||
<Task calls for each selected bead - all in one message>
|
||||
```
|
||||
|
||||
**Important**: The worktree paths were created in Phase 2. Use the exact paths that were created, e.g.:
|
||||
- `~/wt/nixos-configs/nixos-configs-abc`
|
||||
- `~/wt/nixos-configs/nixos-configs-xyz`
|
||||
|
||||
Collect results from all subagents before proceeding.
|
||||
|
||||
## Phase 3: Parallel Review
|
||||
## Phase 4: Parallel Review
|
||||
|
||||
After all implementation subagents complete, launch review subagents for each PR.
|
||||
|
||||
@@ -218,7 +334,7 @@ Review PR for bead [BEAD_ID]
|
||||
|
||||
Launch all review subagents in parallel.
|
||||
|
||||
## Phase 4: Cleanup and Summary
|
||||
## Phase 5: Cleanup and Summary
|
||||
|
||||
After reviews complete:
|
||||
|
||||
@@ -264,9 +380,21 @@ Example output:
|
||||
|
||||
## Error Handling
|
||||
|
||||
- **Worktree creation failures** (Phase 2):
|
||||
- If `git worktree add` fails (branch exists, path exists), prompt user:
|
||||
- Remove existing and retry
|
||||
- Skip this bead
|
||||
- Use existing (with warning about potential divergence)
|
||||
- Do NOT proceed to subagent launch until worktree is confirmed
|
||||
|
||||
- **Branch verification failures** (subagent reports):
|
||||
- If subagent reports it's on `main` or `master`, do NOT retry
|
||||
- Mark bead as failed with reason "Branch verification failed"
|
||||
- Continue with other beads but flag this as a critical issue
|
||||
- Investigation required: the worktree may have been corrupted or not created properly
|
||||
|
||||
- **Subagent failures**: If a subagent fails or times out, note it in the summary but continue with other beads
|
||||
- **PR creation failures**: Report the error but continue with reviews of successful PRs
|
||||
- **Worktree conflicts**: If a worktree already exists, ask the user if they want to remove it or skip that bead
|
||||
|
||||
## Resource Limits
|
||||
|
||||
@@ -5,6 +5,92 @@ with lib;
|
||||
let
|
||||
cfg = config.home.roles.development;
|
||||
|
||||
# Build beads from flake input with corrected vendorHash
|
||||
# The upstream default.nix has stale vendorHash for commits with server mode
|
||||
beadsRev = builtins.substring 0 8 (globalInputs.beads.rev or "unknown");
|
||||
beadsPackage = pkgs.buildGoModule {
|
||||
pname = "beads";
|
||||
version = "0.49.1-${beadsRev}";
|
||||
src = globalInputs.beads;
|
||||
subPackages = [ "cmd/bd" ];
|
||||
doCheck = false;
|
||||
# Regenerated vendorHash for commit 93965b4a (has dolt server mode, Go 1.24)
|
||||
vendorHash = "sha256-gwxGv8y4+1+k0741CnOYcyJPTJ5vTrynqPoO8YS9fbQ=";
|
||||
nativeBuildInputs = [ pkgs.git ];
|
||||
meta = with lib; {
|
||||
description = "beads (bd) - An issue tracker designed for AI-supervised coding workflows";
|
||||
homepage = "https://github.com/steveyegge/beads";
|
||||
license = licenses.mit;
|
||||
mainProgram = "bd";
|
||||
};
|
||||
};
|
||||
|
||||
# Gastown - multi-agent workspace manager (no upstream flake.nix yet)
|
||||
# Source is tracked via flake input for renovate updates
|
||||
gastownRev = builtins.substring 0 8 (globalInputs.gastown.rev or "unknown");
|
||||
gastownPackage = pkgs.buildGoModule {
|
||||
pname = "gastown";
|
||||
version = "unstable-${gastownRev}";
|
||||
src = globalInputs.gastown;
|
||||
vendorHash = "sha256-ripY9vrYgVW8bngAyMLh0LkU/Xx1UUaLgmAA7/EmWQU=";
|
||||
subPackages = [ "cmd/gt" ];
|
||||
doCheck = false;
|
||||
|
||||
# Must match ldflags from gastown Makefile - BuiltProperly=1 is required
|
||||
# or gt will error with "This binary was built with 'go build' directly"
|
||||
ldflags = [
|
||||
"-X github.com/steveyegge/gastown/internal/cmd.Version=${gastownRev}"
|
||||
"-X github.com/steveyegge/gastown/internal/cmd.Commit=${gastownRev}"
|
||||
"-X github.com/steveyegge/gastown/internal/cmd.BuildTime=nix-build"
|
||||
"-X github.com/steveyegge/gastown/internal/cmd.BuiltProperly=1"
|
||||
];
|
||||
|
||||
# Bug fixes not yet merged upstream
|
||||
# Each patch is stored in a separate file for clarity and maintainability
|
||||
patches = [
|
||||
# Fix validateRecipient bug: normalize addresses before comparison
|
||||
./gastown-fix-validate-recipient.patch
|
||||
# Fix agentBeadToAddress to use title field for hq- prefixed beads
|
||||
./gastown-fix-agent-bead-address-title.patch
|
||||
# Fix crew/polecat home paths: remove incorrect /rig suffix
|
||||
./gastown-fix-role-home-paths.patch
|
||||
# Fix town root detection: don't map to Mayor (causes spurious mismatch warnings)
|
||||
./gastown-fix-town-root-detection.patch
|
||||
# Statusline optimization: skip expensive beads queries for detached sessions
|
||||
# Reduces Dolt CPU from ~70% to ~20% by caching and early-exit
|
||||
./gastown-statusline-optimization.patch
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
description = "Gas Town - multi-agent workspace manager by Steve Yegge";
|
||||
homepage = "https://github.com/steveyegge/gastown";
|
||||
license = licenses.mit;
|
||||
mainProgram = "gt";
|
||||
};
|
||||
};
|
||||
|
||||
# Perles - TUI for beads issue tracking (no upstream flake.nix yet)
|
||||
# Source is tracked via flake input for renovate updates
|
||||
perlesRev = builtins.substring 0 8 (globalInputs.perles.rev or "unknown");
|
||||
perlesPackage = pkgs.buildGoModule {
|
||||
pname = "perles";
|
||||
version = "unstable-${perlesRev}";
|
||||
src = globalInputs.perles;
|
||||
vendorHash = "sha256-JHERJDzbiqgjWXwRhXVjgDEiDQ3AUXRIONotfPF21B0=";
|
||||
doCheck = false;
|
||||
|
||||
ldflags = [
|
||||
"-X main.version=${perlesRev}"
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
description = "Perles - Terminal UI for beads issue tracking";
|
||||
homepage = "https://github.com/zjrosen/perles";
|
||||
license = licenses.mit;
|
||||
mainProgram = "perles";
|
||||
};
|
||||
};
|
||||
|
||||
# Fetch the claude-plugins repository (for humanlayer commands/agents)
|
||||
# Update the rev to get newer versions of the commands
|
||||
claudePluginsRepo = builtins.fetchGit {
|
||||
@@ -37,10 +123,14 @@ in
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
home.packages = [
|
||||
globalInputs.beads.packages.${system}.default
|
||||
beadsPackage
|
||||
gastownPackage
|
||||
perlesPackage
|
||||
pkgs.unstable.claude-code
|
||||
pkgs.unstable.claude-code-router
|
||||
pkgs.unstable.codex
|
||||
pkgs.dolt
|
||||
pkgs.sqlite
|
||||
|
||||
# Custom packages
|
||||
pkgs.custom.tea-rbw
|
||||
@@ -61,12 +151,14 @@ in
|
||||
if [ -f "$file" ]; then
|
||||
filename=$(basename "$file" .md)
|
||||
dest="$HOME/.claude/commands/humanlayer:''${filename}.md"
|
||||
rm -f "$dest" 2>/dev/null || true
|
||||
|
||||
# Copy file and conditionally remove the "model:" line from frontmatter
|
||||
${if cfg.allowArbitraryClaudeCodeModelSelection
|
||||
then "cp \"$file\" \"$dest\""
|
||||
else "${pkgs.gnused}/bin/sed '/^model:/d' \"$file\" > \"$dest\""
|
||||
}
|
||||
chmod u+w "$dest" 2>/dev/null || true
|
||||
fi
|
||||
done
|
||||
|
||||
@@ -75,17 +167,19 @@ in
|
||||
if [ -f "$file" ]; then
|
||||
filename=$(basename "$file" .md)
|
||||
dest="$HOME/.claude/agents/humanlayer:''${filename}.md"
|
||||
rm -f "$dest" 2>/dev/null || true
|
||||
|
||||
# Copy file and conditionally remove the "model:" line from frontmatter
|
||||
${if cfg.allowArbitraryClaudeCodeModelSelection
|
||||
then "cp \"$file\" \"$dest\""
|
||||
else "${pkgs.gnused}/bin/sed '/^model:/d' \"$file\" > \"$dest\""
|
||||
}
|
||||
chmod u+w "$dest" 2>/dev/null || true
|
||||
fi
|
||||
done
|
||||
|
||||
# Copy local skills from this repo (with retry for race conditions with running Claude)
|
||||
for file in ${./skills}/*.md; do
|
||||
# Copy local commands from this repo (with retry for race conditions with running Claude)
|
||||
for file in ${./commands}/*.md; do
|
||||
if [ -f "$file" ]; then
|
||||
filename=$(basename "$file" .md)
|
||||
dest="$HOME/.claude/commands/''${filename}.md"
|
||||
@@ -95,14 +189,47 @@ in
|
||||
sleep 0.5
|
||||
cp "$file" "$dest" || echo "Warning: Failed to copy $filename.md to commands"
|
||||
fi
|
||||
chmod u+w "$dest" 2>/dev/null || true
|
||||
fi
|
||||
done
|
||||
|
||||
$DRY_RUN_CMD echo "Claude Code humanlayer commands and agents installed successfully${
|
||||
if cfg.allowArbitraryClaudeCodeModelSelection
|
||||
then " (model specifications preserved)"
|
||||
else " (model selection removed)"
|
||||
} + local skills"
|
||||
# Copy local skills (reference materials) to skills subdirectory
|
||||
mkdir -p ~/.claude/commands/skills
|
||||
for file in ${./skills}/*.md; do
|
||||
if [ -f "$file" ]; then
|
||||
filename=$(basename "$file" .md)
|
||||
dest="$HOME/.claude/commands/skills/''${filename}.md"
|
||||
rm -f "$dest" 2>/dev/null || true
|
||||
if ! cp "$file" "$dest" 2>/dev/null; then
|
||||
sleep 0.5
|
||||
cp "$file" "$dest" || echo "Warning: Failed to copy $filename.md to skills"
|
||||
fi
|
||||
chmod u+w "$dest" 2>/dev/null || true
|
||||
fi
|
||||
done
|
||||
|
||||
# Copy micro-skills (compact reusable knowledge referenced by formulas)
|
||||
for file in ${./skills/micro}/*.md; do
|
||||
if [ -f "$file" ]; then
|
||||
dest="$HOME/.claude/commands/skills/$(basename "$file")"
|
||||
rm -f "$dest" 2>/dev/null || true
|
||||
cp "$file" "$dest"
|
||||
chmod u+w "$dest" 2>/dev/null || true
|
||||
fi
|
||||
done
|
||||
|
||||
# Install beads formulas to user-level formula directory
|
||||
mkdir -p ~/.beads/formulas
|
||||
for file in ${./formulas}/*.formula.toml; do
|
||||
if [ -f "$file" ]; then
|
||||
dest="$HOME/.beads/formulas/$(basename "$file")"
|
||||
rm -f "$dest" 2>/dev/null || true
|
||||
cp "$file" "$dest"
|
||||
chmod u+w "$dest" 2>/dev/null || true
|
||||
fi
|
||||
done
|
||||
|
||||
$DRY_RUN_CMD echo "Claude Code plugins installed: humanlayer commands/agents + local commands + local skills + formulas"
|
||||
'';
|
||||
|
||||
# Set up beads Claude Code integration (hooks for SessionStart/PreCompact)
|
||||
@@ -110,11 +237,51 @@ in
|
||||
home.activation.claudeCodeBeadsSetup = lib.hm.dag.entryAfter ["writeBoundary" "claudeCodeCommands"] ''
|
||||
# Run bd setup claude to install hooks into ~/.claude/settings.json
|
||||
# This is idempotent - safe to run multiple times
|
||||
${globalInputs.beads.packages.${system}.default}/bin/bd setup claude 2>/dev/null || true
|
||||
${beadsPackage}/bin/bd setup claude 2>/dev/null || true
|
||||
|
||||
$DRY_RUN_CMD echo "Claude Code beads integration configured (hooks installed)"
|
||||
'';
|
||||
|
||||
# Beads timer gate checker (Linux only - uses systemd)
|
||||
# Runs every 5 minutes to auto-resolve expired timer gates across all beads projects
|
||||
# This enables self-scheduling molecules (watchers, patrols, etc.)
|
||||
systemd.user.services.beads-gate-check = lib.mkIf pkgs.stdenv.isLinux {
|
||||
Unit = {
|
||||
Description = "Check and resolve expired beads timer gates";
|
||||
};
|
||||
Service = {
|
||||
Type = "oneshot";
|
||||
# Check gates in all workspaces that have running daemons
|
||||
ExecStart = pkgs.writeShellScript "beads-gate-check-all" ''
|
||||
# Get list of workspaces from daemon registry
|
||||
workspaces=$(${beadsPackage}/bin/bd daemon list --json 2>/dev/null | ${pkgs.jq}/bin/jq -r '.[].workspace // empty' 2>/dev/null)
|
||||
|
||||
if [ -z "$workspaces" ]; then
|
||||
exit 0 # No beads workspaces, nothing to do
|
||||
fi
|
||||
|
||||
for ws in $workspaces; do
|
||||
if [ -d "$ws" ]; then
|
||||
cd "$ws" && ${beadsPackage}/bin/bd gate check --type=timer --quiet 2>/dev/null || true
|
||||
fi
|
||||
done
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
systemd.user.timers.beads-gate-check = lib.mkIf pkgs.stdenv.isLinux {
|
||||
Unit = {
|
||||
Description = "Periodic beads timer gate check";
|
||||
};
|
||||
Timer = {
|
||||
OnBootSec = "5min";
|
||||
OnUnitActiveSec = "5min";
|
||||
};
|
||||
Install = {
|
||||
WantedBy = [ "timers.target" ];
|
||||
};
|
||||
};
|
||||
|
||||
# Note: modules must be imported at top-level home config
|
||||
};
|
||||
}
|
||||
|
||||
115
home/roles/development/formulas/quick-fix.formula.toml
Normal file
115
home/roles/development/formulas/quick-fix.formula.toml
Normal file
@@ -0,0 +1,115 @@
|
||||
# Quick Fix Formula
|
||||
#
|
||||
# Streamlined workflow for well-understood bugs and small fixes.
|
||||
# Skips the deep research and planning phases of RPI - get in, fix, get out.
|
||||
#
|
||||
# Use when:
|
||||
# - Bug is well-understood (you know what's broken)
|
||||
# - Fix is straightforward (no architectural decisions)
|
||||
# - Change is small (< 100 lines)
|
||||
#
|
||||
# Use RPI instead when:
|
||||
# - Root cause is unclear
|
||||
# - Multiple approaches possible
|
||||
# - Significant design decisions needed
|
||||
|
||||
formula = "quick-fix"
|
||||
description = """
|
||||
Streamlined workflow for bugs and small fixes.
|
||||
|
||||
A faster alternative to RPI for well-understood issues:
|
||||
1. Quick investigation to confirm understanding
|
||||
2. Implement the fix
|
||||
3. Verify with tests
|
||||
4. Commit and close
|
||||
|
||||
No human gates - designed for quick turnaround on obvious fixes.
|
||||
"""
|
||||
version = 1
|
||||
type = "workflow"
|
||||
|
||||
# === Variables ===
|
||||
|
||||
[vars.title]
|
||||
required = true
|
||||
description = "Brief description of the bug/fix"
|
||||
|
||||
[vars.bead_id]
|
||||
description = "Existing bead ID (creates new if not provided)"
|
||||
|
||||
[vars.test_cmd]
|
||||
default = "make test"
|
||||
description = "Command to verify the fix"
|
||||
|
||||
# === Steps ===
|
||||
|
||||
[[steps]]
|
||||
id = "investigate"
|
||||
title = "Investigate: {{title}}"
|
||||
description = """
|
||||
Quick investigation to confirm understanding of the bug.
|
||||
|
||||
Goals:
|
||||
- Locate the problematic code
|
||||
- Confirm root cause matches expectations
|
||||
- Identify files that need changes
|
||||
|
||||
This is NOT deep research - spend 5-10 minutes max.
|
||||
If the bug is more complex than expected, pivot to RPI workflow.
|
||||
|
||||
Output: Mental model of what to fix (no artifact needed).
|
||||
"""
|
||||
|
||||
[[steps]]
|
||||
id = "fix"
|
||||
title = "Fix: {{title}}"
|
||||
needs = ["investigate"]
|
||||
description = """
|
||||
Implement the fix.
|
||||
|
||||
Guidelines:
|
||||
- Make minimal changes to fix the issue
|
||||
- Follow existing code patterns
|
||||
- Add/update tests if appropriate
|
||||
- Keep changes focused (no drive-by refactors)
|
||||
|
||||
If the fix grows beyond expectations, pause and consider:
|
||||
- Should this be an RPI workflow instead?
|
||||
- Should we split into multiple changes?
|
||||
"""
|
||||
|
||||
[[steps]]
|
||||
id = "verify"
|
||||
title = "Verify fix"
|
||||
needs = ["fix"]
|
||||
description = """
|
||||
Verify the fix works correctly.
|
||||
|
||||
Run: {{test_cmd}}
|
||||
|
||||
Also check:
|
||||
- Bug is actually fixed (manual verification)
|
||||
- No obvious regressions introduced
|
||||
- Code compiles/builds cleanly
|
||||
|
||||
If tests fail, iterate on the fix step.
|
||||
"""
|
||||
|
||||
[[steps]]
|
||||
id = "commit"
|
||||
title = "Commit and close"
|
||||
needs = ["verify"]
|
||||
description = """
|
||||
Commit the fix and close the bead.
|
||||
|
||||
Actions:
|
||||
1. Stage changes: git add -A
|
||||
2. Commit with descriptive message: git commit -m "fix: {{title}}"
|
||||
3. Push to remote: git push
|
||||
4. Close the bead: bd close {{bead_id}}
|
||||
|
||||
Commit message should explain:
|
||||
- What was broken
|
||||
- How it was fixed
|
||||
- Any relevant context
|
||||
"""
|
||||
124
home/roles/development/formulas/rpi.formula.toml
Normal file
124
home/roles/development/formulas/rpi.formula.toml
Normal file
@@ -0,0 +1,124 @@
|
||||
# RPI Formula - Research -> Plan -> Implement
|
||||
#
|
||||
# Universal workflow for feature development with human gates.
|
||||
|
||||
formula = "rpi"
|
||||
description = """
|
||||
Research -> Plan -> Implement workflow.
|
||||
|
||||
Usage:
|
||||
bd pour rpi --var title="Add user preferences"
|
||||
bd pour rpi --var title="Auth" --var bead_id="project-abc" --var test_cmd="nix flake check"
|
||||
"""
|
||||
version = 1
|
||||
type = "workflow"
|
||||
|
||||
# ─── Variables ───
|
||||
|
||||
[vars.title]
|
||||
required = true
|
||||
description = "What are we building?"
|
||||
|
||||
[vars.bead_id]
|
||||
description = "Existing bead ID (creates new if not provided)"
|
||||
|
||||
[vars.test_cmd]
|
||||
default = "make test"
|
||||
description = "Command to run tests"
|
||||
|
||||
[vars.lint_cmd]
|
||||
default = "make lint"
|
||||
description = "Command to run linting"
|
||||
|
||||
# ─── Research Phase ───
|
||||
|
||||
[[steps]]
|
||||
id = "research"
|
||||
title = "Research: {{title}}"
|
||||
skill = "research-agents"
|
||||
description = """
|
||||
Conduct comprehensive codebase research.
|
||||
|
||||
Goals:
|
||||
- Understand current implementation
|
||||
- Identify patterns to follow
|
||||
- Find relevant files and dependencies
|
||||
- Document key discoveries
|
||||
|
||||
Output: thoughts/beads-{{bead_id}}/research.md
|
||||
"""
|
||||
|
||||
# ─── Planning Phase ───
|
||||
|
||||
[[steps]]
|
||||
id = "plan"
|
||||
title = "Plan: {{title}}"
|
||||
needs = ["research"]
|
||||
type = "human"
|
||||
skill = "planning"
|
||||
description = """
|
||||
Create detailed implementation plan based on research.
|
||||
|
||||
Goals:
|
||||
- Present understanding and clarify requirements
|
||||
- Propose design options with tradeoffs
|
||||
- Define phases with success criteria
|
||||
- Identify what we're NOT doing
|
||||
|
||||
Output: thoughts/beads-{{bead_id}}/plan.md
|
||||
"""
|
||||
|
||||
[steps.gate]
|
||||
type = "human"
|
||||
reason = "Plan approval before implementation"
|
||||
|
||||
# ─── Implementation Phase ───
|
||||
|
||||
[[steps]]
|
||||
id = "implement"
|
||||
title = "Implement: {{title}}"
|
||||
needs = ["plan"]
|
||||
description = """
|
||||
Execute the approved plan phase by phase.
|
||||
|
||||
For each phase:
|
||||
1. Make the changes
|
||||
2. Run verification: {{test_cmd}}, {{lint_cmd}}
|
||||
3. Update plan checkboxes for resumability
|
||||
|
||||
Stop and ask if encountering unexpected issues.
|
||||
"""
|
||||
|
||||
# ─── Verification Phase ───
|
||||
|
||||
[[steps]]
|
||||
id = "verify"
|
||||
title = "Manual verification"
|
||||
needs = ["implement"]
|
||||
type = "human"
|
||||
description = """
|
||||
Human confirms implementation works correctly.
|
||||
|
||||
Check: feature works, edge cases handled, no regressions.
|
||||
Tests: {{test_cmd}} | Lint: {{lint_cmd}}
|
||||
"""
|
||||
|
||||
[steps.gate]
|
||||
type = "human"
|
||||
reason = "Confirm implementation is correct"
|
||||
|
||||
# ─── Completion ───
|
||||
|
||||
[[steps]]
|
||||
id = "complete"
|
||||
title = "Close bead"
|
||||
needs = ["verify"]
|
||||
skill = "artifact-format"
|
||||
description = """
|
||||
Mark work as complete.
|
||||
|
||||
Actions:
|
||||
- bd update {{bead_id}} --notes="Implementation complete"
|
||||
- bd close {{bead_id}} --reason="Completed: {{title}}"
|
||||
- bd sync && git push
|
||||
"""
|
||||
@@ -0,0 +1,15 @@
|
||||
diff --git a/internal/mail/router.go b/internal/mail/router.go
|
||||
--- a/internal/mail/router.go
|
||||
+++ b/internal/mail/router.go
|
||||
@@ -315,7 +315,10 @@ func agentBeadToAddress(bead *agentBead) string {
|
||||
}
|
||||
|
||||
// For other hq- agents, fall back to description parsing
|
||||
- return parseAgentAddressFromDescription(bead.Description)
|
||||
+ if bead.Title != "" && strings.Contains(bead.Title, "/") {
|
||||
+ return bead.Title
|
||||
+ }
|
||||
+ return parseAgentAddressFromDescription(bead.Description)
|
||||
}
|
||||
|
||||
// Handle gt- prefixed IDs (legacy format)
|
||||
@@ -0,0 +1,35 @@
|
||||
diff --git a/internal/mail/router.go b/internal/mail/router.go
|
||||
--- a/internal/mail/router.go
|
||||
+++ b/internal/mail/router.go
|
||||
@@ -330,8 +330,29 @@ func agentBeadToAddress(bead *agentBead) string {
|
||||
}
|
||||
|
||||
// Handle gt- prefixed IDs (legacy format)
|
||||
- if !strings.HasPrefix(id, "gt-") {
|
||||
- return "" // Not a valid agent bead ID
|
||||
+ // Handle rig-specific prefixes: <prefix>-<rig>-<role>-<name>
|
||||
+ // Examples: j-java-crew-americano -> java/crew/americano
|
||||
+ idParts := strings.Split(id, "-")
|
||||
+ if len(idParts) >= 3 {
|
||||
+ for i, part := range idParts {
|
||||
+ if part == "crew" || part == "polecat" || part == "polecats" {
|
||||
+ if i >= 1 && i < len(idParts)-1 {
|
||||
+ rig := idParts[i-1]
|
||||
+ name := strings.Join(idParts[i+1:], "-")
|
||||
+ return rig + "/" + part + "/" + name
|
||||
+ }
|
||||
+ }
|
||||
+ if part == "witness" || part == "refinery" {
|
||||
+ if i >= 1 {
|
||||
+ return idParts[i-1] + "/" + part
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ // Handle gt- prefixed IDs (legacy format)
|
||||
+ if !strings.HasPrefix(id, "gt-") {
|
||||
+ return "" // Not a valid agent bead ID
|
||||
}
|
||||
|
||||
// Strip prefix
|
||||
25
home/roles/development/gastown-fix-copydir-symlinks.patch
Normal file
25
home/roles/development/gastown-fix-copydir-symlinks.patch
Normal file
@@ -0,0 +1,25 @@
|
||||
diff --git a/internal/git/git.go b/internal/git/git.go
|
||||
--- a/internal/git/git.go
|
||||
+++ b/internal/git/git.go
|
||||
@@ -73,7 +73,19 @@ func copyDir(src, dest string) error {
|
||||
srcPath := filepath.Join(src, entry.Name())
|
||||
destPath := filepath.Join(dest, entry.Name())
|
||||
|
||||
- if entry.IsDir() {
|
||||
+ // Handle symlinks (recreate them, do not follow)
|
||||
+ if entry.Type()&os.ModeSymlink != 0 {
|
||||
+ linkTarget, err := os.Readlink(srcPath)
|
||||
+ if err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ if err := os.Symlink(linkTarget, destPath); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ continue
|
||||
+ }
|
||||
+
|
||||
+ if entry.IsDir() {
|
||||
if err := copyDir(srcPath, destPath); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
18
home/roles/development/gastown-fix-role-home-paths.patch
Normal file
18
home/roles/development/gastown-fix-role-home-paths.patch
Normal file
@@ -0,0 +1,18 @@
|
||||
diff --git a/internal/cmd/role.go b/internal/cmd/role.go
|
||||
--- a/internal/cmd/role.go
|
||||
+++ b/internal/cmd/role.go
|
||||
@@ -326,11 +326,11 @@ func getRoleHome(role Role, rig, polecat, townRoot string) string {
|
||||
if rig == "" || polecat == "" {
|
||||
return ""
|
||||
}
|
||||
- return filepath.Join(townRoot, rig, "polecats", polecat, "rig")
|
||||
+ return filepath.Join(townRoot, rig, "polecats", polecat)
|
||||
case RoleCrew:
|
||||
if rig == "" || polecat == "" {
|
||||
return ""
|
||||
}
|
||||
- return filepath.Join(townRoot, rig, "crew", polecat, "rig")
|
||||
+ return filepath.Join(townRoot, rig, "crew", polecat)
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
18
home/roles/development/gastown-fix-town-root-detection.patch
Normal file
18
home/roles/development/gastown-fix-town-root-detection.patch
Normal file
@@ -0,0 +1,18 @@
|
||||
diff --git a/internal/cmd/prime.go b/internal/cmd/prime.go
|
||||
--- a/internal/cmd/prime.go
|
||||
+++ b/internal/cmd/prime.go
|
||||
@@ -276,11 +276,12 @@ func detectRole(cwd, townRoot string) RoleInfo {
|
||||
|
||||
// Check for mayor role
|
||||
// At town root, or in mayor/ or mayor/rig/
|
||||
if relPath == "." || relPath == "" {
|
||||
- ctx.Role = RoleMayor
|
||||
- return ctx
|
||||
+ return ctx // RoleUnknown - town root is shared space
|
||||
}
|
||||
+
|
||||
+ // Check for mayor role: mayor/ or mayor/rig/
|
||||
if len(parts) >= 1 && parts[0] == "mayor" {
|
||||
ctx.Role = RoleMayor
|
||||
return ctx
|
||||
}
|
||||
13
home/roles/development/gastown-fix-validate-recipient.patch
Normal file
13
home/roles/development/gastown-fix-validate-recipient.patch
Normal file
@@ -0,0 +1,13 @@
|
||||
diff --git a/internal/mail/router.go b/internal/mail/router.go
|
||||
index b864c069..4b6a045b 100644
|
||||
--- a/internal/mail/router.go
|
||||
+++ b/internal/mail/router.go
|
||||
@@ -646,7 +646,7 @@ func (r *Router) validateRecipient(identity string) error {
|
||||
}
|
||||
|
||||
for _, agent := range agents {
|
||||
- if agentBeadToAddress(agent) == identity {
|
||||
+ if AddressToIdentity(agentBeadToAddress(agent)) == AddressToIdentity(identity) {
|
||||
return nil // Found matching agent
|
||||
}
|
||||
}
|
||||
135
home/roles/development/gastown-statusline-optimization.patch
Normal file
135
home/roles/development/gastown-statusline-optimization.patch
Normal file
@@ -0,0 +1,135 @@
|
||||
diff --git a/internal/cmd/statusline.go b/internal/cmd/statusline.go
|
||||
index 2edf1be8..00253eea 100644
|
||||
--- a/internal/cmd/statusline.go
|
||||
+++ b/internal/cmd/statusline.go
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
+ "time"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/steveyegge/gastown/internal/beads"
|
||||
@@ -14,6 +15,37 @@ import (
|
||||
"github.com/steveyegge/gastown/internal/tmux"
|
||||
"github.com/steveyegge/gastown/internal/workspace"
|
||||
)
|
||||
+// statusLineCacheTTL is how long cached status output remains valid.
|
||||
+const statusLineCacheTTL = 10 * time.Second
|
||||
+
|
||||
+// statusLineCachePath returns the cache file path for a session.
|
||||
+func statusLineCachePath(session string) string {
|
||||
+ return filepath.Join(os.TempDir(), fmt.Sprintf("gt-status-%s", session))
|
||||
+}
|
||||
+
|
||||
+// getStatusLineCache returns cached status if fresh, empty string otherwise.
|
||||
+func getStatusLineCache(session string) string {
|
||||
+ path := statusLineCachePath(session)
|
||||
+ info, err := os.Stat(path)
|
||||
+ if err != nil {
|
||||
+ return ""
|
||||
+ }
|
||||
+ if time.Since(info.ModTime()) > statusLineCacheTTL {
|
||||
+ return ""
|
||||
+ }
|
||||
+ data, err := os.ReadFile(path)
|
||||
+ if err != nil {
|
||||
+ return ""
|
||||
+ }
|
||||
+ return string(data)
|
||||
+}
|
||||
+
|
||||
+// setStatusLineCache writes status to cache file.
|
||||
+func setStatusLineCache(session, status string) {
|
||||
+ path := statusLineCachePath(session)
|
||||
+ _ = os.WriteFile(path, []byte(status), 0644)
|
||||
+}
|
||||
+
|
||||
|
||||
var (
|
||||
statusLineSession string
|
||||
@@ -34,6 +66,19 @@ func init() {
|
||||
func runStatusLine(cmd *cobra.Command, args []string) error {
|
||||
t := tmux.NewTmux()
|
||||
|
||||
+ // Optimization: skip expensive beads queries for detached sessions
|
||||
+ if statusLineSession != "" {
|
||||
+ if !t.IsSessionAttached(statusLineSession) {
|
||||
+ fmt.Print("○ |")
|
||||
+ return nil
|
||||
+ }
|
||||
+ // Check cache for attached sessions too
|
||||
+ if cached := getStatusLineCache(statusLineSession); cached != "" {
|
||||
+ fmt.Print(cached)
|
||||
+ return nil
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
// Get session environment
|
||||
var rigName, polecat, crew, issue, role string
|
||||
|
||||
@@ -150,7 +195,11 @@ func runWorkerStatusLine(t *tmux.Tmux, session, rigName, polecat, crew, issue st
|
||||
|
||||
// Output
|
||||
if len(parts) > 0 {
|
||||
- fmt.Print(strings.Join(parts, " | ") + " |")
|
||||
+ output := strings.Join(parts, " | ") + " |"
|
||||
+ if statusLineSession != "" {
|
||||
+ setStatusLineCache(statusLineSession, output)
|
||||
+ }
|
||||
+ fmt.Print(output)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -389,7 +438,11 @@ func runMayorStatusLine(t *tmux.Tmux) error {
|
||||
}
|
||||
}
|
||||
|
||||
- fmt.Print(strings.Join(parts, " | ") + " |")
|
||||
+ output := strings.Join(parts, " | ") + " |"
|
||||
+ if statusLineSession != "" {
|
||||
+ setStatusLineCache(statusLineSession, output)
|
||||
+ }
|
||||
+ fmt.Print(output)
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -458,7 +511,11 @@ func runDeaconStatusLine(t *tmux.Tmux) error {
|
||||
}
|
||||
}
|
||||
|
||||
- fmt.Print(strings.Join(parts, " | ") + " |")
|
||||
+ output := strings.Join(parts, " | ") + " |"
|
||||
+ if statusLineSession != "" {
|
||||
+ setStatusLineCache(statusLineSession, output)
|
||||
+ }
|
||||
+ fmt.Print(output)
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -526,7 +583,11 @@ func runWitnessStatusLine(t *tmux.Tmux, rigName string) error {
|
||||
}
|
||||
}
|
||||
|
||||
- fmt.Print(strings.Join(parts, " | ") + " |")
|
||||
+ output := strings.Join(parts, " | ") + " |"
|
||||
+ if statusLineSession != "" {
|
||||
+ setStatusLineCache(statusLineSession, output)
|
||||
+ }
|
||||
+ fmt.Print(output)
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -617,7 +678,11 @@ func runRefineryStatusLine(t *tmux.Tmux, rigName string) error {
|
||||
}
|
||||
}
|
||||
|
||||
- fmt.Print(strings.Join(parts, " | ") + " |")
|
||||
+ output := strings.Join(parts, " | ") + " |"
|
||||
+ if statusLineSession != "" {
|
||||
+ setStatusLineCache(statusLineSession, output)
|
||||
+ }
|
||||
+ fmt.Print(output)
|
||||
return nil
|
||||
}
|
||||
|
||||
230
home/roles/development/skills/bd_workflow.md
Normal file
230
home/roles/development/skills/bd_workflow.md
Normal file
@@ -0,0 +1,230 @@
|
||||
---
|
||||
description: How to use the bd (beads) CLI for issue tracking, dependencies, and workflow orchestration
|
||||
---
|
||||
|
||||
# BD Workflow
|
||||
|
||||
The `bd` CLI is a git-backed issue tracker with first-class dependency support. Use it for multi-session work, blocking relationships, and persistent memory across conversation compaction.
|
||||
|
||||
## When to Use BD vs TodoWrite
|
||||
|
||||
| Use BD | Use TodoWrite |
|
||||
|--------|---------------|
|
||||
| Work spans multiple sessions | Single-session tasks |
|
||||
| Dependencies between tasks | Independent subtasks |
|
||||
| Need audit trail in git | Ephemeral tracking |
|
||||
| Cross-repo coordination | Local project only |
|
||||
| Resuming after compaction | Simple task lists |
|
||||
|
||||
## Core Commands
|
||||
|
||||
### Creating Issues
|
||||
|
||||
```bash
|
||||
bd create "Issue title" # Basic task
|
||||
bd create "Bug title" --type=bug --priority=1 # P1 bug
|
||||
bd create "Feature" --type=feature -d "Details" # With description
|
||||
bd q "Quick capture" # Output only ID
|
||||
```
|
||||
|
||||
### Managing Issues
|
||||
|
||||
```bash
|
||||
bd show <id> # View issue details
|
||||
bd show <id> --children # View children of epic
|
||||
bd list # List open issues (default 50)
|
||||
bd list --all # Include closed
|
||||
bd list -s in_progress # Filter by status
|
||||
bd list -t bug -p 0 # P0 bugs
|
||||
bd list --pretty # Tree format
|
||||
```
|
||||
|
||||
### Updating Issues
|
||||
|
||||
```bash
|
||||
bd update <id> --status=in_progress # Start work
|
||||
bd update <id> --status=blocked # Mark blocked
|
||||
bd update <id> --claim # Claim atomically
|
||||
bd update <id> --add-label=urgent # Add label
|
||||
bd update <id> -d "New description" # Update description
|
||||
```
|
||||
|
||||
### Closing Issues
|
||||
|
||||
```bash
|
||||
bd close <id> # Close issue
|
||||
bd close <id> --continue # Auto-advance to next step
|
||||
bd close <id> --suggest-next # Show newly unblocked
|
||||
```
|
||||
|
||||
## Finding Work
|
||||
|
||||
```bash
|
||||
bd ready # Ready issues (no blockers)
|
||||
bd ready --mol <mol-id> # Ready steps in molecule
|
||||
bd ready -n 5 # Limit to 5
|
||||
bd ready --assignee me # Assigned to me
|
||||
bd blocked # Show blocked issues
|
||||
bd blocked --parent <id> # Blocked within epic
|
||||
```
|
||||
|
||||
## Dependency Management
|
||||
|
||||
### Creating Dependencies
|
||||
|
||||
```bash
|
||||
bd dep <blocker> --blocks <blocked> # A blocks B
|
||||
bd dep add <blocked> <blocker> # Same as above
|
||||
bd dep relate <id1> <id2> # Bidirectional link
|
||||
```
|
||||
|
||||
### Viewing Dependencies
|
||||
|
||||
```bash
|
||||
bd dep list <id> # Show dependencies
|
||||
bd dep tree <id> # Dependency tree
|
||||
bd dep cycles # Detect cycles
|
||||
```
|
||||
|
||||
### Removing Dependencies
|
||||
|
||||
```bash
|
||||
bd dep remove <blocked> <blocker> # Remove dependency
|
||||
bd dep unrelate <id1> <id2> # Remove relation
|
||||
```
|
||||
|
||||
## Sync Workflow
|
||||
|
||||
BD syncs issues via git. The daemon handles this automatically, but manual sync is available:
|
||||
|
||||
```bash
|
||||
bd sync # Full sync (pull, merge, push)
|
||||
bd sync --flush-only # Export to JSONL only
|
||||
bd sync --import-only # Import from JSONL only
|
||||
bd sync --status # Show sync branch diff
|
||||
bd sync --squash # Accumulate without commit
|
||||
```
|
||||
|
||||
## Formula and Molecule Workflow
|
||||
|
||||
Formulas are reusable workflow templates. Molecules are instantiated workflows.
|
||||
|
||||
### Formulas
|
||||
|
||||
```bash
|
||||
bd formula list # List available formulas
|
||||
bd formula list --type=workflow # Filter by type
|
||||
bd formula show <name> # Show formula details
|
||||
bd cook <formula> # Compile to proto (stdout)
|
||||
bd cook <formula> --var name=auth # With variable substitution
|
||||
bd cook <formula> --dry-run # Preview steps
|
||||
bd cook <formula> --persist # Save to database
|
||||
```
|
||||
|
||||
### Molecules: Pour vs Wisp
|
||||
|
||||
| pour (persistent) | wisp (ephemeral) |
|
||||
|-------------------|------------------|
|
||||
| Feature implementations | Release workflows |
|
||||
| Multi-session work | Patrol cycles |
|
||||
| Audit trail needed | Health checks |
|
||||
| Git-synced | Local only |
|
||||
|
||||
```bash
|
||||
# Persistent molecule (liquid phase)
|
||||
bd mol pour <proto> --var name=auth
|
||||
|
||||
# Ephemeral molecule (vapor phase)
|
||||
bd mol wisp <proto> --var version=1.0
|
||||
bd mol wisp list # List wisps
|
||||
bd mol wisp gc # Garbage collect
|
||||
```
|
||||
|
||||
### Tracking Molecule Progress
|
||||
|
||||
```bash
|
||||
bd mol show <mol-id> # Show structure
|
||||
bd mol show <mol-id> --parallel # Parallelizable steps
|
||||
bd mol current # Where am I?
|
||||
bd mol current <mol-id> # Status for molecule
|
||||
bd mol progress <mol-id> # Progress summary + ETA
|
||||
```
|
||||
|
||||
### Molecule Lifecycle
|
||||
|
||||
```bash
|
||||
bd mol squash <mol-id> # Condense to digest
|
||||
bd mol burn <mol-id> # Delete wisp
|
||||
bd mol distill <epic-id> # Extract formula from epic
|
||||
```
|
||||
|
||||
## Gates and Human Checkpoints
|
||||
|
||||
Gates are async wait conditions that block workflow steps:
|
||||
|
||||
| Gate Type | Wait Condition |
|
||||
|-----------|---------------|
|
||||
| human | Manual `bd close` |
|
||||
| timer | Timeout expires |
|
||||
| gh:run | GitHub workflow completes |
|
||||
| gh:pr | PR merges |
|
||||
| bead | Cross-rig bead closes |
|
||||
|
||||
```bash
|
||||
bd gate list # Show open gates
|
||||
bd gate list --all # Include closed
|
||||
bd gate check # Evaluate all gates
|
||||
bd gate check --type=bead # Check bead gates only
|
||||
bd gate resolve <id> # Close manually
|
||||
```
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Starting Work on a Bead
|
||||
|
||||
```bash
|
||||
bd update <id> --status=in_progress
|
||||
# ... do work ...
|
||||
bd close <id>
|
||||
```
|
||||
|
||||
### Creating Related Issues
|
||||
|
||||
```bash
|
||||
bd create "Main task" --deps "blocks:<other-id>"
|
||||
bd dep add <new-id> <blocker-id>
|
||||
```
|
||||
|
||||
### Working Through a Molecule
|
||||
|
||||
```bash
|
||||
bd mol pour my-workflow --var name=feature
|
||||
bd ready --mol <mol-id> # Find next step
|
||||
bd update <step-id> --claim # Claim step
|
||||
# ... do work ...
|
||||
bd close <step-id> --continue # Close and advance
|
||||
```
|
||||
|
||||
### Quick Status Check
|
||||
|
||||
```bash
|
||||
bd ready -n 3 # Top 3 ready items
|
||||
bd list -s in_progress # What's in flight?
|
||||
bd blocked # What's stuck?
|
||||
```
|
||||
|
||||
## Useful Flags
|
||||
|
||||
| Flag | Effect |
|
||||
|------|--------|
|
||||
| `--json` | JSON output for scripting |
|
||||
| `--quiet` | Suppress non-essential output |
|
||||
| `--dry-run` | Preview without executing |
|
||||
| `--pretty` | Tree format display |
|
||||
|
||||
## Integration Notes
|
||||
|
||||
- BD auto-syncs via daemon (check with `bd info`)
|
||||
- Issues stored in `.beads/` directory
|
||||
- JSONL files sync through git
|
||||
- Use `bd doctor` if something seems wrong
|
||||
123
home/roles/development/skills/micro/artifact-format.md
Normal file
123
home/roles/development/skills/micro/artifact-format.md
Normal file
@@ -0,0 +1,123 @@
|
||||
---
|
||||
description: How to structure research and plan artifacts in thoughts/
|
||||
---
|
||||
|
||||
# Artifact Format
|
||||
|
||||
Standardized format for thoughts/ artifacts. All beads-related artifacts should follow these conventions for consistency and machine parseability.
|
||||
|
||||
## Frontmatter (Required)
|
||||
|
||||
Every artifact MUST include YAML frontmatter:
|
||||
|
||||
```yaml
|
||||
---
|
||||
date: 2026-01-15T10:00:00-08:00 # ISO 8601 with timezone
|
||||
bead_id: project-abc # Bead identifier
|
||||
bead_title: "Title of the bead" # Human-readable title
|
||||
author: claude # Who created this
|
||||
git_commit: abc123def # Commit hash at creation
|
||||
branch: main # Branch name
|
||||
repository: repo-name # Repository name
|
||||
status: draft|complete # Artifact status
|
||||
---
|
||||
```
|
||||
|
||||
### Gathering Metadata
|
||||
|
||||
```bash
|
||||
git rev-parse HEAD # Current commit
|
||||
git branch --show-current # Current branch
|
||||
basename $(git rev-parse --show-toplevel) # Repo name
|
||||
date -Iseconds # ISO timestamp
|
||||
```
|
||||
|
||||
## Research Artifact Structure
|
||||
|
||||
Location: `thoughts/beads-{bead-id}/research.md`
|
||||
|
||||
```markdown
|
||||
# Research: {bead title}
|
||||
|
||||
**Bead**: {bead-id}
|
||||
**Date**: {timestamp}
|
||||
**Git Commit**: {commit hash}
|
||||
|
||||
## Research Question
|
||||
{Original question from bead description}
|
||||
|
||||
## Summary
|
||||
{2-3 sentence overview answering the research question}
|
||||
|
||||
## Key Discoveries
|
||||
- {Finding with file:line reference}
|
||||
- {Pattern or convention found}
|
||||
- {Architectural decision documented}
|
||||
|
||||
## Architecture
|
||||
{Current patterns and conventions in the codebase}
|
||||
|
||||
## Code References
|
||||
- `path/to/file.py:123` - Description of relevance
|
||||
- `another/file.ts:45-67` - Description of relevance
|
||||
|
||||
## Open Questions
|
||||
{Areas needing further investigation or human clarification}
|
||||
```
|
||||
|
||||
## Plan Artifact Structure
|
||||
|
||||
Location: `thoughts/beads-{bead-id}/plan.md`
|
||||
|
||||
```markdown
|
||||
# {Title} Implementation Plan
|
||||
|
||||
## Overview
|
||||
{What we're implementing and why - 1-2 sentences}
|
||||
|
||||
## Current State
|
||||
{What exists now, key constraints discovered}
|
||||
|
||||
### Key Discoveries
|
||||
- {Finding with file:line reference}
|
||||
- {Pattern to follow}
|
||||
|
||||
## Desired End State
|
||||
{Specification of what success looks like}
|
||||
|
||||
## What We're NOT Doing
|
||||
{Explicitly list out-of-scope items}
|
||||
|
||||
## Phase 1: {Descriptive Name}
|
||||
|
||||
### Overview
|
||||
{What this phase accomplishes}
|
||||
|
||||
### Changes
|
||||
- [ ] {Specific change with file path}
|
||||
- [ ] {Another change}
|
||||
|
||||
### Success Criteria
|
||||
|
||||
#### Automated
|
||||
- [ ] Tests pass: `{test command}`
|
||||
- [ ] Lint passes: `{lint command}`
|
||||
|
||||
#### Manual
|
||||
- [ ] {Human verification step}
|
||||
|
||||
## Phase 2: {Descriptive Name}
|
||||
{Repeat structure...}
|
||||
|
||||
## References
|
||||
- Bead: {bead-id}
|
||||
- Research: `thoughts/beads-{bead-id}/research.md`
|
||||
```
|
||||
|
||||
## Key Principles
|
||||
|
||||
1. **Always include file:line references** - Makes artifacts actionable
|
||||
2. **Separate automated vs manual verification** - Enables agent autonomy
|
||||
3. **Use checkboxes for phases** - Enables resumability after interruption
|
||||
4. **Keep frontmatter machine-parseable** - Enables tooling integration
|
||||
5. **Link related artifacts** - Research links to plan, plan links to bead
|
||||
121
home/roles/development/skills/micro/planning.md
Normal file
121
home/roles/development/skills/micro/planning.md
Normal file
@@ -0,0 +1,121 @@
|
||||
---
|
||||
description: How to create effective implementation plans with phased delivery and clear success criteria
|
||||
---
|
||||
|
||||
# Planning
|
||||
|
||||
Create implementation plans that enable incremental, verifiable progress.
|
||||
|
||||
## Core Principles
|
||||
|
||||
1. **Incremental delivery**: Each phase should produce working, testable changes
|
||||
2. **Clear checkpoints**: Success criteria that can be verified without ambiguity
|
||||
3. **Buy-in before detail**: Confirm understanding and approach before writing specifics
|
||||
4. **Explicit scope**: State what we're NOT doing to prevent scope creep
|
||||
|
||||
## Plan Document Structure
|
||||
|
||||
```markdown
|
||||
# {Feature} Implementation Plan
|
||||
|
||||
## Overview
|
||||
{1-2 sentences: what we're building and why}
|
||||
|
||||
## Current State Analysis
|
||||
{What exists now, key constraints, file:line references}
|
||||
|
||||
## Desired End State
|
||||
{Specification of outcome and how to verify it}
|
||||
|
||||
## What We're NOT Doing
|
||||
{Explicit out-of-scope items}
|
||||
|
||||
## Phase 1: {Descriptive Name}
|
||||
### Overview
|
||||
{What this phase accomplishes - should be independently valuable}
|
||||
|
||||
### Changes Required
|
||||
{Specific files and modifications with code snippets}
|
||||
|
||||
### Success Criteria
|
||||
#### Automated Verification
|
||||
- [ ] Tests pass: `{test command}`
|
||||
- [ ] Lint passes: `{lint command}`
|
||||
|
||||
#### Manual Verification
|
||||
- [ ] {Human-observable outcome}
|
||||
|
||||
## Testing Strategy
|
||||
{Unit tests, integration tests, manual testing steps}
|
||||
|
||||
## References
|
||||
{Links to research, related files, similar implementations}
|
||||
```
|
||||
|
||||
## Phase Design
|
||||
|
||||
Good phases are:
|
||||
- **Self-contained**: Completable in one session
|
||||
- **Testable**: Has clear pass/fail criteria
|
||||
- **Reversible**: Can be rolled back if needed
|
||||
- **Incremental**: Builds on previous phases without requiring all phases
|
||||
|
||||
Bad phases are:
|
||||
- "Refactor everything" (too broad)
|
||||
- "Add helper function" (too granular)
|
||||
- Phases that only work if ALL phases complete
|
||||
|
||||
## Success Criteria Guidelines
|
||||
|
||||
**Automated Verification** (agent-runnable):
|
||||
- Test commands: `make test`, `npm test`, `nix flake check`
|
||||
- Lint/format: `make lint`, `cargo fmt --check`
|
||||
- Type checking: `make typecheck`, `tsc --noEmit`
|
||||
- Build verification: `make build`, `nix build`
|
||||
|
||||
**Manual Verification** (requires human):
|
||||
- UI/UX functionality and appearance
|
||||
- Performance under realistic conditions
|
||||
- Edge cases hard to automate
|
||||
- Integration with external systems
|
||||
|
||||
**From Contribution Guidelines** (if CONTRIBUTING.md exists):
|
||||
- Include any testing requirements specified
|
||||
- Reference the guideline: "Per CONTRIBUTING.md: {requirement}"
|
||||
|
||||
## Presenting Understanding
|
||||
|
||||
Before writing the plan, confirm alignment:
|
||||
|
||||
```
|
||||
Based on the requirements and my research, I understand we need to [summary].
|
||||
|
||||
I've found that:
|
||||
- [Current implementation detail with file:line]
|
||||
- [Relevant pattern or constraint]
|
||||
- [Potential complexity identified]
|
||||
|
||||
Questions my research couldn't answer:
|
||||
- [Specific technical question requiring judgment]
|
||||
```
|
||||
|
||||
Only ask questions you genuinely cannot answer through code investigation.
|
||||
|
||||
## Design Options Pattern
|
||||
|
||||
When multiple approaches exist:
|
||||
|
||||
```
|
||||
**Design Options:**
|
||||
1. [Option A] - [1-sentence description]
|
||||
- Pro: [benefit]
|
||||
- Con: [drawback]
|
||||
|
||||
2. [Option B] - [1-sentence description]
|
||||
- Pro: [benefit]
|
||||
- Con: [drawback]
|
||||
|
||||
Which approach aligns best with [relevant consideration]?
|
||||
```
|
||||
|
||||
Get buy-in on approach before detailing phases.
|
||||
68
home/roles/development/skills/micro/pr-description.md
Normal file
68
home/roles/development/skills/micro/pr-description.md
Normal file
@@ -0,0 +1,68 @@
|
||||
---
|
||||
description: How to write comprehensive PR descriptions that help reviewers understand changes
|
||||
---
|
||||
|
||||
# PR Description
|
||||
|
||||
Write PR descriptions that help reviewers understand what changed and why.
|
||||
|
||||
## Structure
|
||||
|
||||
Use this standard structure for PR descriptions:
|
||||
|
||||
```markdown
|
||||
## Summary
|
||||
<1-3 bullet points of what changed and why>
|
||||
|
||||
## Context
|
||||
<Why this change was needed - the problem being solved>
|
||||
<Link to related issues/tickets>
|
||||
|
||||
## Changes
|
||||
<Detailed breakdown by area/component>
|
||||
- Area 1: What changed and why
|
||||
- Area 2: What changed and why
|
||||
|
||||
## Testing
|
||||
<How this was verified>
|
||||
- Automated: Tests added/updated, CI status
|
||||
- Manual: Steps to verify functionality
|
||||
|
||||
## Screenshots (if UI changes)
|
||||
<Before/after screenshots if applicable>
|
||||
```
|
||||
|
||||
## Guidelines
|
||||
|
||||
### Lead with WHY, not WHAT
|
||||
- The diff shows WHAT changed - your description explains WHY
|
||||
- Start with the problem being solved
|
||||
- Explain the approach chosen and alternatives considered
|
||||
|
||||
### Link to context
|
||||
- Reference related issues: `Fixes #123` or `Relates to #456`
|
||||
- Link to design docs or discussions
|
||||
- Mention dependent PRs if any
|
||||
|
||||
### Call out review areas
|
||||
- Highlight areas needing careful review
|
||||
- Note any tricky or non-obvious code
|
||||
- Point out architectural decisions
|
||||
|
||||
### Note breaking changes prominently
|
||||
- Use a dedicated "Breaking Changes" section if applicable
|
||||
- Explain migration path for consumers
|
||||
- List any deprecations
|
||||
|
||||
### Be scannable
|
||||
- Use bullet points over paragraphs
|
||||
- Keep sections focused and concise
|
||||
- Put the most important info first
|
||||
|
||||
## Anti-patterns to Avoid
|
||||
|
||||
- Empty descriptions or just "fixes bug"
|
||||
- Repeating the commit messages verbatim
|
||||
- Including irrelevant implementation details
|
||||
- Missing context on why the change was made
|
||||
- Forgetting to mention breaking changes
|
||||
49
home/roles/development/skills/micro/research-agents.md
Normal file
49
home/roles/development/skills/micro/research-agents.md
Normal file
@@ -0,0 +1,49 @@
|
||||
---
|
||||
description: How to spawn and coordinate research sub-agents
|
||||
---
|
||||
|
||||
# Research Agents
|
||||
|
||||
Use parallel sub-agents for efficient codebase research.
|
||||
|
||||
## Available Agents
|
||||
|
||||
| Agent | Purpose |
|
||||
|-------|---------|
|
||||
| codebase-locator | Find WHERE files and components live |
|
||||
| codebase-analyzer | Understand HOW specific code works |
|
||||
| codebase-pattern-finder | Find examples of existing patterns |
|
||||
| thoughts-locator | Discover relevant documents in thoughts/ |
|
||||
|
||||
## Spawning Protocol
|
||||
|
||||
1. **Decompose** - Break the research question into 3-5 specific questions
|
||||
2. **Spawn parallel** - Use one Task call with multiple agents
|
||||
3. **Be specific** - Include directories and file patterns in prompts
|
||||
4. **Wait for all** - Do not synthesize until ALL agents complete
|
||||
5. **Synthesize** - Combine findings into coherent summary with file:line references
|
||||
|
||||
## Example
|
||||
|
||||
```
|
||||
Task(codebase-locator, "Find all files related to authentication in src/")
|
||||
Task(codebase-analyzer, "Explain how JWT tokens are validated in src/auth/")
|
||||
Task(codebase-pattern-finder, "Find examples of middleware patterns in src/")
|
||||
Task(thoughts-locator, "Find documents about auth design decisions in thoughts/")
|
||||
```
|
||||
|
||||
## Key Principles
|
||||
|
||||
- **Parallel when different** - Run agents in parallel when searching for different things
|
||||
- **WHAT not HOW** - Each agent knows its job; tell it what you need, not how to search
|
||||
- **Document, don't evaluate** - Agents should describe what exists, not critique it
|
||||
- **Specific directories** - Always scope searches to relevant directories
|
||||
- **File references** - Include specific file:line references in synthesis
|
||||
|
||||
## Agent Prompts
|
||||
|
||||
When spawning agents, include:
|
||||
- The specific question or goal
|
||||
- Relevant directories to search
|
||||
- Reminder to document (not evaluate) what they find
|
||||
- Request for file:line references in findings
|
||||
@@ -53,6 +53,22 @@
|
||||
;; change `org-directory'. It must be set before org loads!
|
||||
(setq org-directory "~/org/")
|
||||
(after! org
|
||||
;; Skip recurring events past their CALDAV_UNTIL date
|
||||
;; org-caldav ignores UNTIL from RRULE, so we store it as a property
|
||||
;; and filter here in the agenda
|
||||
(defun my/skip-if-past-until ()
|
||||
"Return non-nil if entry has CALDAV_UNTIL and current date is past it."
|
||||
(let ((until-str (org-entry-get nil "CALDAV_UNTIL")))
|
||||
(when (and until-str
|
||||
(string-match "^\\([0-9]\\{4\\}\\)\\([0-9]\\{2\\}\\)\\([0-9]\\{2\\}\\)" until-str))
|
||||
(let* ((until-year (string-to-number (match-string 1 until-str)))
|
||||
(until-month (string-to-number (match-string 2 until-str)))
|
||||
(until-day (string-to-number (match-string 3 until-str)))
|
||||
(until-time (encode-time 0 0 0 until-day until-month until-year))
|
||||
(today (current-time)))
|
||||
(when (time-less-p until-time today)
|
||||
(org-end-of-subtree t))))))
|
||||
|
||||
(setq org-agenda-span 'week
|
||||
org-agenda-start-with-log-mode t
|
||||
my-agenda-dirs '("projects" "roam")
|
||||
@@ -61,6 +77,7 @@
|
||||
"\.org$"))
|
||||
my-agenda-dirs))
|
||||
org-log-done 'time
|
||||
org-agenda-skip-function-global #'my/skip-if-past-until
|
||||
org-agenda-custom-commands '(("n" "Agenda"
|
||||
((agenda "")
|
||||
(tags-todo "-someday-recurring")))
|
||||
@@ -83,25 +100,135 @@
|
||||
"d" #'org-agenda-day-view
|
||||
"w" #'org-agenda-week-view))
|
||||
|
||||
;; (use-package! org-caldav
|
||||
;; :defer t
|
||||
;; :config
|
||||
;; (setq org-caldav-url "https://nextcloud.johnogle.info/remote.php/dav/calendars/johno"
|
||||
;; org-caldav-calendar-id "personal"
|
||||
;; org-icalendar-timezone "America/Los_Angeles"
|
||||
;; org-caldav-inbox "~/org/calendar.org"
|
||||
;; org-caldav-files nil
|
||||
;; org-caldav-sync-direction 'cal->org))
|
||||
;; org-caldav: Sync Org entries with Nextcloud CalDAV
|
||||
;; Setup requirements:
|
||||
;; 1. Create Nextcloud app password: Settings -> Security -> Devices & sessions
|
||||
;; 2. Store in rbw: rbw add nextcloud-caldav (put app password as the secret)
|
||||
;; 3. Run: doom sync
|
||||
;; 4. Test: M-x my/org-caldav-sync-with-rbw (or SPC o a s)
|
||||
;;
|
||||
;; Note: Conflict resolution is "Org always wins" - treat Org as source of truth
|
||||
;; for entries that originated in Org.
|
||||
|
||||
(defun my/get-rbw-password (alias)
|
||||
"Return the password for ALIAS via rbw, unlocking the vault only if needed."
|
||||
(let* ((cmd (format "rbw get %s 2>&1" alias))
|
||||
(output (shell-command-to-string cmd)))
|
||||
(string-trim output)))
|
||||
;; Define sync wrapper before use-package (so keybinding works)
|
||||
(defun my/org-caldav-sync-with-rbw ()
|
||||
"Run org-caldav-sync with credentials from rbw embedded in URL."
|
||||
(interactive)
|
||||
(require 'org)
|
||||
(require 'org-caldav)
|
||||
(let* ((password (my/get-rbw-password "nextcloud-caldav"))
|
||||
;; Embed credentials in URL (url-encode password in case of special chars)
|
||||
(encoded-pass (url-hexify-string password)))
|
||||
(setq org-caldav-url
|
||||
(format "https://johno:%s@nextcloud.johnogle.info/remote.php/dav/calendars/johno"
|
||||
encoded-pass))
|
||||
(org-caldav-sync)))
|
||||
|
||||
(use-package! org-caldav
|
||||
:after org
|
||||
:commands (org-caldav-sync my/org-caldav-sync-with-rbw)
|
||||
:init
|
||||
(map! :leader
|
||||
(:prefix ("o" . "open")
|
||||
(:prefix ("a" . "agenda/calendar")
|
||||
:desc "Sync CalDAV" "s" #'my/org-caldav-sync-with-rbw)))
|
||||
:config
|
||||
;; Nextcloud CalDAV base URL (credentials added dynamically by sync wrapper)
|
||||
(setq org-caldav-url "https://nextcloud.johnogle.info/remote.php/dav/calendars/johno")
|
||||
|
||||
;; Timezone for iCalendar export
|
||||
(setq org-icalendar-timezone "America/Los_Angeles")
|
||||
|
||||
;; Sync state storage (in org directory for multi-machine sync)
|
||||
(setq org-caldav-save-directory (expand-file-name ".org-caldav/" org-directory))
|
||||
|
||||
;; Backup file for entries before modification
|
||||
(setq org-caldav-backup-file (expand-file-name ".org-caldav/backup.org" org-directory))
|
||||
|
||||
;; Limit past events to 30 days (avoids uploading years of scheduled tasks)
|
||||
(setq org-caldav-days-in-past 30)
|
||||
|
||||
;; Sync behavior: bidirectional by default
|
||||
(setq org-caldav-sync-direction 'twoway)
|
||||
|
||||
;; What changes from calendar sync back to Org (conservative: title and timestamp only)
|
||||
(setq org-caldav-sync-changes-to-org 'title-and-timestamp)
|
||||
|
||||
;; Deletion handling: never auto-delete to prevent accidental mass deletion
|
||||
(setq org-caldav-delete-calendar-entries 'never)
|
||||
(setq org-caldav-delete-org-entries 'never)
|
||||
|
||||
;; Enable TODO/VTODO sync
|
||||
(setq org-icalendar-include-todo 'all)
|
||||
(setq org-caldav-sync-todo t)
|
||||
|
||||
;; Map VTODO percent-complete to org-todo-keywords
|
||||
;; Format: (PERCENT "KEYWORD") - percent thresholds map to states
|
||||
(setq org-caldav-todo-percent-states
|
||||
'((0 "TODO")
|
||||
(25 "WAIT")
|
||||
(50 "IN-PROGRESS")
|
||||
(100 "DONE")
|
||||
(100 "KILL")))
|
||||
|
||||
;; Allow export with broken links (mu4e links can't be resolved during export)
|
||||
(setq org-export-with-broken-links 'mark)
|
||||
|
||||
;; Calendar-specific configuration
|
||||
(setq org-caldav-calendars
|
||||
'(;; Personal calendar: two-way sync with family-shared Nextcloud calendar
|
||||
(:calendar-id "personal"
|
||||
:inbox "~/org/personal-calendar.org"
|
||||
:files ("~/org/personal-calendar.org"))
|
||||
|
||||
;; Tasks calendar: one-way sync (org → calendar only)
|
||||
;; SCHEDULED/DEADLINE items from todo.org push to private Tasks calendar.
|
||||
;; No inbox = no download from calendar (effectively one-way).
|
||||
;; Note: Create 'tasks' calendar in Nextcloud first, keep it private.
|
||||
(:calendar-id "tasks"
|
||||
:files ("~/org/todo.org"))))
|
||||
|
||||
;; Handle UNTIL in recurring events
|
||||
;; org-caldav ignores UNTIL from RRULE - events repeat forever.
|
||||
;; This advice extracts UNTIL and stores it as a property for agenda filtering.
|
||||
(defun my/org-caldav-add-until-property (orig-fun eventdata-alist)
|
||||
"Advice to store CALDAV_UNTIL property for recurring events."
|
||||
(let ((result (funcall orig-fun eventdata-alist)))
|
||||
(let* ((rrule-props (alist-get 'rrule-props eventdata-alist))
|
||||
(until-str (cadr (assoc 'UNTIL rrule-props)))
|
||||
(summary (alist-get 'summary eventdata-alist)))
|
||||
;; Debug: log what we're seeing
|
||||
(message "CALDAV-DEBUG: %s | rrule-props: %S | until: %s"
|
||||
(or summary "?") rrule-props until-str)
|
||||
(when until-str
|
||||
(save-excursion
|
||||
(org-back-to-heading t)
|
||||
(org-entry-put nil "CALDAV_UNTIL" until-str))))
|
||||
result))
|
||||
|
||||
(advice-add 'org-caldav-insert-org-event-or-todo
|
||||
:around #'my/org-caldav-add-until-property)
|
||||
)
|
||||
|
||||
(defun my/get-rbw-password (alias &optional no-error)
|
||||
"Return the password for ALIAS via rbw, unlocking the vault only if needed.
|
||||
If NO-ERROR is non-nil, return nil instead of signaling an error when
|
||||
rbw is unavailable or the entry is not found."
|
||||
(if (not (executable-find "rbw"))
|
||||
(if no-error
|
||||
nil
|
||||
(user-error "rbw: not installed or not in PATH"))
|
||||
(let* ((cmd (format "rbw get %s 2>/dev/null" (shell-quote-argument alias)))
|
||||
(output (string-trim (shell-command-to-string cmd))))
|
||||
(if (string-empty-p output)
|
||||
(if no-error
|
||||
nil
|
||||
(user-error "rbw: no entry found for '%s' - run: rbw add %s" alias alias))
|
||||
output))))
|
||||
|
||||
(after! gptel
|
||||
:config
|
||||
(setq! gptel-api-key (my/get-rbw-password "openai-api-key-chatgpt-el")
|
||||
(setq! gptel-api-key (my/get-rbw-password "openai-api-key-chatgpt-el" t)
|
||||
gptel-default-mode 'org-mode
|
||||
gptel-use-tools t
|
||||
gptel-confirm-tool-calls 'always
|
||||
|
||||
@@ -49,7 +49,7 @@
|
||||
;; ...Or *all* packages (NOT RECOMMENDED; will likely break things)
|
||||
;; (unpin! t)
|
||||
|
||||
;; (package! org-caldav)
|
||||
(package! org-caldav)
|
||||
|
||||
;; Note: Packages with custom recipes must be pinned for nix-doom-emacs-unstraightened
|
||||
;; to build deterministically. Update pins when upgrading packages.
|
||||
|
||||
@@ -12,9 +12,7 @@ in
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
home.packages = with pkgs; [
|
||||
# Gaming applications would go here
|
||||
# This role is created for future expansion
|
||||
# moonlight-qt is currently in media role but could be moved here
|
||||
custom.mcrcon-rbw
|
||||
];
|
||||
};
|
||||
}
|
||||
@@ -90,6 +90,8 @@ with lib;
|
||||
htop
|
||||
tmux
|
||||
zfs
|
||||
rclone
|
||||
custom.rclone-torbox-setup # Helper script to set up TorBox credentials via rbw
|
||||
];
|
||||
|
||||
# Enable SSH
|
||||
@@ -126,6 +128,26 @@ with lib;
|
||||
|
||||
roles.virtualisation.enable = true;
|
||||
|
||||
# TorBox WebDAV mount for rdt-client and Jellyfin
|
||||
roles.rclone-mount = {
|
||||
enable = true;
|
||||
mounts.torbox = {
|
||||
webdavUrl = "https://webdav.torbox.app";
|
||||
username = "john@ogle.fyi"; # TorBox account email
|
||||
mountPoint = "/media/media/torbox-rclone";
|
||||
environmentFile = "/etc/rclone/torbox.env";
|
||||
vfsCacheMode = "full"; # Best for streaming media
|
||||
dirCacheTime = "5m";
|
||||
extraArgs = [
|
||||
"--buffer-size=64M"
|
||||
"--vfs-read-chunk-size=32M"
|
||||
"--vfs-read-chunk-size-limit=off"
|
||||
];
|
||||
# Wait for ZFS media pool to be mounted before starting
|
||||
requiresMountsFor = [ "/media" ];
|
||||
};
|
||||
};
|
||||
|
||||
# Time zone
|
||||
time.timeZone = "America/Los_Angeles"; # Adjust as needed
|
||||
|
||||
|
||||
@@ -23,12 +23,12 @@
|
||||
printing.enable = true;
|
||||
remote-build.builders = [
|
||||
{
|
||||
hostName = "zix790prors";
|
||||
hostName = "zix790prors.oglehome";
|
||||
maxJobs = 16;
|
||||
speedFactor = 3;
|
||||
}
|
||||
{
|
||||
hostName = "john-endesktop";
|
||||
hostName = "john-endesktop.oglehome";
|
||||
maxJobs = 1;
|
||||
speedFactor = 1;
|
||||
}
|
||||
|
||||
@@ -19,11 +19,18 @@
|
||||
desktopSession = "plasma";
|
||||
};
|
||||
};
|
||||
remote-build.builders = [{
|
||||
hostName = "zix790prors";
|
||||
maxJobs = 16;
|
||||
speedFactor = 4; # Prefer remote heavily on Steam Deck
|
||||
}];
|
||||
remote-build.builders = [
|
||||
{
|
||||
hostName = "zix790prors.oglehome";
|
||||
maxJobs = 16;
|
||||
speedFactor = 4;
|
||||
}
|
||||
{
|
||||
hostName = "john-endesktop.oglehome";
|
||||
maxJobs = 1;
|
||||
speedFactor = 2;
|
||||
}
|
||||
];
|
||||
users = {
|
||||
enable = true;
|
||||
extraGroups = [ "video" ];
|
||||
|
||||
@@ -1,28 +1,29 @@
|
||||
{ lib
|
||||
, stdenv
|
||||
, fetchurl
|
||||
, autoPatchelfHook
|
||||
, patchelf
|
||||
, glibc
|
||||
}:
|
||||
|
||||
let
|
||||
version = "2.0.76";
|
||||
version = "2.1.30";
|
||||
|
||||
srcs = {
|
||||
aarch64-darwin = {
|
||||
url = "https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/${version}/darwin-arm64/claude";
|
||||
sha256 = "b76f6d4d09233e67295897b0a1ed2e22d7afa406431529d8b1b532b63b8cbcbd";
|
||||
sha256 = "3ccc14f322b1e8da0cd58afc254fd5100eee066fa14729f30745e67a3f7979f7";
|
||||
};
|
||||
x86_64-darwin = {
|
||||
url = "https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/${version}/darwin-x64/claude";
|
||||
sha256 = "9d94582f0af5d2201f1c907bf24ff8d216104b897ee0b24795a6c081f40e08d7";
|
||||
sha256 = "8a083696006483b8382ec0e47cd8f2e3223f3d2cab1a21c524fa08c082b5600e";
|
||||
};
|
||||
x86_64-linux = {
|
||||
url = "https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/${version}/linux-x64/claude";
|
||||
sha256 = "5dcdb480f91ba0df0bc8bd6aff148d3dfd3883f0899eeb5b9427a8b0abe7a687";
|
||||
sha256 = "ada8f1cf9272965d38b10f1adb6cea885e621c83f7e7bb233008c721f43fad54";
|
||||
};
|
||||
aarch64-linux = {
|
||||
url = "https://storage.googleapis.com/claude-code-dist-86c565f3-f756-42ad-8dfa-d59b1c096819/claude-code-releases/${version}/linux-arm64/claude";
|
||||
sha256 = "f64a994c8e5bfb84d7242cebbec75d6919db2ee46d50b8fc7a88d5066db193f9";
|
||||
sha256 = "45fbf35a1011b06f86170b20beb64c599db0658aac70e2de2410c45d15775596";
|
||||
};
|
||||
};
|
||||
|
||||
@@ -38,8 +39,14 @@ in stdenv.mkDerivation {
|
||||
|
||||
dontUnpack = true;
|
||||
dontBuild = true;
|
||||
# Bun standalone binaries have JS code appended after the ELF sections
|
||||
# stripping/patching would remove or corrupt this appended data
|
||||
dontStrip = true;
|
||||
dontPatchELF = true;
|
||||
|
||||
nativeBuildInputs = lib.optionals stdenv.isLinux [ autoPatchelfHook ];
|
||||
# Don't use autoPatchelfHook - it rewrites the ELF and strips the appended
|
||||
# bun bundle (the JS code is appended after the ELF sections)
|
||||
nativeBuildInputs = lib.optionals stdenv.isLinux [ patchelf ];
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
@@ -49,6 +56,14 @@ in stdenv.mkDerivation {
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
# Manually patch the interpreter for bun standalone binaries
|
||||
# patchelf --set-interpreter modifies in-place without rewriting the entire ELF,
|
||||
# preserving the appended JS bundle that bun needs at runtime
|
||||
postFixup = lib.optionalString stdenv.isLinux ''
|
||||
interpreter="${glibc}/lib/${if stdenv.hostPlatform.system == "aarch64-linux" then "ld-linux-aarch64.so.1" else "ld-linux-x86-64.so.2"}"
|
||||
patchelf --set-interpreter "$interpreter" $out/bin/claude
|
||||
'';
|
||||
|
||||
meta = with lib; {
|
||||
description = "Terminal-based AI coding assistant from Anthropic";
|
||||
homepage = "https://www.anthropic.com/claude-code";
|
||||
|
||||
@@ -3,4 +3,6 @@
|
||||
tea-rbw = pkgs.callPackage ./tea-rbw {};
|
||||
app-launcher-server = pkgs.callPackage ./app-launcher-server {};
|
||||
claude-code = pkgs.callPackage ./claude-code {};
|
||||
mcrcon-rbw = pkgs.callPackage ./mcrcon-rbw {};
|
||||
rclone-torbox-setup = pkgs.callPackage ./rclone-torbox-setup {};
|
||||
}
|
||||
|
||||
40
packages/mcrcon-rbw/default.nix
Normal file
40
packages/mcrcon-rbw/default.nix
Normal file
@@ -0,0 +1,40 @@
|
||||
{ pkgs, ... }:
|
||||
|
||||
pkgs.writeShellScriptBin "mcrcon" ''
|
||||
set -euo pipefail
|
||||
|
||||
# Configuration - can be overridden with environment variables
|
||||
MINECRAFT_RCON_HOST="''${MCRCON_HOST:-10.0.0.165}"
|
||||
MINECRAFT_RCON_PORT="''${MCRCON_PORT:-25575}"
|
||||
RBW_ENTRY="minecraft-rcon"
|
||||
|
||||
# Check if rbw is available
|
||||
if ! command -v rbw &> /dev/null; then
|
||||
echo "Error: rbw is not available. Please ensure rbw is installed and configured."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Retrieve password from Bitwarden
|
||||
if ! MCRCON_PASS=$(rbw get "$RBW_ENTRY" 2>/dev/null); then
|
||||
echo "Error: Failed to retrieve RCON password from rbw entry '$RBW_ENTRY'"
|
||||
echo "Please ensure the entry exists in Bitwarden and rbw is synced."
|
||||
echo ""
|
||||
echo "To create the entry:"
|
||||
echo " 1. Add 'minecraft-rcon' to Bitwarden with the RCON password"
|
||||
echo " 2. Run 'rbw sync' to refresh the local cache"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Export for mcrcon
|
||||
export MCRCON_HOST="$MINECRAFT_RCON_HOST"
|
||||
export MCRCON_PORT="$MINECRAFT_RCON_PORT"
|
||||
export MCRCON_PASS
|
||||
|
||||
# If no arguments provided, start interactive terminal mode
|
||||
if [[ $# -eq 0 ]]; then
|
||||
exec ${pkgs.mcrcon}/bin/mcrcon -t
|
||||
fi
|
||||
|
||||
# Execute mcrcon with all provided arguments
|
||||
exec ${pkgs.mcrcon}/bin/mcrcon "$@"
|
||||
''
|
||||
98
packages/rclone-torbox-setup/default.nix
Normal file
98
packages/rclone-torbox-setup/default.nix
Normal file
@@ -0,0 +1,98 @@
|
||||
{ pkgs, ... }:
|
||||
|
||||
pkgs.writeShellScriptBin "rclone-torbox-setup" ''
|
||||
set -euo pipefail
|
||||
|
||||
# Default values
|
||||
RBW_ENTRY="''${1:-torbox}"
|
||||
ENV_FILE="''${2:-/etc/rclone/torbox.env}"
|
||||
|
||||
usage() {
|
||||
echo "Usage: rclone-torbox-setup [rbw-entry] [env-file]"
|
||||
echo ""
|
||||
echo "Sets up rclone credentials for TorBox WebDAV mount."
|
||||
echo "Retrieves password from rbw (Bitwarden), obscures it for rclone,"
|
||||
echo "and writes it to the environment file for the systemd service."
|
||||
echo ""
|
||||
echo "Arguments:"
|
||||
echo " rbw-entry Name of the Bitwarden entry containing the password (default: torbox)"
|
||||
echo " env-file Path to write the environment file (default: /etc/rclone/torbox.env)"
|
||||
echo ""
|
||||
echo "The Bitwarden entry should contain your TorBox password as the password field."
|
||||
echo ""
|
||||
echo "Example:"
|
||||
echo " rclone-torbox-setup torbox-password /etc/rclone/torbox.env"
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [[ "''${1:-}" == "-h" ]] || [[ "''${1:-}" == "--help" ]]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
echo "rclone TorBox credential setup"
|
||||
echo "=============================="
|
||||
echo ""
|
||||
|
||||
# Check if rbw is available
|
||||
if ! command -v rbw &> /dev/null; then
|
||||
echo "Error: rbw is not available. Please ensure rbw is installed and configured."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if rclone is available
|
||||
if ! command -v rclone &> /dev/null; then
|
||||
echo "Error: rclone is not available. Please ensure rclone is installed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Retrieving password from rbw entry: $RBW_ENTRY"
|
||||
|
||||
# Retrieve password from Bitwarden
|
||||
if ! TORBOX_PASS=$(rbw get "$RBW_ENTRY" 2>/dev/null); then
|
||||
echo ""
|
||||
echo "Error: Failed to retrieve password from rbw entry '$RBW_ENTRY'"
|
||||
echo ""
|
||||
echo "Please ensure:"
|
||||
echo " 1. The entry '$RBW_ENTRY' exists in Bitwarden"
|
||||
echo " 2. rbw is unlocked: rbw unlock"
|
||||
echo " 3. rbw is synced: rbw sync"
|
||||
echo ""
|
||||
echo "To create the entry in Bitwarden:"
|
||||
echo " - Name: $RBW_ENTRY"
|
||||
echo " - Password: Your TorBox password"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Password retrieved successfully"
|
||||
|
||||
# Obscure the password for rclone
|
||||
echo "Obscuring password for rclone..."
|
||||
if ! OBSCURED_PASS=$(echo -n "$TORBOX_PASS" | rclone obscure -); then
|
||||
echo "Error: Failed to obscure password with rclone"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create the directory if needed (requires sudo)
|
||||
ENV_DIR=$(dirname "$ENV_FILE")
|
||||
if [[ ! -d "$ENV_DIR" ]]; then
|
||||
echo "Creating directory $ENV_DIR (requires sudo)..."
|
||||
sudo mkdir -p "$ENV_DIR"
|
||||
fi
|
||||
|
||||
# Write the environment file
|
||||
echo "Writing environment file to $ENV_FILE (requires sudo)..."
|
||||
echo "RCLONE_WEBDAV_PASS=$OBSCURED_PASS" | sudo tee "$ENV_FILE" > /dev/null
|
||||
sudo chmod 600 "$ENV_FILE"
|
||||
|
||||
echo ""
|
||||
echo "Setup complete!"
|
||||
echo ""
|
||||
echo "The environment file has been created at: $ENV_FILE"
|
||||
echo "The rclone-mount-torbox systemd service will use this file."
|
||||
echo ""
|
||||
echo "To activate the mount after NixOS rebuild:"
|
||||
echo " sudo systemctl start rclone-mount-torbox"
|
||||
echo ""
|
||||
echo "To check status:"
|
||||
echo " sudo systemctl status rclone-mount-torbox"
|
||||
''
|
||||
108
renovate.json
Normal file
108
renovate.json
Normal file
@@ -0,0 +1,108 @@
|
||||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"timezone": "America/Los_Angeles",
|
||||
"gitAuthor": "Renovate Bot <renovate@ogle.fyi>",
|
||||
"nix": {
|
||||
"enabled": true
|
||||
},
|
||||
"github-actions": {
|
||||
"managerFilePatterns": [
|
||||
"/.gitea/workflows/.+\\.ya?ml$/"
|
||||
]
|
||||
},
|
||||
"lockFileMaintenance": {
|
||||
"enabled": true,
|
||||
"schedule": [
|
||||
"after 2pm and before 4pm on Saturday"
|
||||
]
|
||||
},
|
||||
"dependencyDashboard": true,
|
||||
"dependencyDashboardAutoclose": false,
|
||||
"dependencyDashboardTitle": "NixOS Configs Dependency Dashboard",
|
||||
"packageRules": [
|
||||
{
|
||||
"description": "Group all GitHub Actions updates",
|
||||
"matchManagers": [
|
||||
"github-actions"
|
||||
],
|
||||
"groupName": "github-actions"
|
||||
},
|
||||
{
|
||||
"description": "Group stable NixOS ecosystem inputs",
|
||||
"matchManagers": [
|
||||
"nix"
|
||||
],
|
||||
"groupName": "nix-stable-ecosystem",
|
||||
"matchPackageNames": [
|
||||
"/^nixpkgs$/",
|
||||
"/^home-manager$/",
|
||||
"/^nix-darwin$/"
|
||||
],
|
||||
"schedule": [
|
||||
"after 2pm and before 4pm on Saturday"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Group unstable NixOS ecosystem inputs",
|
||||
"matchManagers": [
|
||||
"nix"
|
||||
],
|
||||
"groupName": "nix-unstable-ecosystem",
|
||||
"matchPackageNames": [
|
||||
"/nixpkgs-unstable/",
|
||||
"/home-manager-unstable/"
|
||||
],
|
||||
"schedule": [
|
||||
"after 2pm and before 4pm on Saturday"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "nixpkgs-qt updates on Saturday (staggered from main ecosystem)",
|
||||
"matchManagers": [
|
||||
"nix"
|
||||
],
|
||||
"matchPackageNames": [
|
||||
"/nixpkgs-qt/"
|
||||
],
|
||||
"schedule": [
|
||||
"after 4pm and before 6pm on Saturday"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Ignore private Gitea inputs (handle separately)",
|
||||
"matchManagers": [
|
||||
"nix"
|
||||
],
|
||||
"enabled": false,
|
||||
"matchPackageNames": [
|
||||
"/google-cookie-retrieval/"
|
||||
]
|
||||
},
|
||||
{
|
||||
"description": "Gastown is under active development - check for updates daily",
|
||||
"matchManagers": [
|
||||
"nix"
|
||||
],
|
||||
"matchPackageNames": [
|
||||
"/gastown/"
|
||||
],
|
||||
"schedule": [
|
||||
"before 6am every day"
|
||||
],
|
||||
"automerge": false
|
||||
},
|
||||
{
|
||||
"description": "Beads is under active development - check for updates daily",
|
||||
"matchManagers": [
|
||||
"nix"
|
||||
],
|
||||
"matchPackageNames": [
|
||||
"/beads/"
|
||||
],
|
||||
"schedule": [
|
||||
"before 6am every day"
|
||||
],
|
||||
"automerge": false
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -8,11 +8,12 @@
|
||||
environment.systemPackages = with pkgs; [
|
||||
git
|
||||
glances
|
||||
ghostty.terminfo # So tmux works when SSH'ing from ghostty
|
||||
pciutils
|
||||
tree
|
||||
usbutils
|
||||
vim
|
||||
] ++ lib.optionals pkgs.stdenv.isLinux [
|
||||
ghostty.terminfo # So tmux works when SSH'ing from ghostty
|
||||
];
|
||||
|
||||
nix = {
|
||||
@@ -22,7 +23,13 @@
|
||||
max-jobs = "auto";
|
||||
trusted-users = [ "johno" ];
|
||||
substituters = [
|
||||
"https://nix-cache.johnogle.info"
|
||||
];
|
||||
trusted-public-keys = [
|
||||
"nix-cache.johnogle.info-1:IC5x8BxnrqkU9XqhMdDnZLtSg9Y3rBJVXhve5DJ92J0="
|
||||
];
|
||||
fallback = true;
|
||||
connect-timeout = 5;
|
||||
};
|
||||
|
||||
gc = {
|
||||
|
||||
@@ -14,6 +14,7 @@ with lib;
|
||||
./nfs-mounts
|
||||
./nvidia
|
||||
./printing
|
||||
./rclone-mount
|
||||
./remote-build
|
||||
./spotifyd
|
||||
./users
|
||||
|
||||
149
roles/rclone-mount/default.nix
Normal file
149
roles/rclone-mount/default.nix
Normal file
@@ -0,0 +1,149 @@
|
||||
{ config, lib, pkgs, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
cfg = config.roles.rclone-mount;
|
||||
|
||||
# Generate systemd service for a single mount
|
||||
mkMountService = name: mountCfg: {
|
||||
description = "rclone mount for ${name}";
|
||||
after = [ "network-online.target" ];
|
||||
wants = [ "network-online.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
|
||||
# Wait for parent mount points (e.g., ZFS pools) to be available
|
||||
unitConfig = mkIf (mountCfg.requiresMountsFor != []) {
|
||||
RequiresMountsFor = mountCfg.requiresMountsFor;
|
||||
};
|
||||
|
||||
serviceConfig = {
|
||||
Type = "notify";
|
||||
ExecStartPre = "${pkgs.coreutils}/bin/mkdir -p ${mountCfg.mountPoint}";
|
||||
ExecStart = concatStringsSep " " ([
|
||||
"${pkgs.rclone}/bin/rclone mount"
|
||||
":webdav:${mountCfg.remotePath}"
|
||||
"${mountCfg.mountPoint}"
|
||||
"--webdav-url=${mountCfg.webdavUrl}"
|
||||
"--webdav-vendor=${mountCfg.webdavVendor}"
|
||||
"--webdav-user=${mountCfg.username}"
|
||||
"--allow-other"
|
||||
"--vfs-cache-mode=${mountCfg.vfsCacheMode}"
|
||||
"--dir-cache-time=${mountCfg.dirCacheTime}"
|
||||
"--poll-interval=${mountCfg.pollInterval}"
|
||||
"--log-level=${mountCfg.logLevel}"
|
||||
] ++ mountCfg.extraArgs);
|
||||
ExecStop = "${pkgs.fuse}/bin/fusermount -uz ${mountCfg.mountPoint}";
|
||||
Restart = "on-failure";
|
||||
RestartSec = "10s";
|
||||
EnvironmentFile = mountCfg.environmentFile;
|
||||
};
|
||||
};
|
||||
in
|
||||
{
|
||||
options.roles.rclone-mount = {
|
||||
enable = mkEnableOption "Enable rclone WebDAV mounts";
|
||||
|
||||
mounts = mkOption {
|
||||
type = types.attrsOf (types.submodule {
|
||||
options = {
|
||||
webdavUrl = mkOption {
|
||||
type = types.str;
|
||||
description = "WebDAV server URL (e.g., https://webdav.torbox.app)";
|
||||
};
|
||||
|
||||
webdavVendor = mkOption {
|
||||
type = types.enum [ "other" "nextcloud" "owncloud" "sharepoint" "sharepoint-ntlm" "fastmail" ];
|
||||
default = "other";
|
||||
description = "WebDAV server vendor for optimizations";
|
||||
};
|
||||
|
||||
username = mkOption {
|
||||
type = types.str;
|
||||
description = "WebDAV username (often email address)";
|
||||
};
|
||||
|
||||
environmentFile = mkOption {
|
||||
type = types.path;
|
||||
description = ''
|
||||
Path to environment file containing RCLONE_WEBDAV_PASS.
|
||||
The password should be obscured using: rclone obscure <password>
|
||||
File format: RCLONE_WEBDAV_PASS=<obscured_password>
|
||||
'';
|
||||
};
|
||||
|
||||
mountPoint = mkOption {
|
||||
type = types.str;
|
||||
description = "Local mount point path";
|
||||
};
|
||||
|
||||
remotePath = mkOption {
|
||||
type = types.str;
|
||||
default = "/";
|
||||
description = "Remote path on WebDAV server to mount";
|
||||
};
|
||||
|
||||
vfsCacheMode = mkOption {
|
||||
type = types.enum [ "off" "minimal" "writes" "full" ];
|
||||
default = "full";
|
||||
description = ''
|
||||
VFS cache mode. For streaming media, 'full' is recommended.
|
||||
- off: No caching (direct reads/writes)
|
||||
- minimal: Cache open files only
|
||||
- writes: Cache writes and open files
|
||||
- full: Full caching of all files
|
||||
'';
|
||||
};
|
||||
|
||||
dirCacheTime = mkOption {
|
||||
type = types.str;
|
||||
default = "5m";
|
||||
description = "Time to cache directory entries";
|
||||
};
|
||||
|
||||
pollInterval = mkOption {
|
||||
type = types.str;
|
||||
default = "1m";
|
||||
description = "Poll interval for remote changes";
|
||||
};
|
||||
|
||||
logLevel = mkOption {
|
||||
type = types.enum [ "DEBUG" "INFO" "NOTICE" "ERROR" ];
|
||||
default = "INFO";
|
||||
description = "rclone log level";
|
||||
};
|
||||
|
||||
extraArgs = mkOption {
|
||||
type = types.listOf types.str;
|
||||
default = [];
|
||||
description = "Extra arguments to pass to rclone mount";
|
||||
};
|
||||
|
||||
requiresMountsFor = mkOption {
|
||||
type = types.listOf types.str;
|
||||
default = [];
|
||||
description = ''
|
||||
List of mount points that must be available before this service starts.
|
||||
Use this when the mount point's parent is on a ZFS pool or other filesystem
|
||||
that may not be mounted at boot time.
|
||||
Example: [ "/media" ] to wait for the media ZFS pool to mount.
|
||||
'';
|
||||
};
|
||||
};
|
||||
});
|
||||
default = {};
|
||||
description = "Attribute set of rclone WebDAV mounts to configure";
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
# Ensure FUSE is available
|
||||
environment.systemPackages = [ pkgs.rclone pkgs.fuse ];
|
||||
programs.fuse.userAllowOther = true;
|
||||
|
||||
# Create systemd services for each mount
|
||||
systemd.services = mapAttrs' (name: mountCfg:
|
||||
nameValuePair "rclone-mount-${name}" (mkMountService name mountCfg)
|
||||
) cfg.mounts;
|
||||
};
|
||||
}
|
||||
@@ -35,12 +35,12 @@
|
||||
# a) Configure builders in configuration.nix:
|
||||
# roles.remote-build.builders = [
|
||||
# {
|
||||
# hostName = "zix790prors";
|
||||
# hostName = "zix790prors.oglehome";
|
||||
# maxJobs = 16; # Number of parallel build jobs
|
||||
# speedFactor = 3; # Higher = prefer this builder
|
||||
# }
|
||||
# {
|
||||
# hostName = "john-endesktop";
|
||||
# hostName = "john-endesktop.oglehome";
|
||||
# maxJobs = 1; # Conservative for busy machines
|
||||
# speedFactor = 1;
|
||||
# }
|
||||
|
||||
Reference in New Issue
Block a user