Compare commits

..

1 Commits

Author SHA1 Message Date
4a450a216b feat(emacs): Add prebuilt Doom option using nix-doom-emacs-unstraightened
Implement pre-built Doom Emacs packages for the live USB image, eliminating
the need to run `doom sync` after first boot.

Changes:
- Add nix-doom-emacs-unstraightened flake input
- Add homeModule to all three module sets (nixos, unstable, darwin)
- Add `prebuiltDoom` option to emacs role (default: false)
- Enable prebuiltDoom for live-usb configuration
- Pin custom packages in packages.el for deterministic builds:
  - claude-code-ide, gptel-tool-library, beads

When prebuiltDoom=true, all Doom packages are compiled at nix build time
using emacs-overlay. The doom configuration is stored in the nix store
(read-only), and no `doom sync` is required at runtime.

This is ideal for:
- Live USB images
- Immutable/reproducible systems
- Offline deployments

Closes: nixos-configs-1wd
2026-01-10 10:44:03 -08:00
20 changed files with 106 additions and 508 deletions

0
.beads/sync_base.jsonl Normal file
View File

1
.gitignore vendored
View File

@@ -1,3 +1,2 @@
result
thoughts
.beads

4
scripts/bootstrap.sh → bootstrap.sh Normal file → Executable file
View File

@@ -1,7 +1,6 @@
#!/usr/bin/env bash
# bootstrap.sh
# Usage: nix run .#bootstrap -- <hostname>
# Or: sudo ./scripts/bootstrap.sh <hostname>
# Usage: sudo ./bootstrap.sh <hostname>
set -euo pipefail
NEW_HOSTNAME="${1:?missing hostname}"
@@ -9,3 +8,4 @@ FLAKE_URI="git+https://git.johnogle.info/johno/nixos-configs.git#${NEW_HOSTNAME}
export NIX_CONFIG="experimental-features = nix-command flakes"
nixos-rebuild switch --flake "$FLAKE_URI"

19
build-liveusb.sh Executable file
View File

@@ -0,0 +1,19 @@
#!/usr/bin/env bash
# Build Live USB ISO from flake configuration
# Creates an uncompressed ISO suitable for Ventoy and other USB boot tools
set -e
echo "Building Live USB ISO..."
nix build .#nixosConfigurations.live-usb.config.system.build.isoImage --show-trace
if [ -f "./result/iso/"*.iso ]; then
iso_file=$(ls ./result/iso/*.iso)
echo "✅ Build complete!"
echo "📁 ISO location: $iso_file"
echo "💾 Ready for Ventoy or dd to USB"
else
echo "❌ Build failed - no ISO file found"
exit 1
fi

View File

@@ -230,11 +230,7 @@
system = "x86_64-linux";
modules = nixosModules ++ [
./machines/john-endesktop/configuration.nix
inputs.home-manager.nixosModules.home-manager
{
home-manager.users.johno = import ./home/home-server.nix;
home-manager.extraSpecialArgs = { inherit system; };
}
# Minimal server - no home-manager needed
];
};
@@ -275,16 +271,6 @@
export PATH="${pkgs.lib.makeBinPath commonDeps}:$PATH"
${builtins.readFile ./scripts/upgrade.sh}
'';
bootstrap = pkgs.writeShellScriptBin "bootstrap" ''
export PATH="${pkgs.lib.makeBinPath commonDeps}:$PATH"
${builtins.readFile ./scripts/bootstrap.sh}
'';
build-liveusb = pkgs.writeShellScriptBin "build-liveusb" ''
export PATH="${pkgs.lib.makeBinPath commonDeps}:$PATH"
${builtins.readFile ./scripts/build-liveusb.sh}
'';
in {
update-doomemacs = {
type = "app";
@@ -302,14 +288,6 @@
type = "app";
program = "${upgrade}/bin/upgrade";
};
bootstrap = {
type = "app";
program = "${bootstrap}/bin/bootstrap";
};
build-liveusb = {
type = "app";
program = "${build-liveusb}/bin/build-liveusb";
};
}
);
};

View File

@@ -23,7 +23,6 @@
kubectl.enable = true;
tmux.enable = true;
plasma-manager.enable = true;
starship.enable = true;
};
targets.genericLinux.enable = true;

View File

@@ -23,7 +23,6 @@
plasma-manager.enable = true;
emacs.enable = true;
i3_sway.enable = true;
starship.enable = true;
# Launcher wrappers for excluded/optional packages
launchers = {

View File

@@ -21,7 +21,6 @@
prebuiltDoom = true;
};
i3_sway.enable = true;
starship.enable = true;
# development.enable = false; # Not needed for live USB
# communication.enable = false; # Not needed for live USB
# office.enable = false; # Not needed for live USB

View File

@@ -20,7 +20,6 @@
plasma-manager.enable = true;
emacs.enable = true;
i3_sway.enable = true;
starship.enable = true;
# office.enable = false; # Not needed for media center
# sync.enable = false; # Shared machine, no personal file sync
};

View File

@@ -1,26 +0,0 @@
{ pkgs, globalInputs, system, ... }:
{
# Home Manager configuration for servers (minimal with development tools)
home.username = "johno";
home.homeDirectory = "/home/johno";
home.stateVersion = "24.05";
# Minimal roles for server with development capability
home.roles = {
base.enable = true;
development.enable = true;
emacs.enable = true;
starship.enable = true;
tmux.enable = true;
};
targets.genericLinux.enable = true;
home.sessionVariables = {};
home.sessionPath = [];
imports = [
./roles
./roles/base-linux
];
}

View File

@@ -19,6 +19,5 @@
./sync
./tmux
./emacs
./starship
];
}

View File

@@ -85,25 +85,11 @@ in
fi
done
# Copy local skills from this repo (with retry for race conditions with running Claude)
for file in ${./skills}/*.md; do
if [ -f "$file" ]; then
filename=$(basename "$file" .md)
dest="$HOME/.claude/commands/''${filename}.md"
# Remove existing file first, then copy with retry on failure
rm -f "$dest" 2>/dev/null || true
if ! cp "$file" "$dest" 2>/dev/null; then
sleep 0.5
cp "$file" "$dest" || echo "Warning: Failed to copy $filename.md to commands"
fi
fi
done
$DRY_RUN_CMD echo "Claude Code humanlayer commands and agents installed successfully${
if cfg.allowArbitraryClaudeCodeModelSelection
then " (model specifications preserved)"
else " (model selection removed)"
} + local skills"
}"
'';
# Set up beads Claude Code integration (hooks for SessionStart/PreCompact)

View File

@@ -1,205 +0,0 @@
---
description: Orchestrate parallel bead processing with worktrees, PRs, and reviews
---
# Parallel Beads Workflow
This skill orchestrates parallel bead processing using subagents. Each bead gets its own worktree, implementation, PR, and review.
## Phase 1: Selection
1. **Get ready beads**: Run `bd ready` to list all beads with no blockers
2. **Present selection**: Use `AskUserQuestion` with `multiSelect: true` to let the user choose which beads to work on
- Include bead ID and title for each option
- Allow selection of multiple beads
Example:
```
AskUserQuestion with:
- question: "Which beads do you want to work on in parallel?"
- multiSelect: true
- options from bd ready output
```
## Phase 2: Parallel Implementation
For each selected bead, launch a subagent using the Task tool. All subagents should be launched in parallel (single message with multiple Task tool calls).
### Subagent Instructions Template
Each implementation subagent should receive these instructions:
```
Work on bead [BEAD_ID]: [BEAD_TITLE]
1. **Create worktree**:
- Branch name: `bead/[BEAD_ID]`
- Worktree path: `~/wt/[REPO_NAME]/[BEAD_ID]`
- Command: `git worktree add -b bead/[BEAD_ID] ~/wt/[REPO_NAME]/[BEAD_ID]`
2. **Review the bead requirements**:
- Run `bd show [BEAD_ID]` to understand the acceptance criteria
- Note any external issue references (GitHub issues, Linear tickets, etc.)
3. **Implement the changes**:
- Work in the worktree directory
- Complete all acceptance criteria listed in the bead
- Run any relevant tests or checks
4. **Commit and push**:
- Stage all changes: `git add -A`
- Create a descriptive commit message
- Push the branch: `git push -u origin bead/[BEAD_ID]`
5. **Create a PR**:
- Detect hosting provider from origin URL: `git remote get-url origin`
- If URL contains `github.com`, use `gh`; otherwise use `tea` (Gitea/Forgejo)
- PR title: "[BEAD_ID] [BEAD_TITLE]"
- PR body must include:
- Reference to bead ID: "Implements bead: [BEAD_ID]"
- Any external issue references from the bead (e.g., "Closes #123")
- Summary of changes
- For GitHub (`gh`):
```bash
gh pr create --title "[BEAD_ID] [BEAD_TITLE]" --body "$(cat <<'EOF'
## Summary
[Brief description of changes]
## Bead Reference
Implements bead: [BEAD_ID]
## External Issues
[Any linked issues from the bead]
## Changes
- [List of changes made]
EOF
)"
```
- For Gitea (`tea`):
```bash
tea pr create --head bead/[BEAD_ID] --base main \
--title "[BEAD_ID] [BEAD_TITLE]" \
--description "## Summary
[Brief description of changes]
## Bead Reference
Implements bead: [BEAD_ID]
## External Issues
[Any linked issues from the bead]
## Changes
- [List of changes made]"
```
6. **Update bead status**:
- Mark the bead as "in_review": `bd update [BEAD_ID] --status=in_review`
- Add the PR URL to the bead notes: `bd update [BEAD_ID] --notes="$(bd show [BEAD_ID] --json | jq -r '.notes')
PR: [PR_URL]"`
7. **Report results**:
- Return: PR URL, bead ID, success/failure status
- If blocked or unable to complete, explain what's blocking progress
```
### Launching Subagents
Use `subagent_type: "general-purpose"` for implementation subagents. Launch all selected beads' subagents in a single message for parallel execution:
```
<Task calls for each selected bead - all in one message>
```
Collect results from all subagents before proceeding.
## Phase 3: Parallel Review
After all implementation subagents complete, launch review subagents for each PR.
### Review Subagent Instructions Template
```
Review PR for bead [BEAD_ID]
1. **Detect hosting provider**: Run `git remote get-url origin` - if it contains `github.com` use `gh`, otherwise use `tea`
2. **Read the PR**:
- For GitHub: `gh pr view [PR_NUMBER] --json title,body,additions,deletions,files`
- For Gitea: `tea pr view [PR_NUMBER]`
- View the diff: `git diff main...bead/[BEAD_ID]`
3. **Review against acceptance criteria**:
- Run `bd show [BEAD_ID]` to get the acceptance criteria
- Verify each criterion is addressed
4. **Leave review comments**:
- For GitHub: `gh pr review [PR_NUMBER] --comment --body "[COMMENTS]"`
- For Gitea: `tea pr review [PR_NUMBER] --comment "[COMMENTS]"`
- Include:
- Acceptance criteria checklist (which are met, which might be missing)
- Code quality observations
- Suggestions for improvement
5. **Return summary**:
- Overall assessment (ready to merge / needs changes)
- Key findings
```
Launch all review subagents in parallel.
## Phase 4: Cleanup and Summary
After reviews complete:
1. **Clean up worktrees**:
```bash
git worktree remove ~/wt/[REPO_NAME]/[BEAD_ID] --force
```
Do this for each bead's worktree.
2. **Provide final summary**:
Present a table or list with:
- Bead ID
- PR URL
- Status (success / failed / blocked)
- Review summary
- Any failures or blockers encountered
Example output:
```
## Parallel Beads Summary
| Bead | PR | Bead Status | Review |
|------|-----|-------------|--------|
| beads-abc | #123 | in_review | Approved |
| beads-xyz | #124 | in_review | Needs changes |
| beads-123 | - | open (failed) | Blocked by missing dependency |
### Failures/Blockers
- beads-123: Could not complete because [reason]
### Next Steps
- Review PRs that need changes
- Address blockers for failed beads
- Run `/reconcile_beads` after PRs are merged to close beads
```
## Error Handling
- **Subagent failures**: If a subagent fails or times out, note it in the summary but continue with other beads
- **PR creation failures**: Report the error but continue with reviews of successful PRs
- **Worktree conflicts**: If a worktree already exists, ask the user if they want to remove it or skip that bead
## Resource Limits
- Consider limiting concurrent subagents to 3-5 to avoid overwhelming system resources
- If user selects more beads than the limit, process them in batches
## Notes
- This workflow integrates with the beads system (`bd` commands)
- Worktrees are created in `~/wt/[REPO_NAME]/` by convention
- Each bead gets its own isolated branch and worktree
- PRs automatically reference the bead ID for traceability

View File

@@ -1,88 +0,0 @@
---
description: Reconcile beads with merged PRs and close completed beads
---
# Reconcile Beads Workflow
This skill reconciles beads that are in `in_review` status with their corresponding PRs. If a PR has been merged, the bead is closed.
## Prerequisites
- Custom status `in_review` must be configured: `bd config set status.custom "in_review"`
- Beads in `in_review` status should have a PR URL in their notes
## Workflow
### Step 1: Find beads in review
```bash
bd list --status=in_review
```
### Step 2: For each bead, check PR status
1. **Get the PR URL from bead notes**:
```bash
bd show [BEAD_ID] --json | jq -r '.[0].notes'
```
Note: `bd show --json` returns an array, so use `.[0]` to access the first element.
Extract the PR URL (look for lines starting with "PR:" or containing pull request URLs).
Extract the PR number: `echo "$NOTES" | grep -oP '/pulls/\K\d+'`
2. **Detect hosting provider**:
- Run `git remote get-url origin`
- If URL contains `github.com`, use `gh`; otherwise use `tea` (Gitea/Forgejo)
3. **Check PR status**:
- For GitHub:
```bash
gh pr view [PR_NUMBER] --json state,merged
```
- For Gitea:
```bash
tea pr list --state=closed
```
Look for the PR number in the INDEX column with STATE "merged".
Note: `tea pr view [PR_NUMBER]` lists all PRs, not a specific one. Use `tea pr list --state=closed` and look for your PR number in the results.
### Step 3: Close merged beads
If the PR is merged:
```bash
bd close [BEAD_ID] --reason="PR merged: [PR_URL]"
```
### Step 4: Report summary
Present results:
```
## Beads Reconciliation Summary
### Closed (PR Merged)
| Bead | PR | Title |
|------|-----|-------|
| beads-abc | #123 | Feature X |
| beads-xyz | #456 | Bug fix Y |
### Still in Review
| Bead | PR | Status | Title |
|------|-----|--------|-------|
| beads-def | #789 | Open | Feature Z |
### Issues Found
- beads-ghi: No PR URL found in notes
- beads-jkl: PR #999 not found (may have been deleted)
```
## Error Handling
- **Missing PR URL**: Skip the bead and report it
- **PR not found**: Report the error but continue with other beads
- **API errors**: Report and continue
## Notes
- This skill complements `/parallel_beads` which sets beads to `in_review` status
- Run this skill periodically or after merging PRs to keep beads in sync
- Beads with closed (but not merged) PRs are not automatically closed - they may need rework

View File

@@ -1,72 +0,0 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.home.roles.starship;
in
{
options.home.roles.starship = {
enable = mkEnableOption "starship cross-shell prompt";
};
config = mkIf cfg.enable {
programs.starship = {
enable = true;
enableBashIntegration = true;
enableZshIntegration = true;
settings = {
add_newline = true;
character = {
success_symbol = "[>](bold green)";
error_symbol = "[x](bold red)";
vimcmd_symbol = "[<](bold green)";
};
directory = {
truncation_length = 4;
truncate_to_repo = true;
};
git_branch = {
symbol = "";
format = "[$symbol$branch(:$remote_branch)]($style) ";
};
git_status = {
format = "([$all_status$ahead_behind]($style) )";
};
nix_shell = {
symbol = "";
format = "[$symbol$state( \\($name\\))]($style) ";
};
cmd_duration = {
min_time = 2000;
format = "[$duration]($style) ";
};
# Disable modules that are noisy or rarely needed
package.disabled = true;
nodejs.disabled = true;
python.disabled = true;
ruby.disabled = true;
java.disabled = true;
golang.disabled = true;
rust.disabled = true;
php.disabled = true;
lua.disabled = true;
perl.disabled = true;
terraform.disabled = true;
kubernetes.disabled = true;
docker_context.disabled = true;
aws.disabled = true;
gcloud.disabled = true;
azure.disabled = true;
};
};
};
}

View File

@@ -170,7 +170,6 @@ This document outlines the plan to migrate the john-endesktop server from Arch L
```bash
blkid /dev/nvme0n1p5
# Note the UUID for updating hardware-configuration.nix
/dev/nvme0n1p5: LABEL="nixos" UUID="5f4ad025-bfab-4aed-a933-6638348059e5" UUID_SUB="4734d820-7b8a-4b7f-853a-026021c1d204" BLOCK_SIZE="4096" TYPE="btrfs" PARTLABEL="data" PARTUUID="9ea025df-cdb7-48fd-b5d4-37cd5d8588eb"
```
8. **Copy your NixOS configuration to the server**
@@ -389,11 +388,11 @@ After successful migration and 24-48 hours of stable operation:
Pre-migration:
- [x] nvme0n1p5 removal from media pool complete
- [x] Recent backup verified (< 24 hours)
- [x] Maintenance window scheduled
- [x] NixOS ISO downloaded
- [x] Bootable USB created
- [x] NixOS config builds successfully
- [ ] Recent backup verified (< 24 hours)
- [ ] Maintenance window scheduled
- [ ] NixOS ISO downloaded
- [ ] Bootable USB created
- [ ] NixOS config builds successfully
During migration:
- [ ] ZFS pools exported

View File

@@ -1,35 +0,0 @@
# Common configuration shared between NixOS and Darwin
{ lib, pkgs, ... }:
{
config = {
time.timeZone = "America/Los_Angeles";
environment.systemPackages = with pkgs; [
git
glances
pciutils
tree
usbutils
vim
];
nix = {
package = pkgs.nix;
settings = {
experimental-features = [ "nix-command" "flakes" ];
max-jobs = "auto";
trusted-users = [ "johno" ];
substituters = [
];
};
gc = {
automatic = true;
options = "--delete-older-than 10d";
};
};
nixpkgs.config.allowUnfree = true;
};
}

View File

@@ -7,10 +7,6 @@ let
setEnvironmentPath = "${config.system.build.setEnvironment}";
in
{
imports = [
./common.nix
];
config = {
# Salt manages /etc/bashrc, /etc/zshrc, /etc/zshenv
# nix-darwin writes to .local variants for nix-specific configuration
@@ -47,6 +43,8 @@ in
fi
'';
time.timeZone = "America/Los_Angeles";
# System preferences
system.defaults = {
# Custom keyboard shortcuts
@@ -81,5 +79,42 @@ in
};
};
};
environment.systemPackages = with pkgs; [
git
glances
pciutils
tree
usbutils
vim
];
nix = {
package = pkgs.nix;
# distributedBuilds = true;
# buildMachines = [{
# hostName = "z790prors.oglehome";
# system = "x86_64-linux";
# protocol = "ssh-ng";
# sshUser = "johno";
# sshKey = "/root/.ssh/id_ed25519";
# maxJobs = 3;
# speedFactor = 2;
# }];
settings = {
experimental-features = [ "nix-command" "flakes" ];
max-jobs = "auto";
trusted-users = [ "johno" ];
substituters = [
];
};
gc = {
automatic = true;
options = "--delete-older-than 10d";
};
};
nixpkgs.config.allowUnfree = true;
};
}
}

View File

@@ -4,7 +4,6 @@ with lib;
{
imports = [
./common.nix
./audio
./bluetooth
./btrfs
@@ -32,6 +31,7 @@ with lib;
LC_TELEPHONE = "en_US.UTF-8";
LC_TIME = "en_US.UTF-8";
};
time.timeZone = "America/Los_Angeles";
services.xserver.xkb = {
layout = "us";
@@ -49,7 +49,42 @@ with lib;
# Enable the OpenSSH daemon.
services.openssh.enable = true;
# NixOS-specific gc option (not available on Darwin)
nix.gc.randomizedDelaySec = "14m";
environment.systemPackages = with pkgs; [
git
glances
pciutils
tree
usbutils
vim
];
nix = {
package = pkgs.nix;
# distributedBuilds = true;
# buildMachines = [{
# hostName = "z790prors.oglehome";
# system = "x86_64-linux";
# protocol = "ssh-ng";
# sshUser = "johno";
# sshKey = "/root/.ssh/id_ed25519";
# maxJobs = 3;
# speedFactor = 2;
# }];
settings = {
experimental-features = [ "nix-command" "flakes" ];
max-jobs = "auto";
trusted-users = [ "johno" ];
substituters = [
];
};
gc = {
automatic = true;
randomizedDelaySec = "14m";
options = "--delete-older-than 10d";
};
};
nixpkgs.config.allowUnfree = true;
};
}

View File

@@ -1,22 +0,0 @@
#!/usr/bin/env bash
# Build Live USB ISO from flake configuration
# Creates an uncompressed ISO suitable for Ventoy and other USB boot tools
# Usage: nix run .#build-liveusb
# Or: ./scripts/build-liveusb.sh
set -euo pipefail
REPO_ROOT="${REPO_ROOT:-$(git rev-parse --show-toplevel 2>/dev/null || pwd)}"
echo "Building Live USB ISO..."
nix build "${REPO_ROOT}#nixosConfigurations.live-usb.config.system.build.isoImage" --show-trace
if ls "${REPO_ROOT}/result/iso/"*.iso 1> /dev/null 2>&1; then
iso_file=$(ls "${REPO_ROOT}/result/iso/"*.iso)
echo "Build complete!"
echo "ISO location: $iso_file"
echo "Ready for Ventoy or dd to USB"
else
echo "Build failed - no ISO file found"
exit 1
fi