Merge branch 'steveyegge:main' into main

This commit is contained in:
Jordan Hubbard
2025-12-26 17:22:14 -04:00
committed by GitHub
73 changed files with 5037 additions and 2358 deletions

View File

@@ -2,6 +2,7 @@ package main
import (
"context"
"encoding/json"
"fmt"
"os"
"strings"
@@ -18,8 +19,23 @@ import (
// cookCmd compiles a formula JSON into a proto bead.
var cookCmd = &cobra.Command{
Use: "cook <formula-file>",
Short: "Compile a formula into a proto bead",
Long: `Cook transforms a .formula.json file into a proto bead.
Short: "Compile a formula into a proto (ephemeral by default)",
Long: `Cook transforms a .formula.json file into a proto.
By default, cook outputs the resolved formula as JSON to stdout for
ephemeral use. The output can be inspected, piped, or saved to a file.
Two cooking modes are available (gt-8tmz.23):
COMPILE-TIME (default, --mode=compile):
Produces a proto with {{variable}} placeholders intact.
Use for: modeling, estimation, contractor handoff, planning.
Variables are NOT substituted - the output shows the template structure.
RUNTIME (--mode=runtime or when --var flags provided):
Produces a fully-resolved proto with variables substituted.
Use for: final validation before pour, seeing exact output.
Requires all variables to have values (via --var or defaults).
Formulas are high-level workflow templates that support:
- Variable definitions with defaults and validation
@@ -27,16 +43,26 @@ Formulas are high-level workflow templates that support:
- Composition rules for bonding formulas together
- Inheritance via extends
The cook command parses the formula, resolves inheritance, and
creates a proto bead in the database that can be poured or spawned.
The --persist flag enables the legacy behavior of writing the proto
to the database. This is useful when you want to reuse the same
proto multiple times without re-cooking.
For most workflows, prefer ephemeral protos: pour and wisp commands
accept formula names directly and cook inline (bd-rciw).
Examples:
bd cook mol-feature.formula.json
bd cook .beads/formulas/mol-release.formula.json --force
bd cook mol-patrol.formula.json --search-path .beads/formulas
bd cook mol-feature.formula.json # Compile-time: keep {{vars}}
bd cook mol-feature --var name=auth # Runtime: substitute vars
bd cook mol-feature --mode=runtime --var name=auth # Explicit runtime mode
bd cook mol-feature --dry-run # Preview steps
bd cook mol-release.formula.json --persist # Write to database
bd cook mol-release.formula.json --persist --force # Replace existing
Output:
Creates a proto bead with:
Output (default):
JSON representation of the resolved formula with all steps.
Output (--persist):
Creates a proto bead in the database with:
- ID matching the formula name (e.g., mol-feature)
- The "template" label for proto identification
- Child issues for each step
@@ -55,25 +81,49 @@ type cookResult struct {
}
func runCook(cmd *cobra.Command, args []string) {
CheckReadonly("cook")
ctx := rootCtx
// Cook requires direct store access for creating protos
if store == nil {
if daemonClient != nil {
fmt.Fprintf(os.Stderr, "Error: cook requires direct database access\n")
fmt.Fprintf(os.Stderr, "Hint: use --no-daemon flag: bd --no-daemon cook %s ...\n", args[0])
} else {
fmt.Fprintf(os.Stderr, "Error: no database connection\n")
}
os.Exit(1)
}
dryRun, _ := cmd.Flags().GetBool("dry-run")
persist, _ := cmd.Flags().GetBool("persist")
force, _ := cmd.Flags().GetBool("force")
searchPaths, _ := cmd.Flags().GetStringSlice("search-path")
prefix, _ := cmd.Flags().GetString("prefix")
varFlags, _ := cmd.Flags().GetStringSlice("var")
mode, _ := cmd.Flags().GetString("mode")
// Parse variables (gt-8tmz.23)
inputVars := make(map[string]string)
for _, v := range varFlags {
parts := strings.SplitN(v, "=", 2)
if len(parts) != 2 {
fmt.Fprintf(os.Stderr, "Error: invalid variable format '%s', expected 'key=value'\n", v)
os.Exit(1)
}
inputVars[parts[0]] = parts[1]
}
// Determine cooking mode (gt-8tmz.23)
// Runtime mode is triggered by: explicit --mode=runtime OR providing --var flags
runtimeMode := mode == "runtime" || len(inputVars) > 0
if mode != "" && mode != "compile" && mode != "runtime" {
fmt.Fprintf(os.Stderr, "Error: invalid mode '%s', must be 'compile' or 'runtime'\n", mode)
os.Exit(1)
}
// Only need store access if persisting
if persist {
CheckReadonly("cook --persist")
if store == nil {
if daemonClient != nil {
fmt.Fprintf(os.Stderr, "Error: cook --persist requires direct database access\n")
fmt.Fprintf(os.Stderr, "Hint: use --no-daemon flag: bd --no-daemon cook %s --persist ...\n", args[0])
} else {
fmt.Fprintf(os.Stderr, "Error: no database connection\n")
}
os.Exit(1)
}
}
ctx := rootCtx
// Create parser with search paths
parser := formula.NewParser(searchPaths...)
@@ -107,6 +157,15 @@ func runCook(cmd *cobra.Command, args []string) {
resolved.Steps = formula.ApplyAdvice(resolved.Steps, resolved.Advice)
}
// Apply inline step expansions (gt-8tmz.35)
// This processes Step.Expand fields before compose.expand/map rules
inlineExpandedSteps, err := formula.ApplyInlineExpansions(resolved.Steps, parser)
if err != nil {
fmt.Fprintf(os.Stderr, "Error applying inline expansions: %v\n", err)
os.Exit(1)
}
resolved.Steps = inlineExpandedSteps
// Apply expansion operators (gt-8tmz.3)
if resolved.Compose != nil && (len(resolved.Compose.Expand) > 0 || len(resolved.Compose.Map) > 0) {
expandedSteps, err := formula.ApplyExpansions(resolved.Steps, resolved.Compose, parser)
@@ -141,21 +200,6 @@ func runCook(cmd *cobra.Command, args []string) {
protoID = prefix + resolved.Formula
}
// Check if proto already exists
existingProto, err := store.GetIssue(ctx, protoID)
if err == nil && existingProto != nil {
if !force {
fmt.Fprintf(os.Stderr, "Error: proto %s already exists\n", protoID)
fmt.Fprintf(os.Stderr, "Hint: use --force to replace it\n")
os.Exit(1)
}
// Delete existing proto and its children
if err := deleteProtoSubgraph(ctx, store, protoID); err != nil {
fmt.Fprintf(os.Stderr, "Error deleting existing proto: %v\n", err)
os.Exit(1)
}
}
// Extract variables used in the formula
vars := formula.ExtractVariables(resolved)
@@ -168,19 +212,48 @@ func runCook(cmd *cobra.Command, args []string) {
}
if dryRun {
fmt.Printf("\nDry run: would cook formula %s as proto %s\n\n", resolved.Formula, protoID)
fmt.Printf("Steps (%d):\n", len(resolved.Steps))
// Determine mode label for display
modeLabel := "compile-time"
if runtimeMode {
modeLabel = "runtime"
// Apply defaults for runtime mode display
for name, def := range resolved.Vars {
if _, provided := inputVars[name]; !provided && def.Default != "" {
inputVars[name] = def.Default
}
}
}
fmt.Printf("\nDry run: would cook formula %s as proto %s (%s mode)\n\n", resolved.Formula, protoID, modeLabel)
// In runtime mode, show substituted steps
if runtimeMode {
// Create a copy with substituted values for display
substituteFormulaVars(resolved, inputVars)
fmt.Printf("Steps (%d) [variables substituted]:\n", len(resolved.Steps))
} else {
fmt.Printf("Steps (%d) [{{variables}} shown as placeholders]:\n", len(resolved.Steps))
}
printFormulaSteps(resolved.Steps, " ")
if len(vars) > 0 {
fmt.Printf("\nVariables: %s\n", strings.Join(vars, ", "))
fmt.Printf("\nVariables used: %s\n", strings.Join(vars, ", "))
}
// Show variable values in runtime mode
if runtimeMode && len(inputVars) > 0 {
fmt.Printf("\nVariable values:\n")
for name, value := range inputVars {
fmt.Printf(" {{%s}} = %s\n", name, value)
}
}
if len(bondPoints) > 0 {
fmt.Printf("Bond points: %s\n", strings.Join(bondPoints, ", "))
}
// Show variable definitions
if len(resolved.Vars) > 0 {
// Show variable definitions (more useful in compile-time mode)
if !runtimeMode && len(resolved.Vars) > 0 {
fmt.Printf("\nVariable definitions:\n")
for name, def := range resolved.Vars {
attrs := []string{}
@@ -203,6 +276,54 @@ func runCook(cmd *cobra.Command, args []string) {
return
}
// Ephemeral mode (default): output resolved formula as JSON to stdout (bd-rciw)
if !persist {
// Runtime mode (gt-8tmz.23): substitute variables before output
if runtimeMode {
// Apply defaults from formula variable definitions
for name, def := range resolved.Vars {
if _, provided := inputVars[name]; !provided && def.Default != "" {
inputVars[name] = def.Default
}
}
// Check for missing required variables
var missingVars []string
for _, v := range vars {
if _, ok := inputVars[v]; !ok {
missingVars = append(missingVars, v)
}
}
if len(missingVars) > 0 {
fmt.Fprintf(os.Stderr, "Error: runtime mode requires all variables to have values\n")
fmt.Fprintf(os.Stderr, "Missing: %s\n", strings.Join(missingVars, ", "))
fmt.Fprintf(os.Stderr, "Provide with: --var %s=<value>\n", missingVars[0])
os.Exit(1)
}
// Substitute variables in the formula
substituteFormulaVars(resolved, inputVars)
}
outputJSON(resolved)
return
}
// Persist mode: create proto bead in database (legacy behavior)
// Check if proto already exists
existingProto, err := store.GetIssue(ctx, protoID)
if err == nil && existingProto != nil {
if !force {
fmt.Fprintf(os.Stderr, "Error: proto %s already exists\n", protoID)
fmt.Fprintf(os.Stderr, "Hint: use --force to replace it\n")
os.Exit(1)
}
// Delete existing proto and its children
if err := deleteProtoSubgraph(ctx, store, protoID); err != nil {
fmt.Fprintf(os.Stderr, "Error deleting existing proto: %v\n", err)
os.Exit(1)
}
}
// Create the proto bead from the formula
result, err := cookFormula(ctx, store, resolved, protoID)
if err != nil {
@@ -241,6 +362,294 @@ type cookFormulaResult struct {
Created int
}
// cookFormulaToSubgraph creates an in-memory TemplateSubgraph from a resolved formula.
// This is the ephemeral proto implementation - no database storage.
// The returned subgraph can be passed directly to cloneSubgraph for instantiation.
func cookFormulaToSubgraph(f *formula.Formula, protoID string) (*TemplateSubgraph, error) {
// Map step ID -> created issue
issueMap := make(map[string]*types.Issue)
// Collect all issues and dependencies
var issues []*types.Issue
var deps []*types.Dependency
// Create root proto epic
rootIssue := &types.Issue{
ID: protoID,
Title: f.Formula, // Title is the original formula name
Description: f.Description,
Status: types.StatusOpen,
Priority: 2,
IssueType: types.TypeEpic,
IsTemplate: true,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
issues = append(issues, rootIssue)
issueMap[protoID] = rootIssue
// Collect issues for each step (use protoID as parent for step IDs)
collectStepsToSubgraph(f.Steps, protoID, issueMap, &issues, &deps)
// Collect dependencies from depends_on
stepIDMapping := make(map[string]string)
for _, step := range f.Steps {
collectStepIDMappings(step, protoID, stepIDMapping)
}
for _, step := range f.Steps {
collectDependenciesToSubgraph(step, stepIDMapping, &deps)
}
return &TemplateSubgraph{
Root: rootIssue,
Issues: issues,
Dependencies: deps,
IssueMap: issueMap,
}, nil
}
// collectStepsToSubgraph collects issues and dependencies for steps and their children.
// This is the in-memory version that doesn't create labels (since those require DB).
func collectStepsToSubgraph(steps []*formula.Step, parentID string, issueMap map[string]*types.Issue,
issues *[]*types.Issue, deps *[]*types.Dependency) {
for _, step := range steps {
// Generate issue ID (formula-name.step-id)
issueID := fmt.Sprintf("%s.%s", parentID, step.ID)
// Determine issue type
issueType := types.TypeTask
if step.Type != "" {
switch step.Type {
case "task":
issueType = types.TypeTask
case "bug":
issueType = types.TypeBug
case "feature":
issueType = types.TypeFeature
case "epic":
issueType = types.TypeEpic
case "chore":
issueType = types.TypeChore
}
}
// If step has children, it's an epic
if len(step.Children) > 0 {
issueType = types.TypeEpic
}
// Determine priority
priority := 2
if step.Priority != nil {
priority = *step.Priority
}
issue := &types.Issue{
ID: issueID,
Title: step.Title, // Keep {{variables}} for substitution at pour time
Description: step.Description,
Status: types.StatusOpen,
Priority: priority,
IssueType: issueType,
Assignee: step.Assignee,
IsTemplate: true,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
SourceFormula: step.SourceFormula, // Source tracing (gt-8tmz.18)
SourceLocation: step.SourceLocation, // Source tracing (gt-8tmz.18)
}
// Store labels in the issue's Labels field for in-memory use
issue.Labels = append(issue.Labels, step.Labels...)
// Add gate label for waits_for field (bd-j4cr)
if step.WaitsFor != "" {
gateLabel := fmt.Sprintf("gate:%s", step.WaitsFor)
issue.Labels = append(issue.Labels, gateLabel)
}
*issues = append(*issues, issue)
issueMap[issueID] = issue
// Add parent-child dependency
*deps = append(*deps, &types.Dependency{
IssueID: issueID,
DependsOnID: parentID,
Type: types.DepParentChild,
})
// Recursively collect children
if len(step.Children) > 0 {
collectStepsToSubgraph(step.Children, issueID, issueMap, issues, deps)
}
}
}
// collectStepIDMappings builds a map from step ID to full issue ID
func collectStepIDMappings(step *formula.Step, parentID string, mapping map[string]string) {
issueID := fmt.Sprintf("%s.%s", parentID, step.ID)
mapping[step.ID] = issueID
for _, child := range step.Children {
collectStepIDMappings(child, issueID, mapping)
}
}
// collectDependenciesToSubgraph collects blocking dependencies from depends_on and needs fields.
func collectDependenciesToSubgraph(step *formula.Step, idMapping map[string]string, deps *[]*types.Dependency) {
issueID := idMapping[step.ID]
// Process depends_on field
for _, depID := range step.DependsOn {
depIssueID, ok := idMapping[depID]
if !ok {
continue // Will be caught during validation
}
*deps = append(*deps, &types.Dependency{
IssueID: issueID,
DependsOnID: depIssueID,
Type: types.DepBlocks,
})
}
// Process needs field (bd-hr39) - simpler alias for sibling dependencies
for _, needID := range step.Needs {
needIssueID, ok := idMapping[needID]
if !ok {
continue // Will be caught during validation
}
*deps = append(*deps, &types.Dependency{
IssueID: issueID,
DependsOnID: needIssueID,
Type: types.DepBlocks,
})
}
// Process waits_for field (gt-8tmz.38) - fanout gate dependency
if step.WaitsFor != "" {
waitsForSpec := formula.ParseWaitsFor(step.WaitsFor)
if waitsForSpec != nil {
// Determine spawner ID
spawnerStepID := waitsForSpec.SpawnerID
if spawnerStepID == "" && len(step.Needs) > 0 {
// Infer spawner from first need
spawnerStepID = step.Needs[0]
}
if spawnerStepID != "" {
if spawnerIssueID, ok := idMapping[spawnerStepID]; ok {
// Create WaitsFor dependency with metadata
meta := types.WaitsForMeta{
Gate: waitsForSpec.Gate,
}
metaJSON, _ := json.Marshal(meta)
*deps = append(*deps, &types.Dependency{
IssueID: issueID,
DependsOnID: spawnerIssueID,
Type: types.DepWaitsFor,
Metadata: string(metaJSON),
})
}
}
}
}
// Recursively handle children
for _, child := range step.Children {
collectDependenciesToSubgraph(child, idMapping, deps)
}
}
// resolveAndCookFormula loads a formula by name, resolves it, applies all transformations,
// and returns an in-memory TemplateSubgraph ready for instantiation.
// This is the main entry point for ephemeral proto cooking.
func resolveAndCookFormula(formulaName string, searchPaths []string) (*TemplateSubgraph, error) {
// Create parser with search paths
parser := formula.NewParser(searchPaths...)
// Load formula by name
f, err := parser.LoadByName(formulaName)
if err != nil {
return nil, fmt.Errorf("loading formula %q: %w", formulaName, err)
}
// Resolve inheritance
resolved, err := parser.Resolve(f)
if err != nil {
return nil, fmt.Errorf("resolving formula %q: %w", formulaName, err)
}
// Apply control flow operators (gt-8tmz.4) - loops, branches, gates
controlFlowSteps, err := formula.ApplyControlFlow(resolved.Steps, resolved.Compose)
if err != nil {
return nil, fmt.Errorf("applying control flow to %q: %w", formulaName, err)
}
resolved.Steps = controlFlowSteps
// Apply advice transformations (gt-8tmz.2)
if len(resolved.Advice) > 0 {
resolved.Steps = formula.ApplyAdvice(resolved.Steps, resolved.Advice)
}
// Apply inline step expansions (gt-8tmz.35)
inlineExpandedSteps, err := formula.ApplyInlineExpansions(resolved.Steps, parser)
if err != nil {
return nil, fmt.Errorf("applying inline expansions to %q: %w", formulaName, err)
}
resolved.Steps = inlineExpandedSteps
// Apply expansion operators (gt-8tmz.3)
if resolved.Compose != nil && (len(resolved.Compose.Expand) > 0 || len(resolved.Compose.Map) > 0) {
expandedSteps, err := formula.ApplyExpansions(resolved.Steps, resolved.Compose, parser)
if err != nil {
return nil, fmt.Errorf("applying expansions to %q: %w", formulaName, err)
}
resolved.Steps = expandedSteps
}
// Apply aspects from compose.aspects (gt-8tmz.5)
if resolved.Compose != nil && len(resolved.Compose.Aspects) > 0 {
for _, aspectName := range resolved.Compose.Aspects {
aspectFormula, err := parser.LoadByName(aspectName)
if err != nil {
return nil, fmt.Errorf("loading aspect %q: %w", aspectName, err)
}
if aspectFormula.Type != formula.TypeAspect {
return nil, fmt.Errorf("%q is not an aspect formula (type=%s)", aspectName, aspectFormula.Type)
}
if len(aspectFormula.Advice) > 0 {
resolved.Steps = formula.ApplyAdvice(resolved.Steps, aspectFormula.Advice)
}
}
}
// Cook to in-memory subgraph, including variable definitions for default handling
return cookFormulaToSubgraphWithVars(resolved, resolved.Formula, resolved.Vars)
}
// cookFormulaToSubgraphWithVars creates an in-memory subgraph with variable info attached
func cookFormulaToSubgraphWithVars(f *formula.Formula, protoID string, vars map[string]*formula.VarDef) (*TemplateSubgraph, error) {
subgraph, err := cookFormulaToSubgraph(f, protoID)
if err != nil {
return nil, err
}
// Attach variable definitions to the subgraph for default handling during pour
// Convert from *VarDef to VarDef for simpler handling
if vars != nil {
subgraph.VarDefs = make(map[string]formula.VarDef)
for k, v := range vars {
if v != nil {
subgraph.VarDefs[k] = *v
}
}
}
return subgraph, nil
}
// cookFormula creates a proto bead from a resolved formula.
// protoID is the final ID for the proto (may include a prefix).
func cookFormula(ctx context.Context, s storage.Storage, f *formula.Formula, protoID string) (*cookFormulaResult, error) {
@@ -448,6 +857,36 @@ func collectDependencies(step *formula.Step, idMapping map[string]string, deps *
})
}
// Process waits_for field (gt-8tmz.38) - fanout gate dependency
if step.WaitsFor != "" {
waitsForSpec := formula.ParseWaitsFor(step.WaitsFor)
if waitsForSpec != nil {
// Determine spawner ID
spawnerStepID := waitsForSpec.SpawnerID
if spawnerStepID == "" && len(step.Needs) > 0 {
// Infer spawner from first need
spawnerStepID = step.Needs[0]
}
if spawnerStepID != "" {
if spawnerIssueID, ok := idMapping[spawnerStepID]; ok {
// Create WaitsFor dependency with metadata
meta := types.WaitsForMeta{
Gate: waitsForSpec.Gate,
}
metaJSON, _ := json.Marshal(meta)
*deps = append(*deps, &types.Dependency{
IssueID: issueID,
DependsOnID: spawnerIssueID,
Type: types.DepWaitsFor,
Metadata: string(metaJSON),
})
}
}
}
}
// Recursively handle children
for _, child := range step.Children {
collectDependencies(child, idMapping, deps)
@@ -524,11 +963,35 @@ func printFormulaSteps(steps []*formula.Step, indent string) {
}
}
// substituteFormulaVars substitutes {{variable}} placeholders in a formula (gt-8tmz.23).
// This is used in runtime mode to fully resolve the formula before output.
func substituteFormulaVars(f *formula.Formula, vars map[string]string) {
// Substitute in top-level fields
f.Description = substituteVariables(f.Description, vars)
// Substitute in all steps recursively
substituteStepVars(f.Steps, vars)
}
// substituteStepVars recursively substitutes variables in step titles and descriptions.
func substituteStepVars(steps []*formula.Step, vars map[string]string) {
for _, step := range steps {
step.Title = substituteVariables(step.Title, vars)
step.Description = substituteVariables(step.Description, vars)
if len(step.Children) > 0 {
substituteStepVars(step.Children, vars)
}
}
}
func init() {
cookCmd.Flags().Bool("dry-run", false, "Preview what would be created")
cookCmd.Flags().Bool("force", false, "Replace existing proto if it exists")
cookCmd.Flags().Bool("persist", false, "Persist proto to database (legacy behavior)")
cookCmd.Flags().Bool("force", false, "Replace existing proto if it exists (requires --persist)")
cookCmd.Flags().StringSlice("search-path", []string{}, "Additional paths to search for formula inheritance")
cookCmd.Flags().String("prefix", "", "Prefix to prepend to proto ID (e.g., 'gt-' creates 'gt-mol-feature')")
cookCmd.Flags().StringSlice("var", []string{}, "Variable substitution (key=value), enables runtime mode")
cookCmd.Flags().String("mode", "", "Cooking mode: compile (keep placeholders) or runtime (substitute vars)")
rootCmd.AddCommand(cookCmd)
}

189
cmd/bd/cook_test.go Normal file
View File

@@ -0,0 +1,189 @@
package main
import (
"testing"
"github.com/steveyegge/beads/internal/formula"
)
// =============================================================================
// Cook Tests (gt-8tmz.23: Compile-time vs Runtime Cooking)
// =============================================================================
// TestSubstituteFormulaVars tests variable substitution in formulas
func TestSubstituteFormulaVars(t *testing.T) {
tests := []struct {
name string
formula *formula.Formula
vars map[string]string
wantDesc string
wantStepTitle string
}{
{
name: "substitute single variable in description",
formula: &formula.Formula{
Description: "Build {{feature}} feature",
Steps: []*formula.Step{},
},
vars: map[string]string{"feature": "auth"},
wantDesc: "Build auth feature",
},
{
name: "substitute variable in step title",
formula: &formula.Formula{
Description: "Feature work",
Steps: []*formula.Step{
{Title: "Implement {{name}}"},
},
},
vars: map[string]string{"name": "login"},
wantDesc: "Feature work",
wantStepTitle: "Implement login",
},
{
name: "substitute multiple variables",
formula: &formula.Formula{
Description: "Release {{version}} on {{date}}",
Steps: []*formula.Step{
{Title: "Tag {{version}}"},
{Title: "Deploy to {{env}}"},
},
},
vars: map[string]string{
"version": "1.0.0",
"date": "2024-01-15",
"env": "production",
},
wantDesc: "Release 1.0.0 on 2024-01-15",
wantStepTitle: "Tag 1.0.0",
},
{
name: "nested children substitution",
formula: &formula.Formula{
Description: "Epic for {{project}}",
Steps: []*formula.Step{
{
Title: "Phase 1: {{project}} design",
Children: []*formula.Step{
{Title: "Design {{component}}"},
},
},
},
},
vars: map[string]string{
"project": "checkout",
"component": "cart",
},
wantDesc: "Epic for checkout",
wantStepTitle: "Phase 1: checkout design",
},
{
name: "unsubstituted variable left as-is",
formula: &formula.Formula{
Description: "Build {{feature}} with {{extra}}",
Steps: []*formula.Step{},
},
vars: map[string]string{"feature": "auth"},
wantDesc: "Build auth with {{extra}}", // {{extra}} unchanged
},
{
name: "empty vars map",
formula: &formula.Formula{
Description: "Keep {{placeholder}} intact",
Steps: []*formula.Step{},
},
vars: map[string]string{},
wantDesc: "Keep {{placeholder}} intact",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
substituteFormulaVars(tt.formula, tt.vars)
if tt.formula.Description != tt.wantDesc {
t.Errorf("Description = %q, want %q", tt.formula.Description, tt.wantDesc)
}
if tt.wantStepTitle != "" && len(tt.formula.Steps) > 0 {
if tt.formula.Steps[0].Title != tt.wantStepTitle {
t.Errorf("Steps[0].Title = %q, want %q", tt.formula.Steps[0].Title, tt.wantStepTitle)
}
}
})
}
}
// TestSubstituteStepVarsRecursive tests deep nesting works correctly
func TestSubstituteStepVarsRecursive(t *testing.T) {
steps := []*formula.Step{
{
Title: "Root: {{name}}",
Children: []*formula.Step{
{
Title: "Level 1: {{name}}",
Children: []*formula.Step{
{
Title: "Level 2: {{name}}",
Children: []*formula.Step{
{Title: "Level 3: {{name}}"},
},
},
},
},
},
},
}
vars := map[string]string{"name": "test"}
substituteStepVars(steps, vars)
// Check all levels got substituted
if steps[0].Title != "Root: test" {
t.Errorf("Root title = %q, want %q", steps[0].Title, "Root: test")
}
if steps[0].Children[0].Title != "Level 1: test" {
t.Errorf("Level 1 title = %q, want %q", steps[0].Children[0].Title, "Level 1: test")
}
if steps[0].Children[0].Children[0].Title != "Level 2: test" {
t.Errorf("Level 2 title = %q, want %q", steps[0].Children[0].Children[0].Title, "Level 2: test")
}
if steps[0].Children[0].Children[0].Children[0].Title != "Level 3: test" {
t.Errorf("Level 3 title = %q, want %q", steps[0].Children[0].Children[0].Children[0].Title, "Level 3: test")
}
}
// TestCompileTimeVsRuntimeMode tests that compile-time preserves placeholders
// and runtime mode substitutes them
func TestCompileTimeVsRuntimeMode(t *testing.T) {
// Simulate compile-time mode (no variable substitution)
compileFormula := &formula.Formula{
Description: "Feature: {{name}}",
Steps: []*formula.Step{
{Title: "Implement {{name}}"},
},
}
// In compile-time mode, don't call substituteFormulaVars
// Placeholders should remain intact
if compileFormula.Description != "Feature: {{name}}" {
t.Errorf("Compile-time: Description should preserve placeholder, got %q", compileFormula.Description)
}
// Simulate runtime mode (with variable substitution)
runtimeFormula := &formula.Formula{
Description: "Feature: {{name}}",
Steps: []*formula.Step{
{Title: "Implement {{name}}"},
},
}
vars := map[string]string{"name": "auth"}
substituteFormulaVars(runtimeFormula, vars)
if runtimeFormula.Description != "Feature: auth" {
t.Errorf("Runtime: Description = %q, want %q", runtimeFormula.Description, "Feature: auth")
}
if runtimeFormula.Steps[0].Title != "Implement auth" {
t.Errorf("Runtime: Steps[0].Title = %q, want %q", runtimeFormula.Steps[0].Title, "Implement auth")
}
}

View File

@@ -180,11 +180,14 @@ func ChildParentDependencies(path string) error {
}
defer db.Close()
// Find child→parent dependencies where issue_id starts with depends_on_id + "."
// Find child→parent BLOCKING dependencies where issue_id starts with depends_on_id + "."
// Only matches blocking types (blocks, conditional-blocks, waits-for) that cause deadlock.
// Excludes 'parent-child' type which is a legitimate structural hierarchy relationship.
query := `
SELECT d.issue_id, d.depends_on_id
SELECT d.issue_id, d.depends_on_id, d.type
FROM dependencies d
WHERE d.issue_id LIKE d.depends_on_id || '.%'
AND d.type IN ('blocks', 'conditional-blocks', 'waits-for')
`
rows, err := db.Query(query)
if err != nil {
@@ -195,12 +198,13 @@ func ChildParentDependencies(path string) error {
type badDep struct {
issueID string
dependsOnID string
depType string
}
var badDeps []badDep
for rows.Next() {
var d badDep
if err := rows.Scan(&d.issueID, &d.dependsOnID); err == nil {
if err := rows.Scan(&d.issueID, &d.dependsOnID, &d.depType); err == nil {
badDeps = append(badDeps, d)
}
}
@@ -210,10 +214,10 @@ func ChildParentDependencies(path string) error {
return nil
}
// Delete child→parent dependencies
// Delete child→parent blocking dependencies (preserving parent-child type)
for _, d := range badDeps {
_, err := db.Exec("DELETE FROM dependencies WHERE issue_id = ? AND depends_on_id = ?",
d.issueID, d.dependsOnID)
_, err := db.Exec("DELETE FROM dependencies WHERE issue_id = ? AND depends_on_id = ? AND type = ?",
d.issueID, d.dependsOnID, d.depType)
if err != nil {
fmt.Printf(" Warning: failed to remove %s→%s: %v\n", d.issueID, d.dependsOnID, err)
} else {

View File

@@ -138,3 +138,66 @@ func TestChildParentDependencies_FixesBadDeps(t *testing.T) {
t.Errorf("Expected 2 dirty issues (unique issue_ids from removed deps), got %d", dirtyCount)
}
}
// TestChildParentDependencies_PreservesParentChildType verifies that legitimate
// parent-child type dependencies are NOT removed (only blocking types are removed).
// Regression test for GitHub issue #750.
func TestChildParentDependencies_PreservesParentChildType(t *testing.T) {
// Set up test database with both 'blocks' and 'parent-child' type deps
dir := t.TempDir()
beadsDir := filepath.Join(dir, ".beads")
if err := os.MkdirAll(beadsDir, 0755); err != nil {
t.Fatal(err)
}
dbPath := filepath.Join(beadsDir, "beads.db")
db, err := openDB(dbPath)
if err != nil {
t.Fatal(err)
}
// Create schema with both 'blocks' (anti-pattern) and 'parent-child' (legitimate) deps
_, err = db.Exec(`
CREATE TABLE issues (id TEXT PRIMARY KEY);
CREATE TABLE dependencies (issue_id TEXT, depends_on_id TEXT, type TEXT);
CREATE TABLE dirty_issues (issue_id TEXT PRIMARY KEY);
INSERT INTO issues (id) VALUES ('bd-abc'), ('bd-abc.1'), ('bd-abc.2');
INSERT INTO dependencies (issue_id, depends_on_id, type) VALUES
('bd-abc.1', 'bd-abc', 'parent-child'),
('bd-abc.2', 'bd-abc', 'parent-child'),
('bd-abc.1', 'bd-abc', 'blocks');
`)
if err != nil {
t.Fatal(err)
}
db.Close()
// Run fix
err = ChildParentDependencies(dir)
if err != nil {
t.Fatalf("ChildParentDependencies failed: %v", err)
}
// Verify only 'blocks' type was removed, 'parent-child' preserved
db, _ = openDB(dbPath)
defer db.Close()
var blocksCount int
db.QueryRow("SELECT COUNT(*) FROM dependencies WHERE type = 'blocks'").Scan(&blocksCount)
if blocksCount != 0 {
t.Errorf("Expected 0 'blocks' dependencies after fix, got %d", blocksCount)
}
var parentChildCount int
db.QueryRow("SELECT COUNT(*) FROM dependencies WHERE type = 'parent-child'").Scan(&parentChildCount)
if parentChildCount != 2 {
t.Errorf("Expected 2 'parent-child' dependencies preserved, got %d", parentChildCount)
}
// Verify only 1 dirty issue (the one with 'blocks' dep removed)
var dirtyCount int
db.QueryRow("SELECT COUNT(*) FROM dirty_issues").Scan(&dirtyCount)
if dirtyCount != 1 {
t.Errorf("Expected 1 dirty issue, got %d", dirtyCount)
}
}

View File

@@ -333,12 +333,14 @@ func CheckChildParentDependencies(path string) DoctorCheck {
}
defer db.Close()
// Query for child→parent dependencies where issue_id starts with depends_on_id + "."
// This uses SQLite's LIKE pattern matching
// Query for child→parent BLOCKING dependencies where issue_id starts with depends_on_id + "."
// Only matches blocking types (blocks, conditional-blocks, waits-for) that cause deadlock.
// Excludes 'parent-child' type which is a legitimate structural hierarchy relationship.
query := `
SELECT d.issue_id, d.depends_on_id
FROM dependencies d
WHERE d.issue_id LIKE d.depends_on_id || '.%'
AND d.type IN ('blocks', 'conditional-blocks', 'waits-for')
`
rows, err := db.Query(query)
if err != nil {

View File

@@ -1,12 +1,15 @@
package main
import (
"bytes"
"encoding/json"
"fmt"
"os"
"path/filepath"
"sort"
"strings"
"github.com/BurntSushi/toml"
"github.com/spf13/cobra"
"github.com/steveyegge/beads/internal/formula"
"github.com/steveyegge/beads/internal/ui"
@@ -363,7 +366,7 @@ func getFormulaSearchPaths() []string {
return paths
}
// scanFormulaDir scans a directory for formula files.
// scanFormulaDir scans a directory for formula files (both TOML and JSON).
func scanFormulaDir(dir string) ([]*formula.Formula, error) {
entries, err := os.ReadDir(dir)
if err != nil {
@@ -377,11 +380,13 @@ func scanFormulaDir(dir string) ([]*formula.Formula, error) {
if entry.IsDir() {
continue
}
if !strings.HasSuffix(entry.Name(), formula.FormulaExt) {
// Support both .formula.toml and .formula.json
name := entry.Name()
if !strings.HasSuffix(name, formula.FormulaExtTOML) && !strings.HasSuffix(name, formula.FormulaExtJSON) {
continue
}
path := filepath.Join(dir, entry.Name())
path := filepath.Join(dir, name)
f, err := parser.ParseFile(path)
if err != nil {
continue // Skip invalid formulas
@@ -471,10 +476,297 @@ func printFormulaStepsTree(steps []*formula.Step, indent string) {
}
}
// formulaConvertCmd converts JSON formulas to TOML.
var formulaConvertCmd = &cobra.Command{
Use: "convert <formula-name|path> [--all]",
Short: "Convert formula from JSON to TOML",
Long: `Convert formula files from JSON to TOML format.
TOML format provides better ergonomics:
- Multi-line strings without \n escaping
- Human-readable diffs
- Comments allowed
The convert command reads a .formula.json file and outputs .formula.toml.
The original JSON file is preserved (use --delete to remove it).
Examples:
bd formula convert shiny # Convert shiny.formula.json to .toml
bd formula convert ./my.formula.json # Convert specific file
bd formula convert --all # Convert all JSON formulas
bd formula convert shiny --delete # Convert and remove JSON file
bd formula convert shiny --stdout # Print TOML to stdout`,
Run: runFormulaConvert,
}
var (
convertAll bool
convertDelete bool
convertStdout bool
)
func runFormulaConvert(cmd *cobra.Command, args []string) {
if convertAll {
convertAllFormulas()
return
}
if len(args) == 0 {
fmt.Fprintf(os.Stderr, "Error: formula name or path required\n")
fmt.Fprintf(os.Stderr, "Usage: bd formula convert <name|path> [--all]\n")
os.Exit(1)
}
name := args[0]
// Determine the JSON file path
var jsonPath string
if strings.HasSuffix(name, formula.FormulaExtJSON) {
// Direct path provided
jsonPath = name
} else if strings.HasSuffix(name, formula.FormulaExtTOML) {
fmt.Fprintf(os.Stderr, "Error: %s is already a TOML file\n", name)
os.Exit(1)
} else {
// Search for the formula in search paths
jsonPath = findFormulaJSON(name)
if jsonPath == "" {
fmt.Fprintf(os.Stderr, "Error: JSON formula %q not found\n", name)
fmt.Fprintf(os.Stderr, "\nSearch paths:\n")
for _, p := range getFormulaSearchPaths() {
fmt.Fprintf(os.Stderr, " %s\n", p)
}
os.Exit(1)
}
}
// Parse the JSON file
parser := formula.NewParser()
f, err := parser.ParseFile(jsonPath)
if err != nil {
fmt.Fprintf(os.Stderr, "Error parsing %s: %v\n", jsonPath, err)
os.Exit(1)
}
// Convert to TOML
tomlData, err := formulaToTOML(f)
if err != nil {
fmt.Fprintf(os.Stderr, "Error converting to TOML: %v\n", err)
os.Exit(1)
}
if convertStdout {
fmt.Print(string(tomlData))
return
}
// Determine output path
tomlPath := strings.TrimSuffix(jsonPath, formula.FormulaExtJSON) + formula.FormulaExtTOML
// Write the TOML file
if err := os.WriteFile(tomlPath, tomlData, 0644); err != nil {
fmt.Fprintf(os.Stderr, "Error writing %s: %v\n", tomlPath, err)
os.Exit(1)
}
fmt.Printf("✓ Converted: %s\n", tomlPath)
if convertDelete {
if err := os.Remove(jsonPath); err != nil {
fmt.Fprintf(os.Stderr, "Warning: could not delete %s: %v\n", jsonPath, err)
} else {
fmt.Printf("✓ Deleted: %s\n", jsonPath)
}
}
}
func convertAllFormulas() {
converted := 0
errors := 0
for _, dir := range getFormulaSearchPaths() {
entries, err := os.ReadDir(dir)
if err != nil {
continue
}
parser := formula.NewParser(dir)
for _, entry := range entries {
if entry.IsDir() {
continue
}
if !strings.HasSuffix(entry.Name(), formula.FormulaExtJSON) {
continue
}
jsonPath := filepath.Join(dir, entry.Name())
tomlPath := strings.TrimSuffix(jsonPath, formula.FormulaExtJSON) + formula.FormulaExtTOML
// Skip if TOML already exists
if _, err := os.Stat(tomlPath); err == nil {
fmt.Printf("⏭ Skipped (TOML exists): %s\n", entry.Name())
continue
}
f, err := parser.ParseFile(jsonPath)
if err != nil {
fmt.Fprintf(os.Stderr, "✗ Error parsing %s: %v\n", jsonPath, err)
errors++
continue
}
tomlData, err := formulaToTOML(f)
if err != nil {
fmt.Fprintf(os.Stderr, "✗ Error converting %s: %v\n", jsonPath, err)
errors++
continue
}
if err := os.WriteFile(tomlPath, tomlData, 0644); err != nil {
fmt.Fprintf(os.Stderr, "✗ Error writing %s: %v\n", tomlPath, err)
errors++
continue
}
fmt.Printf("✓ Converted: %s\n", tomlPath)
converted++
if convertDelete {
if err := os.Remove(jsonPath); err != nil {
fmt.Fprintf(os.Stderr, "Warning: could not delete %s: %v\n", jsonPath, err)
}
}
}
}
fmt.Printf("\nConverted %d formulas", converted)
if errors > 0 {
fmt.Printf(" (%d errors)", errors)
}
fmt.Println()
}
// findFormulaJSON searches for a JSON formula file by name.
func findFormulaJSON(name string) string {
for _, dir := range getFormulaSearchPaths() {
path := filepath.Join(dir, name+formula.FormulaExtJSON)
if _, err := os.Stat(path); err == nil {
return path
}
}
return ""
}
// formulaToTOML converts a Formula to TOML bytes.
// Uses a custom structure optimized for TOML readability.
func formulaToTOML(f *formula.Formula) ([]byte, error) {
// We need to re-read the original JSON to get the raw structure
// because the Formula struct loses some ordering/formatting
if f.Source == "" {
return nil, fmt.Errorf("formula has no source path")
}
// Read the original JSON
jsonData, err := os.ReadFile(f.Source)
if err != nil {
return nil, fmt.Errorf("reading source: %w", err)
}
// Parse into a map to preserve structure
var raw map[string]interface{}
if err := json.Unmarshal(jsonData, &raw); err != nil {
return nil, fmt.Errorf("parsing JSON: %w", err)
}
// Fix float64 to int for known integer fields
fixIntegerFields(raw)
// Encode to TOML
var buf bytes.Buffer
encoder := toml.NewEncoder(&buf)
encoder.Indent = ""
if err := encoder.Encode(raw); err != nil {
return nil, fmt.Errorf("encoding TOML: %w", err)
}
// Post-process to convert escaped \n in strings to multi-line strings
result := convertToMultiLineStrings(buf.String())
return []byte(result), nil
}
// convertToMultiLineStrings post-processes TOML to use multi-line strings
// where strings contain newlines. This improves readability for descriptions.
func convertToMultiLineStrings(input string) string {
// Regular expression to match key = "value with \n"
// We look for description fields specifically as those benefit most
lines := strings.Split(input, "\n")
var result []string
for _, line := range lines {
// Check if this line has a string with escaped newlines
if strings.Contains(line, "\\n") {
// Find the key = "..." pattern
eqIdx := strings.Index(line, " = \"")
if eqIdx > 0 && strings.HasSuffix(line, "\"") {
key := strings.TrimSpace(line[:eqIdx])
// Only convert description fields
if key == "description" {
// Extract the value (without quotes)
value := line[eqIdx+4 : len(line)-1]
// Unescape the newlines
value = strings.ReplaceAll(value, "\\n", "\n")
// Use multi-line string syntax
result = append(result, fmt.Sprintf("%s = \"\"\"\n%s\"\"\"", key, value))
continue
}
}
}
result = append(result, line)
}
return strings.Join(result, "\n")
}
// fixIntegerFields recursively fixes float64 values that should be integers.
// JSON unmarshals all numbers as float64, but TOML needs proper int types.
func fixIntegerFields(m map[string]interface{}) {
// Known integer fields
intFields := map[string]bool{
"version": true,
"priority": true,
"count": true,
"max": true,
}
for k, v := range m {
switch val := v.(type) {
case float64:
// Convert whole numbers to int64 if they're known int fields
if intFields[k] && val == float64(int64(val)) {
m[k] = int64(val)
}
case map[string]interface{}:
fixIntegerFields(val)
case []interface{}:
for _, item := range val {
if subMap, ok := item.(map[string]interface{}); ok {
fixIntegerFields(subMap)
}
}
}
}
}
func init() {
formulaListCmd.Flags().String("type", "", "Filter by type (workflow, expansion, aspect)")
formulaConvertCmd.Flags().BoolVar(&convertAll, "all", false, "Convert all JSON formulas")
formulaConvertCmd.Flags().BoolVar(&convertDelete, "delete", false, "Delete JSON file after conversion")
formulaConvertCmd.Flags().BoolVar(&convertStdout, "stdout", false, "Print TOML to stdout instead of file")
formulaCmd.AddCommand(formulaListCmd)
formulaCmd.AddCommand(formulaShowCmd)
formulaCmd.AddCommand(formulaConvertCmd)
rootCmd.AddCommand(formulaCmd)
}

View File

@@ -1,14 +1,17 @@
package main
import (
"context"
"encoding/json"
"fmt"
"os"
"os/exec"
"strings"
"time"
"github.com/spf13/cobra"
"github.com/steveyegge/beads/internal/rpc"
"github.com/steveyegge/beads/internal/storage"
"github.com/steveyegge/beads/internal/storage/sqlite"
"github.com/steveyegge/beads/internal/types"
"github.com/steveyegge/beads/internal/ui"
@@ -101,6 +104,16 @@ Examples:
}
}
// For timer gates, the await_id IS the duration - use it as timeout if not explicitly set
if awaitType == "timer" && timeout == 0 {
var err error
timeout, err = time.ParseDuration(awaitID)
if err != nil {
fmt.Fprintf(os.Stderr, "Error: invalid timer duration %q: %v\n", awaitID, err)
os.Exit(1)
}
}
// Generate title if not provided
if title == "" {
title = fmt.Sprintf("Gate: %s:%s", awaitType, awaitID)
@@ -434,6 +447,129 @@ var gateCloseCmd = &cobra.Command{
},
}
var gateApproveCmd = &cobra.Command{
Use: "approve <gate-id>",
Short: "Approve a human gate",
Long: `Approve a human gate, closing it and notifying waiters.
Human gates (created with --await human:<prompt>) require explicit approval
to close. This is the command that provides that approval.
Example:
bd gate create --await human:approve-deploy --notify gastown/witness
# ... later, when ready to approve ...
bd gate approve <gate-id>
bd gate approve <gate-id> --comment "Reviewed and approved by Steve"`,
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
CheckReadonly("gate approve")
ctx := rootCtx
comment, _ := cmd.Flags().GetString("comment")
var closedGate *types.Issue
var gateID string
// Try daemon first, fall back to direct store access
if daemonClient != nil {
// First get the gate to verify it's a human gate
showResp, err := daemonClient.GateShow(&rpc.GateShowArgs{ID: args[0]})
if err != nil {
FatalError("gate approve: %v", err)
}
var gate types.Issue
if err := json.Unmarshal(showResp.Data, &gate); err != nil {
FatalError("failed to parse gate: %v", err)
}
if gate.AwaitType != "human" {
fmt.Fprintf(os.Stderr, "Error: %s is not a human gate (type: %s:%s)\n", args[0], gate.AwaitType, gate.AwaitID)
os.Exit(1)
}
if gate.Status == types.StatusClosed {
fmt.Fprintf(os.Stderr, "Error: gate %s is already closed\n", args[0])
os.Exit(1)
}
// Close with approval reason
reason := fmt.Sprintf("Human approval granted: %s", gate.AwaitID)
if comment != "" {
reason = fmt.Sprintf("Human approval granted: %s (%s)", gate.AwaitID, comment)
}
resp, err := daemonClient.GateClose(&rpc.GateCloseArgs{
ID: args[0],
Reason: reason,
})
if err != nil {
FatalError("gate approve: %v", err)
}
if err := json.Unmarshal(resp.Data, &closedGate); err != nil {
FatalError("failed to parse gate: %v", err)
}
gateID = closedGate.ID
} else if store != nil {
var err error
gateID, err = utils.ResolvePartialID(ctx, store, args[0])
if err != nil {
fmt.Fprintf(os.Stderr, "Error: %v\n", err)
os.Exit(1)
}
// Get gate and verify it's a human gate
gate, err := store.GetIssue(ctx, gateID)
if err != nil {
fmt.Fprintf(os.Stderr, "Error: %v\n", err)
os.Exit(1)
}
if gate == nil {
fmt.Fprintf(os.Stderr, "Error: gate %s not found\n", gateID)
os.Exit(1)
}
if gate.IssueType != types.TypeGate {
fmt.Fprintf(os.Stderr, "Error: %s is not a gate (type: %s)\n", gateID, gate.IssueType)
os.Exit(1)
}
if gate.AwaitType != "human" {
fmt.Fprintf(os.Stderr, "Error: %s is not a human gate (type: %s:%s)\n", gateID, gate.AwaitType, gate.AwaitID)
os.Exit(1)
}
if gate.Status == types.StatusClosed {
fmt.Fprintf(os.Stderr, "Error: gate %s is already closed\n", gateID)
os.Exit(1)
}
// Close with approval reason
reason := fmt.Sprintf("Human approval granted: %s", gate.AwaitID)
if comment != "" {
reason = fmt.Sprintf("Human approval granted: %s (%s)", gate.AwaitID, comment)
}
if err := store.CloseIssue(ctx, gateID, reason, actor); err != nil {
fmt.Fprintf(os.Stderr, "Error closing gate: %v\n", err)
os.Exit(1)
}
markDirtyAndScheduleFlush()
closedGate, _ = store.GetIssue(ctx, gateID)
} else {
fmt.Fprintf(os.Stderr, "Error: no database connection\n")
os.Exit(1)
}
if jsonOutput {
outputJSON(closedGate)
return
}
fmt.Printf("%s Approved gate: %s\n", ui.RenderPass("✓"), gateID)
if closedGate != nil && closedGate.CloseReason != "" {
fmt.Printf(" %s\n", closedGate.CloseReason)
}
if closedGate != nil && len(closedGate.Waiters) > 0 {
fmt.Printf(" Waiters notified: %s\n", strings.Join(closedGate.Waiters, ", "))
}
},
}
var gateWaitCmd = &cobra.Command{
Use: "wait <gate-id>",
Short: "Add a waiter to an existing gate",
@@ -564,7 +700,314 @@ var gateWaitCmd = &cobra.Command{
},
}
var gateEvalCmd = &cobra.Command{
Use: "eval",
Short: "Evaluate pending gates and close elapsed ones",
Long: `Evaluate all open gates and close those whose conditions are met.
Supported gate types:
- timer gates: closed when elapsed time exceeds timeout
- gh:run gates: closed when GitHub Actions run completes (requires gh CLI)
- gh:pr gates: closed when PR is merged/closed (requires gh CLI)
This command is idempotent and safe to run repeatedly.`,
Run: func(cmd *cobra.Command, args []string) {
CheckReadonly("gate eval")
ctx := rootCtx
dryRun, _ := cmd.Flags().GetBool("dry-run")
var gates []*types.Issue
// Get all open gates
if daemonClient != nil {
resp, err := daemonClient.GateList(&rpc.GateListArgs{All: false})
if err != nil {
FatalError("gate eval: %v", err)
}
if err := json.Unmarshal(resp.Data, &gates); err != nil {
FatalError("failed to parse gates: %v", err)
}
} else if store != nil {
gateType := types.TypeGate
openStatus := types.StatusOpen
filter := types.IssueFilter{
IssueType: &gateType,
Status: &openStatus,
}
var err error
gates, err = store.SearchIssues(ctx, "", filter)
if err != nil {
FatalError("listing gates: %v", err)
}
} else {
FatalError("no database connection")
}
if len(gates) == 0 {
if !jsonOutput {
fmt.Println("No open gates to evaluate")
}
return
}
var closed []string
var skipped []string
var awaitingHuman []string
var awaitingMail []string
now := time.Now()
for _, gate := range gates {
var shouldClose bool
var reason string
switch gate.AwaitType {
case "timer":
shouldClose, reason = evalTimerGate(gate, now)
case "gh:run":
shouldClose, reason = evalGHRunGate(gate)
case "gh:pr":
shouldClose, reason = evalGHPRGate(gate)
case "human":
// Human gates require explicit approval via 'bd gate approve'
awaitingHuman = append(awaitingHuman, gate.ID)
continue
case "mail":
// Mail gates check for messages matching the pattern
if store != nil {
shouldClose, reason = evalMailGate(ctx, store, gate)
} else {
// Daemon mode - can't evaluate mail gates without store access
awaitingMail = append(awaitingMail, gate.ID)
continue
}
default:
// Unsupported gate type - skip
skipped = append(skipped, gate.ID)
continue
}
if !shouldClose {
continue
}
// Gate condition met - close it
if dryRun {
fmt.Printf("Would close gate %s (%s)\n", gate.ID, reason)
closed = append(closed, gate.ID)
continue
}
if daemonClient != nil {
_, err := daemonClient.GateClose(&rpc.GateCloseArgs{
ID: gate.ID,
Reason: reason,
})
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to close gate %s: %v\n", gate.ID, err)
continue
}
} else if store != nil {
if err := store.CloseIssue(ctx, gate.ID, reason, actor); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to close gate %s: %v\n", gate.ID, err)
continue
}
markDirtyAndScheduleFlush()
}
closed = append(closed, gate.ID)
}
if jsonOutput {
outputJSON(map[string]interface{}{
"evaluated": len(gates),
"closed": closed,
"awaiting_human": awaitingHuman,
"awaiting_mail": awaitingMail,
"skipped": skipped,
})
return
}
if len(closed) == 0 {
fmt.Printf("Evaluated %d gates, none ready to close\n", len(gates))
} else {
action := "Closed"
if dryRun {
action = "Would close"
}
fmt.Printf("%s %s %d gate(s)\n", ui.RenderPass("✓"), action, len(closed))
for _, id := range closed {
fmt.Printf(" %s\n", id)
}
}
if len(awaitingHuman) > 0 {
fmt.Printf("Awaiting human approval: %s\n", strings.Join(awaitingHuman, ", "))
fmt.Printf(" Use 'bd gate approve <id>' to approve\n")
}
if len(awaitingMail) > 0 {
fmt.Printf("Awaiting mail: %s\n", strings.Join(awaitingMail, ", "))
}
if len(skipped) > 0 {
fmt.Printf("Skipped %d unsupported gate(s): %s\n", len(skipped), strings.Join(skipped, ", "))
}
},
}
// evalTimerGate checks if a timer gate's duration has elapsed.
func evalTimerGate(gate *types.Issue, now time.Time) (bool, string) {
if gate.Timeout <= 0 {
return false, "" // No timeout set
}
elapsed := now.Sub(gate.CreatedAt)
if elapsed < gate.Timeout {
return false, "" // Not yet elapsed
}
return true, fmt.Sprintf("Timer elapsed (%v)", gate.Timeout)
}
// ghRunStatus represents the JSON output of `gh run view --json`
type ghRunStatus struct {
Status string `json:"status"` // queued, in_progress, completed
Conclusion string `json:"conclusion"` // success, failure, cancelled, skipped, etc.
}
// evalGHRunGate checks if a GitHub Actions run has completed.
// Uses `gh run view <run_id> --json status,conclusion` to check status.
func evalGHRunGate(gate *types.Issue) (bool, string) {
runID := gate.AwaitID
if runID == "" {
return false, ""
}
// Run gh CLI to get run status
cmd := exec.Command("gh", "run", "view", runID, "--json", "status,conclusion")
output, err := cmd.Output()
if err != nil {
// gh CLI failed - could be network issue, invalid run ID, or gh not installed
// Don't close the gate, just skip it
return false, ""
}
var status ghRunStatus
if err := json.Unmarshal(output, &status); err != nil {
return false, ""
}
// Only close if status is "completed"
if status.Status != "completed" {
return false, ""
}
// Run completed - include conclusion in reason
reason := fmt.Sprintf("GitHub Actions run %s completed", runID)
if status.Conclusion != "" {
reason = fmt.Sprintf("GitHub Actions run %s: %s", runID, status.Conclusion)
}
return true, reason
}
// ghPRStatus represents the JSON output of `gh pr view --json`
type ghPRStatus struct {
State string `json:"state"` // OPEN, CLOSED, MERGED
MergedAt string `json:"mergedAt"` // non-empty if merged
}
// evalGHPRGate checks if a GitHub PR has been merged or closed.
// Uses `gh pr view <pr_number> --json state,mergedAt` to check status.
func evalGHPRGate(gate *types.Issue) (bool, string) {
prNumber := gate.AwaitID
if prNumber == "" {
return false, ""
}
// Run gh CLI to get PR status
cmd := exec.Command("gh", "pr", "view", prNumber, "--json", "state,mergedAt")
output, err := cmd.Output()
if err != nil {
// gh CLI failed - could be network issue, invalid PR, or gh not installed
// Don't close the gate, just skip it
return false, ""
}
var status ghPRStatus
if err := json.Unmarshal(output, &status); err != nil {
return false, ""
}
// Close gate if PR is no longer OPEN
// State is "MERGED" for merged PRs, "CLOSED" for closed-without-merge
switch status.State {
case "MERGED":
return true, fmt.Sprintf("PR #%s merged", prNumber)
case "CLOSED":
return true, fmt.Sprintf("PR #%s closed without merge", prNumber)
default:
// Still OPEN
return false, ""
}
}
// evalMailGate checks if any message matching the pattern exists.
// The pattern (await_id) is matched as a case-insensitive substring of message subjects.
// If waiters are specified, only messages addressed to those waiters are considered.
func evalMailGate(ctx context.Context, store storage.Storage, gate *types.Issue) (bool, string) {
pattern := gate.AwaitID
if pattern == "" {
return false, ""
}
// Search for messages
msgType := types.TypeMessage
openStatus := types.StatusOpen
filter := types.IssueFilter{
IssueType: &msgType,
Status: &openStatus,
}
messages, err := store.SearchIssues(ctx, "", filter)
if err != nil {
return false, ""
}
// Convert pattern to lowercase for case-insensitive matching
patternLower := strings.ToLower(pattern)
// Build waiter set for efficient lookup (if waiters specified)
waiterSet := make(map[string]bool)
for _, w := range gate.Waiters {
waiterSet[w] = true
}
// Check each message
for _, msg := range messages {
// Check subject contains pattern (case-insensitive)
if !strings.Contains(strings.ToLower(msg.Title), patternLower) {
continue
}
// If waiters specified, check if message is addressed to a waiter
// Messages use Assignee field for recipient
if len(waiterSet) > 0 {
if !waiterSet[msg.Assignee] {
continue
}
}
// Found a matching message
return true, fmt.Sprintf("Mail received: %s", msg.Title)
}
return false, ""
}
func init() {
// Gate eval flags
gateEvalCmd.Flags().Bool("dry-run", false, "Show what would be closed without actually closing")
gateEvalCmd.Flags().Bool("json", false, "Output JSON format")
// Gate create flags
gateCreateCmd.Flags().String("await", "", "Await spec: gh:run:<id>, gh:pr:<id>, timer:<duration>, human:<prompt>, mail:<pattern> (required)")
gateCreateCmd.Flags().String("timeout", "", "Timeout duration (e.g., 30m, 1h)")
@@ -583,6 +1026,10 @@ func init() {
gateCloseCmd.Flags().StringP("reason", "r", "", "Reason for closing")
gateCloseCmd.Flags().Bool("json", false, "Output JSON format")
// Gate approve flags
gateApproveCmd.Flags().String("comment", "", "Optional approval comment")
gateApproveCmd.Flags().Bool("json", false, "Output JSON format")
// Gate wait flags
gateWaitCmd.Flags().StringSlice("notify", nil, "Mail addresses to add as waiters (repeatable, required)")
gateWaitCmd.Flags().Bool("json", false, "Output JSON format")
@@ -592,7 +1039,9 @@ func init() {
gateCmd.AddCommand(gateShowCmd)
gateCmd.AddCommand(gateListCmd)
gateCmd.AddCommand(gateCloseCmd)
gateCmd.AddCommand(gateApproveCmd)
gateCmd.AddCommand(gateWaitCmd)
gateCmd.AddCommand(gateEvalCmd)
// Add gate command to root
rootCmd.AddCommand(gateCmd)

View File

@@ -290,8 +290,15 @@ type VersionChange struct {
var versionChanges = []VersionChange{
{
Version: "0.37.0",
Date: "2025-12-25",
Date: "2025-12-26",
Changes: []string{
"NEW: bd gate create/show/list/close/wait (bd-udsi) - Async coordination primitives for agent workflows",
"NEW: bd gate eval (gt-twjr5.2) - Evaluate timer gates and GitHub gates (gh:run, gh:pr, mail)",
"NEW: bd gate approve (gt-twjr5.4) - Human gate approval command",
"NEW: bd close --suggest-next (GH#679) - Show newly unblocked issues after close",
"NEW: bd ready/blocked --parent (GH#743) - Scope by epic or parent bead",
"NEW: TOML support for formulas (gt-xmyha) - .formula.toml files alongside JSON",
"NEW: Fork repo auto-detection (GH#742) - Offer to configure .git/info/exclude",
"NEW: Control flow operators (gt-8tmz.4) - loop and gate operators for formula composition",
"NEW: Aspect composition (gt-8tmz.5) - Cross-cutting concerns via aspects field in formulas",
"NEW: Runtime expansion (gt-8tmz.8) - on_complete and for-each dynamic step generation",
@@ -304,6 +311,9 @@ var versionChanges = []VersionChange{
"CHANGED: Formula format YAML→JSON - Formulas now use .formula.json extension",
"CHANGED: bd mol run removed - Orchestration moved to gt commands",
"CHANGED: Wisp architecture simplified (bd-bkul) - Single DB with Wisp=true flag",
"FIX: Gate await fields preserved during upsert (bd-gr4q) - Multirepo sync fix",
"FIX: Tombstones retain closed_at timestamp - Preserves close time in soft deletes",
"FIX: Git detection caching (bd-7di) - Eliminates worktree slowness",
"FIX: installed_plugins.json v2 format (GH#741) - bd doctor handles new Claude Code format",
"FIX: git.IsWorktree() hang on Windows (GH#727) - bd init no longer hangs outside git repos",
"FIX: Skill files deleted by bd sync (GH#738) - .claude/ files now preserved",
@@ -312,6 +322,8 @@ var versionChanges = []VersionChange{
"FIX: Aspect self-matching recursion (gt-8tmz.16) - Prevents infinite loops",
"FIX: Map expansion nested matching (gt-8tmz.33) - Correctly matches child steps",
"FIX: Content-level merge for divergence (bd-kpy) - Better conflict resolution",
"FIX: Windows MCP graceful fallback (GH#387) - Daemon mode on Windows",
"FIX: Windows npm postinstall file locking (GH#670) - Install reliability",
},
},
{

View File

@@ -426,6 +426,24 @@ With --stealth: configures per-repository git settings for invisible beads usage
fmt.Fprintf(os.Stderr, "Warning: failed to close database: %v\n", err)
}
// Fork detection: offer to configure .git/info/exclude (GH#742)
setupExclude, _ := cmd.Flags().GetBool("setup-exclude")
if setupExclude {
// Manual flag - always configure
if err := setupForkExclude(!quiet); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to configure git exclude: %v\n", err)
}
} else if !stealth && isGitRepo() {
// Auto-detect fork and prompt (skip if stealth - it handles exclude already)
if isFork, upstreamURL := detectForkSetup(); isFork {
if promptForkExclude(upstreamURL, quiet) {
if err := setupForkExclude(!quiet); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to configure git exclude: %v\n", err)
}
}
}
}
// Check if we're in a git repo and hooks aren't installed
// Install by default unless --skip-hooks is passed
if !skipHooks && isGitRepo() && !hooksInstalled() {
@@ -491,6 +509,7 @@ func init() {
initCmd.Flags().Bool("contributor", false, "Run OSS contributor setup wizard")
initCmd.Flags().Bool("team", false, "Run team workflow setup wizard")
initCmd.Flags().Bool("stealth", false, "Enable stealth mode: global gitattributes and gitignore, no local repo tracking")
initCmd.Flags().Bool("setup-exclude", false, "Configure .git/info/exclude to keep beads files local (for forks)")
initCmd.Flags().Bool("skip-hooks", false, "Skip git hooks installation")
initCmd.Flags().Bool("skip-merge-driver", false, "Skip git merge driver setup")
initCmd.Flags().Bool("force", false, "Force re-initialization even if JSONL already has issues (may cause data loss)")
@@ -1463,6 +1482,103 @@ func setupGitExclude(verbose bool) error {
return nil
}
// setupForkExclude configures .git/info/exclude for fork workflows (GH#742)
// Adds beads files and Claude artifacts to keep PRs to upstream clean.
// This is separate from stealth mode - fork protection is specifically about
// preventing beads/Claude files from appearing in upstream PRs.
func setupForkExclude(verbose bool) error {
gitDir, err := exec.Command("git", "rev-parse", "--git-dir").Output()
if err != nil {
return fmt.Errorf("not a git repository")
}
gitDirPath := strings.TrimSpace(string(gitDir))
excludePath := filepath.Join(gitDirPath, "info", "exclude")
// Ensure info directory exists
if err := os.MkdirAll(filepath.Join(gitDirPath, "info"), 0755); err != nil {
return fmt.Errorf("failed to create git info directory: %w", err)
}
// Read existing content
var existingContent string
// #nosec G304 - git config path
if content, err := os.ReadFile(excludePath); err == nil {
existingContent = string(content)
}
// Patterns to add for fork protection
patterns := []string{".beads/", "**/RECOVERY*.md", "**/SESSION*.md"}
var toAdd []string
for _, p := range patterns {
// Check for exact line match (pattern alone on a line)
// This avoids false positives like ".beads/issues.jsonl" matching ".beads/"
if !containsExactPattern(existingContent, p) {
toAdd = append(toAdd, p)
}
}
if len(toAdd) == 0 {
if verbose {
fmt.Printf("%s Git exclude already configured\n", ui.RenderPass("✓"))
}
return nil
}
// Append patterns
newContent := existingContent
if !strings.HasSuffix(newContent, "\n") && len(newContent) > 0 {
newContent += "\n"
}
newContent += "\n# Beads fork protection (bd init)\n"
for _, p := range toAdd {
newContent += p + "\n"
}
// #nosec G306 - config file needs 0644
if err := os.WriteFile(excludePath, []byte(newContent), 0644); err != nil {
return fmt.Errorf("failed to write git exclude: %w", err)
}
if verbose {
fmt.Printf("\n%s Added to .git/info/exclude:\n", ui.RenderPass("✓"))
for _, p := range toAdd {
fmt.Printf(" %s\n", p)
}
fmt.Println("\nNote: .git/info/exclude is local-only and won't affect upstream.")
}
return nil
}
// containsExactPattern checks if content contains the pattern as an exact line
// This avoids false positives like ".beads/issues.jsonl" matching ".beads/"
func containsExactPattern(content, pattern string) bool {
for _, line := range strings.Split(content, "\n") {
if strings.TrimSpace(line) == pattern {
return true
}
}
return false
}
// promptForkExclude asks if user wants to configure .git/info/exclude for fork workflow (GH#742)
func promptForkExclude(upstreamURL string, quiet bool) bool {
if quiet {
return false // Don't prompt in quiet mode
}
fmt.Printf("\n%s Detected fork (upstream: %s)\n\n", ui.RenderAccent("▶"), upstreamURL)
fmt.Println("Would you like to configure .git/info/exclude to keep beads files local?")
fmt.Println("This prevents beads from appearing in PRs to upstream.")
fmt.Print("\n[Y/n]: ")
reader := bufio.NewReader(os.Stdin)
response, _ := reader.ReadString('\n')
response = strings.TrimSpace(strings.ToLower(response))
// Default to yes (empty or "y" or "yes")
return response == "" || response == "y" || response == "yes"
}
// setupGlobalGitIgnore configures global gitignore to ignore beads and claude files for a specific project
// DEPRECATED: This function uses absolute paths which don't work in gitignore (GitHub #704).
// Use setupGitExclude instead for new code.

View File

@@ -5,7 +5,6 @@ import (
"fmt"
"os"
"strings"
"time"
"github.com/spf13/cobra"
"github.com/steveyegge/beads/internal/formula"
@@ -208,38 +207,27 @@ func runMolBond(cmd *cobra.Command, args []string) {
}
// Resolve both operands - can be issue IDs or formula names
// Formula names are cooked inline to ephemeral protos (gt-8tmz.25)
issueA, cookedA, err := resolveOrCookFormula(ctx, store, args[0], actor)
// Formula names are cooked inline to in-memory subgraphs (gt-4v1eo)
subgraphA, cookedA, err := resolveOrCookToSubgraph(ctx, store, args[0])
if err != nil {
fmt.Fprintf(os.Stderr, "Error: %v\n", err)
os.Exit(1)
}
issueB, cookedB, err := resolveOrCookFormula(ctx, store, args[1], actor)
subgraphB, cookedB, err := resolveOrCookToSubgraph(ctx, store, args[1])
if err != nil {
// Clean up first cooked formula if second one fails
if cookedA {
_ = deleteProtoSubgraph(ctx, store, issueA.ID)
}
fmt.Fprintf(os.Stderr, "Error: %v\n", err)
os.Exit(1)
}
// Track cooked formulas for cleanup (ephemeral protos deleted after use)
cleanupCooked := func() {
if cookedA {
_ = deleteProtoSubgraph(ctx, store, issueA.ID)
}
if cookedB {
_ = deleteProtoSubgraph(ctx, store, issueB.ID)
}
}
// No cleanup needed - in-memory subgraphs don't pollute the DB
issueA := subgraphA.Root
issueB := subgraphB.Root
idA := issueA.ID
idB := issueB.ID
// Determine operand types
aIsProto := isProto(issueA)
bIsProto := isProto(issueB)
aIsProto := issueA.IsTemplate || cookedA
bIsProto := issueB.IsTemplate || cookedB
// Dispatch based on operand types
// All operations use the main store; wisp flag determines ephemeral vs persistent
@@ -247,17 +235,27 @@ func runMolBond(cmd *cobra.Command, args []string) {
switch {
case aIsProto && bIsProto:
// Compound protos are templates - always persistent
// Note: Proto+proto bonding from formulas is a DB operation, not in-memory
result, err = bondProtoProto(ctx, store, issueA, issueB, bondType, customTitle, actor)
case aIsProto && !bIsProto:
result, err = bondProtoMol(ctx, store, issueA, issueB, bondType, vars, childRef, actor, wisp, pour)
// Pass subgraph directly if cooked from formula
if cookedA {
result, err = bondProtoMolWithSubgraph(ctx, store, subgraphA, issueA, issueB, bondType, vars, childRef, actor, wisp, pour)
} else {
result, err = bondProtoMol(ctx, store, issueA, issueB, bondType, vars, childRef, actor, wisp, pour)
}
case !aIsProto && bIsProto:
result, err = bondMolProto(ctx, store, issueA, issueB, bondType, vars, childRef, actor, wisp, pour)
// Pass subgraph directly if cooked from formula
if cookedB {
result, err = bondProtoMolWithSubgraph(ctx, store, subgraphB, issueB, issueA, bondType, vars, childRef, actor, wisp, pour)
} else {
result, err = bondMolProto(ctx, store, issueA, issueB, bondType, vars, childRef, actor, wisp, pour)
}
default:
result, err = bondMolMol(ctx, store, issueA, issueB, bondType, actor)
}
if err != nil {
cleanupCooked()
fmt.Fprintf(os.Stderr, "Error bonding: %v\n", err)
os.Exit(1)
}
@@ -265,10 +263,6 @@ func runMolBond(cmd *cobra.Command, args []string) {
// Schedule auto-flush - wisps are in main DB now, but JSONL export skips them
markDirtyAndScheduleFlush()
// Clean up ephemeral protos after successful bond
// These were only needed to get the proto structure; the spawned issues persist
cleanupCooked()
if jsonOutput {
outputJSON(result)
return
@@ -284,9 +278,6 @@ func runMolBond(cmd *cobra.Command, args []string) {
} else if pour {
fmt.Printf(" Phase: liquid (persistent, Wisp=false)\n")
}
if cookedA || cookedB {
fmt.Printf(" Ephemeral protos cleaned up after use.\n")
}
}
// isProto checks if an issue is a proto (has the template label)
@@ -394,11 +385,21 @@ func bondProtoProto(ctx context.Context, s storage.Storage, protoA, protoB *type
// bondProtoMol bonds a proto to an existing molecule by spawning the proto.
// If childRef is provided, generates custom IDs like "parent.childref" (dynamic bonding).
// protoSubgraph can be nil if proto is from DB (will be loaded), or pre-loaded for formulas.
func bondProtoMol(ctx context.Context, s storage.Storage, proto, mol *types.Issue, bondType string, vars map[string]string, childRef string, actorName string, wispFlag, pourFlag bool) (*BondResult, error) {
// Load proto subgraph
subgraph, err := loadTemplateSubgraph(ctx, s, proto.ID)
if err != nil {
return nil, fmt.Errorf("loading proto: %w", err)
return bondProtoMolWithSubgraph(ctx, s, nil, proto, mol, bondType, vars, childRef, actorName, wispFlag, pourFlag)
}
// bondProtoMolWithSubgraph is the internal implementation that accepts a pre-loaded subgraph.
func bondProtoMolWithSubgraph(ctx context.Context, s storage.Storage, protoSubgraph *TemplateSubgraph, proto, mol *types.Issue, bondType string, vars map[string]string, childRef string, actorName string, wispFlag, pourFlag bool) (*BondResult, error) {
// Use provided subgraph or load from DB
subgraph := protoSubgraph
if subgraph == nil {
var err error
subgraph, err = loadTemplateSubgraph(ctx, s, proto.ID)
if err != nil {
return nil, fmt.Errorf("loading proto: %w", err)
}
}
// Check for missing variables
@@ -564,18 +565,31 @@ func resolveOrDescribe(ctx context.Context, s storage.Storage, operand string) (
return nil, f.Formula, nil
}
// resolveOrCookFormula tries to resolve an operand as an issue ID.
// If not found and it looks like a formula name, cooks the formula inline.
// Returns the issue, whether it was cooked (ephemeral proto), and any error.
// resolveOrCookToSubgraph tries to resolve an operand as an issue ID or formula.
// If it's an issue, loads the subgraph from DB. If it's a formula, cooks inline to subgraph.
// Returns the subgraph, whether it was cooked from formula, and any error.
//
// This implements gt-8tmz.25: formula names are cooked inline as ephemeral protos.
func resolveOrCookFormula(ctx context.Context, s storage.Storage, operand string, actorName string) (*types.Issue, bool, error) {
// This implements gt-4v1eo: formulas are cooked to in-memory subgraphs (no DB storage).
func resolveOrCookToSubgraph(ctx context.Context, s storage.Storage, operand string) (*TemplateSubgraph, bool, error) {
// First, try to resolve as an existing issue
id, err := utils.ResolvePartialID(ctx, s, operand)
if err == nil {
issue, err := s.GetIssue(ctx, id)
if err == nil {
return issue, false, nil
// Check if it's a proto (template)
if isProto(issue) {
subgraph, err := loadTemplateSubgraph(ctx, s, id)
if err != nil {
return nil, false, fmt.Errorf("loading proto subgraph '%s': %w", id, err)
}
return subgraph, false, nil
}
// It's a molecule, not a proto - wrap it as a single-issue subgraph
return &TemplateSubgraph{
Root: issue,
Issues: []*types.Issue{issue},
IssueMap: map[string]*types.Issue{issue.ID: issue},
}, false, nil
}
}
@@ -584,72 +598,13 @@ func resolveOrCookFormula(ctx context.Context, s storage.Storage, operand string
return nil, false, fmt.Errorf("'%s' not found (not an issue ID or formula name)", operand)
}
// Try to load and cook the formula
parser := formula.NewParser()
f, err := parser.LoadByName(operand)
// Try to cook formula inline to in-memory subgraph (gt-4v1eo)
subgraph, err := resolveAndCookFormula(operand, nil)
if err != nil {
return nil, false, fmt.Errorf("'%s' not found as issue or formula: %w", operand, err)
}
// Resolve formula (inheritance, etc)
resolved, err := parser.Resolve(f)
if err != nil {
return nil, false, fmt.Errorf("resolving formula '%s': %w", operand, err)
}
// Apply control flow operators (gt-8tmz.4)
controlFlowSteps, err := formula.ApplyControlFlow(resolved.Steps, resolved.Compose)
if err != nil {
return nil, false, fmt.Errorf("applying control flow to '%s': %w", operand, err)
}
resolved.Steps = controlFlowSteps
// Apply advice transformations (gt-8tmz.2)
if len(resolved.Advice) > 0 {
resolved.Steps = formula.ApplyAdvice(resolved.Steps, resolved.Advice)
}
// Apply expansion operators (gt-8tmz.3)
if resolved.Compose != nil && (len(resolved.Compose.Expand) > 0 || len(resolved.Compose.Map) > 0) {
expandedSteps, err := formula.ApplyExpansions(resolved.Steps, resolved.Compose, parser)
if err != nil {
return nil, false, fmt.Errorf("applying expansions to '%s': %w", operand, err)
}
resolved.Steps = expandedSteps
}
// Apply aspects (gt-8tmz.5)
if resolved.Compose != nil && len(resolved.Compose.Aspects) > 0 {
for _, aspectName := range resolved.Compose.Aspects {
aspectFormula, err := parser.LoadByName(aspectName)
if err != nil {
return nil, false, fmt.Errorf("loading aspect '%s': %w", aspectName, err)
}
if aspectFormula.Type != formula.TypeAspect {
return nil, false, fmt.Errorf("'%s' is not an aspect formula (type=%s)", aspectName, aspectFormula.Type)
}
if len(aspectFormula.Advice) > 0 {
resolved.Steps = formula.ApplyAdvice(resolved.Steps, aspectFormula.Advice)
}
}
}
// Cook the formula to create an ephemeral proto
// Use a unique ID to avoid collision with existing protos
// Format: _ephemeral-<formula>-<timestamp> (underscore prefix marks it as ephemeral)
protoID := fmt.Sprintf("_ephemeral-%s-%d", resolved.Formula, time.Now().UnixNano())
result, err := cookFormula(ctx, s, resolved, protoID)
if err != nil {
return nil, false, fmt.Errorf("cooking formula '%s': %w", operand, err)
}
// Load the cooked proto
issue, err := s.GetIssue(ctx, result.ProtoID)
if err != nil {
return nil, false, fmt.Errorf("loading cooked proto '%s': %w", result.ProtoID, err)
}
return issue, true, nil
return subgraph, true, nil
}
// looksLikeFormulaName checks if an operand looks like a formula name.

View File

@@ -1,84 +1,134 @@
package main
import (
"encoding/json"
"fmt"
"os"
"sort"
"strings"
"github.com/spf13/cobra"
"github.com/steveyegge/beads/internal/rpc"
"github.com/steveyegge/beads/internal/types"
"github.com/steveyegge/beads/internal/ui"
)
// CatalogEntry represents a formula in the catalog.
type CatalogEntry struct {
Name string `json:"name"`
Type string `json:"type"`
Description string `json:"description"`
Source string `json:"source"`
Steps int `json:"steps"`
Vars []string `json:"vars,omitempty"`
}
var molCatalogCmd = &cobra.Command{
Use: "catalog",
Aliases: []string{"list", "ls"},
Short: "List available molecules",
Run: func(cmd *cobra.Command, args []string) {
ctx := rootCtx
var molecules []*types.Issue
Short: "List available molecule formulas",
Long: `List formulas available for bd pour / bd wisp create.
if daemonClient != nil {
resp, err := daemonClient.List(&rpc.ListArgs{})
Formulas are ephemeral proto definitions stored as .formula.json files.
They are cooked inline when pouring, never stored as database beads.
Search paths (in priority order):
1. .beads/formulas/ (project-level)
2. ~/.beads/formulas/ (user-level)
3. ~/gt/.beads/formulas/ (Gas Town level)`,
Run: func(cmd *cobra.Command, args []string) {
typeFilter, _ := cmd.Flags().GetString("type")
// Get all search paths and scan for formulas
searchPaths := getFormulaSearchPaths()
seen := make(map[string]bool)
var entries []CatalogEntry
for _, dir := range searchPaths {
formulas, err := scanFormulaDir(dir)
if err != nil {
fmt.Fprintf(os.Stderr, "Error loading molecules: %v\n", err)
os.Exit(1)
continue // Skip inaccessible directories
}
var allIssues []*types.Issue
if err := json.Unmarshal(resp.Data, &allIssues); err == nil {
for _, issue := range allIssues {
for _, label := range issue.Labels {
if label == MoleculeLabel {
molecules = append(molecules, issue)
break
}
}
for _, f := range formulas {
if seen[f.Formula] {
continue // Skip shadowed formulas
}
seen[f.Formula] = true
// Apply type filter
if typeFilter != "" && string(f.Type) != typeFilter {
continue
}
// Extract variable names
var varNames []string
for name := range f.Vars {
varNames = append(varNames, name)
}
sort.Strings(varNames)
entries = append(entries, CatalogEntry{
Name: f.Formula,
Type: string(f.Type),
Description: truncateDescription(f.Description, 60),
Source: f.Source,
Steps: countSteps(f.Steps),
Vars: varNames,
})
}
} else if store != nil {
var err error
molecules, err = store.GetIssuesByLabel(ctx, MoleculeLabel)
if err != nil {
fmt.Fprintf(os.Stderr, "Error loading molecules: %v\n", err)
os.Exit(1)
}
} else {
fmt.Fprintf(os.Stderr, "Error: no database connection\n")
os.Exit(1)
}
// Sort by name
sort.Slice(entries, func(i, j int) bool {
return entries[i].Name < entries[j].Name
})
if jsonOutput {
outputJSON(molecules)
outputJSON(entries)
return
}
if len(molecules) == 0 {
fmt.Println("No protos available.")
fmt.Println("\nTo create a proto:")
fmt.Println(" 1. Create an epic with child issues")
fmt.Println(" 2. Add the 'template' label: bd label add <epic-id> template")
fmt.Println(" 3. Use {{variable}} placeholders in titles/descriptions")
fmt.Println("\nTo instantiate a molecule from a proto:")
fmt.Println(" bd pour <id> --var key=value # persistent mol")
fmt.Println(" bd wisp create <id> --var key=value # ephemeral wisp")
if len(entries) == 0 {
fmt.Println("No formulas found.")
fmt.Println("\nTo create a formula, write a .formula.json file:")
fmt.Println(" .beads/formulas/my-workflow.formula.json")
fmt.Println("\nOr distill from existing work:")
fmt.Println(" bd mol distill <epic-id> my-workflow")
fmt.Println("\nTo instantiate from formula:")
fmt.Println(" bd pour <formula-name> --var key=value # persistent mol")
fmt.Println(" bd wisp create <formula-name> --var key=value # ephemeral wisp")
return
}
fmt.Printf("%s\n", ui.RenderPass("Protos (for bd pour / bd wisp create):"))
for _, mol := range molecules {
vars := extractVariables(mol.Title + " " + mol.Description)
varStr := ""
if len(vars) > 0 {
varStr = fmt.Sprintf(" (vars: %s)", strings.Join(vars, ", "))
fmt.Printf("%s\n\n", ui.RenderPass("Formulas (for bd pour / bd wisp create):"))
// Group by type for display
byType := make(map[string][]CatalogEntry)
for _, e := range entries {
byType[e.Type] = append(byType[e.Type], e)
}
// Print workflow types first (most common for pour/wisp)
typeOrder := []string{"workflow", "expansion", "aspect"}
for _, t := range typeOrder {
typeEntries := byType[t]
if len(typeEntries) == 0 {
continue
}
fmt.Printf(" %s: %s%s\n", ui.RenderAccent(mol.ID), mol.Title, varStr)
typeIcon := getTypeIcon(t)
fmt.Printf("%s %s:\n", typeIcon, strings.Title(t))
for _, e := range typeEntries {
varInfo := ""
if len(e.Vars) > 0 {
varInfo = fmt.Sprintf(" (vars: %s)", strings.Join(e.Vars, ", "))
}
fmt.Printf(" %s: %s%s\n", ui.RenderAccent(e.Name), e.Description, varInfo)
}
fmt.Println()
}
fmt.Println()
},
}
func init() {
molCatalogCmd.Flags().String("type", "", "Filter by formula type (workflow, expansion, aspect)")
molCmd.AddCommand(molCatalogCmd)
}

View File

@@ -1,28 +1,29 @@
package main
import (
"context"
"encoding/json"
"fmt"
"os"
"path/filepath"
"regexp"
"strings"
"github.com/spf13/cobra"
"github.com/steveyegge/beads/internal/storage"
"github.com/steveyegge/beads/internal/types"
"github.com/steveyegge/beads/internal/formula"
"github.com/steveyegge/beads/internal/ui"
"github.com/steveyegge/beads/internal/utils"
)
var molDistillCmd = &cobra.Command{
Use: "distill <epic-id>",
Short: "Extract a reusable proto from an existing epic",
Long: `Distill a molecule by extracting a reusable proto from an existing epic.
Use: "distill <epic-id> [formula-name]",
Short: "Extract a formula from an existing epic",
Long: `Distill a molecule by extracting a reusable formula from an existing epic.
This is the reverse of spawn: instead of proto → molecule, it's molecule → proto.
This is the reverse of pour: instead of formula → molecule, it's molecule → formula.
The distill command:
1. Loads the existing epic and all its children
2. Clones the structure as a new proto (adds "template" label)
2. Converts the structure to a .formula.json file
3. Replaces concrete values with {{variable}} placeholders (via --var flags)
Use cases:
@@ -34,19 +35,23 @@ Variable syntax (both work - we detect which side is the concrete value):
--var branch=feature-auth Spawn-style: variable=value (recommended)
--var feature-auth=branch Substitution-style: value=variable
Output locations (first writable wins):
1. .beads/formulas/ (project-level, default)
2. ~/.beads/formulas/ (user-level, if project not writable)
Examples:
bd mol distill bd-o5xe --as "Release Workflow"
bd mol distill bd-abc --var feature_name=auth-refactor --var version=1.0.0`,
Args: cobra.ExactArgs(1),
bd mol distill bd-o5xe my-workflow
bd mol distill bd-abc release-workflow --var feature_name=auth-refactor`,
Args: cobra.RangeArgs(1, 2),
Run: runMolDistill,
}
// DistillResult holds the result of a distill operation
type DistillResult struct {
ProtoID string `json:"proto_id"`
IDMapping map[string]string `json:"id_mapping"` // old ID -> new ID
Created int `json:"created"` // number of issues created
Variables []string `json:"variables"` // variables introduced
FormulaName string `json:"formula_name"`
FormulaPath string `json:"formula_path"`
Steps int `json:"steps"` // number of steps in formula
Variables []string `json:"variables"` // variables introduced
}
// collectSubgraphText gathers all searchable text from a molecule subgraph
@@ -95,11 +100,9 @@ func parseDistillVar(varFlag, searchableText string) (string, string, error) {
// runMolDistill implements the distill command
func runMolDistill(cmd *cobra.Command, args []string) {
CheckReadonly("mol distill")
ctx := rootCtx
// mol distill requires direct store access
// mol distill requires direct store access for reading the epic
if store == nil {
if daemonClient != nil {
fmt.Fprintf(os.Stderr, "Error: mol distill requires direct database access\n")
@@ -110,9 +113,9 @@ func runMolDistill(cmd *cobra.Command, args []string) {
os.Exit(1)
}
customTitle, _ := cmd.Flags().GetString("as")
varFlags, _ := cmd.Flags().GetStringSlice("var")
dryRun, _ := cmd.Flags().GetBool("dry-run")
outputDir, _ := cmd.Flags().GetString("output")
// Resolve epic ID
epicID, err := utils.ResolvePartialID(ctx, store, args[0])
@@ -121,15 +124,23 @@ func runMolDistill(cmd *cobra.Command, args []string) {
os.Exit(1)
}
// Load the epic subgraph (needed for smart var detection)
// Load the epic subgraph
subgraph, err := loadTemplateSubgraph(ctx, store, epicID)
if err != nil {
fmt.Fprintf(os.Stderr, "Error loading epic: %v\n", err)
os.Exit(1)
}
// Determine formula name
formulaName := ""
if len(args) > 1 {
formulaName = args[1]
} else {
// Derive from epic title
formulaName = sanitizeFormulaName(subgraph.Root.Title)
}
// Parse variable substitutions with smart detection
// Accepts both spawn-style (variable=value) and substitution-style (value=variable)
replacements := make(map[string]string)
if len(varFlags) > 0 {
searchableText := collectSubgraphText(subgraph)
@@ -143,76 +154,127 @@ func runMolDistill(cmd *cobra.Command, args []string) {
}
}
if dryRun {
fmt.Printf("\nDry run: would distill %d issues from %s into a proto\n\n", len(subgraph.Issues), epicID)
fmt.Printf("Source: %s\n", subgraph.Root.Title)
if customTitle != "" {
fmt.Printf("Proto title: %s\n", customTitle)
// Convert to formula
f := subgraphToFormula(subgraph, formulaName, replacements)
// Determine output path
outputPath := ""
if outputDir != "" {
outputPath = filepath.Join(outputDir, formulaName+formula.FormulaExt)
} else {
// Find first writable formula directory
outputPath = findWritableFormulaDir(formulaName)
if outputPath == "" {
fmt.Fprintf(os.Stderr, "Error: no writable formula directory found\n")
fmt.Fprintf(os.Stderr, "Try: mkdir -p .beads/formulas\n")
os.Exit(1)
}
}
if dryRun {
fmt.Printf("\nDry run: would distill %d steps from %s into formula\n\n", countSteps(f.Steps), epicID)
fmt.Printf("Formula: %s\n", formulaName)
fmt.Printf("Output: %s\n", outputPath)
if len(replacements) > 0 {
fmt.Printf("\nVariable substitutions:\n")
fmt.Printf("\nVariables:\n")
for value, varName := range replacements {
fmt.Printf(" \"%s\" → {{%s}}\n", value, varName)
fmt.Printf(" %s: \"%s\" → {{%s}}\n", varName, value, varName)
}
}
fmt.Printf("\nStructure:\n")
for _, issue := range subgraph.Issues {
title := issue.Title
for value, varName := range replacements {
title = strings.ReplaceAll(title, value, "{{"+varName+"}}")
}
prefix := " "
if issue.ID == subgraph.Root.ID {
prefix = "→ "
}
fmt.Printf("%s%s\n", prefix, title)
}
printFormulaStepsTree(f.Steps, "")
return
}
// Distill the molecule into a proto
result, err := distillMolecule(ctx, store, subgraph, customTitle, replacements, actor)
if err != nil {
fmt.Fprintf(os.Stderr, "Error distilling molecule: %v\n", err)
// Ensure output directory exists
dir := filepath.Dir(outputPath)
if err := os.MkdirAll(dir, 0755); err != nil {
fmt.Fprintf(os.Stderr, "Error creating directory %s: %v\n", dir, err)
os.Exit(1)
}
// Schedule auto-flush
markDirtyAndScheduleFlush()
// Write formula
data, err := json.MarshalIndent(f, "", " ")
if err != nil {
fmt.Fprintf(os.Stderr, "Error encoding formula: %v\n", err)
os.Exit(1)
}
// #nosec G306 -- Formula files are not sensitive
if err := os.WriteFile(outputPath, data, 0644); err != nil {
fmt.Fprintf(os.Stderr, "Error writing formula: %v\n", err)
os.Exit(1)
}
result := &DistillResult{
FormulaName: formulaName,
FormulaPath: outputPath,
Steps: countSteps(f.Steps),
Variables: getVarNames(replacements),
}
if jsonOutput {
outputJSON(result)
return
}
fmt.Printf("%s Distilled proto: created %d issues\n", ui.RenderPass("✓"), result.Created)
fmt.Printf(" Proto ID: %s\n", result.ProtoID)
fmt.Printf("%s Distilled formula: %d steps\n", ui.RenderPass("✓"), result.Steps)
fmt.Printf(" Formula: %s\n", result.FormulaName)
fmt.Printf(" Path: %s\n", result.FormulaPath)
if len(result.Variables) > 0 {
fmt.Printf(" Variables: %s\n", strings.Join(result.Variables, ", "))
}
fmt.Printf("\nTo instantiate this proto:\n")
fmt.Printf(" bd pour %s", result.ProtoID[:8])
fmt.Printf("\nTo instantiate:\n")
fmt.Printf(" bd pour %s", result.FormulaName)
for _, v := range result.Variables {
fmt.Printf(" --var %s=<value>", v)
}
fmt.Println()
}
// distillMolecule creates a new proto from an existing epic
func distillMolecule(ctx context.Context, s storage.Storage, subgraph *MoleculeSubgraph, customTitle string, replacements map[string]string, actorName string) (*DistillResult, error) {
if s == nil {
return nil, fmt.Errorf("no database connection")
// sanitizeFormulaName converts a title to a valid formula name
func sanitizeFormulaName(title string) string {
// Convert to lowercase and replace spaces/special chars with hyphens
re := regexp.MustCompile(`[^a-zA-Z0-9-]+`)
name := re.ReplaceAllString(strings.ToLower(title), "-")
// Remove leading/trailing hyphens and collapse multiple hyphens
name = regexp.MustCompile(`-+`).ReplaceAllString(name, "-")
name = strings.Trim(name, "-")
if name == "" {
name = "untitled"
}
return name
}
// Build the reverse mapping for tracking variables introduced
var variables []string
// findWritableFormulaDir finds the first writable formula directory
func findWritableFormulaDir(formulaName string) string {
searchPaths := getFormulaSearchPaths()
for _, dir := range searchPaths {
// Try to create the directory if it doesn't exist
if err := os.MkdirAll(dir, 0755); err == nil {
// Check if we can write to it
testPath := filepath.Join(dir, ".write-test")
if f, err := os.Create(testPath); err == nil {
f.Close()
os.Remove(testPath)
return filepath.Join(dir, formulaName+formula.FormulaExt)
}
}
}
return ""
}
// getVarNames extracts variable names from replacements map
func getVarNames(replacements map[string]string) []string {
var names []string
for _, varName := range replacements {
variables = append(variables, varName)
names = append(names, varName)
}
return names
}
// Generate new IDs and create mapping
idMapping := make(map[string]string)
// subgraphToFormula converts a molecule subgraph to a formula
func subgraphToFormula(subgraph *TemplateSubgraph, name string, replacements map[string]string) *formula.Formula {
// Helper to apply replacements
applyReplacements := func(text string) string {
result := text
@@ -222,87 +284,88 @@ func distillMolecule(ctx context.Context, s storage.Storage, subgraph *MoleculeS
return result
}
// Use transaction for atomicity
err := s.RunInTransaction(ctx, func(tx storage.Transaction) error {
// First pass: create all issues with new IDs
for _, oldIssue := range subgraph.Issues {
// Determine title
title := applyReplacements(oldIssue.Title)
if oldIssue.ID == subgraph.Root.ID && customTitle != "" {
title = customTitle
}
// Add template label to all issues
labels := append([]string{}, oldIssue.Labels...)
hasTemplateLabel := false
for _, l := range labels {
if l == MoleculeLabel {
hasTemplateLabel = true
break
}
}
if !hasTemplateLabel {
labels = append(labels, MoleculeLabel)
}
newIssue := &types.Issue{
Title: title,
Description: applyReplacements(oldIssue.Description),
Design: applyReplacements(oldIssue.Design),
AcceptanceCriteria: applyReplacements(oldIssue.AcceptanceCriteria),
Notes: applyReplacements(oldIssue.Notes),
Status: types.StatusOpen, // Protos start fresh
Priority: oldIssue.Priority,
IssueType: oldIssue.IssueType,
Labels: labels,
EstimatedMinutes: oldIssue.EstimatedMinutes,
IDPrefix: "proto", // bd-hobo: distinct prefix for protos
}
if err := tx.CreateIssue(ctx, newIssue, actorName); err != nil {
return fmt.Errorf("failed to create proto issue from %s: %w", oldIssue.ID, err)
}
idMapping[oldIssue.ID] = newIssue.ID
// Build ID mapping for step references
idToStepID := make(map[string]string)
for _, issue := range subgraph.Issues {
// Create a sanitized step ID from the issue ID
stepID := sanitizeFormulaName(issue.Title)
if stepID == "" {
stepID = issue.ID
}
// Second pass: recreate dependencies with new IDs
for _, dep := range subgraph.Dependencies {
newFromID, ok1 := idMapping[dep.IssueID]
newToID, ok2 := idMapping[dep.DependsOnID]
if !ok1 || !ok2 {
continue // Skip if either end is outside the subgraph
}
newDep := &types.Dependency{
IssueID: newFromID,
DependsOnID: newToID,
Type: dep.Type,
}
if err := tx.AddDependency(ctx, newDep, actorName); err != nil {
return fmt.Errorf("failed to create dependency: %w", err)
}
}
return nil
})
if err != nil {
return nil, err
idToStepID[issue.ID] = stepID
}
return &DistillResult{
ProtoID: idMapping[subgraph.Root.ID],
IDMapping: idMapping,
Created: len(subgraph.Issues),
Variables: variables,
}, nil
// Build dependency map (issue ID -> list of depends-on IDs)
depsByIssue := make(map[string][]string)
for _, dep := range subgraph.Dependencies {
depsByIssue[dep.IssueID] = append(depsByIssue[dep.IssueID], dep.DependsOnID)
}
// Convert issues to steps
var steps []*formula.Step
for _, issue := range subgraph.Issues {
if issue.ID == subgraph.Root.ID {
continue // Root becomes the formula itself
}
step := &formula.Step{
ID: idToStepID[issue.ID],
Title: applyReplacements(issue.Title),
Description: applyReplacements(issue.Description),
Type: string(issue.IssueType),
}
// Copy priority if set
if issue.Priority > 0 {
p := issue.Priority
step.Priority = &p
}
// Copy labels (excluding internal ones)
for _, label := range issue.Labels {
if label != MoleculeLabel && !strings.HasPrefix(label, "mol:") {
step.Labels = append(step.Labels, label)
}
}
// Convert dependencies to depends_on (skip root)
if deps, ok := depsByIssue[issue.ID]; ok {
for _, depID := range deps {
if depID == subgraph.Root.ID {
continue // Skip dependency on root (becomes formula itself)
}
if stepID, ok := idToStepID[depID]; ok {
step.DependsOn = append(step.DependsOn, stepID)
}
}
}
steps = append(steps, step)
}
// Build variable definitions
vars := make(map[string]*formula.VarDef)
for _, varName := range replacements {
vars[varName] = &formula.VarDef{
Description: fmt.Sprintf("Value for %s", varName),
Required: true,
}
}
return &formula.Formula{
Formula: name,
Description: applyReplacements(subgraph.Root.Description),
Version: 1,
Type: formula.TypeWorkflow,
Vars: vars,
Steps: steps,
}
}
func init() {
molDistillCmd.Flags().String("as", "", "Custom title for the new proto")
molDistillCmd.Flags().StringSlice("var", []string{}, "Replace value with {{variable}} placeholder (value=variable)")
molDistillCmd.Flags().StringSlice("var", []string{}, "Replace value with {{variable}} placeholder (variable=value)")
molDistillCmd.Flags().Bool("dry-run", false, "Preview what would be created")
molDistillCmd.Flags().String("output", "", "Output directory for formula file")
molCmd.AddCommand(molDistillCmd)
}

View File

@@ -136,7 +136,7 @@ func findStaleMolecules(ctx context.Context, s storage.Storage, blockingOnly, un
}
// Get blocked issues to find what each stale molecule is blocking
blockedIssues, err := s.GetBlockedIssues(ctx)
blockedIssues, err := s.GetBlockedIssues(ctx, types.WorkFilter{})
if err != nil {
return nil, fmt.Errorf("querying blocked issues: %w", err)
}

View File

@@ -44,7 +44,7 @@ func runPour(cmd *cobra.Command, args []string) {
ctx := rootCtx
// Pour requires direct store access for subgraph loading and cloning
// Pour requires direct store access for cloning
if store == nil {
if daemonClient != nil {
fmt.Fprintf(os.Stderr, "Error: pour requires direct database access\n")
@@ -72,36 +72,56 @@ func runPour(cmd *cobra.Command, args []string) {
vars[parts[0]] = parts[1]
}
// Resolve proto ID
protoID, err := utils.ResolvePartialID(ctx, store, args[0])
if err != nil {
fmt.Fprintf(os.Stderr, "Error resolving proto ID %s: %v\n", args[0], err)
os.Exit(1)
// Try to load as formula first (ephemeral proto - gt-4v1eo)
// If that fails, fall back to loading from DB (legacy proto beads)
var subgraph *TemplateSubgraph
var protoID string
isFormula := false
// Try to cook formula inline (gt-4v1eo: ephemeral protos)
// This works for any valid formula name, not just "mol-" prefixed ones
sg, err := resolveAndCookFormula(args[0], nil)
if err == nil {
subgraph = sg
protoID = sg.Root.ID
isFormula = true
}
// Verify it's a proto
protoIssue, err := store.GetIssue(ctx, protoID)
if err != nil {
fmt.Fprintf(os.Stderr, "Error loading proto %s: %v\n", protoID, err)
os.Exit(1)
}
if !isProto(protoIssue) {
fmt.Fprintf(os.Stderr, "Error: %s is not a proto (missing '%s' label)\n", protoID, MoleculeLabel)
os.Exit(1)
if subgraph == nil {
// Try to load as existing proto bead (legacy path)
resolvedID, err := utils.ResolvePartialID(ctx, store, args[0])
if err != nil {
fmt.Fprintf(os.Stderr, "Error: %s not found as formula or proto ID\n", args[0])
os.Exit(1)
}
protoID = resolvedID
// Verify it's a proto
protoIssue, err := store.GetIssue(ctx, protoID)
if err != nil {
fmt.Fprintf(os.Stderr, "Error loading proto %s: %v\n", protoID, err)
os.Exit(1)
}
if !isProto(protoIssue) {
fmt.Fprintf(os.Stderr, "Error: %s is not a proto (missing '%s' label)\n", protoID, MoleculeLabel)
os.Exit(1)
}
// Load the proto subgraph from DB
subgraph, err = loadTemplateSubgraph(ctx, store, protoID)
if err != nil {
fmt.Fprintf(os.Stderr, "Error loading proto: %v\n", err)
os.Exit(1)
}
}
// Load the proto subgraph
subgraph, err := loadTemplateSubgraph(ctx, store, protoID)
if err != nil {
fmt.Fprintf(os.Stderr, "Error loading proto: %v\n", err)
os.Exit(1)
}
_ = isFormula // For future use (e.g., logging)
// Resolve and load attached protos
type attachmentInfo struct {
id string
issue *types.Issue
subgraph *MoleculeSubgraph
subgraph *TemplateSubgraph
}
var attachments []attachmentInfo
for _, attachArg := range attachFlags {
@@ -131,10 +151,13 @@ func runPour(cmd *cobra.Command, args []string) {
})
}
// Check for missing variables
requiredVars := extractAllVariables(subgraph)
// Apply variable defaults from formula (gt-4v1eo)
vars = applyVariableDefaults(vars, subgraph)
// Check for missing required variables (those without defaults)
requiredVars := extractRequiredVariables(subgraph)
for _, attach := range attachments {
attachVars := extractAllVariables(attach.subgraph)
attachVars := extractRequiredVariables(attach.subgraph)
for _, v := range attachVars {
found := false
for _, rv := range requiredVars {

View File

@@ -39,6 +39,7 @@ This is useful for agents executing molecules to see which steps can run next.`,
labels, _ := cmd.Flags().GetStringSlice("label")
labelsAny, _ := cmd.Flags().GetStringSlice("label-any")
issueType, _ := cmd.Flags().GetString("type")
parentID, _ := cmd.Flags().GetString("parent")
// Use global jsonOutput set by PersistentPreRun (respects config.yaml + env vars)
// Normalize labels: trim, dedupe, remove empty
@@ -69,6 +70,9 @@ This is useful for agents executing molecules to see which steps can run next.`,
if assignee != "" && !unassigned {
filter.Assignee = &assignee
}
if parentID != "" {
filter.ParentID = &parentID
}
// Validate sort policy
if !filter.SortPolicy.IsValid() {
fmt.Fprintf(os.Stderr, "Error: invalid sort policy '%s'. Valid values: hybrid, priority, oldest\n", sortPolicy)
@@ -84,6 +88,7 @@ This is useful for agents executing molecules to see which steps can run next.`,
SortPolicy: sortPolicy,
Labels: labels,
LabelsAny: labelsAny,
ParentID: parentID,
}
if cmd.Flags().Changed("priority") {
priority, _ := cmd.Flags().GetInt("priority")
@@ -229,12 +234,17 @@ var blockedCmd = &cobra.Command{
var err error
store, err = sqlite.New(ctx, dbPath)
if err != nil {
fmt.Fprintf(os.Stderr, "Error: failed to open database: %v\n", err)
os.Exit(1)
fmt.Fprintf(os.Stderr, "Error: failed to open database: %v\n", err)
os.Exit(1)
}
defer func() { _ = store.Close() }()
}
blocked, err := store.GetBlockedIssues(ctx)
}
parentID, _ := cmd.Flags().GetString("parent")
var blockedFilter types.WorkFilter
if parentID != "" {
blockedFilter.ParentID = &parentID
}
blocked, err := store.GetBlockedIssues(ctx, blockedFilter)
if err != nil {
fmt.Fprintf(os.Stderr, "Error: %v\n", err)
os.Exit(1)
@@ -410,6 +420,8 @@ func init() {
readyCmd.Flags().StringSlice("label-any", []string{}, "Filter by labels (OR: must have AT LEAST ONE). Can combine with --label")
readyCmd.Flags().StringP("type", "t", "", "Filter by issue type (task, bug, feature, epic, merge-request)")
readyCmd.Flags().String("mol", "", "Filter to steps within a specific molecule")
readyCmd.Flags().String("parent", "", "Filter to descendants of this bead/epic")
rootCmd.AddCommand(readyCmd)
blockedCmd.Flags().String("parent", "", "Filter to descendants of this bead/epic")
rootCmd.AddCommand(blockedCmd)
}

View File

@@ -204,7 +204,7 @@ func TestRelateCommand(t *testing.T) {
}
// Issue1 should NOT be blocked (relates-to doesn't block)
blocked, err := s.GetBlockedIssues(ctx)
blocked, err := s.GetBlockedIssues(ctx, types.WorkFilter{})
if err != nil {
t.Fatalf("GetBlockedIssues failed: %v", err)
}

View File

@@ -925,6 +925,7 @@ var closeCmd = &cobra.Command{
force, _ := cmd.Flags().GetBool("force")
continueFlag, _ := cmd.Flags().GetBool("continue")
noAuto, _ := cmd.Flags().GetBool("no-auto")
suggestNext, _ := cmd.Flags().GetBool("suggest-next")
ctx := rootCtx
@@ -933,6 +934,11 @@ var closeCmd = &cobra.Command{
FatalErrorRespectJSON("--continue only works when closing a single issue")
}
// --suggest-next only works with a single issue
if suggestNext && len(args) > 1 {
FatalErrorRespectJSON("--suggest-next only works when closing a single issue")
}
// Resolve partial IDs first
var resolvedIDs []string
if daemonClient != nil {
@@ -974,8 +980,9 @@ var closeCmd = &cobra.Command{
}
closeArgs := &rpc.CloseArgs{
ID: id,
Reason: reason,
ID: id,
Reason: reason,
SuggestNext: suggestNext,
}
resp, err := daemonClient.CloseIssue(closeArgs)
if err != nil {
@@ -983,18 +990,44 @@ var closeCmd = &cobra.Command{
continue
}
var issue types.Issue
if err := json.Unmarshal(resp.Data, &issue); err == nil {
// Run close hook (bd-kwro.8)
if hookRunner != nil {
hookRunner.Run(hooks.EventClose, &issue)
// Handle response based on whether SuggestNext was requested (GH#679)
if suggestNext {
var result rpc.CloseResult
if err := json.Unmarshal(resp.Data, &result); err == nil {
if result.Closed != nil {
// Run close hook (bd-kwro.8)
if hookRunner != nil {
hookRunner.Run(hooks.EventClose, result.Closed)
}
if jsonOutput {
closedIssues = append(closedIssues, result.Closed)
}
}
if !jsonOutput {
fmt.Printf("%s Closed %s: %s\n", ui.RenderPass("✓"), id, reason)
// Display newly unblocked issues (GH#679)
if len(result.Unblocked) > 0 {
fmt.Printf("\nNewly unblocked:\n")
for _, issue := range result.Unblocked {
fmt.Printf(" • %s %q (P%d)\n", issue.ID, issue.Title, issue.Priority)
}
}
}
}
if jsonOutput {
closedIssues = append(closedIssues, &issue)
} else {
var issue types.Issue
if err := json.Unmarshal(resp.Data, &issue); err == nil {
// Run close hook (bd-kwro.8)
if hookRunner != nil {
hookRunner.Run(hooks.EventClose, &issue)
}
if jsonOutput {
closedIssues = append(closedIssues, &issue)
}
}
if !jsonOutput {
fmt.Printf("%s Closed %s: %s\n", ui.RenderPass("✓"), id, reason)
}
}
if !jsonOutput {
fmt.Printf("%s Closed %s: %s\n", ui.RenderPass("✓"), id, reason)
}
}
@@ -1045,6 +1078,24 @@ var closeCmd = &cobra.Command{
}
}
// Handle --suggest-next flag in direct mode (GH#679)
if suggestNext && len(resolvedIDs) == 1 && closedCount > 0 {
unblocked, err := store.GetNewlyUnblockedByClose(ctx, resolvedIDs[0])
if err == nil && len(unblocked) > 0 {
if jsonOutput {
outputJSON(map[string]interface{}{
"closed": closedIssues,
"unblocked": unblocked,
})
return
}
fmt.Printf("\nNewly unblocked:\n")
for _, issue := range unblocked {
fmt.Printf(" • %s %q (P%d)\n", issue.ID, issue.Title, issue.Priority)
}
}
}
// Schedule auto-flush if any issues were closed
if len(args) > 0 {
markDirtyAndScheduleFlush()
@@ -1354,5 +1405,6 @@ func init() {
closeCmd.Flags().BoolP("force", "f", false, "Force close pinned issues")
closeCmd.Flags().Bool("continue", false, "Auto-advance to next step in molecule")
closeCmd.Flags().Bool("no-auto", false, "With --continue, show next step but don't claim it")
closeCmd.Flags().Bool("suggest-next", false, "Show newly unblocked issues after closing (GH#679)")
rootCmd.AddCommand(closeCmd)
}

View File

@@ -10,6 +10,7 @@ import (
"time"
"github.com/spf13/cobra"
"github.com/steveyegge/beads/internal/formula"
"github.com/steveyegge/beads/internal/rpc"
"github.com/steveyegge/beads/internal/storage"
"github.com/steveyegge/beads/internal/types"
@@ -25,10 +26,11 @@ var variablePattern = regexp.MustCompile(`\{\{([a-zA-Z_][a-zA-Z0-9_]*)\}\}`)
// TemplateSubgraph holds a template epic and all its descendants
type TemplateSubgraph struct {
Root *types.Issue // The template epic
Issues []*types.Issue // All issues in the subgraph (including root)
Dependencies []*types.Dependency // All dependencies within the subgraph
IssueMap map[string]*types.Issue // ID -> Issue for quick lookup
Root *types.Issue // The template epic
Issues []*types.Issue // All issues in the subgraph (including root)
Dependencies []*types.Dependency // All dependencies within the subgraph
IssueMap map[string]*types.Issue // ID -> Issue for quick lookup
VarDefs map[string]formula.VarDef // Variable definitions from formula (for defaults)
}
// InstantiateResult holds the result of template instantiation
@@ -787,6 +789,57 @@ func extractAllVariables(subgraph *TemplateSubgraph) []string {
return extractVariables(allText)
}
// extractRequiredVariables returns only variables that don't have defaults.
// If VarDefs is available (from a cooked formula), uses it to filter out defaulted vars.
// Otherwise, falls back to returning all variables.
func extractRequiredVariables(subgraph *TemplateSubgraph) []string {
allVars := extractAllVariables(subgraph)
// If no VarDefs, assume all variables are required
if subgraph.VarDefs == nil || len(subgraph.VarDefs) == 0 {
return allVars
}
// Filter to only required variables (no default and marked as required, or not defined in VarDefs)
var required []string
for _, v := range allVars {
def, exists := subgraph.VarDefs[v]
// A variable is required if:
// 1. It's not defined in VarDefs at all, OR
// 2. It's defined with Required=true and no Default, OR
// 3. It's defined with no Default (even if Required is false)
if !exists {
required = append(required, v)
} else if def.Default == "" {
required = append(required, v)
}
// If exists and has default, it's not required
}
return required
}
// applyVariableDefaults merges formula default values with provided variables.
// Returns a new map with defaults applied for any missing variables.
func applyVariableDefaults(vars map[string]string, subgraph *TemplateSubgraph) map[string]string {
if subgraph.VarDefs == nil {
return vars
}
result := make(map[string]string)
for k, v := range vars {
result[k] = v
}
// Apply defaults for missing variables
for name, def := range subgraph.VarDefs {
if _, exists := result[name]; !exists && def.Default != "" {
result[name] = def.Default
}
}
return result
}
// substituteVariables replaces {{variable}} with values
func substituteVariables(text string, vars map[string]string) string {
return variablePattern.ReplaceAllStringFunc(text, func(match string) string {

View File

@@ -1,6 +1,6 @@
#!/bin/sh
# bd-shim v1
# bd-hooks-version: 0.36.0
# bd-hooks-version: 0.37.0
#
# bd (beads) post-checkout hook - thin shim
#

View File

@@ -1,6 +1,6 @@
#!/bin/sh
# bd-shim v1
# bd-hooks-version: 0.36.0
# bd-hooks-version: 0.37.0
#
# bd (beads) post-merge hook - thin shim
#

View File

@@ -1,6 +1,6 @@
#!/bin/sh
# bd-shim v1
# bd-hooks-version: 0.36.0
# bd-hooks-version: 0.37.0
#
# bd (beads) pre-commit hook - thin shim
#

View File

@@ -1,6 +1,6 @@
#!/bin/sh
# bd-shim v1
# bd-hooks-version: 0.36.0
# bd-hooks-version: 0.37.0
#
# bd (beads) pre-push hook - thin shim
#

View File

@@ -14,7 +14,7 @@ import (
var (
// Version is the current version of bd (overridden by ldflags at build time)
Version = "0.36.0"
Version = "0.37.0"
// Build can be set via ldflags at compile time
Build = "dev"
// Commit and branch the git revision the binary was built from (optional ldflag)

View File

@@ -128,61 +128,79 @@ func runWispCreate(cmd *cobra.Command, args []string) {
vars[parts[0]] = parts[1]
}
// Resolve proto ID
protoID := args[0]
// Try to resolve partial ID if it doesn't look like a full ID
if !strings.HasPrefix(protoID, "bd-") && !strings.HasPrefix(protoID, "gt-") && !strings.HasPrefix(protoID, "mol-") {
// Might be a partial ID, try to resolve
if resolved, err := resolvePartialIDDirect(ctx, protoID); err == nil {
protoID = resolved
}
// Try to load as formula first (ephemeral proto - gt-4v1eo)
// If that fails, fall back to loading from DB (legacy proto beads)
var subgraph *TemplateSubgraph
var protoID string
// Try to cook formula inline (gt-4v1eo: ephemeral protos)
// This works for any valid formula name, not just "mol-" prefixed ones
sg, err := resolveAndCookFormula(args[0], nil)
if err == nil {
subgraph = sg
protoID = sg.Root.ID
}
// Check if it's a named molecule (mol-xxx) - look up in catalog
if strings.HasPrefix(protoID, "mol-") {
// Find the proto by name
issues, err := store.SearchIssues(ctx, "", types.IssueFilter{
Labels: []string{MoleculeLabel},
})
if err != nil {
fmt.Fprintf(os.Stderr, "Error searching for proto: %v\n", err)
os.Exit(1)
}
found := false
for _, issue := range issues {
if strings.Contains(issue.Title, protoID) || issue.ID == protoID {
protoID = issue.ID
found = true
break
if subgraph == nil {
// Resolve proto ID (legacy path)
protoID = args[0]
// Try to resolve partial ID if it doesn't look like a full ID
if !strings.HasPrefix(protoID, "bd-") && !strings.HasPrefix(protoID, "gt-") && !strings.HasPrefix(protoID, "mol-") {
// Might be a partial ID, try to resolve
if resolved, err := resolvePartialIDDirect(ctx, protoID); err == nil {
protoID = resolved
}
}
if !found {
fmt.Fprintf(os.Stderr, "Error: proto '%s' not found in catalog\n", args[0])
fmt.Fprintf(os.Stderr, "Hint: run 'bd mol catalog' to see available protos\n")
// Check if it's a named molecule (mol-xxx) - look up in catalog
if strings.HasPrefix(protoID, "mol-") {
// Find the proto by name
issues, err := store.SearchIssues(ctx, "", types.IssueFilter{
Labels: []string{MoleculeLabel},
})
if err != nil {
fmt.Fprintf(os.Stderr, "Error searching for proto: %v\n", err)
os.Exit(1)
}
found := false
for _, issue := range issues {
if strings.Contains(issue.Title, protoID) || issue.ID == protoID {
protoID = issue.ID
found = true
break
}
}
if !found {
fmt.Fprintf(os.Stderr, "Error: '%s' not found as formula or proto\n", args[0])
fmt.Fprintf(os.Stderr, "Hint: run 'bd formula list' to see available formulas\n")
os.Exit(1)
}
}
// Load the proto
protoIssue, err := store.GetIssue(ctx, protoID)
if err != nil {
fmt.Fprintf(os.Stderr, "Error loading proto %s: %v\n", protoID, err)
os.Exit(1)
}
if !isProtoIssue(protoIssue) {
fmt.Fprintf(os.Stderr, "Error: %s is not a proto (missing '%s' label)\n", protoID, MoleculeLabel)
os.Exit(1)
}
// Load the proto subgraph from DB
subgraph, err = loadTemplateSubgraph(ctx, store, protoID)
if err != nil {
fmt.Fprintf(os.Stderr, "Error loading proto: %v\n", err)
os.Exit(1)
}
}
// Load the proto
protoIssue, err := store.GetIssue(ctx, protoID)
if err != nil {
fmt.Fprintf(os.Stderr, "Error loading proto %s: %v\n", protoID, err)
os.Exit(1)
}
if !isProtoIssue(protoIssue) {
fmt.Fprintf(os.Stderr, "Error: %s is not a proto (missing '%s' label)\n", protoID, MoleculeLabel)
os.Exit(1)
}
// Apply variable defaults from formula (gt-4v1eo)
vars = applyVariableDefaults(vars, subgraph)
// Load the proto subgraph
subgraph, err := loadTemplateSubgraph(ctx, store, protoID)
if err != nil {
fmt.Fprintf(os.Stderr, "Error loading proto: %v\n", err)
os.Exit(1)
}
// Check for missing variables
requiredVars := extractAllVariables(subgraph)
// Check for missing required variables (those without defaults)
requiredVars := extractRequiredVariables(subgraph)
var missingVars []string
for _, v := range requiredVars {
if _, ok := vars[v]; !ok {