Merge branch 'main' into win-defender-mitigation

# Conflicts:
#	.beads/issues.jsonl
#	README.md
This commit is contained in:
Matt Wilkie
2025-10-14 14:04:20 -07:00
committed by maphew
82 changed files with 11477 additions and 3205 deletions

View File

@@ -35,6 +35,9 @@ var depAddCmd = &cobra.Command{
os.Exit(1)
}
// Schedule auto-flush
markDirtyAndScheduleFlush()
if jsonOutput {
outputJSON(map[string]interface{}{
"status": "added",
@@ -62,6 +65,9 @@ var depRemoveCmd = &cobra.Command{
os.Exit(1)
}
// Schedule auto-flush
markDirtyAndScheduleFlush()
if jsonOutput {
outputJSON(map[string]interface{}{
"status": "removed",

View File

@@ -129,11 +129,26 @@ Output to stdout by default, or use -o flag for file output.`,
// Write JSONL
encoder := json.NewEncoder(out)
exportedIDs := make([]string, 0, len(issues))
for _, issue := range issues {
if err := encoder.Encode(issue); err != nil {
fmt.Fprintf(os.Stderr, "Error encoding issue %s: %v\n", issue.ID, err)
os.Exit(1)
}
exportedIDs = append(exportedIDs, issue.ID)
}
// Only clear dirty issues and auto-flush state if exporting to the default JSONL path
// This prevents clearing dirty flags when exporting to custom paths (e.g., bd export -o backup.jsonl)
if output == "" || output == findJSONLPath() {
// Clear only the issues that were actually exported (fixes bd-52 race condition)
if err := store.ClearDirtyIssuesByID(ctx, exportedIDs); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to clear dirty issues: %v\n", err)
}
// Clear auto-flush state since we just manually exported
// This cancels any pending auto-flush timer and marks DB as clean
clearAutoFlushState()
}
// If writing to file, atomically replace the target file

View File

@@ -222,6 +222,13 @@ Behavior:
updates["estimated_minutes"] = nil
}
}
if _, ok := rawData["external_ref"]; ok {
if issue.ExternalRef != nil {
updates["external_ref"] = *issue.ExternalRef
} else {
updates["external_ref"] = nil
}
}
if err := store.UpdateIssue(ctx, issue.ID, updates, "import"); err != nil {
fmt.Fprintf(os.Stderr, "Error updating issue %s: %v\n", issue.ID, err)
@@ -238,7 +245,16 @@ Behavior:
}
}
// Phase 5: Process dependencies
// Phase 5: Sync ID counters after importing issues with explicit IDs
// This prevents ID collisions with subsequently auto-generated issues
// CRITICAL: If this fails, subsequent auto-generated IDs WILL collide with imported issues
if err := sqliteStore.SyncAllCounters(ctx); err != nil {
fmt.Fprintf(os.Stderr, "Error: failed to sync ID counters: %v\n", err)
fmt.Fprintf(os.Stderr, "Cannot proceed - auto-generated IDs would collide with imported issues.\n")
os.Exit(1)
}
// Phase 6: Process dependencies
// Do this after all issues are created to handle forward references
var depsCreated, depsSkipped int
for _, issue := range allIssues {
@@ -287,6 +303,9 @@ Behavior:
}
}
// Schedule auto-flush after import completes
markDirtyAndScheduleFlush()
// Print summary
fmt.Fprintf(os.Stderr, "Import complete: %d created, %d updated", created, updated)
if skipped > 0 {

View File

@@ -968,3 +968,79 @@ func TestImportWithDependenciesInJSONL(t *testing.T) {
t.Errorf("Dependency target = %s, want bd-1", deps[0].DependsOnID)
}
}
func TestImportCounterSyncAfterHighID(t *testing.T) {
tmpDir, err := os.MkdirTemp("", "bd-collision-test-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer func() {
if err := os.RemoveAll(tmpDir); err != nil {
t.Logf("Warning: cleanup failed: %v", err)
}
}()
dbPath := filepath.Join(tmpDir, "test.db")
testStore, err := sqlite.New(dbPath)
if err != nil {
t.Fatalf("Failed to create storage: %v", err)
}
defer func() {
if err := testStore.Close(); err != nil {
t.Logf("Warning: failed to close store: %v", err)
}
}()
ctx := context.Background()
if err := testStore.SetConfig(ctx, "issue_prefix", "bd"); err != nil {
t.Fatalf("Failed to set issue prefix: %v", err)
}
for i := 0; i < 3; i++ {
issue := &types.Issue{
Title: fmt.Sprintf("Auto issue %d", i+1),
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
}
if err := testStore.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("Failed to create auto issue %d: %v", i+1, err)
}
}
highIDIssue := &types.Issue{
ID: "bd-100",
Title: "High ID issue",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
if err := testStore.CreateIssue(ctx, highIDIssue, "import"); err != nil {
t.Fatalf("Failed to import high ID issue: %v", err)
}
// Step 4: Sync counters after import (mimics import command behavior)
if err := testStore.SyncAllCounters(ctx); err != nil {
t.Fatalf("Failed to sync counters: %v", err)
}
// Step 5: Create another auto-generated issue
// This should get bd-101 (counter should have synced to 100), not bd-4
newIssue := &types.Issue{
Title: "New issue after import",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
}
if err := testStore.CreateIssue(ctx, newIssue, "test"); err != nil {
t.Fatalf("Failed to create new issue: %v", err)
}
if newIssue.ID != "bd-101" {
t.Errorf("Expected new issue to get ID bd-101, got %s", newIssue.ID)
}
}

View File

@@ -1,14 +1,22 @@
package main
import (
"bufio"
"context"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"fmt"
"os"
"path/filepath"
"sort"
"strings"
"sync"
"time"
"github.com/fatih/color"
"github.com/spf13/cobra"
"github.com/steveyegge/beads"
"github.com/steveyegge/beads/internal/storage"
"github.com/steveyegge/beads/internal/storage/sqlite"
"github.com/steveyegge/beads/internal/types"
@@ -19,6 +27,20 @@ var (
actor string
store storage.Storage
jsonOutput bool
// Auto-flush state
autoFlushEnabled = true // Can be disabled with --no-auto-flush
isDirty = false
flushMutex sync.Mutex
flushTimer *time.Timer
flushDebounce = 5 * time.Second
storeMutex sync.Mutex // Protects store access from background goroutine
storeActive = false // Tracks if store is available
flushFailureCount = 0 // Consecutive flush failures
lastFlushError error // Last flush error for debugging
// Auto-import state
autoImportEnabled = true // Can be disabled with --no-auto-import
)
var rootCmd = &cobra.Command{
@@ -31,17 +53,19 @@ var rootCmd = &cobra.Command{
return
}
// Set auto-flush based on flag (invert no-auto-flush)
autoFlushEnabled = !noAutoFlush
// Set auto-import based on flag (invert no-auto-import)
autoImportEnabled = !noAutoImport
// Initialize storage
if dbPath == "" {
// Try to find database in order:
// 1. $BEADS_DB environment variable
// 2. .beads/*.db in current directory or ancestors
// 3. ~/.beads/default.db
if envDB := os.Getenv("BEADS_DB"); envDB != "" {
dbPath = envDB
} else if foundDB := findDatabase(); foundDB != "" {
// Use public API to find database (same logic as extensions)
if foundDB := beads.FindDatabasePath(); foundDB != "" {
dbPath = foundDB
} else {
// Fallback to default location (will be created by init command)
home, _ := os.UserHomeDir()
dbPath = filepath.Join(home, ".beads", "default.db")
}
@@ -54,52 +78,59 @@ var rootCmd = &cobra.Command{
os.Exit(1)
}
// Set actor from env or default
// Mark store as active for flush goroutine safety
storeMutex.Lock()
storeActive = true
storeMutex.Unlock()
// Set actor from flag, env, or default
// Priority: --actor flag > BD_ACTOR env > USER env > "unknown"
if actor == "" {
actor = os.Getenv("USER")
if actor == "" {
if bdActor := os.Getenv("BD_ACTOR"); bdActor != "" {
actor = bdActor
} else if user := os.Getenv("USER"); user != "" {
actor = user
} else {
actor = "unknown"
}
}
// Auto-import if JSONL is newer than DB (e.g., after git pull)
// Skip for import command itself to avoid recursion
if cmd.Name() != "import" && autoImportEnabled {
autoImportIfNewer()
}
},
PersistentPostRun: func(cmd *cobra.Command, args []string) {
// Flush any pending changes before closing
flushMutex.Lock()
needsFlush := isDirty && autoFlushEnabled
if needsFlush {
// Cancel timer and flush immediately
if flushTimer != nil {
flushTimer.Stop()
flushTimer = nil
}
// Don't clear isDirty here - let flushToJSONL do it
}
flushMutex.Unlock()
if needsFlush {
// Call the shared flush function (no code duplication)
flushToJSONL()
}
// Signal that store is closing (prevents background flush from accessing closed store)
storeMutex.Lock()
storeActive = false
storeMutex.Unlock()
if store != nil {
_ = store.Close()
}
},
}
// findDatabase searches for .beads/*.db in current directory and ancestors
func findDatabase() string {
dir, err := os.Getwd()
if err != nil {
return ""
}
// Walk up directory tree looking for .beads/ directory
for {
beadsDir := filepath.Join(dir, ".beads")
if info, err := os.Stat(beadsDir); err == nil && info.IsDir() {
// Found .beads/ directory, look for *.db files
matches, err := filepath.Glob(filepath.Join(beadsDir, "*.db"))
if err == nil && len(matches) > 0 {
// Return first .db file found
return matches[0]
}
}
// Move up one directory
parent := filepath.Dir(dir)
if parent == dir {
// Reached filesystem root
break
}
dir = parent
}
return ""
}
// outputJSON outputs data as pretty-printed JSON
func outputJSON(v interface{}) {
encoder := json.NewEncoder(os.Stdout)
@@ -110,17 +141,526 @@ func outputJSON(v interface{}) {
}
}
// findJSONLPath finds the JSONL file path for the current database
func findJSONLPath() string {
// Use public API for path discovery
jsonlPath := beads.FindJSONLPath(dbPath)
// Ensure the directory exists (important for new databases)
// This is the only difference from the public API - we create the directory
dbDir := filepath.Dir(dbPath)
if err := os.MkdirAll(dbDir, 0755); err != nil {
// If we can't create the directory, return discovered path anyway
// (the subsequent write will fail with a clearer error)
return jsonlPath
}
return jsonlPath
}
// autoImportIfNewer checks if JSONL content changed (via hash) and imports if so
// Fixes bd-84: Hash-based comparison is git-proof (mtime comparison fails after git pull)
func autoImportIfNewer() {
// Find JSONL path
jsonlPath := findJSONLPath()
// Read JSONL file
jsonlData, err := os.ReadFile(jsonlPath)
if err != nil {
// JSONL doesn't exist or can't be accessed, skip import
if os.Getenv("BD_DEBUG") != "" {
fmt.Fprintf(os.Stderr, "Debug: auto-import skipped, JSONL not found: %v\n", err)
}
return
}
// Compute current JSONL hash
hasher := sha256.New()
hasher.Write(jsonlData)
currentHash := hex.EncodeToString(hasher.Sum(nil))
// Get last import hash from DB metadata
ctx := context.Background()
lastHash, err := store.GetMetadata(ctx, "last_import_hash")
if err != nil {
// Metadata not supported or error reading - this shouldn't happen
// since we added metadata table, but be defensive
if os.Getenv("BD_DEBUG") != "" {
fmt.Fprintf(os.Stderr, "Debug: auto-import skipped, metadata error: %v\n", err)
}
return
}
// Compare hashes
if currentHash == lastHash {
// Content unchanged, skip import
if os.Getenv("BD_DEBUG") != "" {
fmt.Fprintf(os.Stderr, "Debug: auto-import skipped, JSONL unchanged (hash match)\n")
}
return
}
if os.Getenv("BD_DEBUG") != "" {
fmt.Fprintf(os.Stderr, "Debug: auto-import triggered (hash changed)\n")
}
// Content changed - perform silent import
scanner := bufio.NewScanner(strings.NewReader(string(jsonlData)))
var allIssues []*types.Issue
for scanner.Scan() {
line := scanner.Text()
if line == "" {
continue
}
var issue types.Issue
if err := json.Unmarshal([]byte(line), &issue); err != nil {
// Parse error, skip this import
if os.Getenv("BD_DEBUG") != "" {
fmt.Fprintf(os.Stderr, "Debug: auto-import skipped, parse error: %v\n", err)
}
return
}
allIssues = append(allIssues, &issue)
}
if err := scanner.Err(); err != nil {
return
}
// Import issues (create new, update existing)
for _, issue := range allIssues {
existing, err := store.GetIssue(ctx, issue.ID)
if err != nil {
continue
}
if existing != nil {
// Update existing issue
updates := make(map[string]interface{})
updates["title"] = issue.Title
updates["description"] = issue.Description
updates["design"] = issue.Design
updates["acceptance_criteria"] = issue.AcceptanceCriteria
updates["notes"] = issue.Notes
updates["status"] = issue.Status
updates["priority"] = issue.Priority
updates["issue_type"] = issue.IssueType
updates["assignee"] = issue.Assignee
if issue.EstimatedMinutes != nil {
updates["estimated_minutes"] = *issue.EstimatedMinutes
}
if issue.ExternalRef != nil {
updates["external_ref"] = *issue.ExternalRef
}
_ = store.UpdateIssue(ctx, issue.ID, updates, "auto-import")
} else {
// Create new issue
_ = store.CreateIssue(ctx, issue, "auto-import")
}
}
// Import dependencies
for _, issue := range allIssues {
if len(issue.Dependencies) == 0 {
continue
}
// Get existing dependencies
existingDeps, err := store.GetDependencyRecords(ctx, issue.ID)
if err != nil {
continue
}
// Add missing dependencies
for _, dep := range issue.Dependencies {
exists := false
for _, existing := range existingDeps {
if existing.DependsOnID == dep.DependsOnID && existing.Type == dep.Type {
exists = true
break
}
}
if !exists {
_ = store.AddDependency(ctx, dep, "auto-import")
}
}
}
// Store new hash after successful import
_ = store.SetMetadata(ctx, "last_import_hash", currentHash)
}
// markDirtyAndScheduleFlush marks the database as dirty and schedules a flush
func markDirtyAndScheduleFlush() {
if !autoFlushEnabled {
return
}
flushMutex.Lock()
defer flushMutex.Unlock()
isDirty = true
// Cancel existing timer if any
if flushTimer != nil {
flushTimer.Stop()
flushTimer = nil
}
// Schedule new flush
flushTimer = time.AfterFunc(flushDebounce, func() {
flushToJSONL()
})
}
// clearAutoFlushState cancels pending flush and marks DB as clean (after manual export)
func clearAutoFlushState() {
flushMutex.Lock()
defer flushMutex.Unlock()
// Cancel pending timer
if flushTimer != nil {
flushTimer.Stop()
flushTimer = nil
}
// Clear dirty flag
isDirty = false
// Reset failure counter (manual export succeeded)
flushFailureCount = 0
lastFlushError = nil
}
// flushToJSONL exports dirty issues to JSONL using incremental updates
func flushToJSONL() {
// Check if store is still active (not closed)
storeMutex.Lock()
if !storeActive {
storeMutex.Unlock()
return
}
storeMutex.Unlock()
flushMutex.Lock()
if !isDirty {
flushMutex.Unlock()
return
}
isDirty = false
flushMutex.Unlock()
jsonlPath := findJSONLPath()
// Double-check store is still active before accessing
storeMutex.Lock()
if !storeActive {
storeMutex.Unlock()
return
}
storeMutex.Unlock()
// Helper to record failure
recordFailure := func(err error) {
flushMutex.Lock()
flushFailureCount++
lastFlushError = err
failCount := flushFailureCount
flushMutex.Unlock()
// Always show the immediate warning
fmt.Fprintf(os.Stderr, "Warning: auto-flush failed: %v\n", err)
// Show prominent warning after 3+ consecutive failures
if failCount >= 3 {
red := color.New(color.FgRed, color.Bold).SprintFunc()
fmt.Fprintf(os.Stderr, "\n%s\n", red("⚠️ CRITICAL: Auto-flush has failed "+fmt.Sprint(failCount)+" times consecutively!"))
fmt.Fprintf(os.Stderr, "%s\n", red("⚠️ Your JSONL file may be out of sync with the database."))
fmt.Fprintf(os.Stderr, "%s\n\n", red("⚠️ Run 'bd export -o .beads/issues.jsonl' manually to fix."))
}
}
// Helper to record success
recordSuccess := func() {
flushMutex.Lock()
flushFailureCount = 0
lastFlushError = nil
flushMutex.Unlock()
}
ctx := context.Background()
// Get dirty issue IDs (bd-39: incremental export optimization)
dirtyIDs, err := store.GetDirtyIssues(ctx)
if err != nil {
recordFailure(fmt.Errorf("failed to get dirty issues: %w", err))
return
}
// No dirty issues? Nothing to do!
if len(dirtyIDs) == 0 {
recordSuccess()
return
}
// Read existing JSONL into a map
issueMap := make(map[string]*types.Issue)
if existingFile, err := os.Open(jsonlPath); err == nil {
scanner := bufio.NewScanner(existingFile)
lineNum := 0
for scanner.Scan() {
lineNum++
line := scanner.Text()
if line == "" {
continue
}
var issue types.Issue
if err := json.Unmarshal([]byte(line), &issue); err == nil {
issueMap[issue.ID] = &issue
} else {
// Warn about malformed JSONL lines
fmt.Fprintf(os.Stderr, "Warning: skipping malformed JSONL line %d: %v\n", lineNum, err)
}
}
existingFile.Close()
}
// Fetch only dirty issues from DB
for _, issueID := range dirtyIDs {
issue, err := store.GetIssue(ctx, issueID)
if err != nil {
recordFailure(fmt.Errorf("failed to get issue %s: %w", issueID, err))
return
}
if issue == nil {
// Issue was deleted, remove from map
delete(issueMap, issueID)
continue
}
// Get dependencies for this issue
deps, err := store.GetDependencyRecords(ctx, issueID)
if err != nil {
recordFailure(fmt.Errorf("failed to get dependencies for %s: %w", issueID, err))
return
}
issue.Dependencies = deps
// Update map
issueMap[issueID] = issue
}
// Convert map to sorted slice
issues := make([]*types.Issue, 0, len(issueMap))
for _, issue := range issueMap {
issues = append(issues, issue)
}
sort.Slice(issues, func(i, j int) bool {
return issues[i].ID < issues[j].ID
})
// Write to temp file first, then rename (atomic)
tempPath := jsonlPath + ".tmp"
f, err := os.Create(tempPath)
if err != nil {
recordFailure(fmt.Errorf("failed to create temp file: %w", err))
return
}
encoder := json.NewEncoder(f)
for _, issue := range issues {
if err := encoder.Encode(issue); err != nil {
f.Close()
os.Remove(tempPath)
recordFailure(fmt.Errorf("failed to encode issue %s: %w", issue.ID, err))
return
}
}
if err := f.Close(); err != nil {
os.Remove(tempPath)
recordFailure(fmt.Errorf("failed to close temp file: %w", err))
return
}
// Atomic rename
if err := os.Rename(tempPath, jsonlPath); err != nil {
os.Remove(tempPath)
recordFailure(fmt.Errorf("failed to rename file: %w", err))
return
}
// Clear only the dirty issues that were actually exported (fixes bd-52 race condition)
if err := store.ClearDirtyIssuesByID(ctx, dirtyIDs); err != nil {
// Don't fail the whole flush for this, but warn
fmt.Fprintf(os.Stderr, "Warning: failed to clear dirty issues: %v\n", err)
}
// Store hash of exported JSONL (fixes bd-84: enables hash-based auto-import)
jsonlData, err := os.ReadFile(jsonlPath)
if err == nil {
hasher := sha256.New()
hasher.Write(jsonlData)
exportedHash := hex.EncodeToString(hasher.Sum(nil))
_ = store.SetMetadata(ctx, "last_import_hash", exportedHash)
}
// Success!
recordSuccess()
}
var (
noAutoFlush bool
noAutoImport bool
)
func init() {
rootCmd.PersistentFlags().StringVar(&dbPath, "db", "", "Database path (default: auto-discover .beads/*.db or ~/.beads/default.db)")
rootCmd.PersistentFlags().StringVar(&actor, "actor", "", "Actor name for audit trail (default: $USER)")
rootCmd.PersistentFlags().StringVar(&actor, "actor", "", "Actor name for audit trail (default: $BD_ACTOR or $USER)")
rootCmd.PersistentFlags().BoolVar(&jsonOutput, "json", false, "Output in JSON format")
rootCmd.PersistentFlags().BoolVar(&noAutoFlush, "no-auto-flush", false, "Disable automatic JSONL sync after CRUD operations")
rootCmd.PersistentFlags().BoolVar(&noAutoImport, "no-auto-import", false, "Disable automatic JSONL import when newer than DB")
}
// createIssuesFromMarkdown parses a markdown file and creates multiple issues
func createIssuesFromMarkdown(cmd *cobra.Command, filepath string) {
// Parse markdown file
templates, err := parseMarkdownFile(filepath)
if err != nil {
fmt.Fprintf(os.Stderr, "Error parsing markdown file: %v\n", err)
os.Exit(1)
}
if len(templates) == 0 {
fmt.Fprintf(os.Stderr, "No issues found in markdown file\n")
os.Exit(1)
}
ctx := context.Background()
createdIssues := []*types.Issue{}
failedIssues := []string{}
// Create each issue
for _, template := range templates {
issue := &types.Issue{
Title: template.Title,
Description: template.Description,
Design: template.Design,
AcceptanceCriteria: template.AcceptanceCriteria,
Status: types.StatusOpen,
Priority: template.Priority,
IssueType: template.IssueType,
Assignee: template.Assignee,
}
if err := store.CreateIssue(ctx, issue, actor); err != nil {
fmt.Fprintf(os.Stderr, "Error creating issue '%s': %v\n", template.Title, err)
failedIssues = append(failedIssues, template.Title)
continue
}
// Add labels
for _, label := range template.Labels {
if err := store.AddLabel(ctx, issue.ID, label, actor); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to add label %s to %s: %v\n", label, issue.ID, err)
}
}
// Add dependencies
for _, depSpec := range template.Dependencies {
depSpec = strings.TrimSpace(depSpec)
if depSpec == "" {
continue
}
var depType types.DependencyType
var dependsOnID string
// Parse format: "type:id" or just "id" (defaults to "blocks")
if strings.Contains(depSpec, ":") {
parts := strings.SplitN(depSpec, ":", 2)
if len(parts) != 2 {
fmt.Fprintf(os.Stderr, "Warning: invalid dependency format '%s' for %s\n", depSpec, issue.ID)
continue
}
depType = types.DependencyType(strings.TrimSpace(parts[0]))
dependsOnID = strings.TrimSpace(parts[1])
} else {
depType = types.DepBlocks
dependsOnID = depSpec
}
if !depType.IsValid() {
fmt.Fprintf(os.Stderr, "Warning: invalid dependency type '%s' for %s\n", depType, issue.ID)
continue
}
dep := &types.Dependency{
IssueID: issue.ID,
DependsOnID: dependsOnID,
Type: depType,
}
if err := store.AddDependency(ctx, dep, actor); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to add dependency %s -> %s: %v\n", issue.ID, dependsOnID, err)
}
}
createdIssues = append(createdIssues, issue)
}
// Schedule auto-flush
if len(createdIssues) > 0 {
markDirtyAndScheduleFlush()
}
// Report failures if any
if len(failedIssues) > 0 {
red := color.New(color.FgRed).SprintFunc()
fmt.Fprintf(os.Stderr, "\n%s Failed to create %d issues:\n", red("✗"), len(failedIssues))
for _, title := range failedIssues {
fmt.Fprintf(os.Stderr, " - %s\n", title)
}
}
if jsonOutput {
outputJSON(createdIssues)
} else {
green := color.New(color.FgGreen).SprintFunc()
fmt.Printf("%s Created %d issues from %s:\n", green("✓"), len(createdIssues), filepath)
for _, issue := range createdIssues {
fmt.Printf(" %s: %s [P%d, %s]\n", issue.ID, issue.Title, issue.Priority, issue.IssueType)
}
}
}
var createCmd = &cobra.Command{
Use: "create [title]",
Short: "Create a new issue",
Args: cobra.MinimumNArgs(1),
Short: "Create a new issue (or multiple issues from markdown file)",
Args: cobra.MinimumNArgs(0), // Changed to allow no args when using -f
Run: func(cmd *cobra.Command, args []string) {
file, _ := cmd.Flags().GetString("file")
// If file flag is provided, parse markdown and create multiple issues
if file != "" {
if len(args) > 0 {
fmt.Fprintf(os.Stderr, "Error: cannot specify both title and --file flag\n")
os.Exit(1)
}
createIssuesFromMarkdown(cmd, file)
return
}
// Original single-issue creation logic
if len(args) == 0 {
fmt.Fprintf(os.Stderr, "Error: title required (or use --file to create from markdown)\n")
os.Exit(1)
}
title := args[0]
description, _ := cmd.Flags().GetString("description")
design, _ := cmd.Flags().GetString("design")
@@ -129,8 +669,32 @@ var createCmd = &cobra.Command{
issueType, _ := cmd.Flags().GetString("type")
assignee, _ := cmd.Flags().GetString("assignee")
labels, _ := cmd.Flags().GetStringSlice("labels")
explicitID, _ := cmd.Flags().GetString("id")
externalRef, _ := cmd.Flags().GetString("external-ref")
deps, _ := cmd.Flags().GetStringSlice("deps")
// Validate explicit ID format if provided (prefix-number)
if explicitID != "" {
// Check format: must contain hyphen and have numeric suffix
parts := strings.Split(explicitID, "-")
if len(parts) != 2 {
fmt.Fprintf(os.Stderr, "Error: invalid ID format '%s' (expected format: prefix-number, e.g., 'bd-42')\n", explicitID)
os.Exit(1)
}
// Validate numeric suffix
if _, err := fmt.Sscanf(parts[1], "%d", new(int)); err != nil {
fmt.Fprintf(os.Stderr, "Error: invalid ID format '%s' (numeric suffix required, e.g., 'bd-42')\n", explicitID)
os.Exit(1)
}
}
var externalRefPtr *string
if externalRef != "" {
externalRefPtr = &externalRef
}
issue := &types.Issue{
ID: explicitID, // Set explicit ID if provided (empty string if not)
Title: title,
Description: description,
Design: design,
@@ -139,6 +703,7 @@ var createCmd = &cobra.Command{
Priority: priority,
IssueType: types.IssueType(issueType),
Assignee: assignee,
ExternalRef: externalRefPtr,
}
ctx := context.Background()
@@ -154,6 +719,52 @@ var createCmd = &cobra.Command{
}
}
// Add dependencies if specified (format: type:id or just id for default "blocks" type)
for _, depSpec := range deps {
// Skip empty specs (e.g., from trailing commas)
depSpec = strings.TrimSpace(depSpec)
if depSpec == "" {
continue
}
var depType types.DependencyType
var dependsOnID string
// Parse format: "type:id" or just "id" (defaults to "blocks")
if strings.Contains(depSpec, ":") {
parts := strings.SplitN(depSpec, ":", 2)
if len(parts) != 2 {
fmt.Fprintf(os.Stderr, "Warning: invalid dependency format '%s', expected 'type:id' or 'id'\n", depSpec)
continue
}
depType = types.DependencyType(strings.TrimSpace(parts[0]))
dependsOnID = strings.TrimSpace(parts[1])
} else {
// Default to "blocks" if no type specified
depType = types.DepBlocks
dependsOnID = depSpec
}
// Validate dependency type
if !depType.IsValid() {
fmt.Fprintf(os.Stderr, "Warning: invalid dependency type '%s' (valid: blocks, related, parent-child, discovered-from)\n", depType)
continue
}
// Add the dependency
dep := &types.Dependency{
IssueID: issue.ID,
DependsOnID: dependsOnID,
Type: depType,
}
if err := store.AddDependency(ctx, dep, actor); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to add dependency %s -> %s: %v\n", issue.ID, dependsOnID, err)
}
}
// Schedule auto-flush
markDirtyAndScheduleFlush()
if jsonOutput {
outputJSON(issue)
} else {
@@ -167,6 +778,7 @@ var createCmd = &cobra.Command{
}
func init() {
createCmd.Flags().StringP("file", "f", "", "Create multiple issues from markdown file")
createCmd.Flags().StringP("description", "d", "", "Issue description")
createCmd.Flags().String("design", "", "Design notes")
createCmd.Flags().String("acceptance", "", "Acceptance criteria")
@@ -174,6 +786,9 @@ func init() {
createCmd.Flags().StringP("type", "t", "task", "Issue type (bug|feature|task|epic|chore)")
createCmd.Flags().StringP("assignee", "a", "", "Assignee")
createCmd.Flags().StringSliceP("labels", "l", []string{}, "Labels (comma-separated)")
createCmd.Flags().String("id", "", "Explicit issue ID (e.g., 'bd-42' for partitioning)")
createCmd.Flags().String("external-ref", "", "External reference (e.g., 'gh-9', 'jira-ABC')")
createCmd.Flags().StringSlice("deps", []string{}, "Dependencies in format 'type:id' or 'id' (e.g., 'discovered-from:bd-20,blocks:bd-15' or 'bd-20')")
rootCmd.AddCommand(createCmd)
}
@@ -311,7 +926,7 @@ var listCmd = &cobra.Command{
fmt.Printf("\nFound %d issues:\n\n", len(issues))
for _, issue := range issues {
fmt.Printf("%s [P%d] %s\n", issue.ID, issue.Priority, issue.Status)
fmt.Printf("%s [P%d] [%s] %s\n", issue.ID, issue.Priority, issue.IssueType, issue.Status)
fmt.Printf(" %s\n", issue.Title)
if issue.Assignee != "" {
fmt.Printf(" Assignee: %s\n", issue.Assignee)
@@ -365,6 +980,10 @@ var updateCmd = &cobra.Command{
acceptanceCriteria, _ := cmd.Flags().GetString("acceptance-criteria")
updates["acceptance_criteria"] = acceptanceCriteria
}
if cmd.Flags().Changed("external-ref") {
externalRef, _ := cmd.Flags().GetString("external-ref")
updates["external_ref"] = externalRef
}
if len(updates) == 0 {
fmt.Println("No updates specified")
@@ -377,6 +996,9 @@ var updateCmd = &cobra.Command{
os.Exit(1)
}
// Schedule auto-flush
markDirtyAndScheduleFlush()
if jsonOutput {
// Fetch updated issue and output
issue, _ := store.GetIssue(ctx, args[0])
@@ -396,6 +1018,7 @@ func init() {
updateCmd.Flags().String("design", "", "Design notes")
updateCmd.Flags().String("notes", "", "Additional notes")
updateCmd.Flags().String("acceptance-criteria", "", "Acceptance criteria")
updateCmd.Flags().String("external-ref", "", "External reference (e.g., 'gh-9', 'jira-ABC')")
rootCmd.AddCommand(updateCmd)
}
@@ -426,6 +1049,12 @@ var closeCmd = &cobra.Command{
fmt.Printf("%s Closed %s: %s\n", green("✓"), id, reason)
}
}
// Schedule auto-flush if any issues were closed
if len(args) > 0 {
markDirtyAndScheduleFlush()
}
if jsonOutput && len(closedIssues) > 0 {
outputJSON(closedIssues)
}

822
cmd/bd/main_test.go Normal file
View File

@@ -0,0 +1,822 @@
package main
import (
"bufio"
"context"
"encoding/json"
"os"
"path/filepath"
"sync"
"testing"
"time"
"github.com/steveyegge/beads/internal/storage/sqlite"
"github.com/steveyegge/beads/internal/types"
)
// TestAutoFlushDirtyMarking tests that markDirtyAndScheduleFlush() correctly marks DB as dirty
func TestAutoFlushDirtyMarking(t *testing.T) {
// Reset auto-flush state
autoFlushEnabled = true
isDirty = false
if flushTimer != nil {
flushTimer.Stop()
flushTimer = nil
}
// Call markDirtyAndScheduleFlush
markDirtyAndScheduleFlush()
// Verify dirty flag is set
flushMutex.Lock()
dirty := isDirty
hasTimer := flushTimer != nil
flushMutex.Unlock()
if !dirty {
t.Error("Expected isDirty to be true after markDirtyAndScheduleFlush()")
}
if !hasTimer {
t.Error("Expected flushTimer to be set after markDirtyAndScheduleFlush()")
}
// Clean up
flushMutex.Lock()
if flushTimer != nil {
flushTimer.Stop()
flushTimer = nil
}
isDirty = false
flushMutex.Unlock()
}
// TestAutoFlushDisabled tests that --no-auto-flush flag disables the feature
func TestAutoFlushDisabled(t *testing.T) {
// Disable auto-flush
autoFlushEnabled = false
isDirty = false
if flushTimer != nil {
flushTimer.Stop()
flushTimer = nil
}
// Call markDirtyAndScheduleFlush
markDirtyAndScheduleFlush()
// Verify dirty flag is NOT set
flushMutex.Lock()
dirty := isDirty
hasTimer := flushTimer != nil
flushMutex.Unlock()
if dirty {
t.Error("Expected isDirty to remain false when autoFlushEnabled=false")
}
if hasTimer {
t.Error("Expected flushTimer to remain nil when autoFlushEnabled=false")
}
// Re-enable for other tests
autoFlushEnabled = true
}
// TestAutoFlushDebounce tests that rapid operations result in a single flush
func TestAutoFlushDebounce(t *testing.T) {
// Create temp directory for test database
tmpDir, err := os.MkdirTemp("", "bd-test-autoflush-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer func() {
if err := os.RemoveAll(tmpDir); err != nil {
t.Logf("Warning: cleanup failed: %v", err)
}
}()
dbPath = filepath.Join(tmpDir, "test.db")
jsonlPath := filepath.Join(tmpDir, "issues.jsonl")
// Create store
testStore, err := sqlite.New(dbPath)
if err != nil {
t.Fatalf("Failed to create storage: %v", err)
}
defer testStore.Close()
store = testStore
storeMutex.Lock()
storeActive = true
storeMutex.Unlock()
// Set short debounce for testing (100ms)
originalDebounce := flushDebounce
flushDebounce = 100 * time.Millisecond
defer func() { flushDebounce = originalDebounce }()
// Reset auto-flush state
autoFlushEnabled = true
isDirty = false
if flushTimer != nil {
flushTimer.Stop()
flushTimer = nil
}
ctx := context.Background()
// Create initial issue to have something in the DB
issue := &types.Issue{
ID: "test-1",
Title: "Test issue",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
if err := testStore.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("Failed to create issue: %v", err)
}
// Simulate rapid CRUD operations
for i := 0; i < 5; i++ {
markDirtyAndScheduleFlush()
time.Sleep(10 * time.Millisecond) // Small delay between marks (< debounce)
}
// Wait for debounce to complete
time.Sleep(200 * time.Millisecond)
// Check that JSONL file was created (flush happened)
if _, err := os.Stat(jsonlPath); os.IsNotExist(err) {
t.Error("Expected JSONL file to be created after debounce period")
}
// Verify only one flush occurred by checking file content
// (should have exactly 1 issue)
f, err := os.Open(jsonlPath)
if err != nil {
t.Fatalf("Failed to open JSONL file: %v", err)
}
defer f.Close()
scanner := bufio.NewScanner(f)
lineCount := 0
for scanner.Scan() {
lineCount++
}
if lineCount != 1 {
t.Errorf("Expected 1 issue in JSONL, got %d (debounce may have failed)", lineCount)
}
// Clean up
storeMutex.Lock()
storeActive = false
storeMutex.Unlock()
}
// TestAutoFlushClearState tests that clearAutoFlushState() properly resets state
func TestAutoFlushClearState(t *testing.T) {
// Set up dirty state
autoFlushEnabled = true
isDirty = true
flushTimer = time.AfterFunc(5*time.Second, func() {})
// Clear state
clearAutoFlushState()
// Verify state is cleared
flushMutex.Lock()
dirty := isDirty
hasTimer := flushTimer != nil
failCount := flushFailureCount
lastErr := lastFlushError
flushMutex.Unlock()
if dirty {
t.Error("Expected isDirty to be false after clearAutoFlushState()")
}
if hasTimer {
t.Error("Expected flushTimer to be nil after clearAutoFlushState()")
}
if failCount != 0 {
t.Errorf("Expected flushFailureCount to be 0, got %d", failCount)
}
if lastErr != nil {
t.Errorf("Expected lastFlushError to be nil, got %v", lastErr)
}
}
// TestAutoFlushOnExit tests that flush happens on program exit
func TestAutoFlushOnExit(t *testing.T) {
// Create temp directory for test database
tmpDir, err := os.MkdirTemp("", "bd-test-exit-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer func() {
if err := os.RemoveAll(tmpDir); err != nil {
t.Logf("Warning: cleanup failed: %v", err)
}
}()
dbPath = filepath.Join(tmpDir, "test.db")
jsonlPath := filepath.Join(tmpDir, "issues.jsonl")
// Create store
testStore, err := sqlite.New(dbPath)
if err != nil {
t.Fatalf("Failed to create storage: %v", err)
}
store = testStore
storeMutex.Lock()
storeActive = true
storeMutex.Unlock()
// Reset auto-flush state
autoFlushEnabled = true
isDirty = false
if flushTimer != nil {
flushTimer.Stop()
flushTimer = nil
}
ctx := context.Background()
// Create test issue
issue := &types.Issue{
ID: "test-exit-1",
Title: "Exit test issue",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
if err := testStore.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("Failed to create issue: %v", err)
}
// Mark dirty (simulating CRUD operation)
markDirtyAndScheduleFlush()
// Simulate PersistentPostRun (exit behavior)
storeMutex.Lock()
storeActive = false
storeMutex.Unlock()
flushMutex.Lock()
needsFlush := isDirty && autoFlushEnabled
if needsFlush {
if flushTimer != nil {
flushTimer.Stop()
flushTimer = nil
}
isDirty = false
}
flushMutex.Unlock()
if needsFlush {
// Manually perform flush logic (simulating PersistentPostRun)
storeMutex.Lock()
storeActive = true // Temporarily re-enable for this test
storeMutex.Unlock()
issues, err := testStore.SearchIssues(ctx, "", types.IssueFilter{})
if err == nil {
allDeps, _ := testStore.GetAllDependencyRecords(ctx)
for _, iss := range issues {
iss.Dependencies = allDeps[iss.ID]
}
tempPath := jsonlPath + ".tmp"
f, err := os.Create(tempPath)
if err == nil {
encoder := json.NewEncoder(f)
for _, iss := range issues {
encoder.Encode(iss)
}
f.Close()
os.Rename(tempPath, jsonlPath)
}
}
storeMutex.Lock()
storeActive = false
storeMutex.Unlock()
}
testStore.Close()
// Verify JSONL file was created
if _, err := os.Stat(jsonlPath); os.IsNotExist(err) {
t.Error("Expected JSONL file to be created on exit")
}
// Verify content
f, err := os.Open(jsonlPath)
if err != nil {
t.Fatalf("Failed to open JSONL file: %v", err)
}
defer f.Close()
scanner := bufio.NewScanner(f)
found := false
for scanner.Scan() {
var exported types.Issue
if err := json.Unmarshal(scanner.Bytes(), &exported); err != nil {
t.Fatalf("Failed to parse JSONL: %v", err)
}
if exported.ID == "test-exit-1" {
found = true
break
}
}
if !found {
t.Error("Expected to find test-exit-1 in JSONL after exit flush")
}
}
// TestAutoFlushConcurrency tests that concurrent operations don't cause races
func TestAutoFlushConcurrency(t *testing.T) {
// Reset auto-flush state
autoFlushEnabled = true
isDirty = false
if flushTimer != nil {
flushTimer.Stop()
flushTimer = nil
}
// Run multiple goroutines calling markDirtyAndScheduleFlush
var wg sync.WaitGroup
for i := 0; i < 10; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for j := 0; j < 100; j++ {
markDirtyAndScheduleFlush()
}
}()
}
wg.Wait()
// Verify no panic and state is valid
flushMutex.Lock()
dirty := isDirty
hasTimer := flushTimer != nil
flushMutex.Unlock()
if !dirty {
t.Error("Expected isDirty to be true after concurrent marks")
}
if !hasTimer {
t.Error("Expected flushTimer to be set after concurrent marks")
}
// Clean up
flushMutex.Lock()
if flushTimer != nil {
flushTimer.Stop()
flushTimer = nil
}
isDirty = false
flushMutex.Unlock()
}
// TestAutoFlushStoreInactive tests that flush doesn't run when store is inactive
func TestAutoFlushStoreInactive(t *testing.T) {
// Create temp directory for test database
tmpDir, err := os.MkdirTemp("", "bd-test-inactive-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer func() {
if err := os.RemoveAll(tmpDir); err != nil {
t.Logf("Warning: cleanup failed: %v", err)
}
}()
dbPath = filepath.Join(tmpDir, "test.db")
jsonlPath := filepath.Join(tmpDir, "issues.jsonl")
// Create store
testStore, err := sqlite.New(dbPath)
if err != nil {
t.Fatalf("Failed to create storage: %v", err)
}
store = testStore
// Set store as INACTIVE (simulating closed store)
storeMutex.Lock()
storeActive = false
storeMutex.Unlock()
// Reset auto-flush state
autoFlushEnabled = true
flushMutex.Lock()
isDirty = true
flushMutex.Unlock()
// Call flushToJSONL (should return early due to inactive store)
flushToJSONL()
// Verify JSONL was NOT created (flush was skipped)
if _, err := os.Stat(jsonlPath); !os.IsNotExist(err) {
t.Error("Expected JSONL file to NOT be created when store is inactive")
}
testStore.Close()
}
// TestAutoFlushJSONLContent tests that flushed JSONL has correct content
func TestAutoFlushJSONLContent(t *testing.T) {
// Create temp directory for test database
tmpDir, err := os.MkdirTemp("", "bd-test-content-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer func() {
if err := os.RemoveAll(tmpDir); err != nil {
t.Logf("Warning: cleanup failed: %v", err)
}
}()
dbPath = filepath.Join(tmpDir, "test.db")
jsonlPath := filepath.Join(tmpDir, "issues.jsonl")
// Create store
testStore, err := sqlite.New(dbPath)
if err != nil {
t.Fatalf("Failed to create storage: %v", err)
}
defer testStore.Close()
store = testStore
storeMutex.Lock()
storeActive = true
storeMutex.Unlock()
ctx := context.Background()
// Create multiple test issues
issues := []*types.Issue{
{
ID: "test-content-1",
Title: "First issue",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
{
ID: "test-content-2",
Title: "Second issue",
Status: types.StatusInProgress,
Priority: 2,
IssueType: types.TypeBug,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
}
for _, issue := range issues {
if err := testStore.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("Failed to create issue: %v", err)
}
}
// Mark dirty and flush immediately
flushMutex.Lock()
isDirty = true
flushMutex.Unlock()
flushToJSONL()
// Verify JSONL file exists
if _, err := os.Stat(jsonlPath); os.IsNotExist(err) {
t.Fatal("Expected JSONL file to be created")
}
// Read and verify content
f, err := os.Open(jsonlPath)
if err != nil {
t.Fatalf("Failed to open JSONL file: %v", err)
}
defer f.Close()
scanner := bufio.NewScanner(f)
foundIssues := make(map[string]*types.Issue)
for scanner.Scan() {
var issue types.Issue
if err := json.Unmarshal(scanner.Bytes(), &issue); err != nil {
t.Fatalf("Failed to parse JSONL: %v", err)
}
foundIssues[issue.ID] = &issue
}
// Verify all issues are present
if len(foundIssues) != 2 {
t.Errorf("Expected 2 issues in JSONL, got %d", len(foundIssues))
}
// Verify content
for _, original := range issues {
found, ok := foundIssues[original.ID]
if !ok {
t.Errorf("Issue %s not found in JSONL", original.ID)
continue
}
if found.Title != original.Title {
t.Errorf("Issue %s: Title = %s, want %s", original.ID, found.Title, original.Title)
}
if found.Status != original.Status {
t.Errorf("Issue %s: Status = %s, want %s", original.ID, found.Status, original.Status)
}
}
// Clean up
storeMutex.Lock()
storeActive = false
storeMutex.Unlock()
}
// TestAutoFlushErrorHandling tests error scenarios in flush operations
func TestAutoFlushErrorHandling(t *testing.T) {
// Create temp directory for test database
tmpDir, err := os.MkdirTemp("", "bd-test-error-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer func() {
if err := os.RemoveAll(tmpDir); err != nil {
t.Logf("Warning: cleanup failed: %v", err)
}
}()
dbPath = filepath.Join(tmpDir, "test.db")
// Create store
testStore, err := sqlite.New(dbPath)
if err != nil {
t.Fatalf("Failed to create storage: %v", err)
}
defer testStore.Close()
store = testStore
storeMutex.Lock()
storeActive = true
storeMutex.Unlock()
ctx := context.Background()
// Create test issue
issue := &types.Issue{
ID: "test-error-1",
Title: "Error test issue",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
if err := testStore.CreateIssue(ctx, issue, "test"); err != nil {
t.Fatalf("Failed to create issue: %v", err)
}
// Create a read-only directory to force flush failure
readOnlyDir := filepath.Join(tmpDir, "readonly")
if err := os.MkdirAll(readOnlyDir, 0555); err != nil {
t.Fatalf("Failed to create read-only dir: %v", err)
}
defer os.Chmod(readOnlyDir, 0755) // Restore permissions for cleanup
// Set dbPath to point to read-only directory
originalDBPath := dbPath
dbPath = filepath.Join(readOnlyDir, "test.db")
// Reset failure counter
flushMutex.Lock()
flushFailureCount = 0
lastFlushError = nil
isDirty = true
flushMutex.Unlock()
// Attempt flush (should fail)
flushToJSONL()
// Verify failure was recorded
flushMutex.Lock()
failCount := flushFailureCount
hasError := lastFlushError != nil
flushMutex.Unlock()
if failCount != 1 {
t.Errorf("Expected flushFailureCount to be 1, got %d", failCount)
}
if !hasError {
t.Error("Expected lastFlushError to be set after flush failure")
}
// Restore dbPath
dbPath = originalDBPath
// Clean up
storeMutex.Lock()
storeActive = false
storeMutex.Unlock()
}
// TestAutoImportIfNewer tests that auto-import triggers when JSONL is newer than DB
func TestAutoImportIfNewer(t *testing.T) {
// Create temp directory for test database
tmpDir, err := os.MkdirTemp("", "bd-test-autoimport-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer func() {
if err := os.RemoveAll(tmpDir); err != nil {
t.Logf("Warning: cleanup failed: %v", err)
}
}()
dbPath = filepath.Join(tmpDir, "test.db")
jsonlPath := filepath.Join(tmpDir, "issues.jsonl")
// Create store
testStore, err := sqlite.New(dbPath)
if err != nil {
t.Fatalf("Failed to create storage: %v", err)
}
defer testStore.Close()
store = testStore
storeMutex.Lock()
storeActive = true
storeMutex.Unlock()
ctx := context.Background()
// Create an initial issue in the database
dbIssue := &types.Issue{
ID: "test-autoimport-1",
Title: "Original DB issue",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
if err := testStore.CreateIssue(ctx, dbIssue, "test"); err != nil {
t.Fatalf("Failed to create issue: %v", err)
}
// Wait a moment to ensure different timestamps
time.Sleep(100 * time.Millisecond)
// Create a JSONL file with different content (simulating a git pull)
jsonlIssue := &types.Issue{
ID: "test-autoimport-2",
Title: "New JSONL issue",
Status: types.StatusInProgress,
Priority: 2,
IssueType: types.TypeBug,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
f, err := os.Create(jsonlPath)
if err != nil {
t.Fatalf("Failed to create JSONL file: %v", err)
}
encoder := json.NewEncoder(f)
if err := encoder.Encode(dbIssue); err != nil {
t.Fatalf("Failed to encode first issue: %v", err)
}
if err := encoder.Encode(jsonlIssue); err != nil {
t.Fatalf("Failed to encode second issue: %v", err)
}
f.Close()
// Touch the JSONL file to make it newer than DB
futureTime := time.Now().Add(1 * time.Second)
if err := os.Chtimes(jsonlPath, futureTime, futureTime); err != nil {
t.Fatalf("Failed to update JSONL timestamp: %v", err)
}
// Call autoImportIfNewer
autoImportIfNewer()
// Verify that the new issue from JSONL was imported
imported, err := testStore.GetIssue(ctx, "test-autoimport-2")
if err != nil {
t.Fatalf("Failed to get imported issue: %v", err)
}
if imported == nil {
t.Error("Expected issue test-autoimport-2 to be imported from JSONL")
} else {
if imported.Title != "New JSONL issue" {
t.Errorf("Expected title 'New JSONL issue', got '%s'", imported.Title)
}
}
// Clean up
storeMutex.Lock()
storeActive = false
storeMutex.Unlock()
}
// TestAutoImportDisabled tests that --no-auto-import flag disables auto-import
func TestAutoImportDisabled(t *testing.T) {
// Create temp directory for test database
tmpDir, err := os.MkdirTemp("", "bd-test-noimport-*")
if err != nil {
t.Fatalf("Failed to create temp dir: %v", err)
}
defer func() {
if err := os.RemoveAll(tmpDir); err != nil {
t.Logf("Warning: cleanup failed: %v", err)
}
}()
dbPath = filepath.Join(tmpDir, "test.db")
jsonlPath := filepath.Join(tmpDir, "issues.jsonl")
// Create store
testStore, err := sqlite.New(dbPath)
if err != nil {
t.Fatalf("Failed to create storage: %v", err)
}
defer testStore.Close()
store = testStore
storeMutex.Lock()
storeActive = true
storeMutex.Unlock()
ctx := context.Background()
// Create a JSONL file with an issue
jsonlIssue := &types.Issue{
ID: "test-noimport-1",
Title: "Should not import",
Status: types.StatusOpen,
Priority: 1,
IssueType: types.TypeTask,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
f, err := os.Create(jsonlPath)
if err != nil {
t.Fatalf("Failed to create JSONL file: %v", err)
}
encoder := json.NewEncoder(f)
if err := encoder.Encode(jsonlIssue); err != nil {
t.Fatalf("Failed to encode issue: %v", err)
}
f.Close()
// Make JSONL newer than DB
futureTime := time.Now().Add(1 * time.Second)
if err := os.Chtimes(jsonlPath, futureTime, futureTime); err != nil {
t.Fatalf("Failed to update JSONL timestamp: %v", err)
}
// Disable auto-import (this would normally be set via --no-auto-import flag)
oldAutoImport := autoImportEnabled
autoImportEnabled = false
defer func() { autoImportEnabled = oldAutoImport }()
// Call autoImportIfNewer (should do nothing)
if autoImportEnabled {
autoImportIfNewer()
}
// Verify that the issue was NOT imported
imported, err := testStore.GetIssue(ctx, "test-noimport-1")
if err != nil {
t.Fatalf("Failed to check for issue: %v", err)
}
if imported != nil {
t.Error("Expected issue test-noimport-1 to NOT be imported when auto-import is disabled")
}
// Clean up
storeMutex.Lock()
storeActive = false
storeMutex.Unlock()
}

289
cmd/bd/markdown.go Normal file
View File

@@ -0,0 +1,289 @@
// Package main provides the bd command-line interface.
// This file implements markdown file parsing for bulk issue creation from structured markdown documents.
package main
import (
"bufio"
"fmt"
"os"
"path/filepath"
"regexp"
"strings"
"github.com/steveyegge/beads/internal/types"
)
var (
// h2Regex matches markdown H2 headers (## Title) for issue titles.
// Compiled once at package init for performance.
h2Regex = regexp.MustCompile(`^##\s+(.+)$`)
// h3Regex matches markdown H3 headers (### Section) for issue sections.
// Compiled once at package init for performance.
h3Regex = regexp.MustCompile(`^###\s+(.+)$`)
)
// IssueTemplate represents a parsed issue from markdown
type IssueTemplate struct {
Title string
Description string
Design string
AcceptanceCriteria string
Priority int
IssueType types.IssueType
Assignee string
Labels []string
Dependencies []string
}
// parsePriority extracts and validates a priority value from content.
// Returns the parsed priority (0-4) or -1 if invalid.
func parsePriority(content string) int {
var p int
if _, err := fmt.Sscanf(content, "%d", &p); err == nil && p >= 0 && p <= 4 {
return p
}
return -1 // Invalid
}
// parseIssueType extracts and validates an issue type from content.
// Returns the validated type or empty string if invalid.
func parseIssueType(content, issueTitle string) types.IssueType {
issueType := types.IssueType(strings.TrimSpace(content))
// Validate issue type
validTypes := map[types.IssueType]bool{
types.TypeBug: true,
types.TypeFeature: true,
types.TypeTask: true,
types.TypeEpic: true,
types.TypeChore: true,
}
if !validTypes[issueType] {
// Warn but continue with default
fmt.Fprintf(os.Stderr, "Warning: invalid issue type '%s' in '%s', using default 'task'\n",
issueType, issueTitle)
return types.TypeTask
}
return issueType
}
// parseStringList extracts a list of strings from content, splitting by comma or whitespace.
// This is a generic helper used by parseLabels and parseDependencies.
func parseStringList(content string) []string {
var items []string
fields := strings.FieldsFunc(content, func(r rune) bool {
return r == ',' || r == ' ' || r == '\n'
})
for _, item := range fields {
item = strings.TrimSpace(item)
if item != "" {
items = append(items, item)
}
}
return items
}
// parseLabels extracts labels from content, splitting by comma or whitespace.
func parseLabels(content string) []string {
return parseStringList(content)
}
// parseDependencies extracts dependencies from content, splitting by comma or whitespace.
func parseDependencies(content string) []string {
return parseStringList(content)
}
// processIssueSection processes a parsed section and updates the issue template.
func processIssueSection(issue *IssueTemplate, section, content string) {
content = strings.TrimSpace(content)
if content == "" {
return
}
switch strings.ToLower(section) {
case "priority":
if p := parsePriority(content); p != -1 {
issue.Priority = p
}
case "type":
issue.IssueType = parseIssueType(content, issue.Title)
case "description":
issue.Description = content
case "design":
issue.Design = content
case "acceptance criteria", "acceptance":
issue.AcceptanceCriteria = content
case "assignee":
issue.Assignee = strings.TrimSpace(content)
case "labels":
issue.Labels = parseLabels(content)
case "dependencies", "deps":
issue.Dependencies = parseDependencies(content)
}
}
// validateMarkdownPath validates and cleans a markdown file path to prevent security issues.
// It checks for directory traversal attempts and ensures the file is a markdown file.
func validateMarkdownPath(path string) (string, error) {
// Clean the path
cleanPath := filepath.Clean(path)
// Prevent directory traversal
if strings.Contains(cleanPath, "..") {
return "", fmt.Errorf("invalid file path: directory traversal not allowed")
}
// Ensure it's a markdown file
ext := strings.ToLower(filepath.Ext(cleanPath))
if ext != ".md" && ext != ".markdown" {
return "", fmt.Errorf("invalid file type: only .md and .markdown files are supported")
}
// Check file exists and is not a directory
info, err := os.Stat(cleanPath)
if err != nil {
return "", fmt.Errorf("cannot access file: %w", err)
}
if info.IsDir() {
return "", fmt.Errorf("path is a directory, not a file")
}
return cleanPath, nil
}
// parseMarkdownFile parses a markdown file and extracts issue templates.
// Expected format:
// ## Issue Title
// Description text...
//
// ### Priority
// 2
//
// ### Type
// feature
//
// ### Description
// Detailed description...
//
// ### Design
// Design notes...
//
// ### Acceptance Criteria
// - Criterion 1
// - Criterion 2
//
// ### Assignee
// username
//
// ### Labels
// label1, label2
//
// ### Dependencies
// bd-10, bd-20
func parseMarkdownFile(path string) ([]*IssueTemplate, error) {
// Validate and clean the file path
cleanPath, err := validateMarkdownPath(path)
if err != nil {
return nil, err
}
// #nosec G304 -- Path is validated by validateMarkdownPath which prevents traversal
file, err := os.Open(cleanPath)
if err != nil {
return nil, fmt.Errorf("failed to open file: %w", err)
}
defer func() {
_ = file.Close() // Close errors on read-only operations are not actionable
}()
var issues []*IssueTemplate
var currentIssue *IssueTemplate
var currentSection string
var sectionContent strings.Builder
scanner := bufio.NewScanner(file)
// Increase buffer size for large markdown files
const maxScannerBuffer = 1024 * 1024 // 1MB
buf := make([]byte, maxScannerBuffer)
scanner.Buffer(buf, maxScannerBuffer)
// Helper to finalize current section
finalizeSection := func() {
if currentIssue == nil || currentSection == "" {
return
}
content := sectionContent.String()
processIssueSection(currentIssue, currentSection, content)
sectionContent.Reset()
}
for scanner.Scan() {
line := scanner.Text()
// Check for H2 (new issue)
if matches := h2Regex.FindStringSubmatch(line); matches != nil {
// Finalize previous section if any
finalizeSection()
// Save previous issue if any
if currentIssue != nil {
issues = append(issues, currentIssue)
}
// Start new issue
currentIssue = &IssueTemplate{
Title: strings.TrimSpace(matches[1]),
Priority: 2, // Default priority
IssueType: "task", // Default type
}
currentSection = ""
continue
}
// Check for H3 (section within issue)
if matches := h3Regex.FindStringSubmatch(line); matches != nil {
// Finalize previous section
finalizeSection()
// Start new section
currentSection = strings.TrimSpace(matches[1])
continue
}
// Regular content line - append to current section
if currentIssue != nil && currentSection != "" {
if sectionContent.Len() > 0 {
sectionContent.WriteString("\n")
}
sectionContent.WriteString(line)
} else if currentIssue != nil && currentSection == "" && currentIssue.Description == "" {
// First lines after title (before any section) become description
if line != "" {
if currentIssue.Description != "" {
currentIssue.Description += "\n"
}
currentIssue.Description += line
}
}
}
// Finalize last section and issue
finalizeSection()
if currentIssue != nil {
issues = append(issues, currentIssue)
}
if err := scanner.Err(); err != nil {
return nil, fmt.Errorf("error reading file: %w", err)
}
// Check if we found any issues
if len(issues) == 0 {
return nil, fmt.Errorf("no issues found in markdown file (expected ## Issue Title format)")
}
return issues, nil
}

238
cmd/bd/markdown_test.go Normal file
View File

@@ -0,0 +1,238 @@
package main
import (
"os"
"path/filepath"
"testing"
)
func TestParseMarkdownFile(t *testing.T) {
tests := []struct {
name string
content string
expected []*IssueTemplate
wantErr bool
}{
{
name: "simple issue",
content: `## Fix authentication bug
This is a critical bug in the auth system.
### Priority
1
### Type
bug
`,
expected: []*IssueTemplate{
{
Title: "Fix authentication bug",
Description: "This is a critical bug in the auth system.",
Priority: 1,
IssueType: "bug",
},
},
},
{
name: "multiple issues",
content: `## First Issue
Description for first issue.
### Priority
0
### Type
feature
## Second Issue
Description for second issue.
### Priority
2
### Type
task
`,
expected: []*IssueTemplate{
{
Title: "First Issue",
Description: "Description for first issue.",
Priority: 0,
IssueType: "feature",
},
{
Title: "Second Issue",
Description: "Description for second issue.",
Priority: 2,
IssueType: "task",
},
},
},
{
name: "issue with all fields",
content: `## Comprehensive Issue
Initial description text.
### Priority
1
### Type
feature
### Description
Detailed description here.
Multi-line support.
### Design
Design notes go here.
### Acceptance Criteria
- Must do this
- Must do that
### Assignee
alice
### Labels
backend, urgent
### Dependencies
bd-10, bd-20
`,
expected: []*IssueTemplate{
{
Title: "Comprehensive Issue",
Description: "Detailed description here.\nMulti-line support.",
Design: "Design notes go here.",
AcceptanceCriteria: "- Must do this\n- Must do that",
Priority: 1,
IssueType: "feature",
Assignee: "alice",
Labels: []string{"backend", "urgent"},
Dependencies: []string{"bd-10", "bd-20"},
},
},
},
{
name: "dependencies with types",
content: `## Issue with typed dependencies
### Priority
2
### Type
task
### Dependencies
blocks:bd-10, discovered-from:bd-20
`,
expected: []*IssueTemplate{
{
Title: "Issue with typed dependencies",
Priority: 2,
IssueType: "task",
Dependencies: []string{"blocks:bd-10", "discovered-from:bd-20"},
},
},
},
{
name: "default values",
content: `## Minimal Issue
Just a title and description.
`,
expected: []*IssueTemplate{
{
Title: "Minimal Issue",
Description: "Just a title and description.",
Priority: 2, // default
IssueType: "task", // default
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Create temp file
tmpDir := t.TempDir()
tmpFile := filepath.Join(tmpDir, "test.md")
if err := os.WriteFile(tmpFile, []byte(tt.content), 0600); err != nil {
t.Fatalf("Failed to create test file: %v", err)
}
// Parse file
got, err := parseMarkdownFile(tmpFile)
if (err != nil) != tt.wantErr {
t.Errorf("parseMarkdownFile() error = %v, wantErr %v", err, tt.wantErr)
return
}
if len(got) != len(tt.expected) {
t.Errorf("parseMarkdownFile() got %d issues, want %d", len(got), len(tt.expected))
return
}
// Compare each issue
for i, gotIssue := range got {
wantIssue := tt.expected[i]
if gotIssue.Title != wantIssue.Title {
t.Errorf("Issue %d: Title = %q, want %q", i, gotIssue.Title, wantIssue.Title)
}
if gotIssue.Description != wantIssue.Description {
t.Errorf("Issue %d: Description = %q, want %q", i, gotIssue.Description, wantIssue.Description)
}
if gotIssue.Priority != wantIssue.Priority {
t.Errorf("Issue %d: Priority = %d, want %d", i, gotIssue.Priority, wantIssue.Priority)
}
if gotIssue.IssueType != wantIssue.IssueType {
t.Errorf("Issue %d: IssueType = %q, want %q", i, gotIssue.IssueType, wantIssue.IssueType)
}
if gotIssue.Design != wantIssue.Design {
t.Errorf("Issue %d: Design = %q, want %q", i, gotIssue.Design, wantIssue.Design)
}
if gotIssue.AcceptanceCriteria != wantIssue.AcceptanceCriteria {
t.Errorf("Issue %d: AcceptanceCriteria = %q, want %q", i, gotIssue.AcceptanceCriteria, wantIssue.AcceptanceCriteria)
}
if gotIssue.Assignee != wantIssue.Assignee {
t.Errorf("Issue %d: Assignee = %q, want %q", i, gotIssue.Assignee, wantIssue.Assignee)
}
// Compare slices
if !stringSlicesEqual(gotIssue.Labels, wantIssue.Labels) {
t.Errorf("Issue %d: Labels = %v, want %v", i, gotIssue.Labels, wantIssue.Labels)
}
if !stringSlicesEqual(gotIssue.Dependencies, wantIssue.Dependencies) {
t.Errorf("Issue %d: Dependencies = %v, want %v", i, gotIssue.Dependencies, wantIssue.Dependencies)
}
}
})
}
}
func TestParseMarkdownFile_FileNotFound(t *testing.T) {
_, err := parseMarkdownFile("/nonexistent/file.md")
if err == nil {
t.Error("Expected error for non-existent file, got nil")
}
}
func stringSlicesEqual(a, b []string) bool {
if len(a) != len(b) {
return false
}
if len(a) == 0 && len(b) == 0 {
return true
}
for i := range a {
if a[i] != b[i] {
return false
}
}
return true
}

View File

@@ -82,7 +82,16 @@ var quickstartCmd = &cobra.Command{
fmt.Printf(" Applications can extend bd's SQLite database:\n")
fmt.Printf(" • Add your own tables (e.g., %s)\n", cyan("myapp_executions"))
fmt.Printf(" • Join with %s table for powerful queries\n", cyan("issues"))
fmt.Printf(" • See %s for integration patterns\n\n", cyan("EXTENDING.md"))
fmt.Printf(" • See database extension docs for integration patterns:\n")
fmt.Printf(" %s\n\n", cyan("https://github.com/steveyegge/beads/blob/main/EXTENDING.md"))
fmt.Printf("%s\n", bold("GIT WORKFLOW (AUTO-SYNC)"))
fmt.Printf(" bd automatically keeps git in sync:\n")
fmt.Printf(" • %s Export to JSONL after CRUD operations (5s debounce)\n", green("✓"))
fmt.Printf(" • %s Import from JSONL when newer than DB (after %s)\n", green("✓"), cyan("git pull"))
fmt.Printf(" • %s Works seamlessly across machines and team members\n", green("✓"))
fmt.Printf(" • No manual export/import needed!\n")
fmt.Printf(" Disable with: %s or %s\n\n", cyan("--no-auto-flush"), cyan("--no-auto-import"))
fmt.Printf("%s\n", green("Ready to start!"))
fmt.Printf("Run %s to create your first issue.\n\n", cyan("bd create \"My first issue\""))

View File

@@ -8,7 +8,7 @@ import (
const (
// Version is the current version of bd
Version = "0.9.0"
Version = "0.9.2"
// Build can be set via ldflags at compile time
Build = "dev"
)