refactor: Split large cmd/bd files to meet 800-line limit (bd-xtf5)

Split 6 files exceeding 800 lines by extracting cohesive function groups:

- show.go (1592→578): extracted show_thread.go, close.go, edit.go, update.go
- doctor.go (1295→690): extracted doctor_fix.go, doctor_health.go, doctor_pollution.go
- sync.go (1201→749): extracted sync_git.go
- compact.go (1199→775): extracted compact_tombstone.go, compact_rpc.go
- linear.go (1190→641): extracted linear_sync.go, linear_conflict.go
- main.go (1148→800): extracted main_help.go, main_errors.go, main_daemon.go

All files now under 800-line acceptance criteria.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Steve Yegge
2025-12-28 18:04:52 -08:00
parent 7ea7aee411
commit 6c14fd2225
22 changed files with 4251 additions and 4087 deletions

View File

@@ -639,552 +639,3 @@ func getLinearHashLength(ctx context.Context) int {
return value
}
// detectLinearConflicts finds issues that have been modified both locally and in Linear
// since the last sync. This is a more expensive operation as it fetches individual
// issue timestamps from Linear.
func detectLinearConflicts(ctx context.Context) ([]linear.Conflict, error) {
lastSyncStr, _ := store.GetConfig(ctx, "linear.last_sync")
if lastSyncStr == "" {
return nil, nil
}
lastSync, err := time.Parse(time.RFC3339, lastSyncStr)
if err != nil {
return nil, fmt.Errorf("invalid last_sync timestamp: %w", err)
}
config := loadLinearMappingConfig(ctx)
client, err := getLinearClient(ctx)
if err != nil {
return nil, fmt.Errorf("failed to create Linear client: %w", err)
}
// Get all local issues with Linear external refs
allIssues, err := store.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
return nil, err
}
var conflicts []linear.Conflict
for _, issue := range allIssues {
if issue.ExternalRef == nil || !linear.IsLinearExternalRef(*issue.ExternalRef) {
continue
}
if !issue.UpdatedAt.After(lastSync) {
continue
}
linearIdentifier := linear.ExtractLinearIdentifier(*issue.ExternalRef)
if linearIdentifier == "" {
continue
}
linearIssue, err := client.FetchIssueByIdentifier(ctx, linearIdentifier)
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to fetch Linear issue %s for conflict check: %v\n",
linearIdentifier, err)
continue
}
if linearIssue == nil {
continue
}
linearUpdatedAt, err := time.Parse(time.RFC3339, linearIssue.UpdatedAt)
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to parse Linear UpdatedAt for %s: %v\n",
linearIdentifier, err)
continue
}
if !linearUpdatedAt.After(lastSync) {
continue
}
localComparable := linear.NormalizeIssueForLinearHash(issue)
linearComparable := linear.IssueToBeads(linearIssue, config).Issue.(*types.Issue)
if localComparable.ComputeContentHash() == linearComparable.ComputeContentHash() {
continue
}
conflicts = append(conflicts, linear.Conflict{
IssueID: issue.ID,
LocalUpdated: issue.UpdatedAt,
LinearUpdated: linearUpdatedAt,
LinearExternalRef: *issue.ExternalRef,
LinearIdentifier: linearIdentifier,
LinearInternalID: linearIssue.ID,
})
}
return conflicts, nil
}
// reimportLinearConflicts re-imports conflicting issues from Linear (Linear wins).
// For each conflict, fetches the current state from Linear and updates the local copy.
func reimportLinearConflicts(ctx context.Context, conflicts []linear.Conflict) error {
if len(conflicts) == 0 {
return nil
}
client, err := getLinearClient(ctx)
if err != nil {
return fmt.Errorf("failed to create Linear client: %w", err)
}
config := loadLinearMappingConfig(ctx)
resolved := 0
failed := 0
for _, conflict := range conflicts {
linearIssue, err := client.FetchIssueByIdentifier(ctx, conflict.LinearIdentifier)
if err != nil {
fmt.Fprintf(os.Stderr, " Warning: failed to fetch %s for resolution: %v\n",
conflict.LinearIdentifier, err)
failed++
continue
}
if linearIssue == nil {
fmt.Fprintf(os.Stderr, " Warning: Linear issue %s not found, skipping\n",
conflict.LinearIdentifier)
failed++
continue
}
updates := linear.BuildLinearToLocalUpdates(linearIssue, config)
err = store.UpdateIssue(ctx, conflict.IssueID, updates, actor)
if err != nil {
fmt.Fprintf(os.Stderr, " Warning: failed to update local issue %s: %v\n",
conflict.IssueID, err)
failed++
continue
}
fmt.Printf(" Resolved: %s <- %s (Linear wins)\n", conflict.IssueID, conflict.LinearIdentifier)
resolved++
}
if failed > 0 {
return fmt.Errorf("%d conflict(s) failed to resolve", failed)
}
fmt.Printf(" Resolved %d conflict(s) by keeping Linear version\n", resolved)
return nil
}
// resolveLinearConflictsByTimestamp resolves conflicts by keeping the newer version.
// For each conflict, compares local and Linear UpdatedAt timestamps.
// If Linear is newer, re-imports from Linear. If local is newer, push will overwrite.
func resolveLinearConflictsByTimestamp(ctx context.Context, conflicts []linear.Conflict) error {
if len(conflicts) == 0 {
return nil
}
var linearWins []linear.Conflict
var localWins []linear.Conflict
for _, conflict := range conflicts {
if conflict.LinearUpdated.After(conflict.LocalUpdated) {
linearWins = append(linearWins, conflict)
} else {
localWins = append(localWins, conflict)
}
}
if len(linearWins) > 0 {
fmt.Printf(" %d conflict(s): Linear is newer, will re-import\n", len(linearWins))
}
if len(localWins) > 0 {
fmt.Printf(" %d conflict(s): Local is newer, will push to Linear\n", len(localWins))
}
if len(linearWins) > 0 {
err := reimportLinearConflicts(ctx, linearWins)
if err != nil {
return fmt.Errorf("failed to re-import Linear-wins conflicts: %w", err)
}
}
if len(localWins) > 0 {
for _, conflict := range localWins {
fmt.Printf(" Resolved: %s -> %s (local wins, will push)\n",
conflict.IssueID, conflict.LinearIdentifier)
}
}
return nil
}
// doPullFromLinear imports issues from Linear using the GraphQL API.
// Supports incremental sync by checking linear.last_sync config and only fetching
// issues updated since that timestamp.
func doPullFromLinear(ctx context.Context, dryRun bool, state string, skipLinearIDs map[string]bool) (*linear.PullStats, error) {
stats := &linear.PullStats{}
client, err := getLinearClient(ctx)
if err != nil {
return stats, fmt.Errorf("failed to create Linear client: %w", err)
}
var linearIssues []linear.Issue
lastSyncStr, _ := store.GetConfig(ctx, "linear.last_sync")
if lastSyncStr != "" {
lastSync, err := time.Parse(time.RFC3339, lastSyncStr)
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: invalid linear.last_sync timestamp, doing full sync\n")
linearIssues, err = client.FetchIssues(ctx, state)
if err != nil {
return stats, fmt.Errorf("failed to fetch issues from Linear: %w", err)
}
} else {
stats.Incremental = true
stats.SyncedSince = lastSyncStr
linearIssues, err = client.FetchIssuesSince(ctx, state, lastSync)
if err != nil {
return stats, fmt.Errorf("failed to fetch issues from Linear (incremental): %w", err)
}
if !dryRun {
fmt.Printf(" Incremental sync since %s\n", lastSync.Format("2006-01-02 15:04:05"))
}
}
} else {
linearIssues, err = client.FetchIssues(ctx, state)
if err != nil {
return stats, fmt.Errorf("failed to fetch issues from Linear: %w", err)
}
if !dryRun {
fmt.Println(" Full sync (no previous sync timestamp)")
}
}
mappingConfig := loadLinearMappingConfig(ctx)
idMode := getLinearIDMode(ctx)
hashLength := getLinearHashLength(ctx)
var beadsIssues []*types.Issue
var allDeps []linear.DependencyInfo
linearIDToBeadsID := make(map[string]string)
for i := range linearIssues {
conversion := linear.IssueToBeads(&linearIssues[i], mappingConfig)
beadsIssues = append(beadsIssues, conversion.Issue.(*types.Issue))
allDeps = append(allDeps, conversion.Dependencies...)
}
if len(beadsIssues) == 0 {
fmt.Println(" No issues to import")
return stats, nil
}
if len(skipLinearIDs) > 0 {
var filteredIssues []*types.Issue
skipped := 0
for _, issue := range beadsIssues {
if issue.ExternalRef == nil {
filteredIssues = append(filteredIssues, issue)
continue
}
linearID := linear.ExtractLinearIdentifier(*issue.ExternalRef)
if linearID != "" && skipLinearIDs[linearID] {
skipped++
continue
}
filteredIssues = append(filteredIssues, issue)
}
if skipped > 0 {
stats.Skipped += skipped
}
beadsIssues = filteredIssues
if len(allDeps) > 0 {
var filteredDeps []linear.DependencyInfo
for _, dep := range allDeps {
if skipLinearIDs[dep.FromLinearID] || skipLinearIDs[dep.ToLinearID] {
continue
}
filteredDeps = append(filteredDeps, dep)
}
allDeps = filteredDeps
}
}
prefix, err := store.GetConfig(ctx, "issue_prefix")
if err != nil || prefix == "" {
prefix = "bd"
}
if idMode == "hash" {
existingIssues, err := store.SearchIssues(ctx, "", types.IssueFilter{IncludeTombstones: true})
if err != nil {
return stats, fmt.Errorf("failed to fetch existing issues for ID collision avoidance: %w", err)
}
usedIDs := make(map[string]bool, len(existingIssues))
for _, issue := range existingIssues {
if issue.ID != "" {
usedIDs[issue.ID] = true
}
}
idOpts := linear.IDGenerationOptions{
BaseLength: hashLength,
MaxLength: 8,
UsedIDs: usedIDs,
}
if err := linear.GenerateIssueIDs(beadsIssues, prefix, "linear-import", idOpts); err != nil {
return stats, fmt.Errorf("failed to generate issue IDs: %w", err)
}
} else if idMode != "db" {
return stats, fmt.Errorf("unsupported linear.id_mode %q (expected \"hash\" or \"db\")", idMode)
}
opts := ImportOptions{
DryRun: dryRun,
SkipUpdate: false,
}
result, err := importIssuesCore(ctx, dbPath, store, beadsIssues, opts)
if err != nil {
return stats, fmt.Errorf("import failed: %w", err)
}
stats.Created = result.Created
stats.Updated = result.Updated
stats.Skipped = result.Skipped
if dryRun {
if stats.Incremental {
fmt.Printf(" Would import %d issues from Linear (incremental since %s)\n",
len(linearIssues), stats.SyncedSince)
} else {
fmt.Printf(" Would import %d issues from Linear (full sync)\n", len(linearIssues))
}
return stats, nil
}
allBeadsIssues, err := store.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to fetch issues for dependency mapping: %v\n", err)
return stats, nil
}
for _, issue := range allBeadsIssues {
if issue.ExternalRef != nil && linear.IsLinearExternalRef(*issue.ExternalRef) {
linearID := linear.ExtractLinearIdentifier(*issue.ExternalRef)
if linearID != "" {
linearIDToBeadsID[linearID] = issue.ID
}
}
}
depsCreated := 0
for _, dep := range allDeps {
fromID, fromOK := linearIDToBeadsID[dep.FromLinearID]
toID, toOK := linearIDToBeadsID[dep.ToLinearID]
if !fromOK || !toOK {
continue
}
dependency := &types.Dependency{
IssueID: fromID,
DependsOnID: toID,
Type: types.DependencyType(dep.Type),
CreatedAt: time.Now(),
}
err := store.AddDependency(ctx, dependency, actor)
if err != nil {
if !strings.Contains(err.Error(), "already exists") &&
!strings.Contains(err.Error(), "duplicate") {
fmt.Fprintf(os.Stderr, "Warning: failed to create dependency %s -> %s (%s): %v\n",
fromID, toID, dep.Type, err)
}
} else {
depsCreated++
}
}
if depsCreated > 0 {
fmt.Printf(" Created %d dependencies from Linear relations\n", depsCreated)
}
return stats, nil
}
// doPushToLinear exports issues to Linear using the GraphQL API.
func doPushToLinear(ctx context.Context, dryRun bool, createOnly bool, updateRefs bool, forceUpdateIDs map[string]bool, skipUpdateIDs map[string]bool) (*linear.PushStats, error) {
stats := &linear.PushStats{}
client, err := getLinearClient(ctx)
if err != nil {
return stats, fmt.Errorf("failed to create Linear client: %w", err)
}
allIssues, err := store.SearchIssues(ctx, "", types.IssueFilter{})
if err != nil {
return stats, fmt.Errorf("failed to get local issues: %w", err)
}
var toCreate []*types.Issue
var toUpdate []*types.Issue
for _, issue := range allIssues {
if issue.IsTombstone() {
continue
}
if issue.ExternalRef != nil && linear.IsLinearExternalRef(*issue.ExternalRef) {
if !createOnly {
toUpdate = append(toUpdate, issue)
}
} else if issue.ExternalRef == nil {
toCreate = append(toCreate, issue)
}
}
var stateCache *linear.StateCache
if !dryRun && (len(toCreate) > 0 || (!createOnly && len(toUpdate) > 0)) {
stateCache, err = linear.BuildStateCache(ctx, client)
if err != nil {
return stats, fmt.Errorf("failed to fetch team states: %w", err)
}
}
mappingConfig := loadLinearMappingConfig(ctx)
for _, issue := range toCreate {
if dryRun {
stats.Created++
continue
}
linearPriority := linear.PriorityToLinear(issue.Priority, mappingConfig)
stateID := stateCache.FindStateForBeadsStatus(issue.Status)
description := linear.BuildLinearDescription(issue)
linearIssue, err := client.CreateIssue(ctx, issue.Title, description, linearPriority, stateID, nil)
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to create issue '%s' in Linear: %v\n", issue.Title, err)
stats.Errors++
continue
}
stats.Created++
fmt.Printf(" Created: %s -> %s\n", issue.ID, linearIssue.Identifier)
if updateRefs && linearIssue.URL != "" {
externalRef := linearIssue.URL
if canonical, ok := linear.CanonicalizeLinearExternalRef(externalRef); ok {
externalRef = canonical
}
updates := map[string]interface{}{
"external_ref": externalRef,
}
if err := store.UpdateIssue(ctx, issue.ID, updates, actor); err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to update external_ref for %s: %v\n", issue.ID, err)
stats.Errors++
}
}
}
if len(toUpdate) > 0 && !createOnly {
for _, issue := range toUpdate {
if skipUpdateIDs != nil && skipUpdateIDs[issue.ID] {
stats.Skipped++
continue
}
linearIdentifier := linear.ExtractLinearIdentifier(*issue.ExternalRef)
if linearIdentifier == "" {
fmt.Fprintf(os.Stderr, "Warning: could not extract Linear identifier from %s: %s\n",
issue.ID, *issue.ExternalRef)
stats.Errors++
continue
}
linearIssue, err := client.FetchIssueByIdentifier(ctx, linearIdentifier)
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to fetch Linear issue %s: %v\n",
linearIdentifier, err)
stats.Errors++
continue
}
if linearIssue == nil {
fmt.Fprintf(os.Stderr, "Warning: Linear issue %s not found (may have been deleted)\n",
linearIdentifier)
stats.Skipped++
continue
}
linearUpdatedAt, err := time.Parse(time.RFC3339, linearIssue.UpdatedAt)
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to parse Linear UpdatedAt for %s: %v\n",
linearIdentifier, err)
stats.Errors++
continue
}
forcedUpdate := forceUpdateIDs != nil && forceUpdateIDs[issue.ID]
if !forcedUpdate && !issue.UpdatedAt.After(linearUpdatedAt) {
stats.Skipped++
continue
}
if !forcedUpdate {
localComparable := linear.NormalizeIssueForLinearHash(issue)
linearComparable := linear.IssueToBeads(linearIssue, mappingConfig).Issue.(*types.Issue)
if localComparable.ComputeContentHash() == linearComparable.ComputeContentHash() {
stats.Skipped++
continue
}
}
if dryRun {
stats.Updated++
continue
}
description := linear.BuildLinearDescription(issue)
updatePayload := map[string]interface{}{
"title": issue.Title,
"description": description,
}
linearPriority := linear.PriorityToLinear(issue.Priority, mappingConfig)
if linearPriority > 0 {
updatePayload["priority"] = linearPriority
}
stateID := stateCache.FindStateForBeadsStatus(issue.Status)
if stateID != "" {
updatePayload["stateId"] = stateID
}
updatedLinearIssue, err := client.UpdateIssue(ctx, linearIssue.ID, updatePayload)
if err != nil {
fmt.Fprintf(os.Stderr, "Warning: failed to update Linear issue %s: %v\n",
linearIdentifier, err)
stats.Errors++
continue
}
stats.Updated++
fmt.Printf(" Updated: %s -> %s\n", issue.ID, updatedLinearIssue.Identifier)
}
}
if dryRun {
fmt.Printf(" Would create %d issues in Linear\n", stats.Created)
if !createOnly {
fmt.Printf(" Would update %d issues in Linear\n", stats.Updated)
}
}
return stats, nil
}