Configure oh-my-openagent (omo) plugin for multi-agent orchestration using ollama-cloud and local llama-swap providers. Primary model is ollama-cloud/glm-5.1 with fallback chains. Add runtime fallback, background task concurrency limits, and disable incompatible agents (hephaestus, multimodal-looker).
24 lines
576 B
JSON
24 lines
576 B
JSON
{
|
|
"$schema": "https://opencode.ai/config.json",
|
|
"plugin": ["oh-my-openagent"],
|
|
"provider": {
|
|
"llama-local": {
|
|
"name": "Llama.cpp (zix790prors RTX 4070 Ti)",
|
|
"npm": "@ai-sdk/openai-compatible",
|
|
"options": {
|
|
"baseURL": "http://zix790prors.oglehome:8080/v1"
|
|
},
|
|
"models": {
|
|
"Qwen3.6-35B-A3B": {
|
|
"name": "Qwen3.6-35B-A3B (UD-Q8_K_XL)",
|
|
"reasoning": true,
|
|
"tool_call": true,
|
|
"limit": {
|
|
"context": 131072,
|
|
"output": 32768
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
} |