Spaces:
Sleeping
Sleeping
File size: 1,823 Bytes
05c76c0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 |
{
"$schema": "https://opencode.ai/config.json",
"plugin": [
"opencode-wakatime",
"opencode-websearch-cited",
"opencode-pty",
"oh-my-opencode"
],
"agent": {
"build": {
"description": "Build agent for implementation",
"model": "litellm-provider/cerebras_gpt-oss-120b",
"permission": {
"bash": "ask",
"write": "ask"
},
"temperature": 0.7
},
"plan": {
"description": "Planning agent with write and bash capabilities",
"model": "litellm-provider/cerebras_gpt-oss-120b",
"permission": {
"bash": "ask",
"write": "ask"
},
"temperature": 0.7
}
},
"experimental": {
"chatMaxRetries": 3
},
"provider": {
"litellm-provider": {
"models": {
"cerebras_gpt-oss-120b": {
"id": "cerebras/gpt-oss-120b",
"limit": {
"context": 128000,
"output": 32768
},
"name": "cerebras/gpt-oss-120b"
},
"cerebras_llama3.1-8b": {
"id": "cerebras/llama3.1-8b",
"limit": {
"context": 128000,
"output": 32768
},
"name": "cerebras/llama3.1-8b"
}
},
"name": "LiteLLM Provider",
"npm": "@ai-sdk/openai-compatible",
"options": {
"apiKey": "{env:LITELLM_API_KEY}",
"baseURL": "https://ins0mn1a-llm-router-v1.hf.space"
}
}
}
} |