z90486091 commited on
Commit
05c76c0
·
1 Parent(s): 9c5c2b2
Files changed (3) hide show
  1. Dockerfile +24 -5
  2. entrypoint.sh +5 -8
  3. opencode.json +60 -0
Dockerfile CHANGED
@@ -1,11 +1,30 @@
1
- FROM ghcr.io/anomalyco/opencode:latest
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
 
3
- ENV OPENCODE_SERVER_USERNAME=opencode
4
- EXPOSE 8860
5
 
6
- # Copy the entrypoint script and make it executable
7
  COPY entrypoint.sh /entrypoint.sh
 
8
  RUN chmod +x /entrypoint.sh
9
-
 
 
 
10
  ENTRYPOINT ["/entrypoint.sh"]
11
  CMD ["web", "--port", "8860", "--hostname", "0.0.0.0"]
 
1
+ FROM debian:12-slim
2
+
3
+ # Set HOME environment variable
4
+ ENV HOME=/root
5
+
6
+ # Install runtime dependencies
7
+ RUN apt-get update && apt-get install -y \
8
+ curl \
9
+ bash \
10
+ git \
11
+ ca-certificates \
12
+ && rm -rf /var/lib/apt/lists/*
13
+
14
+ # Install OpenCode using the official installer
15
+ RUN curl -fsSL https://opencode.ai/install | bash
16
+
17
 
18
+ # Add OpenCode to PATH
19
+ ENV PATH="$HOME/.opencode/bin:$PATH"
20
 
21
+ # Copy your configuration files
22
  COPY entrypoint.sh /entrypoint.sh
23
+ COPY opencode.json /opencode.json
24
  RUN chmod +x /entrypoint.sh
25
+
26
+ ENV OPENCODE_SERVER_USERNAME=opencode
27
+ EXPOSE 8860
28
+
29
  ENTRYPOINT ["/entrypoint.sh"]
30
  CMD ["web", "--port", "8860", "--hostname", "0.0.0.0"]
entrypoint.sh CHANGED
@@ -4,14 +4,11 @@ if [ -n "$HF_OPENCODE_PASSWORD" ]; then
4
  export OPENCODE_SERVER_PASSWORD="$HF_OPENCODE_PASSWORD"
5
  fi
6
 
7
- # Set custom configuration with actual API key substitution
8
- if [ -n "$OC_CONFIG_JSON" ] && [ -n "$LITELLM_API_KEY" ]; then
9
- # Replace {env:LITELLM_API_KEY} with actual value
10
- CONFIG_CONTENT=$(echo "$OC_CONFIG_JSON" | sed "s/{env:LITELLM_API_KEY}/$LITELLM_API_KEY/g")
11
- export OPENCODE_CONFIG_CONTENT="$CONFIG_CONTENT"
12
- elif [ -n "$OC_CONFIG_JSON" ]; then
13
- export OPENCODE_CONFIG_CONTENT="$OC_CONFIG_JSON"
14
- fi
15
 
16
  # Execute opencode with passed arguments
17
  exec opencode "$@"
 
4
  export OPENCODE_SERVER_PASSWORD="$HF_OPENCODE_PASSWORD"
5
  fi
6
 
7
+ # Priority 1: Use local opencode.json file in root directory
8
+ if [ -f "/opencode.json" ]; then
9
+ sed "s/{env:LITELLM_API_KEY}/$LITELLM_API_KEY/g" /opencode.json > /tmp/opencode-processed.json
10
+ export OPENCODE_CONFIG="/tmp/opencode-processed.json"
11
+ fi
 
 
 
12
 
13
  # Execute opencode with passed arguments
14
  exec opencode "$@"
opencode.json ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "$schema": "https://opencode.ai/config.json",
3
+ "plugin": [
4
+ "opencode-wakatime",
5
+ "opencode-websearch-cited",
6
+ "opencode-pty",
7
+ "oh-my-opencode"
8
+ ],
9
+ "agent": {
10
+ "build": {
11
+ "description": "Build agent for implementation",
12
+ "model": "litellm-provider/cerebras_gpt-oss-120b",
13
+ "permission": {
14
+ "bash": "ask",
15
+ "write": "ask"
16
+ },
17
+ "temperature": 0.7
18
+ },
19
+ "plan": {
20
+ "description": "Planning agent with write and bash capabilities",
21
+ "model": "litellm-provider/cerebras_gpt-oss-120b",
22
+ "permission": {
23
+ "bash": "ask",
24
+ "write": "ask"
25
+ },
26
+ "temperature": 0.7
27
+ }
28
+ },
29
+ "experimental": {
30
+ "chatMaxRetries": 3
31
+ },
32
+ "provider": {
33
+ "litellm-provider": {
34
+ "models": {
35
+ "cerebras_gpt-oss-120b": {
36
+ "id": "cerebras/gpt-oss-120b",
37
+ "limit": {
38
+ "context": 128000,
39
+ "output": 32768
40
+ },
41
+ "name": "cerebras/gpt-oss-120b"
42
+ },
43
+ "cerebras_llama3.1-8b": {
44
+ "id": "cerebras/llama3.1-8b",
45
+ "limit": {
46
+ "context": 128000,
47
+ "output": 32768
48
+ },
49
+ "name": "cerebras/llama3.1-8b"
50
+ }
51
+ },
52
+ "name": "LiteLLM Provider",
53
+ "npm": "@ai-sdk/openai-compatible",
54
+ "options": {
55
+ "apiKey": "{env:LITELLM_API_KEY}",
56
+ "baseURL": "https://ins0mn1a-llm-router-v1.hf.space"
57
+ }
58
+ }
59
+ }
60
+ }