Dmitry Beresnev commited on
Commit
974e338
Β·
1 Parent(s): bbb7286

Add and configure NemoClaw, IronClaw, and complete claw stack configs

Browse files

Integrates all active claw projects into the OpenClaw control center:

- NemoClaw (NVIDIA OpenShell) β€” added full app.py integration (constants, port 18793, config loader, service status, restart button, UI link, config editor, diagnostics) and nemoclaw.json reflecting OpenShell's
sandbox/policy/inference/provider architecture
- IronClaw β€” added IRONCLAW_CONFIG_PATH, config loader, session state, and config editor; ironclaw.json documents env-based setup (LLM_BACKEND=openai_compatible, PostgreSQL, ironclaw onboard)
- NanoBot β€” configured nanobot.json per docs (custom OpenAI-compatible provider, deepseek-chat, gateway port 18790, Mochat channel, DuckDuckGo search)
- NanoClaw β€” nanoclaw.json documents env-based config (ANTHROPIC_BASE_URL, ANTHROPIC_AUTH_TOKEN, gateway port 18889)
- NullClaw β€” configured nullclaw.json (custom provider, sqlite memory, port 3000, supervised autonomy, native runtime)
- PicoClaw β€” configured picoclaw.json (model_list format, port 18792, DuckDuckGo)
- ZeroClaw β€” configured zeroclaw.json (custom provider, port 42617, supervised autonomy, no tunnel)
- README β€” added repo links for all seven projects

Files changed (9) hide show
  1. README.md +9 -0
  2. app.py +31 -0
  3. ironclaw.json +15 -0
  4. nanobot.json +30 -1
  5. nanoclaw.json +10 -1
  6. nemoclaw.json +29 -0
  7. nullclaw.json +56 -1
  8. picoclaw.json +42 -1
  9. zeroclaw.json +22 -1
README.md CHANGED
@@ -11,6 +11,15 @@ short_description: AGI Assistant
11
 
12
  This Space hosts the OpenClaw trading bot (paper-only). The LLM runs in a separate Space that you already have; this repo only contains the bot-side architecture and configs.
13
 
 
 
 
 
 
 
 
 
 
14
  **Hugging Face Space Build Notes**
15
  - Keep `README.md`, `Dockerfile`, and `app.py` at the repository root used by the Space.
16
  - For Docker Spaces, the filename must be exactly `Dockerfile` (capital `D`).
 
11
 
12
  This Space hosts the OpenClaw trading bot (paper-only). The LLM runs in a separate Space that you already have; this repo only contains the bot-side architecture and configs.
13
 
14
+ **NanoBot**: [https://github.com/HKUDS/nanobot](https://github.com/HKUDS/nanobot)
15
+ **NanoClaw**: [https://github.com/qwibitai/nanoclaw](https://github.com/qwibitai/nanoclaw)
16
+ **NullClaw**: [https://github.com/nullclaw/nullclaw](https://github.com/nullclaw/nullclaw)
17
+ **PicoClaw**: [https://github.com/sipeed/picoclaw](https://github.com/sipeed/picoclaw)
18
+ **ZeroClaw**: [https://github.com/zeroclaw-labs/zeroclaw](https://github.com/zeroclaw-labs/zeroclaw)
19
+ **memU**: [https://github.com/NevaMind-AI/memU](https://github.com/NevaMind-AI/memU)
20
+ **IronClaw**: [https://github.com/nearai/ironclaw](https://github.com/nearai/ironclaw)
21
+ **NemoClaw (OpenShell)**: [https://github.com/NVIDIA/OpenShell](https://github.com/NVIDIA/OpenShell)
22
+
23
  **Hugging Face Space Build Notes**
24
  - Keep `README.md`, `Dockerfile`, and `app.py` at the repository root used by the Space.
25
  - For Docker Spaces, the filename must be exactly `Dockerfile` (capital `D`).
app.py CHANGED
@@ -104,6 +104,7 @@ NEMOCLAW_PROXY_LOCAL_URL = os.getenv(
104
  IRONCLAW_LOG_PATH = Path(os.getenv("IRONCLAW_LOG_PATH", "/tmp/ironclaw.log"))
105
  IRONCLAW_ERR_LOG_PATH = Path(os.getenv("IRONCLAW_ERR_LOG_PATH", "/tmp/ironclaw.err.log"))
106
  IRONCLAW_ENABLED = os.getenv("IRONCLAW_ENABLED", "1") == "1"
 
107
  STREAMLIT_AUTH_ENABLED = os.getenv("STREAMLIT_AUTH_ENABLED", "1") == "1"
108
  STREAMLIT_AUTH_USERNAME = os.getenv("STREAMLIT_AUTH_USERNAME", "").strip()
109
  STREAMLIT_AUTH_PASSWORD = os.getenv("STREAMLIT_AUTH_PASSWORD", "").strip()
@@ -129,6 +130,7 @@ def init_state() -> None:
129
  st.session_state.setdefault("zeroclaw_config_text", load_zeroclaw_config_text())
130
  st.session_state.setdefault("nullclaw_config_text", load_nullclaw_config_text())
131
  st.session_state.setdefault("nemoclaw_config_text", load_nemoclaw_config_text())
 
132
  st.session_state.setdefault("auto_started", False)
133
  st.session_state.setdefault("auto_start_attempted", False)
134
  st.session_state.setdefault("backtest_result", None)
@@ -230,6 +232,19 @@ def load_nemoclaw_config_json() -> dict:
230
  return {}
231
 
232
 
 
 
 
 
 
 
 
 
 
 
 
 
 
233
  def gateway_process() -> subprocess.Popen | None:
234
  proc = st.session_state.get("gateway_process")
235
  if proc is None:
@@ -1215,6 +1230,22 @@ with cfg_b:
1215
  except json.JSONDecodeError as exc:
1216
  st.error(f"Invalid JSON: {exc}")
1217
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1218
  st.divider()
1219
 
1220
  test_col, logs_col = st.columns([2, 3])
 
104
  IRONCLAW_LOG_PATH = Path(os.getenv("IRONCLAW_LOG_PATH", "/tmp/ironclaw.log"))
105
  IRONCLAW_ERR_LOG_PATH = Path(os.getenv("IRONCLAW_ERR_LOG_PATH", "/tmp/ironclaw.err.log"))
106
  IRONCLAW_ENABLED = os.getenv("IRONCLAW_ENABLED", "1") == "1"
107
+ IRONCLAW_CONFIG_PATH = Path(os.getenv("IRONCLAW_CONFIG_PATH", "ironclaw.json"))
108
  STREAMLIT_AUTH_ENABLED = os.getenv("STREAMLIT_AUTH_ENABLED", "1") == "1"
109
  STREAMLIT_AUTH_USERNAME = os.getenv("STREAMLIT_AUTH_USERNAME", "").strip()
110
  STREAMLIT_AUTH_PASSWORD = os.getenv("STREAMLIT_AUTH_PASSWORD", "").strip()
 
130
  st.session_state.setdefault("zeroclaw_config_text", load_zeroclaw_config_text())
131
  st.session_state.setdefault("nullclaw_config_text", load_nullclaw_config_text())
132
  st.session_state.setdefault("nemoclaw_config_text", load_nemoclaw_config_text())
133
+ st.session_state.setdefault("ironclaw_config_text", load_ironclaw_config_text())
134
  st.session_state.setdefault("auto_started", False)
135
  st.session_state.setdefault("auto_start_attempted", False)
136
  st.session_state.setdefault("backtest_result", None)
 
232
  return {}
233
 
234
 
235
+ def load_ironclaw_config_text() -> str:
236
+ if IRONCLAW_CONFIG_PATH.exists():
237
+ return IRONCLAW_CONFIG_PATH.read_text(encoding="utf-8")
238
+ return "{}"
239
+
240
+
241
+ def load_ironclaw_config_json() -> dict:
242
+ try:
243
+ return json.loads(load_ironclaw_config_text())
244
+ except json.JSONDecodeError:
245
+ return {}
246
+
247
+
248
  def gateway_process() -> subprocess.Popen | None:
249
  proc = st.session_state.get("gateway_process")
250
  if proc is None:
 
1230
  except json.JSONDecodeError as exc:
1231
  st.error(f"Invalid JSON: {exc}")
1232
 
1233
+ ironclaw_text = st.text_area(
1234
+ "ironclaw.json",
1235
+ value=st.session_state.get("ironclaw_config_text", load_ironclaw_config_text()),
1236
+ height=220,
1237
+ )
1238
+ st.session_state["ironclaw_config_text"] = ironclaw_text
1239
+ if st.button("Save IronClaw Config", use_container_width=True):
1240
+ try:
1241
+ parsed = json.loads(ironclaw_text)
1242
+ IRONCLAW_CONFIG_PATH.write_text(
1243
+ json.dumps(parsed, indent=2) + "\n", encoding="utf-8"
1244
+ )
1245
+ st.success(f"Saved {IRONCLAW_CONFIG_PATH}.")
1246
+ except json.JSONDecodeError as exc:
1247
+ st.error(f"Invalid JSON: {exc}")
1248
+
1249
  st.divider()
1250
 
1251
  test_col, logs_col = st.columns([2, 3])
ironclaw.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "note": "IronClaw is configured via environment variables written to ~/.ironclaw/.env by `ironclaw onboard`. No JSON config file is used.",
3
+ "env": {
4
+ "DATABASE_URL": "postgresql://postgres:postgres@localhost:5432/ironclaw",
5
+ "LLM_BACKEND": "openai_compatible",
6
+ "LLM_BASE_URL": "https://researchengineering-agi.hf.space/v1",
7
+ "LLM_API_KEY": "${LLM_SPACE_API_KEY}",
8
+ "LLM_MODEL": "deepseek-chat"
9
+ },
10
+ "setup": [
11
+ "createdb ironclaw",
12
+ "psql ironclaw -c 'CREATE EXTENSION IF NOT EXISTS vector;'",
13
+ "ironclaw onboard"
14
+ ]
15
+ }
nanobot.json CHANGED
@@ -1 +1,30 @@
1
- {}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "providers": {
3
+ "custom": {
4
+ "apiBase": "https://researchengineering-agi.hf.space/v1"
5
+ }
6
+ },
7
+ "agents": {
8
+ "defaults": {
9
+ "model": "deepseek-chat",
10
+ "provider": "custom",
11
+ "workspace": "/app/vault"
12
+ }
13
+ },
14
+ "gateway": {
15
+ "port": 18790
16
+ },
17
+ "channels": {
18
+ "mochat": {
19
+ "enabled": true,
20
+ "allowFrom": ["*"]
21
+ }
22
+ },
23
+ "tools": {
24
+ "web": {
25
+ "search": {
26
+ "provider": "duckduckgo"
27
+ }
28
+ }
29
+ }
30
+ }
nanoclaw.json CHANGED
@@ -1 +1,10 @@
1
- {}
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "note": "NanoClaw is configured via env vars and code, not config files.",
3
+ "env": {
4
+ "ANTHROPIC_BASE_URL": "https://researchengineering-agi.hf.space",
5
+ "ANTHROPIC_AUTH_TOKEN": "${OPENCLAW_GATEWAY_TOKEN}"
6
+ },
7
+ "gateway": {
8
+ "port": 18889
9
+ }
10
+ }
nemoclaw.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "note": "NemoClaw runs OpenClaw inside an NVIDIA OpenShell sandbox. OpenShell is the governance runtime β€” it provides sandboxed execution, policy-enforced egress, and a privacy router for LLM calls. Install: curl -LsSf https://raw.githubusercontent.com/NVIDIA/OpenShell/main/install.sh | sh",
3
+ "sandbox": {
4
+ "from": "openclaw",
5
+ "remote": null,
6
+ "gpu": false
7
+ },
8
+ "policy": {
9
+ "deny_by_default": true,
10
+ "hot_reload": true,
11
+ "layers": ["filesystem", "network", "process", "inference"]
12
+ },
13
+ "inference": {
14
+ "provider": "custom",
15
+ "model": "deepseek-chat",
16
+ "base_url": "https://researchengineering-agi.hf.space/v1",
17
+ "api_key": "${LLM_SPACE_API_KEY}"
18
+ },
19
+ "providers": {
20
+ "openclaw": {
21
+ "type": "openclaw",
22
+ "token": "${OPENCLAW_GATEWAY_TOKEN}"
23
+ }
24
+ },
25
+ "gateway": {
26
+ "port": 18793,
27
+ "auto_create": true
28
+ }
29
+ }
nullclaw.json CHANGED
@@ -1 +1,56 @@
1
- {}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "models": {
3
+ "providers": {
4
+ "custom": {
5
+ "base_url": "https://researchengineering-agi.hf.space/v1",
6
+ "api_key": "${LLM_SPACE_API_KEY}"
7
+ }
8
+ }
9
+ },
10
+
11
+ "agents": {
12
+ "defaults": {
13
+ "model": { "primary": "custom/deepseek-chat" },
14
+ "workspace": "/app/vault"
15
+ }
16
+ },
17
+
18
+ "gateway": {
19
+ "port": 3000,
20
+ "require_pairing": true,
21
+ "allow_public_bind": false
22
+ },
23
+
24
+ "memory": {
25
+ "backend": "sqlite",
26
+ "auto_save": true,
27
+ "embedding_provider": "noop",
28
+ "vector_weight": 0.7,
29
+ "keyword_weight": 0.3,
30
+ "hygiene_enabled": true
31
+ },
32
+
33
+ "http_request": {
34
+ "search_provider": "duckduckgo"
35
+ },
36
+
37
+ "autonomy": {
38
+ "level": "supervised",
39
+ "workspace_only": true,
40
+ "max_actions_per_hour": 20
41
+ },
42
+
43
+ "runtime": {
44
+ "kind": "native"
45
+ },
46
+
47
+ "tunnel": { "provider": "none" },
48
+ "secrets": { "encrypt": true },
49
+ "identity": { "format": "openclaw" },
50
+
51
+ "security": {
52
+ "sandbox": { "backend": "auto" },
53
+ "resources": { "max_memory_mb": 512, "max_cpu_percent": 80 },
54
+ "audit": { "enabled": true, "retention_days": 90 }
55
+ }
56
+ }
picoclaw.json CHANGED
@@ -1 +1,42 @@
1
- {}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "agents": {
3
+ "defaults": {
4
+ "workspace": "/app/vault",
5
+ "model_name": "deepseek-chat",
6
+ "max_tokens": 8192,
7
+ "temperature": 0.7,
8
+ "max_tool_iterations": 20
9
+ }
10
+ },
11
+ "model_list": [
12
+ {
13
+ "model_name": "deepseek-chat",
14
+ "model": "openai/deepseek-chat",
15
+ "api_key": "${LLM_SPACE_API_KEY}",
16
+ "base_url": "https://researchengineering-agi.hf.space/v1",
17
+ "request_timeout": 120
18
+ }
19
+ ],
20
+ "gateway": {
21
+ "host": "127.0.0.1",
22
+ "port": 18792
23
+ },
24
+ "tools": {
25
+ "web": {
26
+ "duckduckgo": {
27
+ "enabled": true,
28
+ "max_results": 5
29
+ },
30
+ "brave": {
31
+ "enabled": false,
32
+ "api_key": "YOUR_BRAVE_API_KEY",
33
+ "max_results": 5
34
+ },
35
+ "tavily": {
36
+ "enabled": false,
37
+ "api_key": "YOUR_TAVILY_API_KEY",
38
+ "max_results": 5
39
+ }
40
+ }
41
+ }
42
+ }
zeroclaw.json CHANGED
@@ -1 +1,22 @@
1
- {}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "default_provider": "custom",
3
+ "default_model": "deepseek-chat",
4
+ "api_key": "${LLM_SPACE_API_KEY}",
5
+ "base_url": "https://researchengineering-agi.hf.space/v1",
6
+ "workspace": "/app/vault",
7
+
8
+ "gateway": {
9
+ "host": "127.0.0.1",
10
+ "port": 42617
11
+ },
12
+
13
+ "autonomy": {
14
+ "level": "supervised",
15
+ "workspace_only": true,
16
+ "max_actions_per_hour": 20
17
+ },
18
+
19
+ "tunnel": {
20
+ "kind": "none"
21
+ }
22
+ }