Spaces:
Running
Running
Update app
Browse files- .env.example +15 -0
.env.example
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
OPENAI_API_KEY=
|
| 2 |
+
MODEL_NAME="gpt-realtime"
|
| 3 |
+
|
| 4 |
+
# Local vision model (only used with --local-vision CLI flag)
|
| 5 |
+
# By default, vision is handled by gpt-realtime when the camera tool is used
|
| 6 |
+
LOCAL_VISION_MODEL=HuggingFaceTB/SmolVLM2-2.2B-Instruct
|
| 7 |
+
|
| 8 |
+
# Cache for local VLM (only used with --local-vision CLI flag)
|
| 9 |
+
HF_HOME=./cache
|
| 10 |
+
|
| 11 |
+
# Hugging Face token for accessing datasets/models
|
| 12 |
+
HF_TOKEN=
|
| 13 |
+
|
| 14 |
+
# To select a specific profile with custom instructions and tools, to be placed in profiles/<myprofile>/__init__.py
|
| 15 |
+
REACHY_MINI_CUSTOM_PROFILE="example"
|