Spaces:
Sleeping
Sleeping
Update utils.py
Browse files
utils.py
CHANGED
|
@@ -15,7 +15,7 @@ def load_config():
|
|
| 15 |
"openrouter_chat_model": "meta-llama/llama-4-scout:free",
|
| 16 |
# Removed openrouter_embedding_model as we'll use HF
|
| 17 |
"huggingface_api_token": os.getenv("HUGGINGFACEHUB_API_TOKEN"), # Load HF token
|
| 18 |
-
"huggingface_embedding_model": "sentence-transformers/all-MiniLM-L6-v2" # Define HF model
|
| 19 |
"use_local_embeddings": True, # Flag to indicate we're using local embeddings
|
| 20 |
"embedding_cache_dir": "/tmp/hf_models" # Cache directory for local models
|
| 21 |
}
|
|
|
|
| 15 |
"openrouter_chat_model": "meta-llama/llama-4-scout:free",
|
| 16 |
# Removed openrouter_embedding_model as we'll use HF
|
| 17 |
"huggingface_api_token": os.getenv("HUGGINGFACEHUB_API_TOKEN"), # Load HF token
|
| 18 |
+
"huggingface_embedding_model": "sentence-transformers/all-MiniLM-L6-v2", # Define HF model
|
| 19 |
"use_local_embeddings": True, # Flag to indicate we're using local embeddings
|
| 20 |
"embedding_cache_dir": "/tmp/hf_models" # Cache directory for local models
|
| 21 |
}
|