Spaces:
Sleeping
Sleeping
MaenGit commited on
Commit ·
bbf4c9f
1
Parent(s): d3bb9ec
update to lama:3.2
Browse files- Dockerfile +1 -1
- main.py +1 -1
Dockerfile
CHANGED
|
@@ -42,4 +42,4 @@ USER 1000
|
|
| 42 |
EXPOSE 7860
|
| 43 |
|
| 44 |
# Startup command
|
| 45 |
-
CMD sh -c "ollama serve & sleep 5 && ollama pull llama3.2
|
|
|
|
| 42 |
EXPOSE 7860
|
| 43 |
|
| 44 |
# Startup command
|
| 45 |
+
CMD sh -c "ollama serve & sleep 5 && ollama pull llama3.2 && uvicorn main:app --host 0.0.0.0 --port 7860 --timeout-keep-alive 65"
|
main.py
CHANGED
|
@@ -79,7 +79,7 @@ async def get_full_voice_and_text(payload, voice, rate):
|
|
| 79 |
@app.post("/stream-voice")
|
| 80 |
async def voice_engine(data: ChatRequest):
|
| 81 |
payload = {
|
| 82 |
-
"model": "llama3.2
|
| 83 |
"messages": data.messages,
|
| 84 |
"stream": False, # Set to False for non-streaming
|
| 85 |
"options": {
|
|
|
|
| 79 |
@app.post("/stream-voice")
|
| 80 |
async def voice_engine(data: ChatRequest):
|
| 81 |
payload = {
|
| 82 |
+
"model": "llama3.2",
|
| 83 |
"messages": data.messages,
|
| 84 |
"stream": False, # Set to False for non-streaming
|
| 85 |
"options": {
|