naimulislam commited on
Commit
8792b2d
·
verified ·
1 Parent(s): 07db6ae

Update entrypoint.sh

Browse files
Files changed (1) hide show
  1. entrypoint.sh +6 -35
entrypoint.sh CHANGED
@@ -1,39 +1,10 @@
1
  #!/bin/bash
2
- ollama serve &
3
- streamlit run app.py --server.port 8501 --server.address 0.0.0.0 &
4
-
5
- sleep 5
6
-
7
- # Fixed Python Proxy
8
- python3 -c '
9
- import uvicorn
10
- from fastapi import FastAPI, Request
11
- from fastapi.responses import StreamingResponse
12
- import requests
13
 
14
- app = FastAPI()
15
-
16
- def proxy_request(url, request_body, method, headers):
17
- # Fix: Correctly extract headers as a list of tuples
18
- filtered_headers = {k: v for k, v in headers.items() if k.lower() != "host"}
19
- resp = requests.request(
20
- method=method,
21
- url=url,
22
- data=request_body,
23
- headers=filtered_headers,
24
- stream=True
25
- )
26
- return StreamingResponse(resp.iter_content(65536), status_code=resp.status_code)
27
-
28
- @app.api_route("/api/{path:path}", methods=["GET", "POST"])
29
- async def proxy_ollama(request: Request, path: str):
30
- body = await request.body()
31
- return proxy_request(f"http://localhost:11434/api/{path}", body, request.method, request.headers)
32
 
33
- @app.api_route("/{path:path}", methods=["GET", "POST"])
34
- async def proxy_streamlit(request: Request, path: str):
35
- body = await request.body()
36
- return proxy_request(f"http://localhost:8501/{path}", body, request.method, request.headers)
37
 
38
- uvicorn.run(app, host="0.0.0.0", port=7860)
39
- '
 
1
  #!/bin/bash
 
 
 
 
 
 
 
 
 
 
 
2
 
3
+ # 1. Start Ollama
4
+ ollama serve &
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
 
6
+ # 2. Start Streamlit on internal port
7
+ streamlit run /app.py --server.port 8501 --server.address 0.0.0.0 --server.headless true &
 
 
8
 
9
+ # 3. Start Nginx (This stays in the foreground to keep the container alive)
10
+ nginx -g "daemon off;"