Donmill commited on
Commit
068c2e4
·
verified ·
1 Parent(s): 81b29ed
Files changed (1) hide show
  1. app.py +3 -7
app.py CHANGED
@@ -1,12 +1,10 @@
1
  import os
2
  from fastapi import FastAPI, Request
3
- from fastapi.responses import FileResponse, JSONResponse
4
  from fastapi.staticfiles import StaticFiles
5
  import httpx
6
 
7
  app = FastAPI()
8
-
9
- # Serve static files (your 3D app)
10
  app.mount("/", StaticFiles(directory="static", html=True), name="static")
11
 
12
  HF_TOKEN = os.environ.get("HF_TOKEN")
@@ -23,7 +21,6 @@ async def ask(request: Request):
23
  "messages": [{"role": "user", "content": question}],
24
  "max_tokens": 256
25
  }
26
- # Use a chat model (change as needed)
27
  async with httpx.AsyncClient() as client:
28
  resp = await client.post(
29
  "https://api-inference.huggingface.co/models/meta-llama/Llama-3.1-8B-Instruct",
@@ -33,12 +30,11 @@ async def ask(request: Request):
33
  )
34
  if resp.status_code == 200:
35
  data = resp.json()
36
- # Extract the answer depending on the model's response format
37
  try:
38
  answer = data["choices"][0]["message"]["content"]
39
  except Exception:
40
  answer = str(data)
41
  return {"answer": answer}
42
  else:
43
- return JSONResponse({"error": "Failed to get AI response"}, status_code
44
- =500)
 
1
  import os
2
  from fastapi import FastAPI, Request
3
+ from fastapi.responses import JSONResponse
4
  from fastapi.staticfiles import StaticFiles
5
  import httpx
6
 
7
  app = FastAPI()
 
 
8
  app.mount("/", StaticFiles(directory="static", html=True), name="static")
9
 
10
  HF_TOKEN = os.environ.get("HF_TOKEN")
 
21
  "messages": [{"role": "user", "content": question}],
22
  "max_tokens": 256
23
  }
 
24
  async with httpx.AsyncClient() as client:
25
  resp = await client.post(
26
  "https://api-inference.huggingface.co/models/meta-llama/Llama-3.1-8B-Instruct",
 
30
  )
31
  if resp.status_code == 200:
32
  data = resp.json()
 
33
  try:
34
  answer = data["choices"][0]["message"]["content"]
35
  except Exception:
36
  answer = str(data)
37
  return {"answer": answer}
38
  else:
39
+ return JSONResponse({"error": "Failed to get AI response"}, status_code=
40
+ 500)