v0idalism commited on
Commit
1b2b044
·
verified ·
1 Parent(s): ebf19c8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -13
app.py CHANGED
@@ -1,9 +1,10 @@
 
1
  from fastapi import FastAPI, Request
2
  from fastapi.middleware.cors import CORSMiddleware
3
- import requests
4
 
5
- # Hugging Face Space API (replace with your model space URL if different)
6
- HF_SPACE_API = "https://v0idalism-blacklight-text.hf.space/run/predict"
7
 
8
  # System prompt
9
  SYSTEM_PROMPT = (
@@ -11,9 +12,10 @@ SYSTEM_PROMPT = (
11
  "Respond in a brutalist, minimal, and precise style."
12
  )
13
 
 
14
  app = FastAPI()
15
 
16
- # Allow frontend calls
17
  app.add_middleware(
18
  CORSMiddleware,
19
  allow_origins=["*"],
@@ -23,7 +25,7 @@ app.add_middleware(
23
  )
24
 
25
  @app.get("/")
26
- def health_check():
27
  return {"status": "ok", "message": "BLACKLIGHT API running"}
28
 
29
  @app.post("/predict")
@@ -34,26 +36,31 @@ async def predict(request: Request):
34
  if not user_prompt:
35
  return {"error": "No prompt provided"}
36
 
37
- # Merge with system prompt
38
  full_prompt = f"{SYSTEM_PROMPT}\n\nUser: {user_prompt}"
39
 
40
  try:
41
- response = requests.post(
 
42
  HF_SPACE_API,
43
  json={"data": [full_prompt]},
44
  timeout=60
45
  )
46
- response.raise_for_status()
47
- result = response.json()
48
  except Exception as e:
49
- return {"error": f"Failed to reach HF Space: {str(e)}"}
50
 
51
- # Extract first string result
52
  try:
53
  output_text = result["data"][0]
54
  if isinstance(output_text, list):
55
  output_text = output_text[0]
56
- except (KeyError, IndexError, TypeError):
57
  return {"error": "Unexpected response format", "raw": result}
58
 
59
- return {"reply": output_text}
 
 
 
 
 
1
+ import requests
2
  from fastapi import FastAPI, Request
3
  from fastapi.middleware.cors import CORSMiddleware
4
+ import uvicorn
5
 
6
+ # Hugging Face Space API to call
7
+ HF_SPACE_API = "https://v0idalism-blacklight-image.hf.space/run/predict"
8
 
9
  # System prompt
10
  SYSTEM_PROMPT = (
 
12
  "Respond in a brutalist, minimal, and precise style."
13
  )
14
 
15
+ # Create app
16
  app = FastAPI()
17
 
18
+ # Allow all origins (for your site frontend)
19
  app.add_middleware(
20
  CORSMiddleware,
21
  allow_origins=["*"],
 
25
  )
26
 
27
  @app.get("/")
28
+ def root():
29
  return {"status": "ok", "message": "BLACKLIGHT API running"}
30
 
31
  @app.post("/predict")
 
36
  if not user_prompt:
37
  return {"error": "No prompt provided"}
38
 
39
+ # Merge system + user prompt
40
  full_prompt = f"{SYSTEM_PROMPT}\n\nUser: {user_prompt}"
41
 
42
  try:
43
+ # Call Hugging Face Space without tokens
44
+ resp = requests.post(
45
  HF_SPACE_API,
46
  json={"data": [full_prompt]},
47
  timeout=60
48
  )
49
+ resp.raise_for_status()
50
+ result = resp.json()
51
  except Exception as e:
52
+ return {"error": f"Failed to contact HF Space: {str(e)}"}
53
 
54
+ # Extract output
55
  try:
56
  output_text = result["data"][0]
57
  if isinstance(output_text, list):
58
  output_text = output_text[0]
59
+ except Exception:
60
  return {"error": "Unexpected response format", "raw": result}
61
 
62
+ return {"reply": output_text}
63
+
64
+ # Hugging Face needs uvicorn.run in __main__
65
+ if __name__ == "__main__":
66
+ uvicorn.run(app, host="0.0.0.0", port=7860)