OppaAI commited on
Commit
444e2a5
·
verified ·
1 Parent(s): 86e4fb5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -4
app.py CHANGED
@@ -6,20 +6,32 @@ import io
6
  import requests
7
  import os
8
 
 
9
  HF_TOKEN = os.environ.get("HF_CV_ROBOT_TOKEN")
10
  MODEL = "Qwen/Qwen3-VL-32B-Instruct"
11
 
 
 
 
 
 
 
12
  # -------------------------------
13
- # 主處理函數
14
  # -------------------------------
15
  def process(payload: dict):
16
  try:
 
 
 
17
  robot_id = payload.get("robot_id", "unknown")
18
  image_b64 = payload["image_b64"]
19
 
20
- # Base64 解碼成圖片,用 PIL 開啟
21
  img_bytes = base64.b64decode(image_b64)
22
- img = Image.open(io.BytesIO(img_bytes)).convert("RGB")
 
 
23
 
24
  # Router API payload
25
  headers = {"Authorization": f"Bearer {HF_TOKEN}"}
@@ -44,9 +56,16 @@ def process(payload: dict):
44
  )
45
 
46
  if resp.status_code != 200:
 
 
47
  return {"error": f"VLM API error: {resp.status_code}, {resp.text}"}
48
 
49
- vlm_text = resp.json()["choices"][0]["message"]["content"][0]["text"]
 
 
 
 
 
50
 
51
  return {
52
  "received": True,
@@ -55,6 +74,8 @@ def process(payload: dict):
55
  }
56
 
57
  except Exception as e:
 
 
58
  return {"error": str(e)}
59
 
60
  # -------------------------------
@@ -69,3 +90,4 @@ demo = gr.Interface(
69
 
70
  if __name__ == "__main__":
71
  demo.launch(mcp_server=True)
 
 
6
  import requests
7
  import os
8
 
9
+ # Get token from environment variable
10
  HF_TOKEN = os.environ.get("HF_CV_ROBOT_TOKEN")
11
  MODEL = "Qwen/Qwen3-VL-32B-Instruct"
12
 
13
+ # Check if the token is available when the script starts
14
+ if not HF_TOKEN:
15
+ print("ERROR: HF_CV_ROBOT_TOKEN environment variable not set.")
16
+ # In a real app, you might want to stop execution or handle this more gracefully
17
+ # For a Gradio app in a Space, it might just fail upon the first request.
18
+
19
  # -------------------------------
20
+ # 主處理函數 (Main Processing Function)
21
  # -------------------------------
22
  def process(payload: dict):
23
  try:
24
+ if not HF_TOKEN:
25
+ return {"error": "Hugging Face token is missing. Please check Space secrets."}
26
+
27
  robot_id = payload.get("robot_id", "unknown")
28
  image_b64 = payload["image_b64"]
29
 
30
+ # Base64 解碼成圖片,用 PIL 開啟 (Decode base64 to image, open with PIL)
31
  img_bytes = base64.b64decode(image_b64)
32
+ # We don't actually use the PIL image object in the rest of the code,
33
+ # so this part is technically unnecessary for the API call, but harmless.
34
+ # img = Image.open(io.BytesIO(img_bytes)).convert("RGB")
35
 
36
  # Router API payload
37
  headers = {"Authorization": f"Bearer {HF_TOKEN}"}
 
56
  )
57
 
58
  if resp.status_code != 200:
59
+ # Added more detail to error logging
60
+ print(f"VLM API error: {resp.status_code}, {resp.text}")
61
  return {"error": f"VLM API error: {resp.status_code}, {resp.text}"}
62
 
63
+ # Check if the expected response structure exists before accessing it
64
+ try:
65
+ vlm_text = resp.json()["choices"][0]["message"]["content"][0]["text"]
66
+ except (KeyError, IndexError, json.JSONDecodeError) as e:
67
+ return {"error": f"Failed to parse VLM response: {e}, Response text: {resp.text}"}
68
+
69
 
70
  return {
71
  "received": True,
 
74
  }
75
 
76
  except Exception as e:
77
+ # Added logging for general exceptions
78
+ print(f"An unexpected error occurred: {e}")
79
  return {"error": str(e)}
80
 
81
  # -------------------------------
 
90
 
91
  if __name__ == "__main__":
92
  demo.launch(mcp_server=True)
93
+