OppaAI commited on
Commit
c071669
·
verified ·
1 Parent(s): dd985a0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -11
app.py CHANGED
@@ -1,48 +1,62 @@
1
  import gradio as gr
2
  import json
3
  import base64
 
4
  import requests
5
  import os
6
 
7
- HF_TOKEN = os.environ.get("HF_CV_ROBOT_TOKEN") # HF Tokens
8
- MODEL = "Qwen/Qwen2.5-VL-7B-Instruct" # VLM model
 
9
 
10
  if not HF_TOKEN:
11
- print("ERROR: HF_CV_ROBOT_TOKEN is not set!")
12
 
 
 
 
13
  def process(payload: dict):
14
  try:
 
 
 
15
  robot_id = payload.get("robot_id", "unknown")
16
  image_b64 = payload["image_b64"]
17
 
18
- headers = {"Authorization": f"Bearer {HF_TOKEN}"}
 
19
 
20
- # data format
 
 
 
 
 
21
  data = {
22
  "model": MODEL,
23
  "messages": [
24
  {
25
  "role": "user",
26
  "content": [
27
- {"type": "text", "text": "Describe this image in detail."},
28
- {"type": "image_file", "image_file": {"b64": image_b64}}
29
  ]
30
  }
31
  ]
32
  }
33
 
34
-
35
  resp = requests.post(
36
  "https://router.huggingface.co/v1/chat/completions",
37
- headers=headers,
38
- json=data,
 
39
  timeout=60
40
  )
41
 
42
  if resp.status_code != 200:
 
43
  return {"error": f"VLM API error: {resp.status_code}, {resp.text}"}
44
 
45
- # get image description as response
46
  try:
47
  vlm_text = resp.json()["choices"][0]["message"]["content"][0]["text"]
48
  except (KeyError, IndexError, json.JSONDecodeError) as e:
@@ -55,9 +69,12 @@ def process(payload: dict):
55
  }
56
 
57
  except Exception as e:
 
58
  return {"error": str(e)}
59
 
 
60
  # Gradio MCP Server
 
61
  demo = gr.Interface(
62
  fn=process,
63
  inputs=gr.JSON(label="Input Payload (Dict format)"),
 
1
  import gradio as gr
2
  import json
3
  import base64
4
+ from io import BytesIO
5
  import requests
6
  import os
7
 
8
+ # HF token & model
9
+ HF_TOKEN = os.environ.get("HF_CV_ROBOT_TOKEN")
10
+ MODEL = "Qwen/Qwen2.5-VL-7B-Instruct" # 確認這個模型在 HF 支援列表裡
11
 
12
  if not HF_TOKEN:
13
+ print("ERROR: HF_CV_ROBOT_TOKEN environment variable not set.")
14
 
15
+ # -------------------------------
16
+ # 主處理函數 (Main Processing Function)
17
+ # -------------------------------
18
  def process(payload: dict):
19
  try:
20
+ if not HF_TOKEN:
21
+ return {"error": "Hugging Face token is missing. Please check Space secrets."}
22
+
23
  robot_id = payload.get("robot_id", "unknown")
24
  image_b64 = payload["image_b64"]
25
 
26
+ # Base64 -> bytes
27
+ img_bytes = base64.b64decode(image_b64)
28
 
29
+ # Base64 bytes 當作 file-like 上傳
30
+ files = {
31
+ "file": ("image.jpg", BytesIO(img_bytes), "image/jpeg")
32
+ }
33
+
34
+ # JSON payload 只放文字訊息
35
  data = {
36
  "model": MODEL,
37
  "messages": [
38
  {
39
  "role": "user",
40
  "content": [
41
+ {"type": "text", "text": "Describe this image in detail."}
 
42
  ]
43
  }
44
  ]
45
  }
46
 
 
47
  resp = requests.post(
48
  "https://router.huggingface.co/v1/chat/completions",
49
+ headers={"Authorization": f"Bearer {HF_TOKEN}"},
50
+ data={"payload": json.dumps(data)},
51
+ files=files,
52
  timeout=60
53
  )
54
 
55
  if resp.status_code != 200:
56
+ print(f"VLM API error: {resp.status_code}, {resp.text}")
57
  return {"error": f"VLM API error: {resp.status_code}, {resp.text}"}
58
 
59
+ # 解析返回文字
60
  try:
61
  vlm_text = resp.json()["choices"][0]["message"]["content"][0]["text"]
62
  except (KeyError, IndexError, json.JSONDecodeError) as e:
 
69
  }
70
 
71
  except Exception as e:
72
+ print(f"An unexpected error occurred: {e}")
73
  return {"error": str(e)}
74
 
75
+ # -------------------------------
76
  # Gradio MCP Server
77
+ # -------------------------------
78
  demo = gr.Interface(
79
  fn=process,
80
  inputs=gr.JSON(label="Input Payload (Dict format)"),