OppaAI commited on
Commit
1a205a3
·
verified ·
1 Parent(s): d117752

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -12
app.py CHANGED
@@ -10,22 +10,21 @@ MODEL = "Qwen/Qwen2.5-VL-7B-Instruct"
10
  def process(payload: dict):
11
  try:
12
  if not HF_TOKEN:
13
- return {"error": "Missing token"}
14
 
15
  robot_id = payload.get("robot_id", "unknown")
16
  image_b64 = payload["image_b64"]
17
 
 
 
 
18
  data = {
19
  "model": MODEL,
20
  "messages": [
21
  {
22
  "role": "user",
23
  "content": [
24
- {"type": "text", "text": "Describe this image in detail."},
25
- {
26
- "type": "image_data",
27
- "image_data": {"b64": image_b64}
28
- }
29
  ]
30
  }
31
  ]
@@ -42,23 +41,23 @@ def process(payload: dict):
42
  )
43
 
44
  if resp.status_code != 200:
45
- return {"error": f"VLM API error: {resp.status_code}, {resp.text}"}
46
 
47
  try:
48
- content = resp.json()["choices"][0]["message"]["content"]
49
- first = content[0]["text"]
50
- except:
51
- return {"error": f"Bad response: {resp.text}"}
52
 
53
  return {
54
  "received": True,
55
  "robot_id": robot_id,
56
- "vllm_analysis": first
57
  }
58
 
59
  except Exception as e:
60
  return {"error": str(e)}
61
 
 
62
  demo = gr.Interface(
63
  fn=process,
64
  inputs=gr.JSON(label="Input Payload"),
 
10
  def process(payload: dict):
11
  try:
12
  if not HF_TOKEN:
13
+ return {"error": "Missing HF token"}
14
 
15
  robot_id = payload.get("robot_id", "unknown")
16
  image_b64 = payload["image_b64"]
17
 
18
+ # 這個是 HF 官方需要的格式
19
+ markdown_image = f"![](data:image/jpeg;base64,{image_b64})\nDescribe this image in detail."
20
+
21
  data = {
22
  "model": MODEL,
23
  "messages": [
24
  {
25
  "role": "user",
26
  "content": [
27
+ {"type": "text", "text": markdown_image}
 
 
 
 
28
  ]
29
  }
30
  ]
 
41
  )
42
 
43
  if resp.status_code != 200:
44
+ return {"error": f"HF VLM error: {resp.status_code}, {resp.text}"}
45
 
46
  try:
47
+ vlm_text = resp.json()["choices"][0]["message"]["content"][0]["text"]
48
+ except Exception as e:
49
+ return {"error": f"Bad response: {e}, text: {resp.text}"}
 
50
 
51
  return {
52
  "received": True,
53
  "robot_id": robot_id,
54
+ "vllm_analysis": vlm_text
55
  }
56
 
57
  except Exception as e:
58
  return {"error": str(e)}
59
 
60
+
61
  demo = gr.Interface(
62
  fn=process,
63
  inputs=gr.JSON(label="Input Payload"),