mbrq13 commited on
Commit
284e8fb
1 Parent(s): a93d23f

Add pneumonia detection app with Grad-CAM

Browse files
Files changed (1) hide show
  1. app.py +72 -32
app.py CHANGED
@@ -86,7 +86,7 @@ def image_to_base64(image):
86
  return f"data:image/jpeg;base64,{img_base64}"
87
 
88
  def query_medgemma(message, history, image=None):
89
- """Query MedGemma endpoint with text and optional image"""
90
 
91
  # Your endpoint URL
92
  endpoint_url = "https://t911ok4t5x994zcu.us-east-1.aws.endpoints.huggingface.cloud"
@@ -97,50 +97,90 @@ def query_medgemma(message, history, image=None):
97
  "Content-Type": "application/json"
98
  }
99
 
100
- # Prepare the message content
101
- content = []
102
-
103
- # Add image if provided
104
  if image is not None:
 
105
  image_base64 = image_to_base64(image)
106
- content.append({
107
- "type": "image_url",
108
- "image_url": {"url": image_base64}
109
- })
110
-
111
- # Add text message
112
- content.append({
113
- "type": "text",
114
- "text": message
115
- })
116
-
117
- # Prepare payload
118
- payload = {
119
- "model": "tgi",
120
- "messages": [
121
- {
122
- "role": "user",
123
- "content": content
124
  }
125
- ],
126
- "max_tokens": 500,
127
- "temperature": 0.7
128
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
129
 
130
  try:
 
131
  response = requests.post(endpoint_url, headers=headers, json=payload, timeout=30)
132
 
133
  if response.status_code == 200:
134
  result = response.json()
135
- if "choices" in result and len(result["choices"]) > 0:
136
- return result["choices"][0]["message"]["content"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
137
  else:
138
- return "Lo siento, no pude obtener una respuesta del modelo."
 
 
 
 
 
139
  else:
140
- return f"Error del endpoint: {response.status_code}. El modelo puede estar escalado a cero - intenta de nuevo en unos segundos."
141
 
142
  except requests.exceptions.Timeout:
143
- return "Timeout: El modelo est谩 despertando, intenta de nuevo en unos segundos."
144
  except Exception as e:
145
  return f"Error de conexi贸n: {str(e)}"
146
 
 
86
  return f"data:image/jpeg;base64,{img_base64}"
87
 
88
  def query_medgemma(message, history, image=None):
89
+ """Query MedGemma endpoint with proper multimodal format"""
90
 
91
  # Your endpoint URL
92
  endpoint_url = "https://t911ok4t5x994zcu.us-east-1.aws.endpoints.huggingface.cloud"
 
97
  "Content-Type": "application/json"
98
  }
99
 
100
+ # Prepare payload based on whether we have an image or not
 
 
 
101
  if image is not None:
102
+ # Multimodal format: Send image as base64 in the content
103
  image_base64 = image_to_base64(image)
104
+
105
+ # TGI multimodal format for MedGemma
106
+ payload = {
107
+ "inputs": {
108
+ "text": message,
109
+ "image": image_base64
110
+ },
111
+ "parameters": {
112
+ "max_new_tokens": 500,
113
+ "temperature": 0.7,
114
+ "do_sample": True,
115
+ "return_full_text": False
 
 
 
 
 
 
116
  }
117
+ }
118
+
119
+ # Alternative format if the above doesn't work
120
+ payload_alt = {
121
+ "inputs": message,
122
+ "image": image_base64,
123
+ "parameters": {
124
+ "max_new_tokens": 500,
125
+ "temperature": 0.7,
126
+ "do_sample": True,
127
+ "return_full_text": False
128
+ }
129
+ }
130
+
131
+ else:
132
+ # Text-only format
133
+ payload = {
134
+ "inputs": message,
135
+ "parameters": {
136
+ "max_new_tokens": 500,
137
+ "temperature": 0.7,
138
+ "do_sample": True,
139
+ "return_full_text": False,
140
+ "stop": ["<|im_end|>", "</s>"]
141
+ }
142
+ }
143
+ payload_alt = None
144
 
145
  try:
146
+ # Try primary format
147
  response = requests.post(endpoint_url, headers=headers, json=payload, timeout=30)
148
 
149
  if response.status_code == 200:
150
  result = response.json()
151
+
152
+ # Handle different TGI response formats
153
+ if isinstance(result, list) and len(result) > 0:
154
+ return result[0].get("generated_text", "Error en el formato de respuesta")
155
+ elif "generated_text" in result:
156
+ return result["generated_text"]
157
+ else:
158
+ return f"Formato de respuesta inesperado: {str(result)[:200]}"
159
+
160
+ elif response.status_code == 422 and payload_alt is not None:
161
+ # Try alternative format for multimodal
162
+ response = requests.post(endpoint_url, headers=headers, json=payload_alt, timeout=30)
163
+
164
+ if response.status_code == 200:
165
+ result = response.json()
166
+ if isinstance(result, list) and len(result) > 0:
167
+ return result[0].get("generated_text", "Error en el formato de respuesta")
168
+ elif "generated_text" in result:
169
+ return result["generated_text"]
170
+ else:
171
+ return f"Formato de respuesta inesperado: {str(result)[:200]}"
172
  else:
173
+ return f"Error 422 en ambos formatos. Detalles: {response.text[:300]}"
174
+
175
+ elif response.status_code == 503:
176
+ return "El modelo est谩 escalado a cero. Intenta de nuevo en unos segundos mientras se activa."
177
+ elif response.status_code == 422:
178
+ return f"Error de formato en la petici贸n. Detalles: {response.text[:300]}"
179
  else:
180
+ return f"Error del endpoint: {response.status_code}. Detalles: {response.text[:200]}"
181
 
182
  except requests.exceptions.Timeout:
183
+ return "Timeout: El modelo est谩 procesando, intenta de nuevo en unos segundos."
184
  except Exception as e:
185
  return f"Error de conexi贸n: {str(e)}"
186