Manik Sheokand commited on
Commit
21f5009
·
1 Parent(s): 85551d6
Files changed (1) hide show
  1. app.py +66 -9
app.py CHANGED
@@ -140,6 +140,67 @@ def generate_answer(image, question, temperature=0.7, top_p=0.95, max_tokens=256
140
  trimmed = [o[len(i):] for i, o in zip(inputs["input_ids"], out)]
141
  return processor.batch_decode(trimmed, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
142
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
143
  # ---- UI ----
144
  with gr.Blocks(title="DermalCare - Pet & Skincare Assistant") as demo:
145
  gr.Markdown("# DermalCare - Your AI Assistant for Pet Care and Skincare")
@@ -158,14 +219,10 @@ with gr.Blocks(title="DermalCare - Pet & Skincare Assistant") as demo:
158
  with gr.Column():
159
  answer = gr.Textbox(lines=12, label="Assistant", interactive=False)
160
 
161
- ask.click(generate_answer, inputs=[img_in, txt_in, temp, topp, max_tok], outputs=answer)
162
-
163
- with gr.Tab("Product Recommender"):
164
- need = gr.Textbox(label="Describe your need (中文或English)", lines=3,
165
- value="敏感肌 想找溫和溫和拔乾的保濕潔面,預算 < TWD 400")
166
- budget = gr.Number(label="Budget (TWD, optional)", value=400, precision=0)
167
- btn = gr.Button("Recommend", variant="primary")
168
- out = gr.Textbox(label="Recommendations", lines=14)
169
- btn.click(fn=recommend_products, inputs=[need, budget], outputs=out, queue=True)
170
 
171
  demo.queue().launch(show_api=False, share=True)
 
140
  trimmed = [o[len(i):] for i, o in zip(inputs["input_ids"], out)]
141
  return processor.batch_decode(trimmed, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
142
 
143
+ # ---- PetCare answer + product suggestions (ONE output) ----
144
+ @spaces.GPU(duration=120)
145
+ def pet_answer_with_recs(image, question, temperature=0.7, top_p=0.95, max_tokens=256, budget_twd=None):
146
+ """
147
+ 1) Get the normal PetBull answer (image + text).
148
+ 2) Run vector search on the user's question.
149
+ 3) Ask the LLM (text-only) to decide if any candidates are relevant for the user's issue.
150
+ If yes, append a 'Suggested products' section (up to 3 items from the list).
151
+ If not, append 'No relevant products.'.
152
+ """
153
+ # Step 1: normal PetBull answer
154
+ base = generate_answer(image, question, temperature, top_p, max_tokens)
155
+
156
+ # Step 2: retrieve product candidates (humans/skincare; model will decide relevance)
157
+ cands = product_search(question, k=8)
158
+ cand_block = format_candidates_for_llm(cands, budget_twd=budget_twd)
159
+
160
+ # Step 3: build a small, text-only prompt for suggestions
161
+ # IMPORTANT: we use the same Qwen2.5-VL model in text mode
162
+ messages = [{
163
+ "role": "user",
164
+ "content": [
165
+ {"type": "text", "text":
166
+ "You are DermalCare's assistant.\n"
167
+ "Task: Given a user message and a list of candidate skincare products, "
168
+ "return a section titled 'Suggested products' with up to 3 items ONLY IF they are relevant to the user's issue. "
169
+ "Each item must be strictly chosen from the candidate list; include one-line 'why it helps' and a short 'how to use'. "
170
+ "If none are relevant (e.g., purely veterinary emergencies, non-skin issues), output exactly: 'No relevant products.'"}
171
+ ]
172
+ },{
173
+ "role": "user",
174
+ "content": [
175
+ {"type": "text", "text": f"User message:\n{question}"},
176
+ {"type": "text", "text": f"Candidates (do not invent beyond this list):\n{cand_block}"}
177
+ ]
178
+ }]
179
+
180
+ # Prepare inputs on GPU (text-only)
181
+ text_prompt = processor.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
182
+ inputs = processor(
183
+ text=[text_prompt], images=None, videos=None, padding=True, return_tensors="pt",
184
+ )
185
+ inputs = {k: (v.to("cuda") if hasattr(v, "to") else v) for k, v in inputs.items()}
186
+
187
+ with torch.no_grad():
188
+ out = model.generate(
189
+ **inputs,
190
+ max_new_tokens=200,
191
+ temperature=0.2, # keep precise/grounded
192
+ top_p=0.95,
193
+ )
194
+ trimmed = [o[len(i):] for i, o in zip(inputs["input_ids"], out)]
195
+ suggestions = processor.batch_decode(trimmed, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
196
+
197
+ # Final combined message
198
+ safety = (
199
+ "Safety notes: For broken/infected skin, pregnancy/lactation, infants, "
200
+ "or if symptoms worsen—seek a qualified dermatologist. Patch-test first."
201
+ )
202
+ return f"{base}\n\n{('Suggested products:\n' + suggestions) if suggestions.strip() != '' else ''}\n\n{safety}"
203
+
204
  # ---- UI ----
205
  with gr.Blocks(title="DermalCare - Pet & Skincare Assistant") as demo:
206
  gr.Markdown("# DermalCare - Your AI Assistant for Pet Care and Skincare")
 
219
  with gr.Column():
220
  answer = gr.Textbox(lines=12, label="Assistant", interactive=False)
221
 
222
+ ask.click(
223
+ pet_answer_with_recs,
224
+ inputs=[img_in, txt_in, temp, topp, max_tok], # (budget optional: add at the end if you want)
225
+ outputs=answer
226
+ )
 
 
 
 
227
 
228
  demo.queue().launch(show_api=False, share=True)