Noor3 commited on
Commit
dccbddd
·
verified ·
1 Parent(s): 6f24db8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -40
app.py CHANGED
@@ -1,4 +1,4 @@
1
- import gradio as gr
2
  import torch
3
  from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
4
 
@@ -22,42 +22,3 @@ def generate_response(symptoms, max_length=512):
22
 
23
  generated_text = tokenizer.decode(output_ids[0], skip_special_tokens=True)
24
  return generated_text
25
-
26
- # Define Gradio interface
27
- with gr.Blocks(title="Medical Symptom Assistant") as demo:
28
- gr.Markdown("# Medical Symptom Assistant")
29
- gr.Markdown("Describe your symptoms below and get potential medical insights.")
30
-
31
- with gr.Row():
32
- with gr.Column():
33
- input_text = gr.Textbox(
34
- label="Describe your symptoms",
35
- placeholder="Example: I am feeling continuous sneezing, shivering and chills",
36
- lines=4
37
- )
38
-
39
- submit_btn = gr.Button("Get Medical Insight", variant="primary")
40
-
41
- with gr.Row():
42
- output_text = gr.Textbox(
43
- label="Medical Response",
44
- lines=8
45
- )
46
-
47
- # Set up the submission action
48
- submit_btn.click(
49
- fn=generate_response,
50
- inputs=input_text,
51
- outputs=output_text
52
- )
53
-
54
- gr.Markdown("""
55
- ### Important Note:
56
- This tool provides preliminary insights only and should not replace professional medical advice.
57
- Always consult with a healthcare provider for proper diagnosis and treatment.
58
-
59
- Model: shanover/medbot_godel_v3
60
- """)
61
-
62
- # Launch the app
63
- demo.launch()
 
1
+
2
  import torch
3
  from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
4
 
 
22
 
23
  generated_text = tokenizer.decode(output_ids[0], skip_special_tokens=True)
24
  return generated_text