Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -5,59 +5,66 @@ import time
|
|
| 5 |
|
| 6 |
GOOGLE_API_KEY = os.getenv('GOOGLE_API_KEY')
|
| 7 |
genai.configure(api_key=GOOGLE_API_KEY)
|
| 8 |
-
|
|
|
|
|
|
|
|
|
|
| 9 |
generation_config = genai.GenerationConfig(temperature=0)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 10 |
|
| 11 |
input_file = None
|
| 12 |
-
|
| 13 |
-
"All the medical tests in the given medical report must be specified in the summarized report. The response generated should be well-structured with health tips based on the report."
|
| 14 |
|
| 15 |
-
def
|
| 16 |
-
global
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
|
| 21 |
-
|
| 22 |
-
|
|
|
|
|
|
|
| 23 |
|
| 24 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 25 |
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
for char in response:
|
| 29 |
-
assistant_message += char
|
| 30 |
-
history[-1] = {"role": "assistant", "content": assistant_message}
|
| 31 |
yield history
|
| 32 |
-
time.sleep(0.1)
|
| 33 |
|
| 34 |
with gr.Blocks() as demo:
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
input_file = genai.upload_file(file)
|
| 38 |
-
|
| 39 |
-
def clear_file():
|
| 40 |
-
global input_file
|
| 41 |
-
input_file.delete()
|
| 42 |
-
|
| 43 |
-
input_file = None
|
| 44 |
|
| 45 |
-
|
|
|
|
|
|
|
| 46 |
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
chatbot = gr.Chatbot(type="messages", render_markdown=True, height=500)
|
| 50 |
-
message = gr.Textbox(label="Enter your prompt")
|
| 51 |
-
submit = gr.Button("Submit")
|
| 52 |
-
clear = gr.ClearButton([message, chatbot])
|
| 53 |
-
|
| 54 |
-
submit.click(greet, [message, chatbot], chatbot)
|
| 55 |
-
|
| 56 |
-
with gr.Column(scale=1):
|
| 57 |
-
file_upload = gr.File(label="Upload PDF File", file_types=['.pdf', '.txt'], )
|
| 58 |
-
|
| 59 |
-
file_upload.upload(upload_file, file_upload)
|
| 60 |
-
file_upload.clear(clear_file, None, [chatbot, message])
|
| 61 |
-
|
| 62 |
|
| 63 |
demo.launch()
|
|
|
|
| 5 |
|
| 6 |
GOOGLE_API_KEY = os.getenv('GOOGLE_API_KEY')
|
| 7 |
genai.configure(api_key=GOOGLE_API_KEY)
|
| 8 |
+
|
| 9 |
+
system_instruction = "You are an expert in medical report analysis. Analyze the given medical report and generate a summarized report of the same."\
|
| 10 |
+
"All the medical tests in the given medical report must be specified in the summarized report. The response generated should be well-structured with health tips based on the report."
|
| 11 |
+
|
| 12 |
generation_config = genai.GenerationConfig(temperature=0)
|
| 13 |
+
model = genai.GenerativeModel('gemini-2.0-flash-exp',
|
| 14 |
+
system_instruction=system_instruction,
|
| 15 |
+
generation_config=generation_config)
|
| 16 |
+
|
| 17 |
+
chat = model.start_chat()
|
| 18 |
|
| 19 |
input_file = None
|
| 20 |
+
stop_generation = 0
|
|
|
|
| 21 |
|
| 22 |
+
def stop():
|
| 23 |
+
global stop_generation
|
| 24 |
+
stop_generation = 1
|
| 25 |
+
|
| 26 |
+
def clear():
|
| 27 |
+
for file in genai.list_files():
|
| 28 |
+
file.delete()
|
| 29 |
+
|
| 30 |
+
global input_file
|
| 31 |
+
input_file = None
|
| 32 |
+
|
| 33 |
+
def add_message(history, message):
|
| 34 |
+
for x in message["files"]:
|
| 35 |
+
history.append({"role": "user", "content": {"path": x}})
|
| 36 |
+
if message["text"] is not None:
|
| 37 |
+
history.append({"role": "user", "content": message["text"]})
|
| 38 |
|
| 39 |
+
return history, gr.MultimodalTextbox(value=None, interactive=False, submit_btn=False, stop_btn=True)
|
| 40 |
+
|
| 41 |
+
def bot(history, message):
|
| 42 |
+
global input_file,stop_generation
|
| 43 |
|
| 44 |
+
if message["files"]:
|
| 45 |
+
input_file = genai.upload_file(message["files"][0])
|
| 46 |
+
response = chat.send_message([message["text"], input_file]).text
|
| 47 |
+
else:
|
| 48 |
+
response = chat.send_message(message["text"]).text
|
| 49 |
+
|
| 50 |
+
history.append({"role": "assistant", "content": ""})
|
| 51 |
+
for character in response:
|
| 52 |
+
if stop_generation:
|
| 53 |
+
break
|
| 54 |
|
| 55 |
+
history[-1]["content"] += character
|
| 56 |
+
time.sleep(0.05)
|
|
|
|
|
|
|
|
|
|
| 57 |
yield history
|
|
|
|
| 58 |
|
| 59 |
with gr.Blocks() as demo:
|
| 60 |
+
chatbot = gr.Chatbot(bubble_full_width=False, type="messages",height=550)
|
| 61 |
+
chat_input = gr.MultimodalTextbox(interactive=True, placeholder="Enter message or upload file...", show_label=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
|
| 63 |
+
chat_msg = chat_input.submit (add_message, [chatbot, chat_input], [chatbot, chat_input])
|
| 64 |
+
bot_msg = chat_msg.then(bot, [chatbot, chat_input], chatbot)
|
| 65 |
+
bot_msg.then(lambda: gr.MultimodalTextbox(interactive=True, submit_btn=True, stop_btn=False, value=None), None, chat_input)
|
| 66 |
|
| 67 |
+
chat_input.stop(stop)
|
| 68 |
+
chatbot.clear(clear)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 69 |
|
| 70 |
demo.launch()
|