File size: 3,068 Bytes
ee569c7
 
 
259d0bd
ee569c7
23d90b1
 
 
37198d8
23d90b1
 
8b11a37
23d90b1
 
 
 
ddf2041
23d90b1
3059833
23d90b1
 
3059833
23d90b1
ee569c7
23d90b1
 
 
ddf2041
23d90b1
 
 
ddf2041
3059833
23d90b1
 
 
aa8c03a
23d90b1
 
 
 
 
aa8c03a
23d90b1
 
 
a502bea
23d90b1
 
 
 
 
 
 
 
 
 
 
 
 
ddf2041
23d90b1
 
 
 
 
 
 
 
 
993e2e8
23d90b1
 
 
 
 
259d0bd
23d90b1
 
3059833
23d90b1
ee569c7
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
import os
import google.generativeai as genai
import gradio as gr
import time

if gr.NO_RELOAD:
    GOOGLE_API_KEY = os.getenv('GOOGLE_API_KEY')
    genai.configure(api_key=GOOGLE_API_KEY)

    system_instruction = "You are an expert in medical report analysis and a medical professional. Analyze the given medical report and generate a summarized report of the same."\
            "All the medical tests in the given medical report must be specified in the summarized report. The response generated should be well-structured with health tips based on the report."

    generation_config = genai.GenerationConfig(temperature=0)
    model = genai.GenerativeModel('gemini-2.0-flash-exp', 
                                system_instruction=system_instruction,
                                generation_config=generation_config)

    chat = model.start_chat()

    stop_generation = None
    prompt = input_file = None

    theme = gr.themes.Default(text_size=gr.themes.sizes.text_sm,font=[gr.themes.GoogleFont("Inconsolata"), "Arial", "sans-serif"])

def stop():
    global stop_generation
    stop_generation = 1

def clear():
    for file in genai.list_files():
        file.delete()


def add_message(history, message):
    global prompt,input_file,stop_generation
    stop_generation = 0

    if len(message["files"]) == 1:
            input_file = genai.upload_file(message["files"][0])
            history.append({"role": "user", "content": {"path": message["files"][0]}})
    else:
        input_file = None
    
    if message["text"] is not None:
        prompt = message["text"]
        history.append({"role": "user", "content": message["text"]})
    
    return history, gr.MultimodalTextbox(value=None, interactive=False, submit_btn=False, stop_btn=True)

def bot(history):
    global prompt,input_file

    if input_file is not None and prompt:
        response = chat.send_message([prompt, input_file]).text
    elif input_file is None and prompt:
        response = chat.send_message(prompt).text
    elif input_file is not None and not prompt:
        response = "what can I do with your file?"
    elif input_file is None and not prompt:
        response = "Please provide a message"
    
    history.append({"role": "assistant", "content": ""})
    for character in response:
        if stop_generation:
            break
        history[-1]["content"] += character
        time.sleep(0.01)
        yield history

with gr.Blocks(theme=theme) as demo:
    chatbot = gr.Chatbot(bubble_full_width=False, type="messages",height=650,container=False)
    chat_input = gr.MultimodalTextbox(interactive=True, placeholder="Enter message or upload file...", show_label=False, file_count="single",container=False)

    chat_msg = chat_input.submit(add_message, [chatbot, chat_input], [chatbot, chat_input])
    bot_msg = chat_msg.then(bot, chatbot, chatbot)
    bot_msg.then(lambda: gr.MultimodalTextbox(interactive=True, submit_btn=True, stop_btn=False, value=None), None, chat_input)
    
    chat_input.stop(stop)
    chatbot.clear(clear)   

demo.launch()