File size: 2,718 Bytes
53a6315
 
 
 
 
f49d5d2
53a6315
 
 
f49d5d2
 
 
53a6315
f49d5d2
53a6315
f49d5d2
 
 
 
 
 
 
 
53a6315
f49d5d2
 
7836133
f49d5d2
7836133
f49d5d2
 
 
 
 
 
 
 
 
 
 
53a6315
0e8cb0c
f49d5d2
53a6315
 
 
 
 
 
f49d5d2
 
 
 
53a6315
f49d5d2
7836133
53a6315
 
 
 
 
 
 
 
 
 
f49d5d2
 
 
53a6315
 
7836133
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
# TODO: Consider removing python-dotenv; I don't want to use .env for environment variables
import os
import gradio as gr
from data_service import DataService
from llm_service import LLMService
import gettext

class App(object):
  def __init__(self, data_service: DataService, llm_service: LLMService):
    gettext.install("aibreakout", './locale')

    with gr.Blocks(theme=gr.themes.Soft(primary_hue=gr.themes.colors.blue, secondary_hue=gr.themes.colors.sky), title=_("Doctor Chat Assistant")) as gradio_app:
      gr.Markdown(
        _("Welcome to the G34 Patient Assistance Portal. Select the patient below to get started.")
      )
      
      def create_query_textbox(visible: bool) -> gr.Textbox:
        return gr.Textbox(
          label=_("Query"),
          placeholder=_("What is your query?"),
          visible=visible,
          interactive=visible
        )

      # Dividing the UI into columns is weird, since the principal organizing item is the row...
      with gr.Row():
        patients = data_service.get_patients()
        patient_dropdown = gr.Dropdown(
          choices=[(x[1]["FIRST"], x[1]["PATIENT_ID"]) for x in patients.sort_values(by="FIRST").iterrows()],
          multiselect=False,
          value=None,
          label=_("Patient")
        )
        query_textbox = create_query_textbox(False)
      
      with gr.Row():
        patient_summary = gr.Markdown()
        query_response = gr.Markdown()

      def patient_selected(patient: str) -> tuple[str, gr.Textbox]:
        return (
          llm_service.get_summary(patient),
          create_query_textbox(True)
        )
      patient_dropdown.select(
        patient_selected,
        inputs=[patient_dropdown],
        outputs=(patient_summary, query_textbox)
      )

      def ask_query(patient: str, query: str) -> str:
        return llm_service.answer_query(patient, query)

      query_textbox.submit(
        fn=ask_query,
        inputs=[patient_dropdown, query_textbox],
        outputs=(query_response)
      )

    self._gradio_app = gradio_app

  def launch(self):
    self._gradio_app.launch(share=True, debug=True)

if __name__ == '__main__':
  print("Attempting to launch G34 final project as a Gradio App...")
  if not os.getenv("OPENAI_API_KEY"):
    print("WARNING: OpenAI API Key was not found in environment variables.")
    #print("The application UI will launch, but AI functionality will be disabled.")
    print("Please add the OpenAI API key as an application secret.")

  with DataService().build() as data_service:
    with LLMService().with_key(os.getenv("OPENAI_API_KEY")).with_data_service(data_service).build() as llm_service:
      app = App(data_service, llm_service)
      app.launch()