aadya1762 commited on
Commit
6aec7fd
·
1 Parent(s): 1dde7c7
Files changed (2) hide show
  1. app.py +5 -15
  2. gemmademo/_chat.py +8 -1
app.py CHANGED
@@ -6,21 +6,11 @@ def main():
6
  model_options = list(LlamaCppGemmaModel.AVAILABLE_MODELS.keys())
7
  task_options = ["Question Answering", "Text Generation", "Code Completion"]
8
 
9
- def update_chat(model_name, task_name):
10
- model = LlamaCppGemmaModel(name=model_name)
11
- model.load_model()
12
- prompt_manager = PromptManager(task=task_name)
13
- chat = GradioChat(model=model, prompt_manager=prompt_manager)
14
- chat.run()
15
-
16
- gr.Interface(
17
- fn=update_chat,
18
- inputs=[
19
- gr.Dropdown(choices=model_options, value="gemma-2b-it", label="Select Gemma Model"),
20
- gr.Dropdown(choices=task_options, value="Question Answering", label="Select Task"),
21
- ],
22
- outputs=[],
23
- ).launch()
24
 
25
  if __name__ == "__main__":
26
  main()
 
6
  model_options = list(LlamaCppGemmaModel.AVAILABLE_MODELS.keys())
7
  task_options = ["Question Answering", "Text Generation", "Code Completion"]
8
 
9
+ model = LlamaCppGemmaModel(name="gemma-2b-it")
10
+ model.load_model()
11
+ prompt_manager = PromptManager(task="Question Answering")
12
+ chat = GradioChat(model=model, prompt_manager=prompt_manager, model_options=model_options, task_options=task_options)
13
+ chat.run()
 
 
 
 
 
 
 
 
 
 
14
 
15
  if __name__ == "__main__":
16
  main()
gemmademo/_chat.py CHANGED
@@ -14,9 +14,11 @@ class GradioChat:
14
  - Formats user inputs before sending them to the model.
15
  """
16
 
17
- def __init__(self, model: LlamaCppGemmaModel, prompt_manager: PromptManager):
18
  self.model = model
19
  self.prompt_manager = prompt_manager
 
 
20
 
21
  def run(self):
22
  self._chat()
@@ -30,4 +32,9 @@ class GradioChat:
30
  chat_interface = gr.ChatInterface(
31
  chat_fn,
32
  textbox=gr.Textbox(placeholder="What is up?", container=False),
 
 
 
 
33
  )
 
 
14
  - Formats user inputs before sending them to the model.
15
  """
16
 
17
+ def __init__(self, model: LlamaCppGemmaModel, prompt_manager: PromptManager, model_options: list[str], task_options: list[str]):
18
  self.model = model
19
  self.prompt_manager = prompt_manager
20
+ self.model_options = model_options
21
+ self.task_options = task_options
22
 
23
  def run(self):
24
  self._chat()
 
32
  chat_interface = gr.ChatInterface(
33
  chat_fn,
34
  textbox=gr.Textbox(placeholder="What is up?", container=False),
35
+ additional_inputs=[
36
+ gr.Dropdown(choices=self.model_options, value="gemma-2b-it", label="Select Gemma Model"),
37
+ gr.Dropdown(choices=self.task_options, value="Question Answering", label="Select Task"),
38
+ ],
39
  )
40
+ chat_interface.launch()