Al-Alcoba-Inciarte commited on
Commit
3eb7675
·
verified ·
1 Parent(s): 221af1f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -30
app.py CHANGED
@@ -5,35 +5,10 @@ from haystack.components.generators import HuggingFaceTGIGenerator
5
  generator = HuggingFaceTGIGenerator("mistralai/Mixtral-8x7B-Instruct-v0.1")
6
  generator.warm_up()
7
 
8
- #def respond(prompt):
9
- # result = generator.run(prompt, generation_kwargs={"max_new_tokens": 350})
10
- # return result["replies"][0]
11
- #
12
- #iface = gr.Interface(fn=respond, inputs="text", outputs="text")
13
- #iface.launch()
14
 
15
- import gradio as gr
16
-
17
- with gr.Blocks() as demo:
18
- chatbot = gr.Chatbot()
19
- msg = gr.Textbox()
20
- clear = gr.Button("Clear")
21
-
22
- #llm_chain, llm = init_chain(model, tokenizer)
23
-
24
- def user(user_message, history):
25
- return "", history + [[user_message, None]]
26
-
27
- def bot(history):
28
- print("Question: ", history[-1][0])
29
- bot_message = generator.run(history[-1][0], generation_kwargs={"max_new_tokens": 350}) #llm_chain.run(question=history[-1][0])
30
- print("Response: ", bot_message)
31
- history[-1][1] = ""
32
- history[-1][1] += bot_message
33
- return history
34
-
35
- msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(bot, chatbot, chatbot)
36
- clear.click(lambda: None, None, chatbot, queue=False)
37
 
38
- demo.queue()
39
- demo.launch()
 
5
  generator = HuggingFaceTGIGenerator("mistralai/Mixtral-8x7B-Instruct-v0.1")
6
  generator.warm_up()
7
 
8
+ def respond(prompt):
9
+ result = generator.run(prompt, generation_kwargs={"max_new_tokens": 350})
10
+ return result["replies"][0]
 
 
 
11
 
12
+ iface = gr.Interface(fn=respond, inputs="text", outputs="text")
13
+ iface.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14