krrisstiie commited on
Commit
fb0b429
·
verified ·
1 Parent(s): 5c51594

adding huggingface & transitioning to a generative AI model

Browse files
Files changed (1) hide show
  1. app.py +20 -5
app.py CHANGED
@@ -1,9 +1,24 @@
1
  import gradio as gr
2
- import random
 
 
3
 
4
  def respond (message, history):
5
- responses = ["Why are you asking me..", "I believe so", "My sources say no", "Too foggy to predict", "Stay delusional", "Uhh lwk ask yourself", "Of course not", "Girl yesss", "I guess", "No way", "Bro trust"]
6
- return random.choice(responses)
 
 
 
 
 
 
 
 
 
 
 
 
7
 
8
- chatbot = gr.ChatInterface(respond, type = 'messages', title = "Magic 8 Ball", description = "Ask me anything, I promise to tell the truth..", theme = "mgetz/Celeb_glitzy")
9
- chatbot.launch()
 
 
1
  import gradio as gr
2
+ from huggingface_hub import InferenceClient
3
+
4
+ client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
5
 
6
  def respond (message, history):
7
+ messages = [{"role:" "system", "content": "You're a friendly chatbot"}]
8
+
9
+ if history:
10
+ messages.extend(history)
11
+
12
+ messages.append({"role": "user", "content": message})
13
+
14
+ response = client.chat_completion(
15
+ messages,
16
+ max_tokens = 100,
17
+ temperature = 0.2
18
+ )
19
+
20
+ return response['choices'][0]['message']['content'].strip()
21
 
22
+ chatbot = gr.ChatInterface(respond, type = 'messages')
23
+
24
+ chatbot.launch()