Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import random | |
| from huggingface_hub import InferenceClient | |
| from gradio.themes.utils import colors | |
| theme = gr.themes.Default( | |
| primary_hue=gr.themes.Color( | |
| c100="#ffedd5", c200="#fed7aa", c300="#ffe09e", c400="#c2814c", | |
| c50="#fff8f0", c500="#f97316", c600="#ea580c", c700="#c2410c", | |
| c800="#9a3412", c900="#7c2d12", c950="#611f00" | |
| ) | |
| ) | |
| client = InferenceClient ("HuggingFaceH4/zephyr-7b-beta") #change the LLM | |
| def respond(message, history): | |
| messages = [{"role" : "system", "content" : "You are a chatbot who helps with mental health"}] #change personality | |
| if history: | |
| messages.extend(history) | |
| messages.append({"role" : "user", "content" : message}) | |
| response = "" | |
| for message in client.chat_completion( | |
| messages, | |
| max_tokens = 100, #change length | |
| stream =True | |
| ): | |
| token = message.choices[0].delta.content | |
| response += token | |
| print(type(message)) | |
| print(response) | |
| yield response | |
| #print(response["choices"][0]["message"]["content"].strip()) | |
| def random_message(message, history): | |
| choices = ["try again later", "isn't looking good for you", "perhaps", "maybe", "doubtful", "yes", "no", "without a doubt", "most likely", "signs point to yes", "it is certain", "my sources say no", "reply hazy, try again"] | |
| random_choices = random.choice(choices) | |
| return random_choices | |
| chatbot = gr.ChatInterface(respond, type = "messages", theme=theme) | |
| chatbot.launch(debug=True) |