Spaces:
Sleeping
Sleeping
File size: 896 Bytes
96c0fed 5acce1c 88dbfe8 ce5bb83 526d3d2 96c0fed 9329502 1786fdc 8a00090 6bddfb8 9329502 53e9871 1786fdc e5f53e6 1a82b77 c5379e7 5b2dad7 c5379e7 526d3d2 1786fdc e5f53e6 ce5bb83 526d3d2 96c0fed |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
import gradio as gr
import random
from huggingface_hub import InferenceClient
client = InferenceClient("google/gemma-3-27b-it")
##change the LLM
def respond(message, history):
messages = [{"role":"system","content":"You are a friendly chatbot"}]
if history :
messages.extend(history)
messages.append({"role":"user","content": message})
response = client.chat_completion(messages, max_tokens = 400, temperature = 2, top_p =.999)
return response["choices"][0]["message"]["content"].strip()
def random_message (message, history):
choices = ('yes','no','i dont think so','be so for real','yep', 'woooow', 'hmmmm', 'idk', 'idc', 'yaaay', 'why would i know')
wow = random.choice(choices)
theme = soft
return wow
chatbot = gr.ChatInterface(respond, type = "messages", title = "chatty")
chatbot.launch(debug=True)
# repeats messages.
|