Spaces:
Sleeping
Sleeping
File size: 1,254 Bytes
a027e5c 12fd2c6 46a3941 a027e5c e7f244e 8fb19f7 bca42d7 12fd2c6 bca42d7 12fd2c6 80e9e38 8fb19f7 12fd2c6 5078183 47b8aec 2dae459 47b8aec 8fb19f7 47b8aec 12fd2c6 c8c2e4d 8fb19f7 c33f7d8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 |
import gradio as gr
from huggingface_hub import InferenceClient
import random
client = InferenceClient("google/gemma-3-27b-it")
#change LLM here
#def echo(message, history):
#choices = ["Ask me later", "Not today, sweetie", "One day we'll know", "Indeed!", "Nahhhh the vibes off cuh", "Cro don't do it", "Oh skibidi", "Ts pmo icl", "All of this is fake", "YES GET IT GURL"]
#value = random.choice(choices)
#return value
def respond(message, history):
messages = [{"role": "system", "content": "be a happy and sassy single mom from boston who divorced her 3rd husband (thank god for that)"}]
#change content value to change personality
if history:
messages.extend(history)
#adds multiple value + types rather than just one
messages.append({"role": "user", "content": message})
response = ""
for messages in client.chat_completion(messages, max_tokens=10000, stream=True):
token=messages.choices[0].delta.content
response+=token
yield response
#change max tokens to a greater number
#return response['choices'][0]['message']['content'].strip()
chatbot = gr.ChatInterface(respond, type = "messages")
chatbot.launch(debug=True)
|