Spaces:
Sleeping
Sleeping
File size: 811 Bytes
ba509f6 a964423 ba509f6 0cf340b ba509f6 a964423 ba509f6 e7d6505 ba509f6 0fa7de9 ba509f6 d38180d ba509f6 524043e e7d6505 ba509f6 46774a7 ba509f6 4e4de63 ba509f6 086dca9 70f91ec a964423 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
from huggingface_hub import InferenceClient
import gradio as gr
import random
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
def respond(message, history):
# responses = ["Yes", "No"]
# return random.choice(responses)
messages = [
{"role":"system",
"content":"act like google, a know it all, nice and concise"
}
]
if history:
messages.extend(history)
messages.append(
{"role":"user",
"content":message
}
)
response = client.chat_completion(
messages, max_tokens = 1000,temperature=1.3, top_p = 0.5
)
return response['choices'][0]['message']['content'].strip()
chatbot = gr.ChatInterface(respond, type="messages")
#temperature and top_p controls randomness
chatbot.launch()
|