Spaces:
Sleeping
Sleeping
File size: 1,390 Bytes
4e9efdd 5e159a7 011fb7a 4e9efdd 011fb7a a00b76e 5e159a7 919a8e3 e43508e a00b76e 011fb7a a00b76e 4e9efdd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 |
import gradio as gr
import random
from huggingface_hub import InferenceClient
# IMPORT INFERENCE CLIENT
# import lines go at the top! Any libraries I need to import go up here ^^
client = InferenceClient("microsoft/phi-4")
# CREATE AN INSTANCE OF INFERENCE CLIENT THAT'S CONNECTED TO THE MICROSOFT PHI GENERATION MODEL
def respond(message, history):
messages = [{"role": "system", "content": "You are a friendly chatbot."}]
if history:
messages.extend(history)
messages.append({"role": "user", "content": message})
response = client.chat_completion(
messages,
max_tokens=100
)
return response['choices'][0]['message']['content'].strip()
# CODE NEW RESPOND FUNCTION
#def magic_8_ball(message,history):
# return random.choice(['Maybe', "I don't think so honey", "Yea right", "Absolutely!", "I can see that for you.", "ijbol"])
# def yes_or_no(message, history):
# return random.choice(['Yes', 'No'])
# def echo(message, history):
# return message
print("Hello World!")
chatbot = gr.ChatInterface(
respond, # CHANGE THE FUNCTION MY CHATBOT RUNS
#title="The Psychic Magic 8 Ball!",
#description="Ask any yes or no question to learn your future",
type="messages"
)
# defining my chatbot so that the user can interact, see their conversation history and send new messages
chatbot.launch() |