Spaces:
Sleeping
Sleeping
| import gradio as gr #provides a chatbot interface | |
| # import random removed because we want to generate an acftual response not just yes or no. | |
| from huggingface_hub import InferenceClient | |
| client=InferenceClient("microsoft/phi-4") #allows it to access a pre trained model so we don't have to train it from scratch | |
| def respond_yes_no_randomly(message,history): | |
| messages=[{'role':'system', 'content':'You are a friendly chatbot'}] #lets the user know theirs and the chatbot's role | |
| if history: | |
| messages.extend(history) #going to keep on extending history the more you interact | |
| messages.append({'role':'user','content':message}) | |
| response = client.chat_completion( | |
| messages, | |
| max_tokens = 100 #can be changed, in real life it would use over 500, this just tells the chatbot to use max 100 words | |
| ) | |
| return response['choices'][0]['message']['content'].strip() #going to clean it up, remove spaces and take key words to know what your actual message is about | |
| #return random.choice(responses) #basically getting my chatbot to echo my message back to me, and remember our messages | |
| chatbot=gr.ChatInterface(respond_yes_no_randomly,type="messages",title="friendlyy chatbot") #This is my chatbot ui convo history and user input | |
| chatbot.launch() #We are going |