Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import random | |
| #import lines go at the top! | |
| from huggingface_hub import InferenceClient | |
| client = InferenceClient("microsoft/phi-4") | |
| # name of llm chatbot accessed ^^ or can use ' microsoft/phi-4 that's connected to the microsoft phi gen model | |
| def respond(message,history): | |
| messages = [{'role': 'system','content':'You are a friendly chatbot.'}] | |
| #creating dictionary containing key value pairs; first key value pair is system message, second is user message | |
| if history: | |
| messages.extend(history) | |
| messages.append({'role': 'user','content': message}) | |
| response = client.chat_completion(messages, max_tokens = 500, top_p=0.8) | |
| #max tokens is a parameter to determine how long the message should be | |
| return response['choices'][0]['message']['content'].strip() | |
| #def yes_or_no(message,history): | |
| # return random.choice(['Yes','No']) | |
| #def echo(message, history): | |
| #always need two inputs | |
| # return message | |
| #print("Hello, World") | |
| chatbot = gr.ChatInterface(respond, type='messages') | |
| #defining my chatbot so user can interact, see their conversation and send new messages | |
| chatbot.launch() |