| import gradio as gr | |
| import random | |
| from huggingface_hub import InferenceClient | |
| # import lines go at the top! any libraries I need to import | |
| client = InferenceClient("Qwen/Qwen2.5-7B-Instruct-1M") | |
| #CREATE AN INSTANCE OF INFERENCE CLIENT THAT'S CONNECTED TO THE MICROSOFT PHI GENERATION MODEL | |
| #def echo(message, history): | |
| #return message | |
| def respond(message, history): | |
| messages = [{"role": "system", "content" : "You are a friendly chatbot."}] | |
| if history: | |
| messages.extent(history) #conversation adds messages to the history | |
| messages.append({"role": "user", "content": message}) | |
| response = client.chat_completion( | |
| messages, #parameters | |
| max_tokens = 100 #determines how long the message will be | |
| ) | |
| return response['choices'][0]['message']['content'].strip() | |
| #def yes_or_no(message,history): | |
| #return random.choice(["absolutely NOT.","now why would you even ask that", "this is not what I meant when i said do it for the lore..","do it for the lore!", "+1000 aura if you do","yes PLEASE", "i mean you do you ig..","i mean yolo"]) | |
| #print("Hello world!") | |
| chatbot = gr.ChatInterface(respond , type = "messages") | |
| # defining my chatbot so that the user can interact, see their convo history and send new messages | |
| chatbot.launch() |