File size: 1,289 Bytes
6669583
fb04f70
eacba31
6669583
 
66036ee
eacba31
 
fb04f70
 
 
eacba31
 
 
 
db53eda
eacba31
 
 
 
 
 
 
 
db53eda
 
69edd3c
db53eda
eacba31
6669583
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
import gradio as gr
import random
from huggingface_hub import InferenceClient
# import lines go at the top! any libraries I need to import

client = InferenceClient("Qwen/Qwen2.5-7B-Instruct-1M")
#CREATE AN INSTANCE OF INFERENCE CLIENT THAT'S CONNECTED TO THE MICROSOFT PHI GENERATION MODEL

#def echo(message, history):
    #return message

def respond(message, history): 
    messages = [{"role": "system", "content" : "You are a friendly chatbot."}]
    if history:
        messages.extent(history) #conversation adds messages to the history 
    
    messages.append({"role": "user", "content": message})
    response = client.chat_completion(
        messages, #parameters 
        max_tokens = 100 #determines how long the message will be 
    )

    return response['choices'][0]['message']['content'].strip()

#def yes_or_no(message,history): 
    #return random.choice(["absolutely NOT.","now why would you even ask that", "this is not what I meant when i said do it for the lore..","do it for the lore!", "+1000 aura if you do","yes PLEASE", "i mean you do you ig..","i mean yolo"])

#print("Hello world!")
chatbot = gr.ChatInterface(respond , type = "messages")
# defining my chatbot so that the user can interact, see their convo history and send new messages

chatbot.launch()