|
|
import gradio as gr |
|
|
|
|
|
from huggingface_hub import InferenceClient |
|
|
|
|
|
client = InferenceClient("microsoft/phi-4") |
|
|
|
|
|
def respond(message, history): |
|
|
|
|
|
messages = [{"role": "system", "content": "You are a chatbot helping students make university options based on their current subjects and interests"}] |
|
|
|
|
|
if history: |
|
|
messages.extend(history) |
|
|
|
|
|
messages.append({"role": "user", "content": message}) |
|
|
|
|
|
response = client.chat_completion( |
|
|
messages, |
|
|
max_token=100 |
|
|
) |
|
|
return response['choices'][0]['message']['content'].strip() |
|
|
chatbot = gr.ChatInterface(respond, type="messages", title="CASSI") |
|
|
chatbot.launch() |