| import gradio as gr |
| import huggingface_hub import InterferenceClient |
| import knowledgebase.txt |
|
|
| client = InterferenceClient("google/gemma-3-27b-it") |
|
|
| def respond(message,history): |
| messages = [{"role": "system" , "content", "You're a supportive and helpful feminist"}] |
| if history: |
| messages.extend(history) |
| |
| messages.append({"role" : "user", "content" : message}) |
| |
| response = "" |
| for message in client.chat_completion( |
| messages, |
| max_tokens = 150, |
| stream=True, |
| ): |
| token = message.choices[0].delta.content |
| response += token |
| yield response |
| |
| print(response) |
| |
| |
| chatbot = gr.ChatInterface(respond, type = "messages") |
|
|
| chatbot.launch(debug=True) |
|
|
|
|