Spaces:
Sleeping
Sleeping
File size: 1,578 Bytes
cee914f 8214de5 04139b3 cee914f 04139b3 cee914f 8214de5 cee914f 8214de5 cee914f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
import streamlit as st
import requests
# Set your Hugging Face API key here
API_KEY = 'your_huggingface_api_key'
MODEL_URL = "https://api-inference.huggingface.co/models/microsoft/DialoGPT-medium"
# Function to get chatbot response from Hugging Face API
def query_huggingface_api(message):
headers = {"Authorization": f"Bearer {API_KEY}"}
payload = {"inputs": message}
response = requests.post(MODEL_URL, headers=headers, json=payload)
response_json = response.json()
# Extract and return the chatbot response
if isinstance(response_json, list):
return response_json[0]['generated_text']
else:
return "Error: Unable to get response from the model."
# Set up Streamlit UI
st.title("Learning Chatbot")
st.subheader("Ask me anything related to learning!")
# User input
user_message = st.text_input("You: ")
# Initialize the conversation history in session state
if "history" not in st.session_state:
st.session_state.history = []
if user_message:
# Append the user's message to the conversation history
st.session_state.history.append(f"You: {user_message}")
# Combine the history for context in the conversation
conversation_history = " ".join(st.session_state.history)
# Query the Hugging Face API to get the response
bot_response = query_huggingface_api(conversation_history)
# Append the bot's response to the history
st.session_state.history.append(f"Bot: {bot_response}")
# Show conversation history
for message in st.session_state.history:
st.write(message)
|