Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| from transformers import pipeline | |
| import openai | |
| import requests | |
| # Initialize OpenAI API key | |
| openai.api_key = "sk-...1-AA" # Replace with your actual OpenAI API key | |
| # Function to get responses from OpenAI | |
| def get_chat_response(query): | |
| response = openai.ChatCompletion.create( | |
| model="gpt-3.5-turbo", # Choose the model you want to use | |
| messages=[ | |
| {"role": "user", "content": query} | |
| ] | |
| ) | |
| return response['choices'][0]['message']['content'] | |
| class StudyAssistantChatbot: | |
| def __init__(self): | |
| # Check if either TensorFlow or PyTorch is installed | |
| try: | |
| self.qa_pipeline = pipeline("text-generation", model="distilgpt2") | |
| except RuntimeError as e: | |
| st.error(f"Error loading the model: {e}") | |
| st.error("Please make sure either TensorFlow or PyTorch is installed.") | |
| raise | |
| # Initialize Streamlit app | |
| st.title("Personalized Study Assistant Chatbot") | |
| # Create chatbot instance | |
| try: | |
| chatbot = StudyAssistantChatbot() | |
| except RuntimeError: | |
| st.stop() | |
| # Input for user query | |
| query = st.text_input("Ask your study-related question:") | |
| if st.button("Get Tips and Resources"): | |
| if query: | |
| # Get response from OpenAI | |
| response = get_chat_response(query) | |
| st.write(response) | |
| else: | |
| st.write("Please enter a question to get started!") | |
| # Add a sidebar for additional options | |
| st.sidebar.header("About") | |
| st.sidebar.text("This is a personalized study assistant chatbot.") | |