khankashif's picture
Create app.py
70a0e1f verified
import streamlit as st
from groq import Groq
import os
from dotenv import load_dotenv
# Load environment variables from .env
load_dotenv()
# Set up Groq client
client = Groq()
# Define a function to get the response from the Llama model
def get_response(user_input):
completion = client.chat.completions.create(
model="deepseek-r1-distill-llama-70b",
messages=[{"role": "user", "content": user_input}],
temperature=0.6,
max_completion_tokens=4096,
top_p=0.95,
stream=True,
stop=None,
)
response = ""
for chunk in completion:
response += chunk.choices[0].delta.content or ""
return response
# Streamlit UI for user interaction
st.title("Educational Assistant Chatbot")
st.write("Ask me anything related to Math, English (Nouns), and other educational topics!")
# Provide different subjects to choose from
subject_choice = st.selectbox("Choose Subject:", ["Math", "English", "Science", "History", "Geography"])
# Based on the subject, prompt the user
if subject_choice == "Math":
st.write("Ask me any math question!")
elif subject_choice == "English":
st.write("Ask me about Nouns or other English topics!")
elif subject_choice == "Science":
st.write("Ask me about Science topics!")
elif subject_choice == "History":
st.write("Ask me about History topics!")
elif subject_choice == "Geography":
st.write("Ask me about Geography!")
user_input = st.text_input("Your Question:")
if user_input:
question = f"Subject: {subject_choice}. Question: {user_input}"
response = get_response(question)
st.write(f"**Bot Response:** {response}")