nsgupta1's picture
Modularize
7dff9c2 verified
raw
history blame
1.4 kB
import streamlit as st
from utils.constants import metadata_path, embeddings_path
from question_handler import find_top_question, generate_detailed_prompt
from code_executor import execute_code
from utils.openai_client import generate_response
# Load metadata and embeddings
metadata = pd.read_csv(metadata_path)
embeddings = np.load(embeddings_path)
# Streamlit UI components (e.g., sidebar, chat interface)
st.title("Real-World Programming Question Mock Interview")
# Sidebar form for generating questions
with st.sidebar.form(key="input_form"):
company = st.text_input("Company", value="Google")
difficulty = st.selectbox("Difficulty", ["Easy", "Medium", "Hard"], index=1)
topic = st.text_input("Topic", value="Binary Search")
generate_button = st.form_submit_button(label="Generate")
if generate_button:
query = f"{company} {difficulty} {topic}"
top_question = find_top_question(query, metadata, embeddings)
detailed_prompt = generate_detailed_prompt(top_question)
response = generate_response(detailed_prompt)
st.session_state.generated_question = response
# Code execution section in the sidebar
st.sidebar.markdown("## Python Code Interpreter")
code_input = st.sidebar.text_area("Write your Python code here:", height=300)
if st.sidebar.button("Run Code"):
execute_code(code_input)
# Display generated questions and follow-up chat logic here...