File size: 5,927 Bytes
1768be1
 
 
 
 
 
 
 
 
 
b8579f2
1768be1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
import pandas as pd
from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationChain
from langchain_google_genai.chat_models import ChatGoogleGenerativeAI
import os
from dotenv import load_dotenv

load_dotenv()
app = FastAPI()

# Enable CORS for all origins (for development)
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

# Load Excel file
question_df = pd.read_excel("coding_questions.xlsx")

# Load Gemini API Key
google_api_key = os.getenv("GEMINI_KEY")
if not google_api_key:
    raise ValueError("Please set the GEMINI_KEY environment variable in your .env file.")


# Initialize Gemini model
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0.7, google_api_key=google_api_key)

# Store conversation memories for each session
session_memories = {}

def get_conversation_for_session(session_id: str):
    """Get or create a conversation for a specific session"""
    if session_id not in session_memories:
        memory = ConversationBufferMemory()
        session_memories[session_id] = ConversationChain(llm=llm, memory=memory)
    return session_memories[session_id]

# ---------- MODELS ----------
class Message(BaseModel):
    message: str
    session_id: str = "default"

class ResetSession(BaseModel):
    session_id: str

# ---------- ROUTES ----------
@app.post("/daily-question")
def get_daily_questions(msg: Message):
    easy_q = question_df[question_df["problem_level"].str.lower() == "easy"].sample(1).iloc[0]
    medium_q = question_df[question_df["problem_level"].str.lower() == "medium"].sample(1).iloc[0]
    hard_q = question_df[question_df["problem_level"].str.lower() == "hard"].sample(1).iloc[0]

    questions = [
        {
            "level": easy_q["problem_level"],
            "statement": easy_q["problem_statement"],
            "link": easy_q["problem_link"]
        },
        {
            "level": medium_q["problem_level"],
            "statement": medium_q["problem_statement"],
            "link": medium_q["problem_link"]
        },
        {
            "level": hard_q["problem_level"],
            "statement": hard_q["problem_statement"],
            "link": hard_q["problem_link"]
        }
    ]
    # Format for chat frontend as HTML with <a> tags and blue color
    formatted = "<br><br>".join([
        f"<b>{q['level']}</b>: {q['statement']}<br>"
        f"<a href='{q['link']}' target='_blank' rel='noopener noreferrer' style='color:#2563eb;'>Practice the question</a>"
        for q in questions
    ])
    return {"reply": formatted,"session_id": msg.session_id}

@app.post("/interview")
async def interview_flow(msg: Message):
    # Get conversation for this session (always retrieve it first)
    conversation = get_conversation_for_session(msg.session_id)
    
    if msg.message.startswith("__start_hr__"):
        name = msg.message.replace("__start_hr__", "").strip()
        conversation.memory.save_context({"input": "name"}, {"output": name})
        hr_intro_prompt = f"""
    You are a professional HR (Maha Laxmi), for 10 years in ProMVP (It provides innovative solutions for businesses and startups). You conducts interviews for candidates applying for various positions.
    Today you are interviewing {name}.

    1. Ask the candidate about their background, skills, and experiences.
    2. Ask about their career goals and why they are interested in this position.
    3. Ask about their strengths and weaknesses.
    4. Ask about their problem-solving skills and how they handle challenges.
    5. Ask about their teamwork and communication skills.
    6. Ask about their availability and salary expectations.
    7. Conclude the interview by thanking the candidate for their time and explaining the next steps in the hiring process.
    8. You will ask questions one by one, and the candidate will respond to each question.
    9. If the candidate hints to exit the interview, you will conclude the interview politely.
    Note: You will keep the questions short and concise
    """
        question = conversation.predict(input=hr_intro_prompt)
        return {"reply": question, "session_id": msg.session_id}

    elif msg.message.startswith("__start_mock__"):
        tech_stack = msg.message.replace("__start_mock__", "").strip()
        conversation.memory.save_context({"input": "Tech stack"}, {"output": tech_stack})
        prompt = f"""
    You are a technical interviewer with expertise in {tech_stack} for 10 years.
        1. You will conduct a mock technical interview for a beginner candidate with skills in {tech_stack}.
        2. You will ask 10 questions in total, covering the fundanmentals of {tech_stack}.
        3. You will ask questions one by one, and the candidate will respond to each question.

        Note: You will keep the questions short and concise
  
    If the candidate hints to exit the interview, you will conclude the interview politely.
    """
        question = conversation.predict(input=prompt)
        return {"reply": question, "session_id": msg.session_id}

    # For all other messages, use the session-specific conversation
    response = conversation.predict(input=msg.message)
    return {"reply": response, "session_id": msg.session_id}

@app.post("/reset-session")
def reset_session(reset_data: ResetSession):
    """Reset or clear the conversation memory for a specific session"""
    session_id = reset_data.session_id
    if session_id in session_memories:
        # Remove the session from memory
        del session_memories[session_id]
    return {"message": f"Session {session_id} has been reset", "session_id": session_id}

@app.get("/")
def root():
    return {"message": "Welcome to the AI Interview Agent (Gemini Edition)"}