Raheel31's picture
Upload 5 files
435ca27 verified
import os
import sys
import gradio as gr
from huggingface_hub import InferenceClient
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from rag.logger import get_logger
from rag.analysis_chain import retriever, hf_llm, analyze_resume_against_job
logger = get_logger(__name__)
# -----------------------------------
# Load HuggingFace API key
# -----------------------------------
HF_API_TOKEN = os.getenv("HUGGINGFACE_API_TOKEN")
if not HF_API_TOKEN:
raise RuntimeError("Environment variable HUGGINGFACE_API_TOKEN is missing!")
client = InferenceClient(
token=HF_API_TOKEN,
model="" # When in use insert model name as parameter here
)
# -----------------------------------
# System Prompt
# -----------------------------------
DEFAULT_SYSTEM_MESSAGE = """
You are a helpful resume-analysis chatbot.
You can perform the following tasks on the data you have:
1. Job description analysis using the RAG pipeline.
2. Candidate summarization using the vectorstore *WHICH YOU ALREADY HAVE*.
3. General conversation.
Always respond clearly and professionally as if you were a talent aquisition specialist.
"""
# -----------------------------------
# INTENT DETECTOR
# -----------------------------------
def detect_intent(user_message: str):
"""Lightweight rule-based intent classifier."""
message = user_message.lower()
# --- JOB DESCRIPTION ANALYSIS ---
jd_keywords = [
"responsibilities", "requirements", "we are looking for",
"qualifications", "role description", "job description",
"candidate must", "skills required", "apply", "position",
"looking for a", "experience required"
]
if any(k in message for k in jd_keywords):
return "job_analysis"
# --- CANDIDATE SUMMARY ---
candidate_keywords = [
"candidate", "tell me about him", "tell me about her", "profile summary",
"summary", "skills", "experience", "background", "what can he do",
"what is his experience", "what is his background", "about the candidate", "about his resume"
]
if any(k in message for k in candidate_keywords):
return "candidate_info"
# --- DEFAULT ---
return "general"
# -----------------------------------
# BOT RESPONSE
# -----------------------------------
def bot_response(message, history):
system_msg = DEFAULT_SYSTEM_MESSAGE
max_tokens = 500
temperature = 0.7
top_p = 0.95
intent = detect_intent(message)
# -----------------------------------
# INTENT 1 β†’ JOB ANALYSIS USING RAG
# -----------------------------------
if intent == "job_analysis":
rag_output = analyze_resume_against_job(
job_description=message,
retriever=retriever,
llm_callable=hf_llm
)
prompt = f"{system_msg}\n\n{rag_output}"
# -----------------------------------
# INTENT 2 β†’ CANDIDATE SUMMARY
# -----------------------------------
elif intent == "candidate_info":
# Use LCEL retriever interface (correct for VectorStoreRetriever)
retrieved_docs = retriever.invoke("candidate overall profile")
combined = "\n".join([doc.page_content for doc in retrieved_docs])
prompt = f"""
You are a professional candidate summarization assistant.
Using the resume data below, create a detailed profile summary.
Resume Data:
{combined}
Provide:
- background
- key experiences
- technical + soft skills
- strengths
- ideal job roles
"""
# -----------------------------------
# INTENT 3 β†’ GENERAL CHAT
# -----------------------------------
else:
prompt = f"{system_msg}\nUser: {message}"
# -----------------------------------
# STREAMING HF LLM OUTPUT
# -----------------------------------
response = ""
for chunk in client.chat_completion(
messages=[{"role": "user", "content": prompt}],
max_tokens=max_tokens,
temperature=temperature,
top_p=top_p,
stream=True
):
token = chunk.choices[0].delta.content or ""
response += token
yield response
# -----------------------------------
# UI: ChatGPT-style interface
# -----------------------------------
chatbot = gr.ChatInterface(
fn=bot_response,
title="GenAI Career Agent"
)
# -----------------------------------
# Layout (NO LOGIN)
# -----------------------------------
with gr.Blocks() as demo:
gr.Markdown("## Resume Analyst RAG Chatbot")
gr.Markdown("Uses FAISS + HuggingFace LLM + custom RAG analysis pipeline.")
chatbot.render()