File size: 3,854 Bytes
28ace03
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a8ee0db
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
import os
import sys
import json
from typing import Dict, List, Any, Annotated
from typing_extensions import TypedDict

from langchain_core.messages import AIMessage, SystemMessage, HumanMessage, ToolMessage
from langchain_groq import ChatGroq
from langgraph.graph import StateGraph, START, END
from langgraph.graph.message import add_messages
from langgraph.prebuilt import ToolNode 
from langchain_openai import ChatOpenAI

from src.config import read_system_prompt, format_cv
from src.crew.crew_pool import interview_analyser 


class State(TypedDict):
    messages: Annotated[list, add_messages]

class InterviewProcessor:
    def __init__(self, cv_document: Dict[str, Any], job_offer: Dict[str, Any], conversation_history: List[Dict[str, Any]]):
        if not cv_document or 'candidat' not in cv_document:
            raise ValueError("Document CV invalide fourni.")
        if not job_offer:
            raise ValueError("Données de l'offre d'emploi non fournies.")

        self.job_offer = job_offer
        self.cv_data = cv_document['candidat']
        self.conversation_history = conversation_history
        self.tools = [interview_analyser]
        self.llm = self._get_llm()
        self.llm_with_tools = self.llm.bind_tools(self.tools)

        self.system_prompt_template = self._load_prompt_template()
        self.graph = self._build_graph()

    def _get_llm(self) -> ChatOpenAI:
        openai_api_key = os.getenv("OPENAI_API_KEY")
        return ChatOpenAI(
        temperature=0.6, 
        model_name="gpt-4o-mini", 
        api_key=openai_api_key
    )

    def _load_prompt_template(self) -> str:
        return read_system_prompt('prompts/rag_prompt_old.txt')

    def _chatbot_node(self, state: State) -> dict:
        if state["messages"] and isinstance(state["messages"][-1], ToolMessage):
            tool_message = state["messages"][-1]
            return {"messages": [AIMessage(content=tool_message.content)]}
        messages = state["messages"]
        formatted_cv_str = format_cv(self.cv_data)

        mission = self.job_offer.get('mission', 'Non spécifiée')
        profil_recherche = self.job_offer.get('profil_recherche', 'Non spécifié')
        competences = self.job_offer.get('competences', 'Non spécifiées')
        pole = self.job_offer.get('pole', 'Non spécifié')
        system_prompt = self.system_prompt_template.format(
            entreprise=self.job_offer.get('entreprise', 'notre entreprise'),
            poste=self.job_offer.get('poste', 'ce poste'),
            mission=mission,
            profil_recherche=profil_recherche,
            competences=competences,
            pole=pole,
            cv=formatted_cv_str
        )
        llm_messages = [SystemMessage(content=system_prompt)] + messages
        response = self.llm_with_tools.invoke(llm_messages)
        return {"messages": [response]}

    def _route_after_chatbot(self, state: State) -> str:
        last_message = state["messages"][-1]
        if last_message.tool_calls:
            return "call_tool"
        return END

    def _build_graph(self) -> any:
        graph_builder = StateGraph(State)
        
        graph_builder.add_node("chatbot", self._chatbot_node)
        graph_builder.add_node("call_tool", ToolNode(self.tools))        
        graph_builder.add_edge(START, "chatbot")        
        graph_builder.add_conditional_edges(
            "chatbot",
            self._route_after_chatbot,
            {
                "call_tool": "call_tool", 
                END: END                  
            }
        )
        graph_builder.add_edge("call_tool", "chatbot")
        return graph_builder.compile()

    def run(self, messages: List[Dict[str, Any]]) -> Dict[str, Any]:
        initial_state = self.conversation_history + messages
        return self.graph.invoke({"messages": initial_state})