File size: 3,033 Bytes
b55b8d4
 
 
 
 
 
 
 
2c2c90a
b55b8d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2c2c90a
b55b8d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2c2c90a
b55b8d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fc0ebc0
b55b8d4
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
import os
from langchain_groq import ChatGroq
from langgraph.graph import StateGraph, MessagesState, START, END
from langgraph.checkpoint.memory import MemorySaver
from langchain_core.messages import SystemMessage
from pydantic import BaseModel, ConfigDict, Field
from typing import Optional, List
from .models_loader import llm
from .prompts import introduction_prompt , details_extract_prompt

# Pydantic model for extracted business info
class DetailsFormatter(BaseModel):
    business_type: str = Field(description="The type of the business")
    platform: str = Field(description="The platform used for the business")
    target_audience: str = Field(description="The target audience of the business")
    business_goals: str = Field(description="The business goals of the business")
    offerings: str = Field(description="The offerings of the business")
    Challenges_faced: str = Field(description="The challenges faced by the business")

# State model
class State(BaseModel):
    interactions: Optional[list] = []
    model_config = ConfigDict(arbitrary_types_allowed=True)

# Global business state (shared)
business_state = State()

class IntroductionChatbot:
    def __init__(self):
        self.memory = MemorySaver()
        # self.llm = ChatGroq(model_name="Gemma2-9b-It")
        self.llm = llm
        self.workflow = self._initialize_workflow()
        self.interact_agent = self.workflow.compile(checkpointer=self.memory)
        self.messages = []

    def _initialize_workflow(self):
        workflow = StateGraph(MessagesState)
        workflow.add_node("chatbot", self._call_model)
        workflow.add_edge(START, "chatbot")
        workflow.add_edge("chatbot", END)
        return workflow

    def _call_model(self, state):
        template = introduction_prompt
        messages = [SystemMessage(content=template)] + state["messages"]
        response = self.llm.invoke(messages)
        return {"messages": [response]}

    def chat(self, user_input: str):
        self.messages.append({"role": "user", "content": user_input})
        config = {"configurable": {"thread_id": "1"}}
        response = self.interact_agent.invoke({"messages": [user_input]}, config)['messages'][-1].content
        self.messages.append({"role": "assistant", "content": response})
        business_state.interactions.append({'user': user_input, 'agent_response': response})
        return response

    def is_complete(self, latest_response: str) -> bool:
        return "Thanks for providing all your required business details" in latest_response

    def extract_details(self):
        template = details_extract_prompt(business_state.interactions)

        messages = [SystemMessage(content=template)]
        response = self.llm.bind_tools([DetailsFormatter]).invoke(messages)

        if hasattr(response, 'tool_calls') and response.tool_calls:
            return response.tool_calls[0]['args']
        elif hasattr(response, 'content'):
            return response.content
        else:
            return "No response"