from crewai.tools import BaseTool from pydantic import BaseModel, Field, PrivateAttr from typing import Type, Any class PolicyQueryToolInput(BaseModel): """ Schema for input to the PolicyQueryTool. Attributes: UIN (str): The UIN (Unique Identification Number) of the policy. question (str): The question to ask about the policy. """ UIN: str = Field(..., description="UIN number of the policy.") question: str = Field(..., description="Question about the policy.") class PolicyQueryTool(BaseTool): """ A custom CrewAI tool to query insurance policy documents by UIN using a vector store. Attributes: name (str): Name of the tool. description (str): Description of the tool’s functionality. args_schema (Type[BaseModel]): The schema defining expected arguments. _vector_store (Any): The vector store used for querying policy documents. """ name: str = "Policy Query Tool" description: str = "Answers questions about a specific insurance policy using its UIN number." args_schema: Type[BaseModel] = PolicyQueryToolInput _vector_store: Any = PrivateAttr() # Holds the internal vector store object, excluded from Pydantic validation def __init__(self, vector_store): """ Initializes the PolicyQueryTool with the provided vector store. Args: vector_store (Any): A Chroma-based vector store used to perform retrieval. """ super().__init__() self._vector_store = vector_store # Store vector DB client internally (not exposed via schema) def _run(self, **kwargs) -> str: """ Executes the tool with the provided UIN and question. Args: kwargs: Should include 'UIN' (policy identifier) and 'question' (user query). Returns: str: The answer to the user's question as generated by the LLM. """ UIN = kwargs.get("UIN") question = kwargs.get("question") # Debug print to verify tool execution #print("PolicyQueryTool======> Running with UIN:", UIN, "and question:", question) # Create a query engine specific to the UIN using vector similarity and metadata filters query_engine = create_query_engine(UIN=UIN, embedding_model="BAAI/bge-small-en-v1.5", vector_store=vector_store, similarity_top_k=10, llm_model="deepseek/deepseek-chat-v3-0324:free", api_key="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") # Run the query on the engine and return the response return query_engine.query(question)