Spaces:
Sleeping
Sleeping
Aryan Jain
add main.py for server initialization; update file handling in DatabaseUpdater and improve context retrieval in ToolCall
46d624e | import os | |
| # from groq import Groq | |
| from openai import AsyncOpenAI | |
| import json | |
| from ._tool_call import ToolCall | |
| from ._config import logger | |
| from openai.types.chat.chat_completion_chunk import ChoiceDeltaToolCall, ChoiceDeltaToolCallFunction | |
| AVAILABLE_FUNCTIONS = { | |
| "contact_us": ToolCall.send_email.__name__, | |
| "report_missing_context": ToolCall.report_missing_context.__name__, | |
| "get_context_for_user_query": ToolCall.get_context_for_user_query.__name__, | |
| } | |
| INSTRUCTIONS = """ | |
| # Objective | |
| You are a Conversational AI assistant developed by **Sifars**, a web development company. Your responses **must** be **short (not more than 50 words), precise, and engaging**. You provide **clear, conversational answers** to queries about **Sifars and its services**. | |
| # About Sifars | |
| Sifars is a pioneering web service provider founded in 2018 by **Jatin Sethi, Munish Kumar, and Sukhwinder Singh**. Headquartered in **Mohali, Punjab, India**, we specialize in **application development using Python, JavaScript, React, Node.js, and more**. We focus on **scalability, innovation, and work-life balance**. | |
| Sifars is specialized in building AI powered web and mobile applications. | |
| - **Email:** [contact@sifars.com](mailto:contact@sifars.com) | |
| - **Address:** D-234, Ground Floor, Phase 8B, Industrial Area, Sector 74, Sahibzada Ajit Singh Nagar, Punjab 160055, India | |
| - **Phone:** [+91 8106 455 950](tel:+918106455950), [+91 8008 296 463](tel:+918008296463), [+91 8896 720 000](tel:+918896720000) | |
| - **Careers:** [Explore opportunities](https://www.sifars.com/en/careers/) | |
| If a user wants to contact us, ask for their **name, email, phone number, and reason for contact**, then call the **"contact_us"** tool with these details. | |
| # Rules | |
| ### **Response Length & Style** | |
| - **All responses must be under 50 words.** | |
| - Responses **must** be **short, clear, and engaging** (two sentences per paragraph max). | |
| - **Each sentence must have fewer than 10 words.** | |
| - Avoid **repetition, filler words, or a salesy tone**. | |
| - Use **simple, varied language** in a **friendly, conversational tone**. | |
| ### **Handling Queries** | |
| #### ✅ **How to Respond to Queries** | |
| 1. **First, check if the chatbot can answer the query using the provided information.** | |
| - If yes, **answer directly** without calling any tool. | |
| - If not, proceed to Step 2. | |
| 2. **If the context is not enough, and the query is about Sifars:** | |
| - Call **"get_context_for_user_query"** to fetch relevant details by passing correct query. | |
| - Normalize the **query**. Query might be refrence to the chat history so paraphrase the query as it standalso understandable without the history | |
| - If the retrieved context is enough, **answer the question without calling another tool**. | |
| - If still insufficient, proceed to Step 3. | |
| 3. **If no sufficient information is available even after calling "get_context_for_user_query":** | |
| - Collect the user’s **name, email, phone number, and reason for contact**. | |
| - Call **"contact_us"** with these details. | |
| - Respond: *"Your query has been forwarded to our team. They will reach out soon."* | |
| 4. **DO NOT call "get_context_for_user_query" for general or casual conversations.** | |
| #### 🚨 **Handling Job Applications** | |
| - **If a user wants to apply for a job at Sifars:** | |
| - Ask them to visit our **[Careers Page](https://www.sifars.com/en/careers/)**. | |
| - Alternatively, they can **email their resume to [hr@sifars.com](mailto:hr@sifars.com)**. | |
| - **DO NOT call "contact_us" or "get_context_for_user_query" for job application queries.** | |
| #### 📌 **Examples of Proper Handling** | |
| ✅ **User:** "Who are you?" | |
| ✅ **Response:** "I am a chatbot developed by Sifars to assist users." | |
| ✅ **User:** "What services does Sifars offer?" | |
| ✅ **Response:** "We offer web and application development using Python, JavaScript, React, and more." | |
| ✅ **User:** "Where is Sifars located?" | |
| ✅ **Response:** "Sifars is headquartered in Mohali, Punjab, India." | |
| ✅ **User:** "How can I apply for a job at Sifars?" | |
| ✅ **Response:** "You can check our openings at [Careers Page](https://www.sifars.com/en/careers/) or email your resume to [hr@sifars.com](mailto:hr@sifars.com)." | |
| ❌ **User:** "What is Sifars' refund policy?" (No info in context) | |
| ✅ **Action:** | |
| 1. **Check if existing context provides an answer.** | |
| 2. **If not, call "get_context_for_user_query"** to fetch more details. | |
| 3. **If still insufficient, collect the user’s details (name, email, phone number, and reason for contact).** | |
| 4. **Call "contact_us" with these details.** | |
| 5. **Respond:** *"Your query has been forwarded to our team. They will reach out soon."* | |
| ### **Tool Usage** | |
| - **Tools should only be called if absolutely necessary.** | |
| - **Always check if the chatbot can answer the question first.** | |
| - **"get_context_for_user_query" | |
| """ | |
| CONTEXT_EXTRACTING_TOOL={ | |
| "type": "function", | |
| "function": { | |
| "name": "get_context_for_user_query", | |
| "description": "Extract the context from the database regarding the user query", | |
| "parameters": { | |
| "type": "object", | |
| "properties": { | |
| "query": { | |
| "type": "string", | |
| "description": "This field contains the query for which you need to provide an answer. Query might be refrence to the chat history so paraphrase the query as it standalso understandable without the history." | |
| }, | |
| }, | |
| "required": ["query"] | |
| } | |
| } | |
| } | |
| CONTACT_TOOL={ | |
| "type": "function", | |
| "function": { | |
| "name": "contact_us", | |
| "description": "Collect the information dropped by user in the chat to contact the sifars team", | |
| "parameters": { | |
| "type": "object", | |
| "properties": { | |
| "name": { | |
| "type": "string", | |
| "description": "This field is the name of the user which you will collect from the user" | |
| }, | |
| "email": { | |
| "type": "string", | |
| "description": "This field is the email of the user which you will collect from the user" | |
| }, | |
| "phone_number": { | |
| "type": "string", | |
| "description": "This field is the phone number of the user which you will collect from the user" | |
| }, | |
| "reason_for_contact": { | |
| "type": "string", | |
| "description": "This field will contain a paragraph which is the reason for contact of the user which you will collect. This is seperate from the subject of the email. Keep it same as what user has given in the chat. Do not change it or paraphrase it.." | |
| }, | |
| "subject": { | |
| "type": "string", | |
| "description": "This is the subject of the email which will be dynamically contructed by you using the reason for contact provided. It is mandatory that you do not mention this field to the user." | |
| } | |
| }, | |
| "required": ["name", "email", "phone_number","reason_for_contact", "subject"] | |
| } | |
| } | |
| } | |
| REPORT_MISSING_CONTEXT_TOOL={ | |
| "type": "function", | |
| "function": { | |
| "name": "report_missing_context", | |
| "description": "Report the context missing in the chat to the sifars team", | |
| "parameters": { | |
| "type": "object", | |
| "properties": { | |
| "unresolved_query": { | |
| "type": "string", | |
| "description": "This field contains the query for which you could not provide an answer because the provided query was not enough." | |
| }, | |
| }, | |
| "required": ["unresolved_query"] | |
| } | |
| } | |
| } | |
| class ChatClient: | |
| def __init__( | |
| self, | |
| model: str = os.getenv("OPENAI_MODEL"), | |
| max_tokens: int=4096, | |
| stream: bool=True, | |
| system_message: str=INSTRUCTIONS | |
| ): | |
| self.client = AsyncOpenAI( | |
| base_url=os.getenv("OPENAI_API_BASE_URL"), | |
| api_key=os.getenv("OPENAI_API_KEY"), | |
| ) | |
| self.model = model | |
| self.max_tokens = max_tokens | |
| self.stream = stream | |
| self.system_message = system_message | |
| async def __aenter__(self): | |
| return self | |
| async def __aexit__(self, exc_type, exc, traceback): | |
| pass | |
| async def create_chat_completions( | |
| self, | |
| messages: list, | |
| model: str=os.getenv("OPENAI_MODEL"), | |
| ): | |
| logger.info("Calling Groq API...") | |
| response = await self.client.chat.completions.create( | |
| messages=[ | |
| {"role": "system", "content": self.system_message}, | |
| *messages | |
| ], | |
| model=model, | |
| max_tokens=self.max_tokens, | |
| stream=self.stream, | |
| temperature=0.7, | |
| tools=[CONTACT_TOOL, REPORT_MISSING_CONTEXT_TOOL, CONTEXT_EXTRACTING_TOOL], | |
| tool_choice="auto", | |
| ) | |
| logger.info("Groq API called successfully.") | |
| tool_calls_by_index = {} | |
| current_tool_call = None | |
| async for chunk in response: | |
| delta = chunk.choices[0].delta | |
| if delta and delta.content: | |
| yield delta.content | |
| elif delta and delta.tool_calls: | |
| for tool_call in delta.tool_calls: | |
| if tool_call.index not in tool_calls_by_index: | |
| tool_calls_by_index[tool_call.index] = { | |
| 'id': tool_call.id, | |
| 'type': tool_call.type, | |
| 'function': { | |
| 'name': tool_call.function.name, | |
| 'arguments': '' | |
| } | |
| } | |
| current_call = tool_calls_by_index[tool_call.index] | |
| if tool_call.function.arguments: | |
| current_call['function']['arguments'] += tool_call.function.arguments | |
| if tool_calls_by_index: | |
| complete_tool_calls = [ | |
| ChoiceDeltaToolCall( | |
| index=idx, | |
| id=call['id'], | |
| type=call['type'], | |
| function=ChoiceDeltaToolCallFunction( | |
| name=call['function']['name'], | |
| arguments=call['function']['arguments'] | |
| ) | |
| ) | |
| for idx, call in tool_calls_by_index.items() | |
| ] | |
| tool_calls_output = await self._handle_required_action( | |
| tool_calls=complete_tool_calls, | |
| ) | |
| messages.append({"role": "assistant", "tool_calls": complete_tool_calls}) | |
| messages.extend(tool_calls_output) | |
| async for chunk in self.create_chat_completions(messages=messages): | |
| yield chunk | |
| async def _handle_required_action(self, tool_calls: list[dict]): | |
| tool_calls_output = [] | |
| for tool in tool_calls: | |
| if tool.type == "function": | |
| try: | |
| function_to_call = AVAILABLE_FUNCTIONS[tool.function.name] | |
| function_arguments = json.loads(str(tool.function.arguments)) if tool.function.arguments else {} | |
| logger.info(f"Calling tool: {function_to_call}") | |
| async with ToolCall() as tool_call: | |
| function_response = await getattr(tool_call, function_to_call)( | |
| function_arguments | |
| ) | |
| logger.info("Tool call completed.") | |
| except Exception as e: | |
| logger.error(e) | |
| function_response = "Unable to call the tool." | |
| tool_calls_output.append( | |
| { | |
| "role": "tool", | |
| "tool_call_id": tool.id, | |
| "name": tool.function.name, | |
| "content": ( | |
| str(function_response) | |
| if function_response | |
| else "No results found." | |
| ), | |
| } | |
| ) | |
| return tool_calls_output | |