rentbot / llm_handler.py
mgbam's picture
Update llm_handler.py
056dfed verified
# rentbot/llm_handler.py
import os
from openai import AsyncOpenAI
import json
client = AsyncOpenAI(api_key=os.getenv("OPENAI_API_KEY"))
# Definition of the tool the LLM can use
tools = [
{
"type": "function",
"function": {
"name": "create_event",
"description": "Create a calendar event to book an apartment viewing.",
"parameters": {
"type": "object",
"properties": {
"start_time": {
"type": "string",
"description": "The start time of the event in ISO 8601 format, e.g., 2025-07-18T14:00:00",
},
"duration_minutes": {
"type": "integer",
"description": "The duration of the event in minutes.",
"default": 30
},
"summary": {
"type": "string",
"description": "A short summary or name for the event, e.g., 'Unit 5B viewing'",
},
},
"required": ["start_time", "summary"],
},
},
}
]
async def get_llm_response(messages: list, async_chunk_handler):
"""
Calls the OpenAI API and streams text chunks to a handler.
This is now a regular async function, NOT a generator.
It returns the final assistant message and any tool calls.
"""
try:
stream = await client.chat.completions.create(
model="gpt-4o-mini",
messages=messages,
stream=True,
tools=tools,
tool_choice="auto",
)
full_response = ""
tool_calls = []
async for chunk in stream:
delta = chunk.choices[0].delta
if delta and delta.content:
text_chunk = delta.content
full_response += text_chunk
# Call the provided handler with the new chunk
await async_chunk_handler(text_chunk)
if delta and delta.tool_calls:
# This part handles accumulating tool call data from multiple chunks
if not tool_calls:
tool_calls = [{"id": tc.id, "type": "function", "function": {"name": None, "arguments": ""}} for tc in delta.tool_calls]
for i, tool_call_chunk in enumerate(delta.tool_calls):
if tool_call_chunk.function.name:
tool_calls[i]["function"]["name"] = tool_call_chunk.function.name
if tool_call_chunk.function.arguments:
tool_calls[i]["function"]["arguments"] += tool_call_chunk.function.arguments
# Construct the final assistant message object
assistant_message = {"role": "assistant", "content": full_response}
if tool_calls:
assistant_message["tool_calls"] = tool_calls
# This return is now VALID because there is no 'yield' in this function
return assistant_message, tool_calls
except Exception as e:
print(f"Error in get_llm_response: {e}")
error_message = "I'm having a little trouble right now. Please try again in a moment."
await async_chunk_handler(error_message)
return {"role": "assistant", "content": error_message}, []