File size: 3,437 Bytes
eae2044 056dfed eae2044 056dfed eae2044 056dfed eae2044 056dfed eae2044 056dfed eae2044 056dfed eae2044 056dfed eae2044 056dfed eae2044 056dfed eae2044 056dfed |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 |
# rentbot/llm_handler.py
import os
from openai import AsyncOpenAI
import json
client = AsyncOpenAI(api_key=os.getenv("OPENAI_API_KEY"))
# Definition of the tool the LLM can use
tools = [
{
"type": "function",
"function": {
"name": "create_event",
"description": "Create a calendar event to book an apartment viewing.",
"parameters": {
"type": "object",
"properties": {
"start_time": {
"type": "string",
"description": "The start time of the event in ISO 8601 format, e.g., 2025-07-18T14:00:00",
},
"duration_minutes": {
"type": "integer",
"description": "The duration of the event in minutes.",
"default": 30
},
"summary": {
"type": "string",
"description": "A short summary or name for the event, e.g., 'Unit 5B viewing'",
},
},
"required": ["start_time", "summary"],
},
},
}
]
async def get_llm_response(messages: list, async_chunk_handler):
"""
Calls the OpenAI API and streams text chunks to a handler.
This is now a regular async function, NOT a generator.
It returns the final assistant message and any tool calls.
"""
try:
stream = await client.chat.completions.create(
model="gpt-4o-mini",
messages=messages,
stream=True,
tools=tools,
tool_choice="auto",
)
full_response = ""
tool_calls = []
async for chunk in stream:
delta = chunk.choices[0].delta
if delta and delta.content:
text_chunk = delta.content
full_response += text_chunk
# Call the provided handler with the new chunk
await async_chunk_handler(text_chunk)
if delta and delta.tool_calls:
# This part handles accumulating tool call data from multiple chunks
if not tool_calls:
tool_calls = [{"id": tc.id, "type": "function", "function": {"name": None, "arguments": ""}} for tc in delta.tool_calls]
for i, tool_call_chunk in enumerate(delta.tool_calls):
if tool_call_chunk.function.name:
tool_calls[i]["function"]["name"] = tool_call_chunk.function.name
if tool_call_chunk.function.arguments:
tool_calls[i]["function"]["arguments"] += tool_call_chunk.function.arguments
# Construct the final assistant message object
assistant_message = {"role": "assistant", "content": full_response}
if tool_calls:
assistant_message["tool_calls"] = tool_calls
# This return is now VALID because there is no 'yield' in this function
return assistant_message, tool_calls
except Exception as e:
print(f"Error in get_llm_response: {e}")
error_message = "I'm having a little trouble right now. Please try again in a moment."
await async_chunk_handler(error_message)
return {"role": "assistant", "content": error_message}, [] |