mgbam commited on
Commit
eae2044
·
verified ·
1 Parent(s): 8fc1b10

Create llm_handler.py

Browse files
Files changed (1) hide show
  1. llm_handler.py +85 -0
llm_handler.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # rentbot/llm_handler.py
2
+ import os
3
+ from openai import AsyncOpenAI
4
+
5
+ client = AsyncOpenAI(api_key=os.getenv("OPENAI_API_KEY"))
6
+
7
+ # Definition of the tool the LLM can use
8
+ tools = [
9
+ {
10
+ "type": "function",
11
+ "function": {
12
+ "name": "create_event",
13
+ "description": "Create a calendar event to book an apartment viewing.",
14
+ "parameters": {
15
+ "type": "object",
16
+ "properties": {
17
+ "start_time": {
18
+ "type": "string",
19
+ "description": "The start time of the event in ISO 8601 format, e.g., 2025-07-18T14:00:00",
20
+ },
21
+ "duration_minutes": {
22
+ "type": "integer",
23
+ "description": "The duration of the event in minutes.",
24
+ "default": 30
25
+ },
26
+ "summary": {
27
+ "type": "string",
28
+ "description": "A short summary or name for the event, e.g., 'Unit 5B viewing'",
29
+ },
30
+ },
31
+ "required": ["start_time", "summary"],
32
+ },
33
+ },
34
+ }
35
+ ]
36
+
37
+ async def get_llm_response(messages: list):
38
+ """
39
+ Yields response chunks from OpenAI's chat completion streaming API.
40
+ Returns the full assistant message and any tool calls.
41
+ """
42
+ try:
43
+ stream = await client.chat.completions.create(
44
+ model="gpt-4o-mini",
45
+ messages=messages,
46
+ stream=True,
47
+ tools=tools,
48
+ tool_choice="auto",
49
+ )
50
+
51
+ full_response = ""
52
+ tool_calls = []
53
+
54
+ async for chunk in stream:
55
+ delta = chunk.choices[0].delta
56
+ if delta.content:
57
+ text_chunk = delta.content
58
+ full_response += text_chunk
59
+ yield text_chunk # Yield text chunks for real-time TTS
60
+
61
+ if delta.tool_calls:
62
+ # Accumulate tool call chunks
63
+ if not tool_calls:
64
+ tool_calls.extend(delta.tool_calls)
65
+ else:
66
+ for i, tool_call_chunk in enumerate(delta.tool_calls):
67
+ if tool_call_chunk.function.arguments:
68
+ tool_calls[i].function.arguments += tool_call_chunk.function.arguments
69
+
70
+ # Construct the final assistant message object
71
+ assistant_message = {"role": "assistant", "content": full_response}
72
+ if tool_calls:
73
+ assistant_message["tool_calls"] = [
74
+ {
75
+ "id": tc.id,
76
+ "type": "function",
77
+ "function": {"name": tc.function.name, "arguments": tc.function.arguments}
78
+ } for tc in tool_calls
79
+ ]
80
+
81
+ return assistant_message, tool_calls
82
+
83
+ except Exception as e:
84
+ print(f"Error in get_llm_response: {e}")
85
+ return {"role": "assistant", "content": "I'm having a little trouble right now. Please try again in a moment."}, []