Spaces:
Sleeping
Sleeping
File size: 1,641 Bytes
3369a79 daba5d5 3369a79 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 | import chainlit as cl
import asyncio
import os
from dotenv import load_dotenv, find_dotenv
from agents import (
Agent,
Runner,
AsyncOpenAI,
set_default_openai_api,
set_default_openai_client,
)
from openai.types.responses import ResponseTextDeltaEvent
load_dotenv(find_dotenv())
base_url = 'https://generativelanguage.googleapis.com/v1beta/openai/'
api_key = os.getenv('gemini_api_key')
model = 'gemini-2.0-flash'
client = AsyncOpenAI(base_url=base_url, api_key=api_key)
set_default_openai_client(client,use_for_tracing=False)
set_default_openai_api('chat_completions')
agent = Agent(
name='Teacher',
instructions='You are a teacher that teaches any topic in detail',
model=model,
#tool=[function_tool],
)
@cl.on_chat_start
async def on_chat_start():
cl.user_session.set("history", [])
await cl.Message("Hi! This your Teacher. Please provide a brief description of the topic you would like to learn about.").send()
@cl.on_message
async def handle(message: cl.Message):
history = cl.user_session.get("history")
history.append({
"role":"user",
"content":message.content
})
msg = cl.Message(content="")
result = Runner.run_streamed(agent,f"{history}")
async for event in result.stream_events():
if event.type == "raw_response_event" and isinstance(event.data, ResponseTextDeltaEvent):
response = event.data.delta
await msg.stream_token(response)
history.append({
"role":"teacher",
"content":msg.content
})
cl.user_session.set("history", history)
await msg.update()
|