Spaces:
Sleeping
Sleeping
File size: 6,496 Bytes
ec64bd2 9aef558 ec64bd2 5ede693 ec64bd2 5ede693 ec64bd2 5ede693 ec64bd2 e8ad3a7 ec64bd2 9bf4eb3 ec64bd2 b69a31a ec64bd2 e8ad3a7 801579d e8ad3a7 9bf4eb3 b69a31a ec64bd2 0d3775b a14158b 8620a41 ec64bd2 f604ded ec64bd2 a947f26 ec64bd2 e8ad3a7 ec64bd2 e8ad3a7 ec64bd2 e8ad3a7 338adb8 a14158b ec64bd2 e8ad3a7 ec64bd2 7394738 ec64bd2 ce2f6db fb2ab3f ec64bd2 c603b82 da471a9 c603b82 9d9f437 87928c5 9d9f437 a947f26 9d9f437 a947f26 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 |
# import os
# import logging
# from fastapi import FastAPI
# from fastapi.middleware.cors import CORSMiddleware
# from fastapi.responses import JSONResponse, FileResponse
# from fastapi.staticfiles import StaticFiles
# from dotenv import load_dotenv
# from openai import AsyncOpenAI
# # Import your custom modules
# from chainlit.auth import create_jwt
# import chainlit as cl
# import uvicorn
# # Load environment variables from .env file
# load_dotenv()
# # Initialize logging
# logging.basicConfig(level=logging.INFO)
# logger = logging.getLogger(__name__) # Use __name__ to get the root logger
# # Initialize FastAPI app
# app = FastAPI()
# # # CORS middleware setup
# # app.add_middleware(
# # CORSMiddleware,
# # allow_origins=["*"], # Specify domains or use ["*"] for open access
# # allow_credentials=True,
# # allow_methods=["*"], # Specify methods or use ["*"] for all methods
# # allow_headers=["*"], # Specify headers or use ["*"] for all headers
# # )
# client = AsyncOpenAI(api_key=os.environ["OPENAI_API_KEY"])
# settings = {
# "model": "gpt-3.5-turbo",
# "temperature": 0.7,
# "max_tokens": 500,
# "top_p": 1,
# "frequency_penalty": 0,
# "presence_penalty": 0,
# }
# app.mount("/", StaticFiles(directory="static", html=True), name="static")
# logger.info("Static files are being served from the 'static' directory.")
# @app.get("/")
# def read_root():
# # Serve your static HTML file at the root.
# logger.info(f"Serving static file for path")
# return FileResponse('static/index.html')
# @app.get("/api")
# def read_api():
# # This endpoint simply returns a JSON message.
# return {"message": "Hello from the FastAPI API!"}
# @app.get("/custom-auth")
# def custom_auth():
# # Verify the user's identity with custom logic.
# token = create_jwt(cl.User(identifier="Test User"))
# logger.info("Custom auth token generated.")
# print("teeeeeee", token)
# return JSONResponse({"token": token})
# @cl.on_chat_start
# async def on_chat_start():
# cl.user_session.set(
# "message_history",
# [{"role": "system", "content": "You are a helpful assistant."}],
# )
# await cl.Message(content="Connected to Chainlit!").send()
# logger.info("Chat started with Chainlit.")
# @cl.on_message
# async def on_message(message: cl.Message):
# message_history = cl.user_session.get("message_history")
# message_history.append({"role": "user", "content": message.content})
# msg = cl.Message(content="")
# await msg.send()
# stream = await client.chat.completions.create(
# messages=message_history, stream=True, **settings
# )
# async for part in stream:
# if token := part.choices[0].delta.content or "":
# await msg.stream_token(token)
# message_history.append({"role": "assistant", "content": msg.content})
# await msg.update()
# logger.info("Message processed and response sent.")
#########################################################################################################################
from fastapi import FastAPI
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse, JSONResponse
import os
import logging
from fastapi.middleware.cors import CORSMiddleware
from openai import AsyncOpenAI
# Import your custom modules
from chainlit.auth import create_jwt
from dotenv import load_dotenv
import chainlit as cl
import uvicorn
import asyncio
from socket import gaierror
# Load environment variables from .env file
load_dotenv()
# Initialize logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__) # Use __name__ to get the root logger
app = FastAPI()
# CORS middleware setup
app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # Specify domains or use ["*"] for open access
allow_credentials=True,
allow_methods=["*"], # Specify methods or use ["*"] for all methods
allow_headers=["*"], # Specify headers or use ["*"] for all headers
)
client = AsyncOpenAI(api_key=os.environ["OPENAI_API_KEY"])
settings = {
"model": "gpt-3.5-turbo",
"temperature": 0.7,
"max_tokens": 500,
"top_p": 1,
"frequency_penalty": 0,
"presence_penalty": 0,
}
# Assuming your static files are in a directory named 'static'.
# app.mount("/static", StaticFiles(directory="static"), name="static")
# app.mount("/assets", StaticFiles(directory="static/assets"), name="assets")
@app.get("/")
def read_root():
# Serve your static HTML file at the root.
return {"message": "Hello from the FastAPI API!"}
@app.get("/api")
def read_api():
# This endpoint simply returns a JSON message.
return {"message": "Hello from the FastAPI API!"}
@app.get("/custom-auth")
def custom_auth():
# Verify the user's identity with custom logic.
token = create_jwt(cl.User(identifier="Test User"))
logger.info("Custom auth token generated.")
print("teeeeeee", token)
return JSONResponse({"token": token})
@cl.on_chat_start
async def on_chat_start():
cl.user_session.set(
"message_history",
[{"role": "system", "content": "You are a helpful assistant."}],
)
await cl.Message(content="Connected to Chainlit!").send()
logger.info("Chat started with Chainlit.")
@cl.on_message
async def on_message(message: cl.Message):
message_history = cl.user_session.get("message_history")
message_history.append({"role": "user", "content": message.content})
msg = cl.Message(content="")
await msg.send()
try:
stream = await client.chat.completions.create(
messages=message_history, stream=True, **settings
)
async for part in stream:
if token := part.choices[0].delta.content or "":
await msg.stream_token(token)
except gaierror as e:
logger.error(f"Network error during OpenAI API call: {e}")
await msg.update(content="Sorry, there was a network error.")
except asyncio.TimeoutError:
logger.error("Timeout error during OpenAI API call.")
await msg.update(content="Sorry, the request timed out.")
except Exception as e:
logger.error(f"Unexpected error during OpenAI API call: {e}")
await msg.update(content="Sorry, an unexpected error occurred.")
message_history.append({"role": "assistant", "content": msg.content})
await msg.update()
logger.info("Message processed and response sent.")
|