hamza82 commited on
Commit
5ede693
·
verified ·
1 Parent(s): 3a4efe5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +92 -92
app.py CHANGED
@@ -1,111 +1,111 @@
1
- import os
2
- import logging
3
- from fastapi import FastAPI
4
- from fastapi.middleware.cors import CORSMiddleware
5
- from fastapi.responses import JSONResponse, FileResponse
6
- from fastapi.staticfiles import StaticFiles
7
- from dotenv import load_dotenv
8
- from openai import AsyncOpenAI
9
 
10
- # Import your custom modules
11
- from chainlit.auth import create_jwt
12
- import chainlit as cl
13
- import uvicorn
14
- # Load environment variables from .env file
15
- load_dotenv()
16
 
17
- # Initialize logging
18
- logging.basicConfig(level=logging.INFO)
19
- logger = logging.getLogger(__name__) # Use __name__ to get the root logger
20
 
21
- # Initialize FastAPI app
22
- app = FastAPI()
23
 
24
- # CORS middleware setup
25
- app.add_middleware(
26
- CORSMiddleware,
27
- allow_origins=["*"], # Specify domains or use ["*"] for open access
28
- allow_credentials=True,
29
- allow_methods=["*"], # Specify methods or use ["*"] for all methods
30
- allow_headers=["*"], # Specify headers or use ["*"] for all headers
31
- )
32
-
33
- client = AsyncOpenAI(api_key=os.environ["OPENAI_API_KEY"])
34
-
35
- settings = {
36
- "model": "gpt-3.5-turbo",
37
- "temperature": 0.7,
38
- "max_tokens": 500,
39
- "top_p": 1,
40
- "frequency_penalty": 0,
41
- "presence_penalty": 0,
42
- }
43
-
44
- app.mount("/", StaticFiles(directory="static", html=True), name="static")
45
- logger.info("Static files are being served from the 'static' directory.")
46
 
47
- @app.get("/")
48
- def read_root():
49
- """
50
- Catch-all route to serve index.html for any undefined routes,
51
- allowing client-side routing to function properly.
52
- """
53
- logger.info(f"Serving static file for path")
54
- return FileResponse('static/index.html')
55
 
56
- @app.get("/api")
57
- def custom_auth():
58
- # Verify the user's identity with custom logic.
59
- token = create_jwt(cl.User(identifier="Test User"))
60
- logger.info("Custom auth token generated.")
61
- return JSONResponse({"token": token})
62
 
63
- @cl.on_chat_start
64
- async def on_chat_start():
65
- cl.user_session.set(
66
- "message_history",
67
- [{"role": "system", "content": "You are a helpful assistant."}],
68
- )
69
- await cl.Message(content="Connected to Chainlit!").send()
70
- logger.info("Chat started with Chainlit.")
71
 
72
- @cl.on_message
73
- async def on_message(message: cl.Message):
74
- message_history = cl.user_session.get("message_history")
75
- message_history.append({"role": "user", "content": message.content})
76
 
77
- msg = cl.Message(content="")
78
- await msg.send()
79
 
80
- stream = await client.chat.completions.create(
81
- messages=message_history, stream=True, **settings
82
- )
83
 
84
- async for part in stream:
85
- if token := part.choices[0].delta.content or "":
86
- await msg.stream_token(token)
87
 
88
- message_history.append({"role": "assistant", "content": msg.content})
89
- await msg.update()
90
- logger.info("Message processed and response sent.")
91
 
92
 
93
 
94
- # from fastapi import FastAPI
95
- # from fastapi.staticfiles import StaticFiles
96
- # from fastapi.responses import FileResponse
97
 
98
- # app = FastAPI()
99
 
100
- # # Mount the 'static' directory to serve static files.
101
- # # Assuming your static files are in a directory named 'static'.
102
- # app.mount("/static", StaticFiles(directory="static"), name="static")
103
 
104
- # @app.get("/")
105
- # def read_root():
106
- # # Serve your static HTML file at the root.
107
- # return FileResponse('static/index.html')
108
 
109
- # @app.get("/api")
110
- # def read_api():
111
- # return {"message": "Hello from the FastAPI API!"}
 
1
+ # import os
2
+ # import logging
3
+ # from fastapi import FastAPI
4
+ # from fastapi.middleware.cors import CORSMiddleware
5
+ # from fastapi.responses import JSONResponse, FileResponse
6
+ # from fastapi.staticfiles import StaticFiles
7
+ # from dotenv import load_dotenv
8
+ # from openai import AsyncOpenAI
9
 
10
+ # # Import your custom modules
11
+ # from chainlit.auth import create_jwt
12
+ # import chainlit as cl
13
+ # import uvicorn
14
+ # # Load environment variables from .env file
15
+ # load_dotenv()
16
 
17
+ # # Initialize logging
18
+ # logging.basicConfig(level=logging.INFO)
19
+ # logger = logging.getLogger(__name__) # Use __name__ to get the root logger
20
 
21
+ # # Initialize FastAPI app
22
+ # app = FastAPI()
23
 
24
+ # # CORS middleware setup
25
+ # app.add_middleware(
26
+ # CORSMiddleware,
27
+ # allow_origins=["*"], # Specify domains or use ["*"] for open access
28
+ # allow_credentials=True,
29
+ # allow_methods=["*"], # Specify methods or use ["*"] for all methods
30
+ # allow_headers=["*"], # Specify headers or use ["*"] for all headers
31
+ # )
32
+
33
+ # client = AsyncOpenAI(api_key=os.environ["OPENAI_API_KEY"])
34
+
35
+ # settings = {
36
+ # "model": "gpt-3.5-turbo",
37
+ # "temperature": 0.7,
38
+ # "max_tokens": 500,
39
+ # "top_p": 1,
40
+ # "frequency_penalty": 0,
41
+ # "presence_penalty": 0,
42
+ # }
43
+
44
+ # app.mount("/", StaticFiles(directory="static", html=True), name="static")
45
+ # logger.info("Static files are being served from the 'static' directory.")
46
 
47
+ # @app.get("/")
48
+ # def read_root():
49
+ # """
50
+ # Catch-all route to serve index.html for any undefined routes,
51
+ # allowing client-side routing to function properly.
52
+ # """
53
+ # logger.info(f"Serving static file for path")
54
+ # return FileResponse('static/index.html')
55
 
56
+ # @app.get("/api")
57
+ # def custom_auth():
58
+ # # Verify the user's identity with custom logic.
59
+ # token = create_jwt(cl.User(identifier="Test User"))
60
+ # logger.info("Custom auth token generated.")
61
+ # return JSONResponse({"token": token})
62
 
63
+ # @cl.on_chat_start
64
+ # async def on_chat_start():
65
+ # cl.user_session.set(
66
+ # "message_history",
67
+ # [{"role": "system", "content": "You are a helpful assistant."}],
68
+ # )
69
+ # await cl.Message(content="Connected to Chainlit!").send()
70
+ # logger.info("Chat started with Chainlit.")
71
 
72
+ # @cl.on_message
73
+ # async def on_message(message: cl.Message):
74
+ # message_history = cl.user_session.get("message_history")
75
+ # message_history.append({"role": "user", "content": message.content})
76
 
77
+ # msg = cl.Message(content="")
78
+ # await msg.send()
79
 
80
+ # stream = await client.chat.completions.create(
81
+ # messages=message_history, stream=True, **settings
82
+ # )
83
 
84
+ # async for part in stream:
85
+ # if token := part.choices[0].delta.content or "":
86
+ # await msg.stream_token(token)
87
 
88
+ # message_history.append({"role": "assistant", "content": msg.content})
89
+ # await msg.update()
90
+ # logger.info("Message processed and response sent.")
91
 
92
 
93
 
94
+ from fastapi import FastAPI
95
+ from fastapi.staticfiles import StaticFiles
96
+ from fastapi.responses import FileResponse
97
 
98
+ app = FastAPI()
99
 
100
+ # Mount the 'static' directory to serve static files.
101
+ # Assuming your static files are in a directory named 'static'.
102
+ app.mount("/static", StaticFiles(directory="static"), name="static")
103
 
104
+ @app.get("/")
105
+ def read_root():
106
+ # Serve your static HTML file at the root.
107
+ return FileResponse('static/index.html')
108
 
109
+ @app.get("/api")
110
+ def read_api():
111
+ return {"message": "Hello from the FastAPI API!"}