axz91 commited on
Commit
063a120
Β·
1 Parent(s): a5f1edb

Add application file

Browse files
.chainlit/.langchain.db ADDED
Binary file (12.3 kB). View file
 
.chainlit/config.toml ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ # If true (default), the app will be available to anonymous users.
3
+ # If false, users will need to authenticate and be part of the project to use the app.
4
+ public = true
5
+
6
+ # The project ID (found on https://cloud.chainlit.io).
7
+ # The project ID is required when public is set to false or when using the cloud database.
8
+ #id = ""
9
+
10
+ # Uncomment if you want to persist the chats.
11
+ # local will create a database in your .chainlit directory (requires node.js installed).
12
+ # cloud will use the Chainlit cloud database.
13
+ # custom will load use your custom client.
14
+ # database = "local"
15
+
16
+ # Whether to enable telemetry (default: true). No personal data is collected.
17
+ enable_telemetry = true
18
+
19
+ # List of environment variables to be provided by each user to use the app.
20
+ user_env = []
21
+
22
+ # Duration (in seconds) during which the session is saved when the connection is lost
23
+ session_timeout = 3600
24
+
25
+ [UI]
26
+ # Name of the app and chatbot.
27
+ name = "Chatbot"
28
+
29
+ # Description of the app and chatbot. This is used for HTML tags.
30
+ # description = ""
31
+
32
+ # The default value for the expand messages settings.
33
+ default_expand_messages = false
34
+
35
+ # Hide the chain of thought details from the user in the UI.
36
+ hide_cot = false
37
+
38
+ # Link to your github repo. This will add a github button in the UI's header.
39
+ # github = ""
40
+
41
+ # Override default MUI light theme. (Check theme.ts)
42
+ [UI.theme.light]
43
+ #background = "#FAFAFA"
44
+ #paper = "#FFFFFF"
45
+
46
+ [UI.theme.light.primary]
47
+ #main = "#F80061"
48
+ #dark = "#980039"
49
+ #light = "#FFE7EB"
50
+
51
+ # Override default MUI dark theme. (Check theme.ts)
52
+ [UI.theme.dark]
53
+ #background = "#FAFAFA"
54
+ #paper = "#FFFFFF"
55
+
56
+ [UI.theme.dark.primary]
57
+ #main = "#F80061"
58
+ #dark = "#980039"
59
+ #light = "#FFE7EB"
60
+
61
+
62
+ [meta]
63
+ generated_by = "0.6.3"
__pycache__/app.cpython-311.pyc ADDED
Binary file (4.03 kB). View file
 
app.py CHANGED
@@ -38,6 +38,8 @@ vector_store = SupabaseVectorStore(
38
  )
39
 
40
 
 
 
41
  # rebuild storage context
42
  storage_context = StorageContext.from_defaults(vector_store=vector_store)
43
 
@@ -72,7 +74,10 @@ from llama_index import get_response_synthesizer
72
  @cl.on_chat_start
73
  async def factory():
74
 
75
-
 
 
 
76
  query_engine = index.as_query_engine(
77
  service_context=service_context,
78
  streaming=True,
@@ -81,20 +86,19 @@ async def factory():
81
  cl.user_session.set("query_engine", query_engine)
82
 
83
 
84
- @cl.on_message
85
- async def main(message):
86
 
87
 
 
 
88
  query_engine = cl.user_session.get("query_engine") # type: RetrieverQueryEngine
89
  response = await cl.make_async(query_engine.query)(message)
90
 
91
  response_message = cl.Message(content="")
92
 
93
  for token in response.response_gen:
94
- response_message.content = response.response_txt
95
 
96
  if response.response_txt:
97
  response_message.content = response.response_txt
98
 
99
- await response_message.send()
100
-
 
38
  )
39
 
40
 
41
+
42
+
43
  # rebuild storage context
44
  storage_context = StorageContext.from_defaults(vector_store=vector_store)
45
 
 
74
  @cl.on_chat_start
75
  async def factory():
76
 
77
+
78
+
79
+ # Substitute your connection string here
80
+
81
  query_engine = index.as_query_engine(
82
  service_context=service_context,
83
  streaming=True,
 
86
  cl.user_session.set("query_engine", query_engine)
87
 
88
 
 
 
89
 
90
 
91
+ @cl.on_message
92
+ async def main(message):
93
  query_engine = cl.user_session.get("query_engine") # type: RetrieverQueryEngine
94
  response = await cl.make_async(query_engine.query)(message)
95
 
96
  response_message = cl.Message(content="")
97
 
98
  for token in response.response_gen:
99
+ await response_message.stream_token(token=token)
100
 
101
  if response.response_txt:
102
  response_message.content = response.response_txt
103
 
104
+ await response_message.send()
 
chainlit.md ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Welcome to Chainlit! πŸš€πŸ€–
2
+
3
+ Hi there, Developer! πŸ‘‹ We're excited to have you on board. Chainlit is a powerful tool designed to help you prototype, debug and share applications built on top of LLMs.
4
+
5
+ ## Useful Links πŸ”—
6
+
7
+ - **Documentation:** Get started with our comprehensive [Chainlit Documentation](https://docs.chainlit.io) πŸ“š
8
+ - **Discord Community:** Join our friendly [Chainlit Discord](https://discord.gg/k73SQ3FyUh) to ask questions, share your projects, and connect with other developers! πŸ’¬
9
+
10
+ We can't wait to see what you create with Chainlit! Happy coding! πŸ’»πŸ˜Š
11
+
12
+ ## Welcome screen
13
+
14
+ To modify the welcome screen, edit the `chainlit.md` file at the root of your project. If you do not want a welcome screen, just leave this file empty.