oskardotid commited on
Commit
483a574
·
1 Parent(s): 9d6d431

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -10
app.py CHANGED
@@ -79,16 +79,54 @@ retrieval_augmented_qa_pipeline = RetrievalAugmentedQAPipeline(
79
  llm=chat_openai
80
  )
81
 
82
- @cl.on_chat_start # marks a function that will be executed at the start of a user session
83
- async def start_chat():
84
- retrieval_augmented_qa_pipeline = RetrievalAugmentedQAPipeline(vector_db_retriever=vector_db, llm=chat_openai, wandb_project=wandb_project)
85
- cl.user_session.set("pipeline", retrieval_augmented_qa_pipeline)
86
-
87
- @cl.on_message # marks a function that should be run each time the chatbot receives a message from a user
88
- async def main(message: str):
89
- retrieval_augmented_qa_pipeline = cl.user_session.get("pipeline")
90
- completion = retrieval_augmented_qa_pipeline.run_pipeline(message)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
91
 
92
- await cl.Message(content=completion).send()
 
93
 
94
 
 
79
  llm=chat_openai
80
  )
81
 
82
+ @cl.on_message
83
+ async def main(message: cl.Message):
84
+ settings = cl.user_session.get("settings")
85
+
86
+ client = AsyncOpenAI()
87
+
88
+ context_list = vector_db.search_by_text(message.content, k=4)
89
+ context_prompt = ""
90
+ for context in context_list:
91
+ context_prompt += context[0] + "\n"
92
+ formatted_system_prompt = raqa_prompt.create_message(context=context_prompt)
93
+ formatted_user_prompt = user_prompt.create_message(user_query=message.content)
94
+ print(formatted_system_prompt)
95
+ print(formatted_user_prompt)
96
+ prompt = Prompt(
97
+ provider=ChatOpenAI.id,
98
+ messages=[
99
+ PromptMessage(
100
+ role="system",
101
+ template=RAQA_PROMPT_TEMPLATE,
102
+ formatted=formatted_system_prompt['content'],
103
+ ),
104
+ PromptMessage(
105
+ role="user",
106
+ template=USER_PROMPT_TEMPLATE,
107
+ formatted=formatted_user_prompt['content'],
108
+ ),
109
+ ],
110
+ inputs={"context": context_prompt,
111
+ "user_query": message.content},
112
+ settings=settings,
113
+ )
114
+
115
+ msg = cl.Message(content="")
116
+
117
+ async for stream_resp in await client.chat.completions.create(
118
+ messages=[m.to_openai() for m in prompt.messages], stream=True, **settings
119
+ ):
120
+ token = stream_resp.choices[0].delta.content
121
+ if not token:
122
+ token = ""
123
+ await msg.stream_token(token)
124
+
125
+ # Update the prompt object with the completion
126
+ prompt.completion = msg.content
127
+ msg.prompt = prompt
128
 
129
+ # Send and close the message stream
130
+ await msg.send()
131
 
132