anl139 commited on
Commit
cf08f6f
·
verified ·
1 Parent(s): 43551fe

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +126 -59
app.py CHANGED
@@ -104,101 +104,168 @@ retriever = ensemble_retriever
104
  # Prepare Retrieval and Generation Chain
105
  # -------------------------------
106
 
107
- # Pull the prompt from the hub; ensure that the prompt exists at the specified location
 
 
108
  prompt = hub.pull("rlm/rag-prompt")
109
 
110
- # Initialize the language model (adjust the model name as needed)
111
  llm = ChatOpenAI(model_name="gpt-4o-mini", temperature=0)
112
 
113
- # Create the document chain (the "stuff" chain that combines retrieved documents)
114
- question_answer_chain = create_stuff_documents_chain(llm, prompt)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
115
 
116
- # Create the retrieval augmented generation (RAG) chain using the retriever and document chain
 
117
  rag_chain = create_retrieval_chain(retriever, question_answer_chain)
118
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
119
 
120
- # -------------------------------
121
- # Define the Chat Callback Function
122
- # -------------------------------
123
 
124
- def message_and_history(user_message, history):
125
- """
126
- Processes the user input, performs retrieval and generation,
127
- and updates the conversation history.
128
- """
129
- # Initialize history if empty
130
- if not history:
131
- history = [{"role": "assistant", "content": "<b>LA2050 Navigator:</b><br> Welcome to the LA2050 ideas hub! How can I help you today?"}]
132
-
133
- # Append the user's message to history
134
- history.append({"role": "user", "content": user_message})
135
-
136
- # Simulate a brief delay (optional)
137
  time.sleep(1)
138
-
139
- # If the input is empty, return an error message
140
- if not user_message.strip():
141
  history.append({"role": "assistant", "content": "<b>LA2050 Navigator:</b><br> Please enter a valid message."})
142
  yield history, history
143
  return
144
 
145
  try:
146
- # Invoke the RAG chain with the user's input
147
- response = rag_chain.invoke({"input": user_message})
148
  answer = response["answer"]
149
  except Exception as e:
150
  answer = f"An error occurred: {e}"
151
 
152
- # Prepare a dynamic response that simulates streaming text
153
  dynamic_message = {"role": "assistant", "content": "<b>LA2050 Navigator:</b><br> "}
154
  history.append(dynamic_message)
155
 
156
- # Stream the answer character by character (this loop yields intermediate updates)
157
  for character in answer:
158
  dynamic_message["content"] += character
159
  yield history, history
160
 
161
- # Finalize the answer and yield the final history
162
  history[-1]["content"] = f"<b>LA2050 Navigator:</b><br> {answer}"
163
  yield history, history
164
 
165
 
166
- # -------------------------------
167
- # Set Up the Gradio Interface
168
- # -------------------------------
169
-
170
- # Define a custom green theme for the interface
171
- green_theme = gr.themes.Base(
172
- primary_hue=gr.themes.Color(
173
- c50="#00A168", c100="#57B485", c200="#D7ECE0", c300="#FFFFFF",
174
- c400="#EAE9E9", c500="#000000", c600="#3A905E", c700="#2A774A",
175
- c800="#1A5E36", c900="#0A4512", c950="#052A08"
176
- )
177
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
178
 
179
- with gr.Blocks(theme=green_theme) as block:
180
  gr.HTML('<div class="chat-header"><h1>LA2050 Navigator</h1></div>')
181
-
182
- # Initialize the chatbot with a welcome message
183
- chatbot = gr.Chatbot(
184
- value=[{"role": "assistant", "content": "<b>LA2050 Navigator:</b><br> Welcome to the LA2050 ideas hub! How can I help you today?"}],
185
- type="messages"
186
- )
187
-
188
- # Use a Gradio State to keep track of the conversation history
189
  state = gr.State([])
190
-
191
- # Textbox for user input
192
- user_input_box = gr.Textbox(placeholder="Type a message", scale=3, show_label=False)
193
-
194
- # When the textbox is submitted, run the callback function
195
- user_input_box.submit(
 
 
 
 
 
196
  message_and_history,
197
- inputs=[user_input_box, state],
198
  outputs=[chatbot, state]
199
  ).then(
200
- lambda: "", inputs=[], outputs=user_input_box
201
  )
202
 
203
  block.launch(debug=True, share=True)
204
-
 
104
  # Prepare Retrieval and Generation Chain
105
  # -------------------------------
106
 
107
+ #### RETRIEVAL and GENERATION ####
108
+
109
+ # Prompt
110
  prompt = hub.pull("rlm/rag-prompt")
111
 
112
+ # LLM
113
  llm = ChatOpenAI(model_name="gpt-4o-mini", temperature=0)
114
 
115
+ # Post-processing
116
+ def format_docs(docs):
117
+ return "\n\n".join(doc.page_content for doc in docs)
118
+
119
+ # Chain
120
+ rag_chain = (
121
+ {"context": retriever | format_docs, "question": RunnablePassthrough()}
122
+ | prompt
123
+ | llm
124
+ | StrOutputParser()
125
+ )
126
+ system_prompt = (
127
+ "You are the LA2050 Navigator, an AI-powered chatbot designed to help users explore organizations and community initiatives within the Goldhirsh Foundation’s LA2050 Ideas Hub."
128
+ "Your role is to provide personalized recommendations, guide users toward supporting these organizations and initiatives, and answer relevant questions about the Goldhirsh Foundation, LA2050, and its projects."
129
+ "Adhere to these instructions when interacting with the user:"
130
+ "Start each session by introducing yourself and explaining your purpose before the user inputs any message."
131
+ "Provide tailored recommendations at a time by default unless the user requests a different number."
132
+ "Include: the full name of the organization; a brief (1-2 sentence) description; link to the organization personal websites/social"
133
+ "in the data files; suggestions for how the user can engage, such as donating, volunteering, or learning more."
134
+ "In your recommendations, prioritize nonprofit organizations awarded by the Goldhirsh Foundation (designated “winner”), as well as organizations with multiple proposal submissions."
135
+ "Use the data files as your primary source of information when answering questions. If information is unavailable in the files, acknowledge this and guide the user to relevant resources."
136
+ "Maintain a polite, helpful, respectful, and enthusiastic tone at all times to create a positive user experience."
137
+ "Politely redirect the user back to relevant topics if they ask unrelated questions, keeping the conversation aligned with your purpose."
138
+ "Use the following information about the data files to help you:"
139
+ "The “ideas” are community initiatives submitted by organizations to apply for grants and support from the Goldhirsh Foundation."
140
+ "There are currently no winners for the 2024 data, although this information can be found on the LA2050 Ideas Hub website. "
141
+ "Only projects labeled as “winner” in the Project Ranking column should be identified as winning projects."
142
+ "\n\n{context}"
143
+ )
144
+ prompt = ChatPromptTemplate.from_messages(
145
+ [
146
+ ("system", system_prompt),
147
+ ("human", "{input}"),
148
+ ]
149
+ )
150
 
151
+
152
+ question_answer_chain = create_stuff_documents_chain(llm, prompt)
153
  rag_chain = create_retrieval_chain(retriever, question_answer_chain)
154
 
155
+ green_theme = gr.themes.Base(
156
+ primary_hue=gr.themes.Color(
157
+ c50="#00A168",
158
+ c100="#57B485",
159
+ c200="#D7ECE0",
160
+ c300="#FFFFFF",
161
+ c400="#EAE9E9",
162
+ c500="#000000",
163
+ c600="#3A905E",
164
+ c700="#2A774A",
165
+ c800="#1A5E36",
166
+ c900="#0A4512",
167
+ c950="#052A08"
168
+ ),
169
+ font=[gr.themes.GoogleFont('Space Grotesk'), 'ui-sans-serif', 'system-ui', 'sans-serif']
170
+ ).set(
171
+ body_background_fill='#00A168',
172
+ body_text_color='#000000',
173
+ background_fill_primary='#FFFFFF',
174
+ background_fill_secondary='#FFFFFF',
175
+ border_color_accent='#57B485',
176
+ border_color_accent_subdued='#EAE9E9',
177
+ color_accent='#57B485',
178
+ color_accent_soft='#D7ECE0',
179
+ checkbox_background_color='#FFFFFF',
180
+ button_primary_background_fill='#57B485',
181
+ button_primary_background_fill_hover='#3A905E',
182
+ button_secondary_background_fill='#D7ECE0',
183
+ button_secondary_text_color='#000000'
184
+ )
185
 
 
 
 
186
 
187
+
188
+ # integrate gradio with RAG logic
189
+ def message_and_history(message, history):
190
+ history = history or [{"role": "assistant", "content": "<b>LA2050 Navigator:</b><br> Welcome to the LA2050 ideas hub! How can I help you today?"}]
191
+ history.append({"role": "user", "content": message.get("text", "")})
192
+
 
 
 
 
 
 
 
193
  time.sleep(1)
194
+
195
+ user_input = message.get("text", "")
196
+ if not user_input:
197
  history.append({"role": "assistant", "content": "<b>LA2050 Navigator:</b><br> Please enter a valid message."})
198
  yield history, history
199
  return
200
 
201
  try:
202
+ response = rag_chain.invoke({"input": user_input})
 
203
  answer = response["answer"]
204
  except Exception as e:
205
  answer = f"An error occurred: {e}"
206
 
 
207
  dynamic_message = {"role": "assistant", "content": "<b>LA2050 Navigator:</b><br> "}
208
  history.append(dynamic_message)
209
 
 
210
  for character in answer:
211
  dynamic_message["content"] += character
212
  yield history, history
213
 
 
214
  history[-1]["content"] = f"<b>LA2050 Navigator:</b><br> {answer}"
215
  yield history, history
216
 
217
 
218
+ # set to light mode
219
+ js_func = """
220
+ function refresh() {
221
+ const url = new URL(window.location);
222
+
223
+ if (url.searchParams.get('__theme') !== 'light') {
224
+ url.searchParams.set('__theme', 'light');
225
+ window.location.href = url.href;
226
+ }
227
+ }
228
+ """
229
+ css = """""
230
+ .chat-header {
231
+ text-color: #FFFFFF;
232
+ text-align: center;
233
+ }
234
+ .gradio-container .prose .chat-header h1 {
235
+ color: #FFFFFF;
236
+ text-align: center;
237
+ }
238
+
239
+ }
240
+ """
241
+
242
+ # setup gradio interface
243
+ with gr.Blocks(theme=green_theme, js=js_func, css=css) as block:
244
 
 
245
  gr.HTML('<div class="chat-header"><h1>LA2050 Navigator</h1></div>')
246
+
247
+ chatbot = gr.Chatbot( value=
248
+ [{"role": "assistant", "content": "<b>LA2050 Navigator:</b><br> Welcome to the LA2050 ideas hub! How can I help you today?"}],
249
+ type="messages",
250
+ bubble_full_width=False)
251
+
 
 
252
  state = gr.State([])
253
+
254
+ message = gr.MultimodalTextbox(
255
+ interactive=True,
256
+ file_count="multiple",
257
+ placeholder="Type a message",
258
+ label="",
259
+ elem_classes="custom-textbox",
260
+ scale=3,
261
+ show_label=False)
262
+
263
+ message.submit(
264
  message_and_history,
265
+ inputs=[message, state],
266
  outputs=[chatbot, state]
267
  ).then(
268
+ lambda: "", inputs=[], outputs=message
269
  )
270
 
271
  block.launch(debug=True, share=True)