manabb commited on
Commit
5b4edca
·
verified ·
1 Parent(s): 4bcda75

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +65 -11
app.py CHANGED
@@ -1,3 +1,4 @@
 
1
  import gradio as gr
2
  import time
3
 
@@ -34,7 +35,7 @@ summarizer = pipeline("summarization", model="google/flan-t5-small", device_map=
34
  def intelligently_show_context_with_pages_resources(context, query, docs, top_n=3):
35
  """ Intelligently extract paragraphs with PAGE NUMBERS + RESOURCE names """
36
  display_context = []
37
- display_context.append("📄 **FULL CONTEXT with Pages & Resources**\n")
38
  display_context.append("=" * 120)
39
 
40
  paragraphs = [p.strip() for p in re.split(r'\n\s*\n', context) if p.strip()]
@@ -121,21 +122,67 @@ def show_history_compact(limit=3):
121
  output += "\n" + "─" * 60
122
 
123
  return output
124
- def summarize_with_flan_t5(query):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
125
  user_repo_id = "manabb/nrl"
126
  msg=""
127
  """Generate bullet summary + context + HISTORY TRACKING."""
128
  try:
129
-
130
- docs = retriever.invoke(query)
131
- print(f"✅ Retrieved {len(docs)} docs")
 
 
 
 
132
  context = "\n".join([doc.page_content for doc in docs])
133
 
134
  bullet_prompt = f"""Summarize as 4-6 bullet points:
135
 
136
  {context[:900]}
137
 
138
- BULLETS:"""
139
 
140
  bullet_summary = summarizer(bullet_prompt, max_length=200, min_length=50, do_sample=False)[0]['summary_text']
141
 
@@ -147,9 +194,9 @@ BULLETS:"""
147
  # ✅ COMBINE HISTORY + CURRENT RESULT
148
  history_section = show_history_compact(limit=3) # Last 3 queries
149
  combined_result = f"""
150
- 🤖 **YOUR ANSWER: "{query}"**
151
 
152
- 📋 **BULLET SUMMARY:**
153
  {bullet_summary}
154
 
155
  📄 **INTELLIGENT CONTEXT:**
@@ -168,7 +215,8 @@ BULLETS:"""
168
  finally:
169
  if os.path.exists("temp_faiss"):
170
  shutil.rmtree("temp_faiss")
171
- return msg
 
172
 
173
 
174
  def login(user, pwd):
@@ -194,6 +242,9 @@ def load_resources():
194
  gr.update(visible=False) # hide tabs
195
  )
196
 
 
 
 
197
  with gr.Blocks() as demo:
198
  status = gr.Markdown("# 🚀 NRL AI Space for commercial department - Guwahati")
199
 
@@ -208,13 +259,15 @@ with gr.Blocks() as demo:
208
  # ---- Tabs Container (initially hidden) ----
209
  with gr.Column(visible=False) as tabs_panel:
210
  with gr.Tab("📄 ASK on manual of procurement of Goods"):
 
 
 
211
  query_input1 = gr.Textbox(label="❓ Your Question pls", placeholder="e.g., What is Gem?")
212
  query_btn1 = gr.Button("🧠 Get Answer", variant="primary")
213
- answer_output1 = gr.Textbox(label="✅ Answer", lines=15)
214
  query_btn1.click(
215
  fn=summarize_with_flan_t5,
216
  inputs=query_input1,
217
- outputs=answer_output1 # answers with bullet, smart context and history
218
  )
219
 
220
  with gr.Tab("📊 Upcoming functionality-1"):
@@ -236,3 +289,4 @@ with gr.Blocks() as demo:
236
  )
237
 
238
  demo.launch()
 
 
1
+ #R&D
2
  import gradio as gr
3
  import time
4
 
 
35
  def intelligently_show_context_with_pages_resources(context, query, docs, top_n=3):
36
  """ Intelligently extract paragraphs with PAGE NUMBERS + RESOURCE names """
37
  display_context = []
38
+ display_context.append("📄 ****\n")
39
  display_context.append("=" * 120)
40
 
41
  paragraphs = [p.strip() for p in re.split(r'\n\s*\n', context) if p.strip()]
 
122
  output += "\n" + "─" * 60
123
 
124
  return output
125
+ #========================optimized the question
126
+ def reframe_question_with_history(user_question, history_limit=3):
127
+ """
128
+ Generate a single, comprehensive question using conversation history context.
129
+ """
130
+ # Build history context
131
+ history_context = ""
132
+ recent_history = HISTORY[-history_limit:] if HISTORY else []
133
+
134
+ if recent_history:
135
+ history_context = "Recent conversation context:\n"
136
+ for entry in recent_history:
137
+ history_context += f"- {entry['query'][:80]}... ({entry['timestamp']})\n"
138
+ history_context += "\n"
139
+
140
+ # Reframing prompt
141
+ reframe_prompt = f"""Generate a single, comprehensive question that best captures the information needed to address the user's query or intent and includes the context from the conversation history.
142
+
143
+ User's question: {user_question}
144
+
145
+ {history_context}Only output the optimized question.
146
+
147
+ OPTIMIZED QUESTION:"""
148
+
149
+ # Use FLAN-T5 for reframing (lightweight)
150
+ reframer = pipeline("text2text-generation", model="google/flan-t5-small", device_map="cpu")
151
+ reframed = reframer(
152
+ reframe_prompt,
153
+ max_new_tokens=100,
154
+ max_length=512,
155
+ temperature=0.1,
156
+ do_sample=False
157
+ )[0]['generated_text']
158
+
159
+ # Extract just the question
160
+ optimized_question = reframed.split("OPTIMIZED QUESTION:")[-1].strip()
161
+ if not optimized_question or len(optimized_question) < 10:
162
+ optimized_question = user_question # Fallback
163
+
164
+ return optimized_question
165
+
166
+ #========================main funcition
167
+ def summarize_with_flan_t5(query,history):
168
  user_repo_id = "manabb/nrl"
169
  msg=""
170
  """Generate bullet summary + context + HISTORY TRACKING."""
171
  try:
172
+ # REFRARE QUESTION WITH HISTORY
173
+ #print("🔄 Reframing question with history...")
174
+ optimized_query = reframe_question_with_history(query)
175
+ #print(f"📝 Original: {query}")
176
+ #print(f"📝 Optimized: {optimized_query}")
177
+ docs = retriever.invoke(optimized_query)
178
+ #print(f"✅ Retrieved {len(docs)} docs")
179
  context = "\n".join([doc.page_content for doc in docs])
180
 
181
  bullet_prompt = f"""Summarize as 4-6 bullet points:
182
 
183
  {context[:900]}
184
 
185
+ Main Points:"""
186
 
187
  bullet_summary = summarizer(bullet_prompt, max_length=200, min_length=50, do_sample=False)[0]['summary_text']
188
 
 
194
  # ✅ COMBINE HISTORY + CURRENT RESULT
195
  history_section = show_history_compact(limit=3) # Last 3 queries
196
  combined_result = f"""
197
+ 🤖 **YOUR Querry: "{query}"**
198
 
199
+ 📋 **SUMMARY:**
200
  {bullet_summary}
201
 
202
  📄 **INTELLIGENT CONTEXT:**
 
215
  finally:
216
  if os.path.exists("temp_faiss"):
217
  shutil.rmtree("temp_faiss")
218
+ history.append(msg)
219
+ return history," "
220
 
221
 
222
  def login(user, pwd):
 
242
  gr.update(visible=False) # hide tabs
243
  )
244
 
245
+
246
+ #==========================gradio
247
+
248
  with gr.Blocks() as demo:
249
  status = gr.Markdown("# 🚀 NRL AI Space for commercial department - Guwahati")
250
 
 
259
  # ---- Tabs Container (initially hidden) ----
260
  with gr.Column(visible=False) as tabs_panel:
261
  with gr.Tab("📄 ASK on manual of procurement of Goods"):
262
+
263
+
264
+ answer_output1 = gr.Textbox(label="✅ Answer", height=500)
265
  query_input1 = gr.Textbox(label="❓ Your Question pls", placeholder="e.g., What is Gem?")
266
  query_btn1 = gr.Button("🧠 Get Answer", variant="primary")
 
267
  query_btn1.click(
268
  fn=summarize_with_flan_t5,
269
  inputs=query_input1,
270
+ outputs=[answer_output1,query_input1] # answers with bullet, smart context and history
271
  )
272
 
273
  with gr.Tab("📊 Upcoming functionality-1"):
 
289
  )
290
 
291
  demo.launch()
292
+