manabb commited on
Commit
3e7de78
Β·
verified Β·
1 Parent(s): 1582a08

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -9
app.py CHANGED
@@ -325,16 +325,21 @@ def bePrepare1():
325
  return "I am ready, ask me questions with model google flan-t5."
326
 
327
  def ask_question(query):
 
328
  if not qa_chain:
329
- return "❌ Please clik the button to get the udated resources with tiny Lama."
330
- response = qa_chain.invoke({"query": query})
331
- return response["result"]
332
-
 
333
  def ask_question1(query):
 
334
  if not qa_chain1:
335
- return "❌ Please clik the button to get the udated resources google flan-t5."
336
- response1 = qa_chain1.invoke({"query": query})
337
- return response1["result"]
 
 
338
  #====================
339
  # Gradio UI
340
  with gr.Blocks() as demo:
@@ -342,7 +347,7 @@ with gr.Blocks() as demo:
342
  with gr.Row():
343
  # LEFT COLUMN: Document Management
344
  with gr.Column(scale=1):
345
- gr.Markdown("## 🧠 Using TinyLama Model")
346
  with gr.Row():
347
  Index_processing_output=gr.Textbox(label="πŸ“ Status for tiny lama", interactive=False)
348
  with gr.Row():
@@ -351,11 +356,12 @@ with gr.Blocks() as demo:
351
  with gr.Row():
352
  query_input = gr.Textbox(label="❓ Your Question pls")
353
  query_btn = gr.Button("🧠 Get Answer")
 
354
  answer_output = gr.Textbox(label="βœ… Answer", lines=4)
355
  query_btn.click(ask_question, inputs=query_input, outputs=answer_output)
356
  # RIGHT COLUMN: Document Management
357
  with gr.Column(scale=2):
358
- gr.Markdown("## 🧠 Using google flan-t5")
359
  Index_processing_output1=gr.Textbox(label="πŸ“ Status for google flan-t5", interactive=False)
360
  Index_processing_btn1 = gr.Button("πŸ”„ Clik to get the udated resources with google flan-t5")
361
  Index_processing_btn1.click(bePrepare1, inputs=None, outputs=Index_processing_output1)
 
325
  return "I am ready, ask me questions with model google flan-t5."
326
 
327
  def ask_question(query):
328
+ msg="Blank question! "
329
  if not qa_chain:
330
+ msg="❌ Please clik the button to get the udated resources with tiny Lama."
331
+ if query:
332
+ response = qa_chain.invoke({"query": query})
333
+ msg= response["result"]
334
+ return msg
335
  def ask_question1(query):
336
+ msg="Blank question!"
337
  if not qa_chain1:
338
+ msg="❌ Please clik the button to get the udated resources google flan-t5."
339
+ if query:
340
+ response1 = qa_chain1.invoke({"query": query})
341
+ msg=response1["result"]
342
+ return msg
343
  #====================
344
  # Gradio UI
345
  with gr.Blocks() as demo:
 
347
  with gr.Row():
348
  # LEFT COLUMN: Document Management
349
  with gr.Column(scale=1):
350
+ gr.Markdown("## 🧠 Using heavy TinyLama Model")
351
  with gr.Row():
352
  Index_processing_output=gr.Textbox(label="πŸ“ Status for tiny lama", interactive=False)
353
  with gr.Row():
 
356
  with gr.Row():
357
  query_input = gr.Textbox(label="❓ Your Question pls")
358
  query_btn = gr.Button("🧠 Get Answer")
359
+ with gr.Row():
360
  answer_output = gr.Textbox(label="βœ… Answer", lines=4)
361
  query_btn.click(ask_question, inputs=query_input, outputs=answer_output)
362
  # RIGHT COLUMN: Document Management
363
  with gr.Column(scale=2):
364
+ gr.Markdown("## 🧠 Using ligth model - google flan-t5")
365
  Index_processing_output1=gr.Textbox(label="πŸ“ Status for google flan-t5", interactive=False)
366
  Index_processing_btn1 = gr.Button("πŸ”„ Clik to get the udated resources with google flan-t5")
367
  Index_processing_btn1.click(bePrepare1, inputs=None, outputs=Index_processing_output1)