bestroi commited on
Commit
6c9d68a
·
verified ·
1 Parent(s): 371ffd7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +46 -43
app.py CHANGED
@@ -549,49 +549,52 @@ with gr.Blocks(title="Archaeological Query Engine") as app:
549
  )
550
 
551
  # Add new tab for AI-generated answers using Groq API
552
- with gr.TabItem("AI Answers (Groq API)"):
553
- gr.Markdown(
554
- "Ask questions about the dataset and get AI-generated answers using the Groq API "
555
- "with the deepseek-r1-distill-llama-70b model."
556
- )
557
-
558
- with gr.Row():
559
- with gr.Column():
560
- ai_city_dropdown = gr.Dropdown(
561
- choices=city_names,
562
- value=city_names[0] if city_names else None,
563
- label="Select City"
564
- )
565
- question_input = gr.Textbox(
566
- label="Ask a Question",
567
- placeholder="E.g., What was the historical significance of this ancient city?",
568
- lines=3
569
- )
570
- max_sources_slider = gr.Slider(
571
- minimum=1,
572
- maximum=10,
573
- value=3,
574
- step=1,
575
- label="Maximum Number of Sources to Consider",
576
- info="Higher values may provide more comprehensive answers but will take longer"
577
- )
578
- temperature_slider = gr.Slider(
579
- minimum=0.0,
580
- maximum=1.0,
581
- value=0.3,
582
- step=0.1,
583
- label="Temperature",
584
- info="Lower values create more focused answers, higher values create more creative ones"
585
- )
586
- generate_button = gr.Button("Generate Answer")
587
-
588
- with gr.Column():
589
- answer_output = gr.HTML(
590
- label="AI-Generated Answer",
591
- value="",
592
- elem_classes=["results-output"]
593
- )
594
-
 
 
 
595
  # Function to handle the Generate Answer button click
596
  def on_generate_answer(city, question, max_sources, temperature):
597
  if not question or not question.strip():
 
549
  )
550
 
551
  # Add new tab for AI-generated answers using Groq API
552
+ with gr.TabItem("AI Answers (Groq API)"):
553
+ gr.Markdown("Ask questions about the dataset and get AI-generated answers using the Groq API with the deepseek-r1-distill-llama-70b model.")
554
+
555
+ with gr.Row():
556
+ with gr.Column():
557
+ # API key is now hardcoded in the code
558
+
559
+ ai_city_dropdown = gr.Dropdown(
560
+ choices=city_names,
561
+ value=city_names[0] if city_names else None,
562
+ label="Select City"
563
+ )
564
+
565
+ question_input = gr.Textbox(
566
+ label="Ask a Question",
567
+ placeholder="E.g., What was the historical significance of this ancient city?",
568
+ lines=3
569
+ )
570
+
571
+ max_sources_slider = gr.Slider(
572
+ minimum=1,
573
+ maximum=10,
574
+ value=3,
575
+ step=1,
576
+ label="Maximum Number of Sources to Consider",
577
+ info="Higher values may provide more comprehensive answers but will take longer"
578
+ )
579
+
580
+ temperature_slider = gr.Slider(
581
+ minimum=0.0,
582
+ maximum=1.0,
583
+ value=0.3,
584
+ step=0.1,
585
+ label="Temperature",
586
+ info="Lower values create more focused answers, higher values create more creative ones"
587
+ )
588
+
589
+ generate_button = gr.Button("Generate Answer")
590
+
591
+ with gr.Column():
592
+ answer_output = gr.HTML(
593
+ label="AI-Generated Answer",
594
+ value="",
595
+ elem_classes=["results-output"]
596
+ )
597
+
598
  # Function to handle the Generate Answer button click
599
  def on_generate_answer(city, question, max_sources, temperature):
600
  if not question or not question.strip():