rameshmoorthy's picture
Update app.py
13e196e verified
import gradio as gr
import logging
from sentence_transformers import CrossEncoder
from phi.agent import Agent
from phi.model.groq import Groq
from backend.semantic_search import table, retriever
import numpy as np
from time import perf_counter
import os
import time
# Logging setup
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# API Key setup
api_key = os.getenv("GROQ_API_KEY")
if not api_key:
logger.error("GROQ_API_KEY not found.")
print("Warning: GROQ_API_KEY not found. Set it in environment variables.")
else:
os.environ["GROQ_API_KEY"] = api_key
# Initialize Phi Agent
def create_phi_agent():
if not api_key:
return None
agent = Agent(
name="Science Education Assistant",
role="You are a helpful science tutor for 10th-grade students",
instructions=[
"You are an expert science teacher specializing in 10th-grade curriculum.",
"Provide clear, accurate, and age-appropriate explanations.",
"Use simple language and examples that students can understand.",
"Focus on concepts from physics, chemistry, and biology.",
"Structure responses with headings and bullet points when helpful.",
"Encourage learning and curiosity."
],
model=Groq(id="llama3-70b-8192", api_key=api_key),
markdown=True
)
return agent
phi_agent = create_phi_agent()
# Response Generation Function
def retrieve_and_generate_response(query, cross_encoder_choice, history=None):
"""Generate response using semantic search and LLM"""
top_rerank = 25
top_k_rank = 20
if not query.strip():
return "Please provide a valid question."
try:
start_time = perf_counter()
# Encode query and search documents
query_vec = retriever.encode(query)
documents = table.search(query_vec, vector_column_name="vector").limit(top_rerank).to_list()
documents = [doc["text"] for doc in documents]
# Re-rank documents using cross-encoder
cross_encoder_model = CrossEncoder('BAAI/bge-reranker-base') if cross_encoder_choice == '(ACCURATE) BGE reranker' else CrossEncoder('cross-encoder/ms-marco-MiniLM-L-6-v2')
query_doc_pair = [[query, doc] for doc in documents]
cross_scores = cross_encoder_model.predict(query_doc_pair)
sim_scores_argsort = list(reversed(np.argsort(cross_scores)))
documents = [documents[idx] for idx in sim_scores_argsort[:top_k_rank]]
# Create context from top documents
context = "\n\n".join(documents[:10]) if documents else ""
context = f"Context information from educational materials:\n{context}\n\n"
# Add conversation history for context
history_context = ""
if history and len(history) > 0:
for user_msg, bot_msg in history[-2:]: # Last 2 exchanges
if user_msg and bot_msg:
history_context += f"Previous Q: {user_msg}\nPrevious A: {bot_msg}\n"
# Create full prompt
full_prompt = f"{history_context}{context}Question: {query}\n\nPlease answer the question using the context provided above. If the context doesn't contain relevant information, use your general knowledge about 10th-grade science topics."
# Generate response
if not phi_agent:
return "Chatbot not configured properly. Please check GROQ_API_KEY."
response = phi_agent.run(full_prompt)
response_text = response.content if hasattr(response, 'content') else str(response)
logger.info(f"Response generation took {perf_counter() - start_time:.2f} seconds")
return response_text
except Exception as e:
logger.error(f"Error in response generation: {e}")
return f"Error generating response: {str(e)}"
# Main chat function that works with Gradio
def science_chat_function(message, history, cross_encoder_choice):
"""Main chat function for the science tutor"""
if not message.strip():
return "", history
# Generate response using your existing logic
response = retrieve_and_generate_response(message, cross_encoder_choice, history)
# Add to history
history.append([message, response])
return "", history
# Gradio Interface
with gr.Blocks(title="Science Chatbot", theme='gradio/soft') as chatbot_app:
# Header section
with gr.Row():
with gr.Column(scale=10):
gr.HTML("""
<div style="color: #FF4500;">
<h1>Welcome! I am your friend!</h1>
<h1>Ask me! I will help you</h1>
<h1><span style="color: #008000">I AM A CHATBOT FOR 10TH SCIENCE</span></h1>
</div>
<p style="font-family: sans-serif; font-size: 16px;">
A free chatbot developed by K.M.RAMYASRI, TGT, GHS.SUTHUKENY using Phi Agent & Groq LLMs for 10th-grade students
</p>
<p style="font-family: Arial, sans-serif; font-size: 14px;">
Suggestions may be sent to <a href="mailto:ramyasriraman2019@gmail.com" style="color: #00008B; font-style: italic;">ramyadevi1607@yahoo.com</a>.
</p>
""")
with gr.Column(scale=3):
try:
gr.Image(value='logo.png', height=200, width=200, show_label=False)
except:
gr.HTML("<div style='height: 200px; width: 200px; background-color: #f0f0f0; display: flex; align-items: center; justify-content: center;'>Logo</div>")
# Cross-encoder selection
cross_encoder = gr.Radio(
choices=['(FAST) MiniLM-L6v2', '(ACCURATE) BGE reranker'],
value='(ACCURATE) BGE reranker',
label="Embeddings Model",
info="Select the model for document ranking"
)
# Main chat interface
chatbot = gr.Chatbot(
label="Science Tutor Conversation",
height=400
)
with gr.Row():
msg = gr.Textbox(
placeholder="Ask your science question here...",
show_label=False,
scale=4
)
submit_btn = gr.Button("Submit", scale=1, variant="primary")
clear = gr.Button("Clear Conversation")
# Event handlers
def submit_message(message, history, cross_encoder_choice):
return science_chat_function(message, history, cross_encoder_choice)
def clear_chat():
return [], ""
# Connect events
submit_btn.click(
fn=submit_message,
inputs=[msg, chatbot, cross_encoder],
outputs=[msg, chatbot]
)
msg.submit(
fn=submit_message,
inputs=[msg, chatbot, cross_encoder],
outputs=[msg, chatbot]
)
clear.click(
fn=clear_chat,
outputs=[chatbot, msg]
)
# Example questions
gr.Examples(
examples=[
'CAN U SAY THE DIFFERENCES BETWEEN METALS AND NON METALS?',
'WHAT IS IONIC BOND?',
'EXPLAIN ASEXUAL REPRODUCTION',
'What is photosynthesis?',
'Explain Newton\'s laws of motion'
],
inputs=msg,
label="Try these example questions:"
)
if __name__ == "__main__":
chatbot_app.launch(server_name="0.0.0.0", server_port=7860)
# import gradio as gr
# import logging
# from sentence_transformers import CrossEncoder
# from phi.agent import Agent
# from phi.model.groq import Groq
# from backend.semantic_search import table, retriever
# import numpy as np
# from time import perf_counter
# import os
# import time
# # Logging setup
# logging.basicConfig(level=logging.INFO)
# logger = logging.getLogger(__name__)
# # API Key setup
# api_key = os.getenv("GROQ_API_KEY")
# if not api_key:
# logger.error("GROQ_API_KEY not found.")
# gr.Warning("GROQ_API_KEY not found. Set it in 'Repository secrets'.")
# else:
# os.environ["GROQ_API_KEY"] = api_key
# # Initialize Phi Agent
# def create_phi_agent():
# if not api_key:
# return None
# agent = Agent(
# name="Science Education Assistant",
# role="You are a helpful science tutor for 10th-grade students",
# instructions=[
# "You are an expert science teacher specializing in 10th-grade curriculum.",
# "Provide clear, accurate, and age-appropriate explanations.",
# "Use simple language and examples that students can understand.",
# "Focus on concepts from physics, chemistry, and biology.",
# "Structure responses with headings and bullet points when helpful.",
# "Encourage learning and curiosity."
# ],
# model=Groq(id="llama3-70b-8192", api_key=api_key),
# markdown=True
# )
# return agent
# phi_agent = create_phi_agent()
# # Response Generation
# def retrieve_and_generate_response(query, cross_encoder_choice, history=None):
# top_rerank = 25
# top_k_rank = 20
# if not query:
# return "Please provide a valid question."
# try:
# start_time = perf_counter()
# query_vec = retriever.encode(query)
# documents = table.search(query_vec, vector_column_name="vector").limit(top_rerank).to_list()
# documents = [doc["text"] for doc in documents]
# cross_encoder1 = CrossEncoder('BAAI/bge-reranker-base') if cross_encoder_choice == '(ACCURATE) BGE reranker' else CrossEncoder('cross-encoder/ms-marco-MiniLM-L-6-v2')
# query_doc_pair = [[query, doc] for doc in documents]
# cross_scores = cross_encoder1.predict(query_doc_pair)
# sim_scores_argsort = list(reversed(np.argsort(cross_scores)))
# documents = [documents[idx] for idx in sim_scores_argsort[:top_k_rank]]
# context = "\n\n".join(documents[:10]) if documents else ""
# context = f"Context information from educational materials:\n{context}\n\n"
# history_context = ""
# if history:
# for msg in history[-4:]: # Limit to last 4 messages (2 exchanges)
# if msg["role"] == "user" and msg["content"]:
# history_context += f"Previous Q: {msg['content']}\n"
# elif msg["role"] == "assistant" and msg["content"]:
# history_context += f"Previous A: {msg['content']}\n"
# full_prompt = f"{history_context}{context}Question: {query}\n\nPlease answer the question using the context provided above. If the context doesn't contain relevant information, use your general knowledge about 10th-grade science topics."
# if not phi_agent:
# return "Chatbot not configured properly."
# response = phi_agent.run(full_prompt)
# logger.info(f"Response generation took {perf_counter() - start_time:.2f} seconds")
# return response.content if hasattr(response, 'content') else str(response)
# except Exception as e:
# logger.error(f"Error in response generation: {e}")
# return f"Error: {str(e)}"
# # Gradio Interface with Message-Based Chatbot
# with gr.Blocks(title="Science Chatbot", theme='gradio/soft') as chatbot_app:
# with gr.Row():
# with gr.Column(scale=10):
# gr.HTML("""
# <div style="color: #FF4500;">
# <h1>Welcome! I am your friend!</h1>
# <h1>Ask me! I will help you</h1>
# <h1><span style="color: #008000">I AM A CHATBOT FOR 10TH SCIENCE</span></h1>
# </div>
# <p style="font-family: sans-serif; font-size: 16px;">
# A free chatbot developed by K.M.RAMYASRI, TGT, GHS.SUTHUKENY using Phi Agent & Groq LLMs for 10th-grade students
# </p>
# <p style="font-family: Arial, sans-serif; font-size: 14px;">
# Suggestions may be sent to <a href="mailto:ramyasriraman2019@gmail.com" style="color: #00008B; font-style: italic;">ramyadevi1607@yahoo.com</a>.
# </p>
# """)
# with gr.Column(scale=3):
# gr.Image(value='logo.png', height=200, width=200, show_label=False)
# chatbot = gr.Chatbot(
# type="messages",
# elem_id="chatbot",
# avatar_images=(
# 'https://aui.atlassian.com/aui/8.8/docs/images/avatar-person.svg',
# 'https://huggingface.co/datasets/huggingface/brand-assets/resolve/main/hf-logo.svg'
# ),
# bubble_full_width=False,
# show_copy_button=True,
# show_share_button=True,
# label="Conversation"
# )
# with gr.Row():
# query = gr.Textbox(
# scale=3,
# show_label=False,
# placeholder="Enter text and press enter",
# container=False
# )
# submit_btn = gr.Button(value="Submit", scale=1, variant="primary")
# cross_encoder = gr.Radio(
# choices=['(FAST) MiniLM-L6v2', '(ACCURATE) BGE reranker'],
# value='(ACCURATE) BGE reranker',
# label="Embeddings",
# info="Select the model for document ranking"
# )
# clear = gr.Button("Clear")
# def user(user_message, history: list):
# if not user_message.strip():
# gr.Warning("Please submit a non-empty question")
# return "", history
# history = history or []
# return "", history + [{"role": "user", "content": user_message}]
# def bot(history: list, cross_encoder: str):
# query = history[-1]["content"] if history and history[-1]["role"] == "user" else ""
# if not query:
# return history
# response = retrieve_and_generate_response(query, cross_encoder, history[:-1])
# history.append({"role": "assistant", "content": ""})
# for character in response:
# history[-1]["content"] += character
# time.sleep(0.02) # Faster streaming for better UX
# yield history
# query.submit(user, [query, chatbot], [query, chatbot], queue=False).then(
# bot, [chatbot, cross_encoder], chatbot
# )
# submit_btn.click(user, [query, chatbot], [query, chatbot], queue=False).then(
# bot, [chatbot, cross_encoder], chatbot
# )
# clear.click(lambda: [], None, chatbot, queue=False)
# examples = [
# 'CAN U SAY THE DIFFERENCES BETWEEN METALS AND NON METALS?',
# 'WHAT IS IONIC BOND?',
# 'EXPLAIN ASEXUAL REPRODUCTION'
# ]
# gr.Examples(examples, query)
# if __name__ == "__main__":
# chatbot_app.launch(server_name="0.0.0.0", server_port=7860)# import gradio as gr
# import logging
# from sentence_transformers import CrossEncoder
# from phi.agent import Agent
# from phi.model.groq import Groq
# from backend.semantic_search import table, retriever
# import numpy as np
# from time import perf_counter
# import os
# # Logging setup
# logging.basicConfig(level=logging.INFO)
# logger = logging.getLogger(__name__)
# # API Key setup
# api_key = os.getenv("GROQ_API_KEY")
# if not api_key:
# logger.error("GROQ_API_KEY not found.")
# gr.Warning("GROQ_API_KEY not found. Set it in 'Repository secrets'.")
# else:
# os.environ["GROQ_API_KEY"] = api_key
# # Initialize Phi Agent
# def create_phi_agent():
# if not api_key:
# return None
# agent = Agent(
# name="Science Education Assistant",
# role="You are a helpful science tutor for 10th-grade students",
# instructions=[
# "You are an expert science teacher specializing in 10th-grade curriculum.",
# "Provide clear, accurate, and age-appropriate explanations.",
# "Use simple language and examples that students can understand.",
# "Focus on concepts from physics, chemistry, and biology.",
# "Structure responses with headings and bullet points when helpful.",
# "Encourage learning and curiosity."
# ],
# model=Groq(id="llama3-70b-8192", api_key=api_key),
# markdown=True
# )
# return agent
# phi_agent = create_phi_agent()
# # Response Generation
# def retrieve_and_generate_response(query, cross_encoder_choice, history=None):
# top_rerank = 25
# top_k_rank = 20
# if not query:
# return "Please provide a valid question."
# try:
# start_time = perf_counter()
# query_vec = retriever.encode(query)
# documents = table.search(query_vec, vector_column_name="vector").limit(top_rerank).to_list()
# documents = [doc["text"] for doc in documents]
# cross_encoder1 = CrossEncoder('BAAI/bge-reranker-base') if cross_encoder_choice == '(ACCURATE) BGE reranker' else CrossEncoder('cross-encoder/ms-marco-MiniLM-L-6-v2')
# query_doc_pair = [[query, doc] for doc in documents]
# cross_scores = cross_encoder1.predict(query_doc_pair)
# sim_scores_argsort = list(reversed(np.argsort(cross_scores)))
# documents = [documents[idx] for idx in sim_scores_argsort[:top_k_rank]]
# context = "\n\n".join(documents[:10]) if documents else ""
# context = f"Context information from educational materials:\n{context}\n\n"
# history_context = ""
# if history:
# for user_msg, bot_msg in history[-2:]:
# if user_msg and bot_msg:
# history_context += f"Previous Q: {user_msg}\nPrevious A: {bot_msg}\n\n"
# full_prompt = f"{history_context}{context}Question: {query}\n\nPlease answer the question using the context provided above. If the context doesn't contain relevant information, use your general knowledge about 10th-grade science topics."
# if not phi_agent:
# return "Chatbot not configured properly."
# response = phi_agent.run(full_prompt)
# logger.info(f"Response generation took {perf_counter() - start_time:.2f} seconds")
# return response.content if hasattr(response, 'content') else str(response)
# except Exception as e:
# logger.error(f"Error in response generation: {e}")
# return f"Error: {str(e)}"
# # Gradio Interface with Chatbot
# with gr.Blocks(title="Science Chatbot", theme='gradio/soft') as chatbot_app:
# history_state = gr.State([])
# with gr.Row():
# with gr.Column(scale=10):
# gr.HTML("""
# <div style="color: #FF4500;">
# <h1>Welcome! I am your friend!</h1>
# <h1>Ask me! I will help you</h1>
# <h1><span style="color: #008000">I AM A CHATBOT FOR 10TH SCIENCE</span></h1>
# </div>
# <p style="font-family: sans-serif; font-size: 16px;">
# A free chatbot developed by K.M.RAMYASRI, TGT, GHS.SUTHUKENY using Phi Agent & Groq LLMs for 10th-grade students
# </p>
# <p style="font-family: Arial, sans-serif; font-size: 14px;">
# Suggestions may be sent to <a href="mailto:ramyasriraman2019@gmail.com" style="color: #00008B; font-style: italic;">ramyadevi1607@yahoo.com</a>.
# </p>
# """)
# with gr.Column(scale=3):
# gr.Image(value='logo.png', height=200, width=200, show_label=False)
# chatbot = gr.Chatbot(
# [],
# elem_id="chatbot",
# avatar_images=(
# 'https://aui.atlassian.com/aui/8.8/docs/images/avatar-person.svg',
# 'https://huggingface.co/datasets/huggingface/brand-assets/resolve/main/hf-logo.svg'
# ),
# bubble_full_width=False,
# show_copy_button=True,
# show_share_button=True,
# label="Conversation"
# )
# with gr.Row():
# query = gr.Textbox(
# scale=3,
# show_label=False,
# placeholder="Enter text and press enter",
# container=False
# )
# submit_btn = gr.Button(value="Submit", scale=1, variant="primary")
# cross_encoder = gr.Radio(
# choices=['(FAST) MiniLM-L6v2', '(ACCURATE) BGE reranker'],
# value='(ACCURATE) BGE reranker',
# label="Embeddings",
# info="Select the model for document ranking"
# )
# def handle_query(txt, cross_encoder, history_state):
# if not txt.strip():
# gr.Warning("Please submit a non-empty question")
# return history_state, ""
# history = history_state or []
# history.append((txt, ""))
# response = retrieve_and_generate_response(txt, cross_encoder, history[:-1])
# history[-1] = (txt, response)
# history_state[:] = history
# return history, ""
# submit_btn.click(
# fn=handle_query,
# inputs=[query, cross_encoder, history_state],
# outputs=[chatbot, query]
# )
# query.submit(
# fn=handle_query,
# inputs=[query, cross_encoder, history_state],
# outputs=[chatbot, query]
# )
# examples = [
# 'CAN U SAY THE DIFFERENCES BETWEEN METALS AND NON METALS?',
# 'WHAT IS IONIC BOND?',
# 'EXPLAIN ASEXUAL REPRODUCTION'
# ]
# gr.Examples(examples, query)
# if __name__ == "__main__":
# chatbot_app.launch(server_name="0.0.0.0", server_port=7860)# import gradio as gr
# import logging
# from sentence_transformers import CrossEncoder
# from phi.agent import Agent
# from phi.model.groq import Groq
# from backend.semantic_search import table, retriever
# import numpy as np
# from time import perf_counter
# import os
# # Logging setup
# logging.basicConfig(level=logging.INFO)
# logger = logging.getLogger(__name__)
# # API Key setup
# api_key = os.getenv("GROQ_API_KEY")
# if not api_key:
# logger.error("GROQ_API_KEY not found.")
# gr.Warning("GROQ_API_KEY not found. Set it in 'Repository secrets'.")
# else:
# os.environ["GROQ_API_KEY"] = api_key
# # Initialize Phi Agent
# def create_phi_agent():
# if not api_key:
# return None
# agent = Agent(
# name="Science Education Assistant",
# role="You are a helpful science tutor for 10th-grade students",
# instructions=[
# "You are an expert science teacher specializing in 10th-grade curriculum.",
# "Provide clear, accurate, and age-appropriate explanations.",
# "Use simple language and examples that students can understand.",
# "Focus on concepts from physics, chemistry, and biology.",
# "Structure responses with headings and bullet points when helpful.",
# "Encourage learning and curiosity."
# ],
# model=Groq(id="llama3-70b-8192", api_key=api_key),
# markdown=True
# )
# return agent
# phi_agent = create_phi_agent()
# # Response Generation
# def retrieve_and_generate_response(query, cross_encoder_choice, history=None):
# top_rerank = 25
# top_k_rank = 20
# if not query:
# return "Please provide a valid question."
# try:
# start_time = perf_counter()
# query_vec = retriever.encode(query)
# documents = table.search(query_vec, vector_column_name="vector").limit(top_rerank).to_list()
# documents = [doc["text"] for doc in documents]
# cross_encoder1 = CrossEncoder('BAAI/bge-reranker-base') if cross_encoder_choice == '(ACCURATE) BGE reranker' else CrossEncoder('cross-encoder/ms-marco-MiniLM-L-6-v2')
# query_doc_pair = [[query, doc] for doc in documents]
# cross_scores = cross_encoder1.predict(query_doc_pair)
# sim_scores_argsort = list(reversed(np.argsort(cross_scores)))
# documents = [documents[idx] for idx in sim_scores_argsort[:top_k_rank]]
# context = "\n\n".join(documents[:10]) if documents else ""
# context = f"Context information from educational materials:\n{context}\n\n"
# history_context = ""
# if history:
# for user_msg, bot_msg in history[-2:]:
# if user_msg and bot_msg:
# history_context += f"Previous Q: {user_msg}\nPrevious A: {bot_msg}\n\n"
# full_prompt = f"{history_context}{context}Question: {query}\n\nPlease answer the question using the context provided above. If the context doesn't contain relevant information, use your general knowledge about 10th-grade science topics."
# if not phi_agent:
# return "Chatbot not configured properly."
# response = phi_agent.run(full_prompt)
# logger.info(f"Response generation took {perf_counter() - start_time:.2f} seconds")
# return response.content if hasattr(response, 'content') else str(response)
# except Exception as e:
# logger.error(f"Error in response generation: {e}")
# return f"Error: {str(e)}"
# # Gradio Interface with Chatbot
# with gr.Blocks(title="Science Chatbot", theme=gr.themes.Default(primary_hue="cyan", secondary_hue="yellow", neutral_hue="purple")) as chatbot_app:
# gr.HTML("""
# <center>
# <h1><span style="color: purple;">Science Chatbot for 10th Grade Students</span></h1>
# <h2>AI-powered Science Tutor</h2>
# <i>⚠️ Ask any question from 10th-grade science (physics, chemistry, biology) and get clear, accurate answers! ⚠️</i>
# </center>
# """)
# chatbot = gr.Chatbot(label="Conversation")
# with gr.Row():
# query = gr.Textbox(label="Ask a Science Question", placeholder="E.g., What is an ionic bond?", show_label=False)
# submit_btn = gr.Button("Submit", variant="primary")
# cross_encoder = gr.Radio(choices=['(FAST) MiniLM-L6v2', '(ACCURATE) BGE reranker'], value='(ACCURATE) BGE reranker', label="Embeddings")
# def handle_query(user_input, history, cross_encoder_choice):
# if not user_input.strip():
# return history, ""
# history = history or []
# response = retrieve_and_generate_response(user_input, cross_encoder_choice, history)
# history.append([user_input, response])
# return history, ""
# submit_btn.click(
# fn=handle_query,
# inputs=[query, chatbot, cross_encoder],
# outputs=[chatbot, query]
# )
# if __name__ == "__main__":
# chatbot_app.queue().launch(server_name="0.0.0.0", server_port=7860)# import gradio as gr
# # import logging
# from sentence_transformers import CrossEncoder
# from phi.agent import Agent
# from phi.model.groq import Groq
# from backend.semantic_search import table, retriever
# import numpy as np
# from time import perf_counter
# import os
# # Logging setup
# logging.basicConfig(level=logging.INFO)
# logger = logging.getLogger(__name__)
# # API Key setup
# api_key = os.getenv("GROQ_API_KEY")
# if not api_key:
# logger.error("GROQ_API_KEY not found.")
# gr.Warning("GROQ_API_KEY not found. Set it in 'Repository secrets'.")
# else:
# os.environ["GROQ_API_KEY"] = api_key
# # Initialize Phi Agent
# def create_phi_agent():
# if not api_key:
# return None
# agent = Agent(
# name="Science Education Assistant",
# role="You are a helpful science tutor for 10th-grade students",
# instructions=[
# "You are an expert science teacher specializing in 10th-grade curriculum.",
# "Provide clear, accurate, and age-appropriate explanations.",
# "Use simple language and examples that students can understand.",
# "Focus on concepts from physics, chemistry, and biology.",
# "Structure responses with headings and bullet points when helpful.",
# "Encourage learning and curiosity."
# ],
# model=Groq(id="llama3-70b-8192", api_key=api_key),
# markdown=True
# )
# return agent
# phi_agent = create_phi_agent()
# # Response Generation
# def retrieve_and_generate_response(query, cross_encoder_choice, history=None):
# top_rerank = 25
# top_k_rank = 20
# if not query:
# return "Please provide a valid question."
# try:
# start_time = perf_counter()
# query_vec = retriever.encode(query)
# documents = table.search(query_vec, vector_column_name="vector").limit(top_rerank).to_list()
# documents = [doc["text"] for doc in documents]
# cross_encoder1 = CrossEncoder('BAAI/bge-reranker-base') if cross_encoder_choice == '(ACCURATE) BGE reranker' else CrossEncoder('cross-encoder/ms-marco-MiniLM-L-6-v2')
# query_doc_pair = [[query, doc] for doc in documents]
# cross_scores = cross_encoder1.predict(query_doc_pair)
# sim_scores_argsort = list(reversed(np.argsort(cross_scores)))
# documents = [documents[idx] for idx in sim_scores_argsort[:top_k_rank]]
# context = "\n\n".join(documents[:10]) if documents else ""
# context = f"Context information from educational materials:\n{context}\n\n"
# history_context = ""
# if history:
# for user_msg, bot_msg in history[-2:]:
# if user_msg and bot_msg:
# history_context += f"Previous Q: {user_msg}\nPrevious A: {bot_msg}\n\n"
# full_prompt = f"{history_context}{context}Question: {query}\n\nPlease answer the question using the context provided above. If the context doesn't contain relevant information, use your general knowledge about 10th-grade science topics."
# if not phi_agent:
# return "Chatbot not configured properly."
# response = phi_agent.run(full_prompt)
# logger.info(f"Response generation took {perf_counter() - start_time:.2f} seconds")
# return response.content if hasattr(response, 'content') else str(response)
# except Exception as e:
# logger.error(f"Error in response generation: {e}")
# return f"Error: {str(e)}"
# # Gradio Interface (Inspired by Quiz App)
# with gr.Blocks(title="Science Chatbot", theme=gr.themes.Default(primary_hue="cyan", secondary_hue="yellow", neutral_hue="purple")) as chatbot_app:
# gr.HTML("""
# <center>
# <h1><span style="color: purple;">Science Chatbot for 10th Grade Students</span></h1>
# <h2>AI-powered Science Tutor</h2>
# <i>⚠️ Ask any question from 10th-grade science (physics, chemistry, biology) and get clear, accurate answers! ⚠️</i>
# </center>
# """)
# with gr.Row():
# query = gr.Textbox(label="Ask a Science Question", placeholder="E.g., What is an ionic bond?")
# submit_btn = gr.Button("Submit", variant="primary")
# response_output = gr.Textbox(label="Answer", interactive=False)
# cross_encoder = gr.Radio(choices=['(FAST) MiniLM-L6v2', '(ACCURATE) BGE reranker'], value='(ACCURATE) BGE reranker', label="Embeddings")
# history_state = gr.State(value=[])
# def handle_query(user_input, history, cross_encoder_choice):
# if not user_input.strip():
# return history, "Please enter a valid question.", history
# response = retrieve_and_generate_response(user_input, cross_encoder_choice, history)
# history.append([user_input, response])
# return history, response, history[-2:] # Limit history to last 2 exchanges for context
# submit_btn.click(
# fn=handle_query,
# inputs=[query, history_state, cross_encoder],
# outputs=[history_state, response_output, history_state]
# )
# if __name__ == "__main__":
# chatbot_app.queue().launch(server_name="0.0.0.0", server_port=7860)# import gradio as gr