import gradio as gr import logging from sentence_transformers import CrossEncoder from phi.agent import Agent from phi.model.groq import Groq from backend.semantic_search import table, retriever import numpy as np from time import perf_counter import os import time # Logging setup logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) # API Key setup api_key = os.getenv("GROQ_API_KEY") if not api_key: logger.error("GROQ_API_KEY not found.") print("Warning: GROQ_API_KEY not found. Set it in environment variables.") else: os.environ["GROQ_API_KEY"] = api_key # Initialize Phi Agent def create_phi_agent(): if not api_key: return None agent = Agent( name="Science Education Assistant", role="You are a helpful science tutor for 10th-grade students", instructions=[ "You are an expert science teacher specializing in 10th-grade curriculum.", "Provide clear, accurate, and age-appropriate explanations.", "Use simple language and examples that students can understand.", "Focus on concepts from physics, chemistry, and biology.", "Structure responses with headings and bullet points when helpful.", "Encourage learning and curiosity." ], model=Groq(id="llama3-70b-8192", api_key=api_key), markdown=True ) return agent phi_agent = create_phi_agent() # Response Generation Function def retrieve_and_generate_response(query, cross_encoder_choice, history=None): """Generate response using semantic search and LLM""" top_rerank = 25 top_k_rank = 20 if not query.strip(): return "Please provide a valid question." try: start_time = perf_counter() # Encode query and search documents query_vec = retriever.encode(query) documents = table.search(query_vec, vector_column_name="vector").limit(top_rerank).to_list() documents = [doc["text"] for doc in documents] # Re-rank documents using cross-encoder cross_encoder_model = CrossEncoder('BAAI/bge-reranker-base') if cross_encoder_choice == '(ACCURATE) BGE reranker' else CrossEncoder('cross-encoder/ms-marco-MiniLM-L-6-v2') query_doc_pair = [[query, doc] for doc in documents] cross_scores = cross_encoder_model.predict(query_doc_pair) sim_scores_argsort = list(reversed(np.argsort(cross_scores))) documents = [documents[idx] for idx in sim_scores_argsort[:top_k_rank]] # Create context from top documents context = "\n\n".join(documents[:10]) if documents else "" context = f"Context information from educational materials:\n{context}\n\n" # Add conversation history for context history_context = "" if history and len(history) > 0: for user_msg, bot_msg in history[-2:]: # Last 2 exchanges if user_msg and bot_msg: history_context += f"Previous Q: {user_msg}\nPrevious A: {bot_msg}\n" # Create full prompt full_prompt = f"{history_context}{context}Question: {query}\n\nPlease answer the question using the context provided above. If the context doesn't contain relevant information, use your general knowledge about 10th-grade science topics." # Generate response if not phi_agent: return "Chatbot not configured properly. Please check GROQ_API_KEY." response = phi_agent.run(full_prompt) response_text = response.content if hasattr(response, 'content') else str(response) logger.info(f"Response generation took {perf_counter() - start_time:.2f} seconds") return response_text except Exception as e: logger.error(f"Error in response generation: {e}") return f"Error generating response: {str(e)}" # Main chat function that works with Gradio def science_chat_function(message, history, cross_encoder_choice): """Main chat function for the science tutor""" if not message.strip(): return "", history # Generate response using your existing logic response = retrieve_and_generate_response(message, cross_encoder_choice, history) # Add to history history.append([message, response]) return "", history # Gradio Interface with gr.Blocks(title="Science Chatbot", theme='gradio/soft') as chatbot_app: # Header section with gr.Row(): with gr.Column(scale=10): gr.HTML("""
A free chatbot developed by K.M.RAMYASRI, TGT, GHS.SUTHUKENY using Phi Agent & Groq LLMs for 10th-grade students
Suggestions may be sent to ramyadevi1607@yahoo.com.
""") with gr.Column(scale=3): try: gr.Image(value='logo.png', height=200, width=200, show_label=False) except: gr.HTML("# A free chatbot developed by K.M.RAMYASRI, TGT, GHS.SUTHUKENY using Phi Agent & Groq LLMs for 10th-grade students #
## Suggestions may be sent to ramyadevi1607@yahoo.com. #
# """) # with gr.Column(scale=3): # gr.Image(value='logo.png', height=200, width=200, show_label=False) # chatbot = gr.Chatbot( # type="messages", # elem_id="chatbot", # avatar_images=( # 'https://aui.atlassian.com/aui/8.8/docs/images/avatar-person.svg', # 'https://huggingface.co/datasets/huggingface/brand-assets/resolve/main/hf-logo.svg' # ), # bubble_full_width=False, # show_copy_button=True, # show_share_button=True, # label="Conversation" # ) # with gr.Row(): # query = gr.Textbox( # scale=3, # show_label=False, # placeholder="Enter text and press enter", # container=False # ) # submit_btn = gr.Button(value="Submit", scale=1, variant="primary") # cross_encoder = gr.Radio( # choices=['(FAST) MiniLM-L6v2', '(ACCURATE) BGE reranker'], # value='(ACCURATE) BGE reranker', # label="Embeddings", # info="Select the model for document ranking" # ) # clear = gr.Button("Clear") # def user(user_message, history: list): # if not user_message.strip(): # gr.Warning("Please submit a non-empty question") # return "", history # history = history or [] # return "", history + [{"role": "user", "content": user_message}] # def bot(history: list, cross_encoder: str): # query = history[-1]["content"] if history and history[-1]["role"] == "user" else "" # if not query: # return history # response = retrieve_and_generate_response(query, cross_encoder, history[:-1]) # history.append({"role": "assistant", "content": ""}) # for character in response: # history[-1]["content"] += character # time.sleep(0.02) # Faster streaming for better UX # yield history # query.submit(user, [query, chatbot], [query, chatbot], queue=False).then( # bot, [chatbot, cross_encoder], chatbot # ) # submit_btn.click(user, [query, chatbot], [query, chatbot], queue=False).then( # bot, [chatbot, cross_encoder], chatbot # ) # clear.click(lambda: [], None, chatbot, queue=False) # examples = [ # 'CAN U SAY THE DIFFERENCES BETWEEN METALS AND NON METALS?', # 'WHAT IS IONIC BOND?', # 'EXPLAIN ASEXUAL REPRODUCTION' # ] # gr.Examples(examples, query) # if __name__ == "__main__": # chatbot_app.launch(server_name="0.0.0.0", server_port=7860)# import gradio as gr # import logging # from sentence_transformers import CrossEncoder # from phi.agent import Agent # from phi.model.groq import Groq # from backend.semantic_search import table, retriever # import numpy as np # from time import perf_counter # import os # # Logging setup # logging.basicConfig(level=logging.INFO) # logger = logging.getLogger(__name__) # # API Key setup # api_key = os.getenv("GROQ_API_KEY") # if not api_key: # logger.error("GROQ_API_KEY not found.") # gr.Warning("GROQ_API_KEY not found. Set it in 'Repository secrets'.") # else: # os.environ["GROQ_API_KEY"] = api_key # # Initialize Phi Agent # def create_phi_agent(): # if not api_key: # return None # agent = Agent( # name="Science Education Assistant", # role="You are a helpful science tutor for 10th-grade students", # instructions=[ # "You are an expert science teacher specializing in 10th-grade curriculum.", # "Provide clear, accurate, and age-appropriate explanations.", # "Use simple language and examples that students can understand.", # "Focus on concepts from physics, chemistry, and biology.", # "Structure responses with headings and bullet points when helpful.", # "Encourage learning and curiosity." # ], # model=Groq(id="llama3-70b-8192", api_key=api_key), # markdown=True # ) # return agent # phi_agent = create_phi_agent() # # Response Generation # def retrieve_and_generate_response(query, cross_encoder_choice, history=None): # top_rerank = 25 # top_k_rank = 20 # if not query: # return "Please provide a valid question." # try: # start_time = perf_counter() # query_vec = retriever.encode(query) # documents = table.search(query_vec, vector_column_name="vector").limit(top_rerank).to_list() # documents = [doc["text"] for doc in documents] # cross_encoder1 = CrossEncoder('BAAI/bge-reranker-base') if cross_encoder_choice == '(ACCURATE) BGE reranker' else CrossEncoder('cross-encoder/ms-marco-MiniLM-L-6-v2') # query_doc_pair = [[query, doc] for doc in documents] # cross_scores = cross_encoder1.predict(query_doc_pair) # sim_scores_argsort = list(reversed(np.argsort(cross_scores))) # documents = [documents[idx] for idx in sim_scores_argsort[:top_k_rank]] # context = "\n\n".join(documents[:10]) if documents else "" # context = f"Context information from educational materials:\n{context}\n\n" # history_context = "" # if history: # for user_msg, bot_msg in history[-2:]: # if user_msg and bot_msg: # history_context += f"Previous Q: {user_msg}\nPrevious A: {bot_msg}\n\n" # full_prompt = f"{history_context}{context}Question: {query}\n\nPlease answer the question using the context provided above. If the context doesn't contain relevant information, use your general knowledge about 10th-grade science topics." # if not phi_agent: # return "Chatbot not configured properly." # response = phi_agent.run(full_prompt) # logger.info(f"Response generation took {perf_counter() - start_time:.2f} seconds") # return response.content if hasattr(response, 'content') else str(response) # except Exception as e: # logger.error(f"Error in response generation: {e}") # return f"Error: {str(e)}" # # Gradio Interface with Chatbot # with gr.Blocks(title="Science Chatbot", theme='gradio/soft') as chatbot_app: # history_state = gr.State([]) # with gr.Row(): # with gr.Column(scale=10): # gr.HTML(""" ## A free chatbot developed by K.M.RAMYASRI, TGT, GHS.SUTHUKENY using Phi Agent & Groq LLMs for 10th-grade students #
## Suggestions may be sent to ramyadevi1607@yahoo.com. #
# """) # with gr.Column(scale=3): # gr.Image(value='logo.png', height=200, width=200, show_label=False) # chatbot = gr.Chatbot( # [], # elem_id="chatbot", # avatar_images=( # 'https://aui.atlassian.com/aui/8.8/docs/images/avatar-person.svg', # 'https://huggingface.co/datasets/huggingface/brand-assets/resolve/main/hf-logo.svg' # ), # bubble_full_width=False, # show_copy_button=True, # show_share_button=True, # label="Conversation" # ) # with gr.Row(): # query = gr.Textbox( # scale=3, # show_label=False, # placeholder="Enter text and press enter", # container=False # ) # submit_btn = gr.Button(value="Submit", scale=1, variant="primary") # cross_encoder = gr.Radio( # choices=['(FAST) MiniLM-L6v2', '(ACCURATE) BGE reranker'], # value='(ACCURATE) BGE reranker', # label="Embeddings", # info="Select the model for document ranking" # ) # def handle_query(txt, cross_encoder, history_state): # if not txt.strip(): # gr.Warning("Please submit a non-empty question") # return history_state, "" # history = history_state or [] # history.append((txt, "")) # response = retrieve_and_generate_response(txt, cross_encoder, history[:-1]) # history[-1] = (txt, response) # history_state[:] = history # return history, "" # submit_btn.click( # fn=handle_query, # inputs=[query, cross_encoder, history_state], # outputs=[chatbot, query] # ) # query.submit( # fn=handle_query, # inputs=[query, cross_encoder, history_state], # outputs=[chatbot, query] # ) # examples = [ # 'CAN U SAY THE DIFFERENCES BETWEEN METALS AND NON METALS?', # 'WHAT IS IONIC BOND?', # 'EXPLAIN ASEXUAL REPRODUCTION' # ] # gr.Examples(examples, query) # if __name__ == "__main__": # chatbot_app.launch(server_name="0.0.0.0", server_port=7860)# import gradio as gr # import logging # from sentence_transformers import CrossEncoder # from phi.agent import Agent # from phi.model.groq import Groq # from backend.semantic_search import table, retriever # import numpy as np # from time import perf_counter # import os # # Logging setup # logging.basicConfig(level=logging.INFO) # logger = logging.getLogger(__name__) # # API Key setup # api_key = os.getenv("GROQ_API_KEY") # if not api_key: # logger.error("GROQ_API_KEY not found.") # gr.Warning("GROQ_API_KEY not found. Set it in 'Repository secrets'.") # else: # os.environ["GROQ_API_KEY"] = api_key # # Initialize Phi Agent # def create_phi_agent(): # if not api_key: # return None # agent = Agent( # name="Science Education Assistant", # role="You are a helpful science tutor for 10th-grade students", # instructions=[ # "You are an expert science teacher specializing in 10th-grade curriculum.", # "Provide clear, accurate, and age-appropriate explanations.", # "Use simple language and examples that students can understand.", # "Focus on concepts from physics, chemistry, and biology.", # "Structure responses with headings and bullet points when helpful.", # "Encourage learning and curiosity." # ], # model=Groq(id="llama3-70b-8192", api_key=api_key), # markdown=True # ) # return agent # phi_agent = create_phi_agent() # # Response Generation # def retrieve_and_generate_response(query, cross_encoder_choice, history=None): # top_rerank = 25 # top_k_rank = 20 # if not query: # return "Please provide a valid question." # try: # start_time = perf_counter() # query_vec = retriever.encode(query) # documents = table.search(query_vec, vector_column_name="vector").limit(top_rerank).to_list() # documents = [doc["text"] for doc in documents] # cross_encoder1 = CrossEncoder('BAAI/bge-reranker-base') if cross_encoder_choice == '(ACCURATE) BGE reranker' else CrossEncoder('cross-encoder/ms-marco-MiniLM-L-6-v2') # query_doc_pair = [[query, doc] for doc in documents] # cross_scores = cross_encoder1.predict(query_doc_pair) # sim_scores_argsort = list(reversed(np.argsort(cross_scores))) # documents = [documents[idx] for idx in sim_scores_argsort[:top_k_rank]] # context = "\n\n".join(documents[:10]) if documents else "" # context = f"Context information from educational materials:\n{context}\n\n" # history_context = "" # if history: # for user_msg, bot_msg in history[-2:]: # if user_msg and bot_msg: # history_context += f"Previous Q: {user_msg}\nPrevious A: {bot_msg}\n\n" # full_prompt = f"{history_context}{context}Question: {query}\n\nPlease answer the question using the context provided above. If the context doesn't contain relevant information, use your general knowledge about 10th-grade science topics." # if not phi_agent: # return "Chatbot not configured properly." # response = phi_agent.run(full_prompt) # logger.info(f"Response generation took {perf_counter() - start_time:.2f} seconds") # return response.content if hasattr(response, 'content') else str(response) # except Exception as e: # logger.error(f"Error in response generation: {e}") # return f"Error: {str(e)}" # # Gradio Interface with Chatbot # with gr.Blocks(title="Science Chatbot", theme=gr.themes.Default(primary_hue="cyan", secondary_hue="yellow", neutral_hue="purple")) as chatbot_app: # gr.HTML(""" #