Spaces:
Running
Running
| # Import necessary libraries | |
| import os | |
| import gradio as gr | |
| from mcp_service import make_retrieve_data_mcp, make_retrieve_graphs_mcp | |
| from utils import create_user_id | |
| import logging | |
| from climateqa.engine.embeddings import get_embeddings_function | |
| from climateqa.engine.llm import get_llm | |
| from climateqa.engine.vectorstore import get_azure_search_vectorstore | |
| from climateqa.engine.reranker import get_reranker | |
| logging.basicConfig(level=logging.WARNING) | |
| os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2" # Suppresses INFO and WARNING logs | |
| logging.getLogger().setLevel(logging.WARNING) | |
| # Load environment variables in local mode | |
| try: | |
| from dotenv import load_dotenv | |
| load_dotenv() | |
| except Exception as e: | |
| pass | |
| # Set up Gradio Theme | |
| theme = gr.themes.Base( | |
| primary_hue="blue", | |
| secondary_hue="red", | |
| font=[gr.themes.GoogleFont("Poppins"), "ui-sans-serif", "system-ui", "sans-serif"], | |
| ) | |
| user_id = create_user_id() | |
| # Create vectorstore and retriever | |
| embeddings_function = get_embeddings_function() | |
| vectorstore = get_azure_search_vectorstore(embeddings=embeddings_function, index_name="climateqa-ipx") | |
| vectorstore_graphs = get_azure_search_vectorstore(embeddings=embeddings_function, index_name="climateqa-owid", text_key="description") | |
| vectorstore_region = get_azure_search_vectorstore(embeddings=embeddings_function, index_name="climateqa-v2") | |
| llm = get_llm(provider="azure", max_tokens=3000, temperature=0.0, streaming=True) | |
| if os.environ["GRADIO_ENV"] == "local": | |
| reranker = get_reranker("nano") | |
| else: | |
| reranker = get_reranker("nano") | |
| def mcp_server(): | |
| with gr.Blocks() as mcp_only: | |
| retrieve_data_mcp = make_retrieve_data_mcp(vectorstore=vectorstore, reranker=reranker) | |
| gr.api(retrieve_data_mcp) | |
| retrieve_graphs_mcp = make_retrieve_graphs_mcp(vectorstore=vectorstore_graphs) | |
| # gr.api(retrieve_graphs_mcp) | |
| return mcp_only | |
| mcp_demo = mcp_server() | |
| mcp_demo.launch( | |
| server_name=os.getenv("HOST", "0.0.0.0"), | |
| server_port=7860, | |
| mcp_server=True, | |
| ) | |