Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| from orchestrator.dispatcher import Dispatcher | |
| from components.sidebar import render_sidebar | |
| from components.paper_list import render_paper_list | |
| from components.notebook_view import render_notebook | |
| from components.graph_view import render_graph | |
| from orchestrator.gemini import gemini_generate, gemini_generate_code | |
| def main(): | |
| st.set_page_config( | |
| page_title="π MCP Research Companion", | |
| layout="wide", | |
| initial_sidebar_state="expanded" | |
| ) | |
| # Sidebar (updated return values) | |
| (query, num_results, theme, search_clicked, gemini_prompt, | |
| mcp_codegen_prompt, codegen_clicked) = render_sidebar() | |
| if theme == "Dark": | |
| st.markdown( | |
| """ | |
| <style> | |
| body {background-color: #0E1117; color: #E6E1DC;} | |
| .stButton>button {background-color: #2563EB; color: white;} | |
| </style> | |
| """, | |
| unsafe_allow_html=True, | |
| ) | |
| # -- Gemini Q&A -- | |
| if gemini_prompt: | |
| st.header("π‘ Gemini Research Q&A") | |
| with st.spinner("Gemini is thinking..."): | |
| answer = gemini_generate(gemini_prompt) | |
| st.success(answer) | |
| # -- MCP Code Generation -- | |
| if mcp_codegen_prompt and codegen_clicked: | |
| st.header("π οΈ Gemini MCP Server Code Generation") | |
| with st.spinner("Gemini is coding your MCP server..."): | |
| system_instruction = ( | |
| "You are an expert in Model Context Protocol (MCP) server development. " | |
| "Generate clean, production-ready Python code for an MCP server as described below. " | |
| "Use best practices and include all necessary imports and comments." | |
| ) | |
| code_result = gemini_generate_code(system_instruction, mcp_codegen_prompt) | |
| st.code(code_result, language="python") | |
| st.download_button("Download code as mcp_server.py", code_result, file_name="mcp_server.py", mime="text/x-python") | |
| # -- Search and Display Papers -- | |
| if search_clicked and query: | |
| dispatcher = Dispatcher() | |
| with st.spinner("Searching MCP servers..."): | |
| papers = dispatcher.search_papers(query, limit=num_results) | |
| render_paper_list(papers) | |
| if papers: | |
| first_paper = papers[0] | |
| st.subheader("Gemini-Powered Abstract Summarizer") | |
| if st.button("Summarize Abstract with Gemini"): | |
| with st.spinner("Gemini is generating summary..."): | |
| summary = gemini_generate(first_paper["abstract"]) | |
| st.success(summary) | |
| notebook_cells = dispatcher.get_notebook_cells(first_paper["id"]) | |
| render_notebook(notebook_cells) | |
| graph_data = dispatcher.get_graph(first_paper["id"]) | |
| render_graph(graph_data) | |
| if __name__ == "__main__": | |
| main() | |