Spaces:
Running
Running
| from __future__ import annotations | |
| import os | |
| import gradio as gr | |
| import health | |
| from layout import CELL_CSS, cell | |
| from problem_cell import render_problem_cell | |
| from solution_cell import render_solution_cell | |
| from setup_cell import render_setup_cell | |
| from context_biased_transcription_cell import render_context_biased_transcription_cell | |
| from media_analysis_cell import render_media_analysis_cell | |
| from translation_cell import render_translation_cell | |
| from wrap_up_cell import render_wrap_up_cell | |
| def render_health_panel(gemini_api_key: str | None = None) -> str: | |
| return health.render_health_notice(gemini_api_key) | |
| def create_app() -> gr.Blocks: | |
| """Create the Gradio Blocks application used in the Hugging Face Space. | |
| The layout is intentionally notebook-like: each conceptual unit | |
| (problem, health, demos, wrap-up) is encapsulated in its own module | |
| and rendered as a "cell" to keep the main app glue straightforward. | |
| """ | |
| with gr.Blocks(title="Aileen3 Demo") as demo: | |
| gr.HTML(f"<style>{CELL_CSS}</style>") | |
| with cell("π Introduction"): | |
| gr.Markdown( | |
| """ | |
| # Aileen 3 Core | |
| <div style="display: flex; justify-content: center; gap: 10px;"> | |
| <a href="https://ndurner.de/links/aileen3-linkedin"> | |
| <img alt="LinkedIn post" src="https://img.shields.io/badge/π LinkedIn-Post-blue?logo=linkedin"> | |
| </a> | |
| <a href="https://youtu.be/r56najKVS4I"> | |
| <img alt="Demo video" src="https://img.shields.io/badge/YouTube-MCP%20demo%20video-red?logo=youtube"> | |
| </a> | |
| </div> | |
| Large Language Models (LLMs) rely on tools β sometimes provided by MCP servers β to interact with the outside world. | |
| Aileen 3 Core is an MCP server focused on **Information Foraging**: mining high-noise sources for novel insights and turning | |
| them into dense briefings you can consume quickly. Grounded in cognitive science, Aileen 3 models novelty as prediction error against | |
| explicit priors such as user expectations, facts from an AI Memory Bank, or media context. | |
| > **"Information is surprises. You learn something when things donβt turn out the way you expected."** βΈΊ Roger Schank | |
| To that end, the Aileen 3 Core MCP server provides media access and analysis services backed by Google Gemini. | |
| This Space shows a small, notebook-style slice of that system so you can verify the core MCP server is reachable and working. | |
| Today, Aileen 3 Core is a contender in the **[MCP's 1st Birthday β Hosted by Anthropic and Gradio](https://huggingface.co/MCP-1st-Birthday)** | |
| hackathon. It also powers the **[Aileen 3 Agent](https://github.com/ndurner/aileen3-agent)** project, a | |
| [capstone project](https://ndurner.de/links/aileen3-kaggle-writeup) for the | |
| [*AI Agents Intensive Course with Google*](https://www.kaggle.com/learn-guide/5-day-agents). Its predecessor, Aileen 2, explored autonomous | |
| personalized summarization of German parliament proceedings and won Honorable Mention in the | |
| [Generative AI Agents Developer Contest by NVIDIA and LangChain](https://www.nvidia.com/en-in/ai-data-science/generative-ai/developer-contest-with-langchain/). | |
| For more details on setup, usage, and background, see the full project README. Aileen 3 Core has been tested with Claude Desktop and Aileen 3 Agent. | |
| """ | |
| ) | |
| with cell("π§ How this demo is organized"): | |
| gr.Markdown( | |
| """ | |
| Think of this interface as a lightweight Jupyter notebook: instead of code cells, you get a sequence of interactive βcellsβ that walk through key aspects of the Aileen 3 Core MCP server. | |
| - The **Introduction** cell gives you the high-level context from the README. | |
| - The **Health check** cell below connects to the MCP server used by the Space and shows whether it can be started successfully. | |
| - Future cells will demonstrate the problem statement and core information-foraging capabilities. | |
| """ | |
| ) | |
| render_problem_cell() | |
| render_solution_cell() | |
| gemini_key_box = render_setup_cell() | |
| with cell("π©π»ββοΈ Health check"): | |
| health_panel = gr.HTML(value=health.render_placeholder_notice()) | |
| run_button = gr.Button("Run health check", variant="primary") | |
| run_button.click( | |
| fn=render_health_panel, | |
| inputs=gemini_key_box, | |
| outputs=health_panel, | |
| queue=False, | |
| ) | |
| render_context_biased_transcription_cell(gemini_key_box) | |
| render_media_analysis_cell(gemini_key_box) | |
| render_translation_cell(gemini_key_box) | |
| render_wrap_up_cell() | |
| return demo | |
| def main() -> None: | |
| app = create_app() | |
| server_name = os.environ.get("GRADIO_SERVER_NAME") or os.environ.get("HOST") or "0.0.0.0" | |
| port_value = os.environ.get("GRADIO_SERVER_PORT") or os.environ.get("PORT") or "7860" | |
| try: | |
| server_port = int(port_value) | |
| except ValueError: | |
| server_port = 7860 | |
| app.launch( | |
| server_name=server_name, | |
| server_port=server_port, | |
| ) | |
| if __name__ == "__main__": | |
| main() | |