Spaces:
Sleeping
Sleeping
A newer version of the Gradio SDK is available: 6.13.0
Quick Integration Guide - LlamaIndex
30-Second Setup
from src.core import EcoMCPKnowledgeBase
# 1. Initialize
kb = EcoMCPKnowledgeBase()
# 2. Load documents
kb.initialize("./docs")
# 3. Add products
kb.add_products(your_products_list)
# 4. Search
results = kb.search("laptop", top_k=5)
Integration Points
Server/MCP Handler
from src.core import initialize_knowledge_base, get_knowledge_base
# Startup
initialize_knowledge_base("./docs")
# In handler
kb = get_knowledge_base()
results = kb.search(user_query)
Gradio UI
import gradio as gr
from src.core import get_knowledge_base
def search_interface(query, search_type):
kb = get_knowledge_base()
if search_type == "Products":
results = kb.search_products(query)
else:
results = kb.search_documentation(query)
return "\n\n".join([f"Score: {r.score:.2f}\n{r.content[:200]}" for r in results])
gr.Interface(search_interface,
inputs=[gr.Textbox(label="Search"),
gr.Radio(["Products", "Documentation"])],
outputs="text").launch()
API Endpoint
from fastapi import FastAPI
from src.core import get_knowledge_base
app = FastAPI()
@app.post("/search")
def search(query: str, top_k: int = 5):
kb = get_knowledge_base()
results = kb.search(query, top_k=top_k)
return [r.to_dict() for r in results]
Configuration
from src.core import IndexConfig, EcoMCPKnowledgeBase
config = IndexConfig(
embedding_model="text-embedding-3-small",
chunk_size=1024,
use_pinecone=False,
)
kb = EcoMCPKnowledgeBase(config=config)
Environment
export OPENAI_API_KEY=sk-...
export PINECONE_API_KEY=... # Optional
Documentation
- Full Guide:
docs/LLAMA_INDEX_GUIDE.md - Examples:
src/core/examples.py - Tests:
tests/test_llama_integration.py