| import requests |
| import gradio as gr |
| import logging |
| import nest_asyncio |
| from typing import Any |
| from llama_index.tools.mcp import BasicMCPClient, McpToolSpec |
|
|
| |
| logger = logging.getLogger(__name__) |
|
|
| |
| DEFAULT_HUGGINGFACE_MODEL = "Eric1227/dolphin-2.5-mixtral-8x7b-MLX-6bit" |
| HUGGINGFACE_API_URL = "https://api-inference.huggingface.co/models/{model_name}" |
| API_KEY = "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" |
|
|
| |
| nest_asyncio.apply() |
|
|
| |
| REMOTE_MCP_URL = "http://https://binary1ne-mcpserver.hf.space" |
| mcp_client = BasicMCPClient(REMOTE_MCP_URL) |
| mcp_tool = McpToolSpec(client=mcp_client) |
|
|
| |
| def query_huggingface_api(prompt: str, model_name: str = DEFAULT_HUGGINGFACE_MODEL) -> str: |
| headers = { |
| "Authorization": f"Bearer {API_KEY}", |
| "Content-Type": "application/json" |
| } |
|
|
| payload = { |
| "inputs": prompt |
| } |
|
|
| response = requests.post(HUGGINGFACE_API_URL.format(model_name=model_name), |
| headers=headers, json=payload) |
|
|
| if response.status_code == 200: |
| return response.json()[0]["generated_text"] |
| else: |
| logger.error(f"Error from Hugging Face API: {response.status_code}, {response.text}") |
| return "Error processing your request." |
|
|
| |
| def interact_with_mcp(input_text: str) -> str: |
| |
| try: |
| response = mcp_client.query(input_text) |
| return response['response'] |
| except Exception as e: |
| logger.error(f"Error interacting with MCP: {str(e)}") |
| return "MCP interaction failed." |
|
|
| |
| def generate_response_with_mcp(prompt: str) -> str: |
| |
| model_response = query_huggingface_api(prompt) |
| |
| |
| mcp_response = interact_with_mcp(model_response) |
| |
| |
| return f"Model Response: {model_response}\n\nMCP Response: {mcp_response}" |
|
|
| |
| def launch_gradio_interface(): |
| with gr.Blocks() as demo: |
| gr.Markdown("### Hugging Face Model + Remote MCP Integration") |
| |
| with gr.Row(): |
| prompt_input = gr.Textbox(label="Enter Your Prompt", placeholder="Type something here...") |
| output_text = gr.Textbox(label="Generated Response") |
| |
| |
| submit_btn = gr.Button("Generate Response") |
| |
| |
| submit_btn.click(generate_response_with_mcp, inputs=prompt_input, outputs=output_text) |
|
|
| demo.launch() |
|
|
| if __name__ == "__main__": |
| launch_gradio_interface() |
|
|