Spaces:
Build error
Build error
| import gradio as gr | |
| import httpx | |
| import json | |
| import asyncio | |
| import os | |
| import sys | |
| from dotenv import load_dotenv | |
| from typing import List, Dict, Any | |
| # MCP imports | |
| try: | |
| from mcp.server import Server | |
| from mcp.server.stdio import stdio_server | |
| from mcp.types import Tool, TextContent | |
| MCP_AVAILABLE = True | |
| except ImportError: | |
| MCP_AVAILABLE = False | |
| print("MCP not available. Install with: pip install mcp") | |
| # Load environment variables | |
| load_dotenv() | |
| class ConversationTransfer: | |
| def __init__(self): | |
| self.anthropic_key = os.getenv("ANTHROPIC_API_KEY") | |
| self.mistral_key = os.getenv("MISTRAL_API_KEY") | |
| self.hyperbolic_key = os.getenv("HYPERBOLIC_API_KEY") | |
| # Print status | |
| print(f"π API Keys Status:") | |
| print(f" Anthropic: {'β ' if self.anthropic_key else 'β'}") | |
| print(f" Mistral: {'β ' if self.mistral_key else 'β'}") | |
| print(f" Hyperbolic: {'β ' if self.hyperbolic_key else 'β'}") | |
| def parse_conversation(self, text: str) -> List[Dict]: | |
| """Parse conversation from various formats""" | |
| try: | |
| # Try JSON first | |
| data = json.loads(text) | |
| if isinstance(data, list): | |
| return data | |
| else: | |
| return [data] | |
| except json.JSONDecodeError: | |
| # Parse plain text | |
| return self._parse_plain_text(text) | |
| def _parse_plain_text(self, text: str) -> List[Dict]: | |
| """Parse plain text conversation""" | |
| messages = [] | |
| lines = text.strip().split('\n') | |
| current_role = "user" | |
| current_content = "" | |
| for line in lines: | |
| line = line.strip() | |
| if not line: | |
| continue | |
| # Check for role indicators | |
| if any(line.lower().startswith(prefix) for prefix in ['user:', 'human:', 'you:']): | |
| if current_content: | |
| messages.append({"role": current_role, "content": current_content.strip()}) | |
| current_role = "user" | |
| current_content = line.split(':', 1)[1].strip() if ':' in line else line | |
| elif any(line.lower().startswith(prefix) for prefix in ['assistant:', 'ai:', 'bot:', 'claude:', 'gpt:', 'chatgpt:']): | |
| if current_content: | |
| messages.append({"role": current_role, "content": current_content.strip()}) | |
| current_role = "assistant" | |
| current_content = line.split(':', 1)[1].strip() if ':' in line else line | |
| else: | |
| current_content += " " + line | |
| if current_content: | |
| messages.append({"role": current_role, "content": current_content.strip()}) | |
| return messages | |
| async def send_to_anthropic(self, messages: List[Dict]) -> str: | |
| """Send conversation to Anthropic Claude""" | |
| if not self.anthropic_key: | |
| return "β Anthropic API key not configured" | |
| # Add transfer context | |
| system_msg = "This conversation was transferred from another LLM. Please continue the conversation naturally, maintaining the same tone and context." | |
| user_messages = [msg for msg in messages if msg["role"] != "system"] | |
| headers = { | |
| "x-api-key": self.anthropic_key, | |
| "content-type": "application/json", | |
| "anthropic-version": "2023-06-01" | |
| } | |
| payload = { | |
| "model": "claude-3-haiku-20240307", | |
| "max_tokens": 1000, | |
| "system": system_msg, | |
| "messages": user_messages | |
| } | |
| try: | |
| async with httpx.AsyncClient(timeout=30.0) as client: | |
| response = await client.post( | |
| "https://api.anthropic.com/v1/messages", | |
| headers=headers, | |
| json=payload | |
| ) | |
| response.raise_for_status() | |
| result = response.json() | |
| return result["content"][0]["text"] | |
| except Exception as e: | |
| return f"β Error calling Anthropic: {str(e)}" | |
| async def send_to_mistral(self, messages: List[Dict]) -> str: | |
| """Send conversation to Mistral""" | |
| if not self.mistral_key: | |
| return "β Mistral API key not configured" | |
| # Add transfer context | |
| system_msg = {"role": "system", "content": "This conversation was transferred from another LLM. Please continue the conversation naturally."} | |
| all_messages = [system_msg] + messages | |
| headers = { | |
| "Authorization": f"Bearer {self.mistral_key}", | |
| "Content-Type": "application/json" | |
| } | |
| payload = { | |
| "model": "mistral-small", | |
| "messages": all_messages, | |
| "max_tokens": 1000 | |
| } | |
| try: | |
| async with httpx.AsyncClient(timeout=30.0) as client: | |
| response = await client.post( | |
| "https://api.mistral.ai/v1/chat/completions", | |
| headers=headers, | |
| json=payload | |
| ) | |
| response.raise_for_status() | |
| result = response.json() | |
| return result["choices"][0]["message"]["content"] | |
| except Exception as e: | |
| return f"β Error calling Mistral: {str(e)}" | |
| async def send_to_hyperbolic(self, messages: List[Dict]) -> str: | |
| """Send conversation to Hyperbolic Labs""" | |
| if not self.hyperbolic_key: | |
| return "β Hyperbolic API key not configured" | |
| # Add transfer context | |
| system_msg = {"role": "system", "content": "This conversation was transferred from another LLM. Please continue naturally."} | |
| all_messages = [system_msg] + messages | |
| headers = { | |
| "Authorization": f"Bearer {self.hyperbolic_key}", | |
| "Content-Type": "application/json" | |
| } | |
| payload = { | |
| "model": "meta-llama/Llama-2-7b-chat-hf", | |
| "messages": all_messages, | |
| "max_tokens": 1000 | |
| } | |
| try: | |
| async with httpx.AsyncClient(timeout=30.0) as client: | |
| response = await client.post( | |
| "https://api.hyperbolic.xyz/v1/chat/completions", | |
| headers=headers, | |
| json=payload | |
| ) | |
| response.raise_for_status() | |
| result = response.json() | |
| return result["choices"][0]["message"]["content"] | |
| except Exception as e: | |
| return f"β Error calling Hyperbolic: {str(e)}" | |
| async def transfer_conversation(self, history_text: str, source_provider: str, target_provider: str) -> str: | |
| """Main transfer function""" | |
| if not history_text.strip(): | |
| return "β Please provide conversation history" | |
| # Parse conversation | |
| try: | |
| messages = self.parse_conversation(history_text) | |
| if not messages: | |
| return "β Could not parse conversation history" | |
| except Exception as e: | |
| return f"β Error parsing conversation: {str(e)}" | |
| # Build result | |
| result = f"π **Transferring Conversation**\n" | |
| result += f" From: {source_provider}\n" | |
| result += f" To: {target_provider}\n" | |
| result += f" Messages: {len(messages)}\n\n" | |
| # Show parsed messages preview | |
| if messages: | |
| result += "π **Conversation Preview:**\n" | |
| for i, msg in enumerate(messages[:2]): # Show first 2 messages | |
| content_preview = msg['content'][:100] + "..." if len(msg['content']) > 100 else msg['content'] | |
| result += f" {msg['role']}: {content_preview}\n" | |
| if len(messages) > 2: | |
| result += f" ... and {len(messages)-2} more messages\n" | |
| result += "\n" | |
| # Transfer to target provider | |
| try: | |
| if target_provider.lower() == "anthropic": | |
| response = await self.send_to_anthropic(messages) | |
| elif target_provider.lower() == "mistral": | |
| response = await self.send_to_mistral(messages) | |
| elif target_provider.lower() == "hyperbolic": | |
| response = await self.send_to_hyperbolic(messages) | |
| else: | |
| return f"β Unsupported target provider: {target_provider}" | |
| result += f"β **Transfer Successful!**\n\n" | |
| result += f"π€ **Response from {target_provider.title()}:**\n" | |
| result += f"{response}" | |
| return result | |
| except Exception as e: | |
| return f"β Transfer failed: {str(e)}" | |
| # Initialize the transfer tool | |
| transfer_tool = ConversationTransfer() | |
| # MCP Server Setup (if available) | |
| if MCP_AVAILABLE: | |
| server = Server("conversation-transfer") | |
| async def list_tools() -> List[Tool]: | |
| return [ | |
| Tool( | |
| name="transfer_conversation", | |
| description="Transfer conversation history from one LLM provider to another", | |
| inputSchema={ | |
| "type": "object", | |
| "properties": { | |
| "history_text": { | |
| "type": "string", | |
| "description": "Conversation history in JSON or plain text format" | |
| }, | |
| "source_provider": { | |
| "type": "string", | |
| "description": "Source LLM provider (e.g., 'ChatGPT', 'Claude', 'Gemini')" | |
| }, | |
| "target_provider": { | |
| "type": "string", | |
| "description": "Target LLM provider", | |
| "enum": ["anthropic", "mistral", "hyperbolic"] | |
| } | |
| }, | |
| "required": ["history_text", "source_provider", "target_provider"] | |
| } | |
| ) | |
| ] | |
| async def call_tool(name: str, arguments: Dict[str, Any]) -> List[TextContent]: | |
| if name == "transfer_conversation": | |
| result = await transfer_tool.transfer_conversation( | |
| arguments["history_text"], | |
| arguments["source_provider"], | |
| arguments["target_provider"] | |
| ) | |
| return [TextContent(type="text", text=result)] | |
| else: | |
| raise ValueError(f"Unknown tool: {name}") | |
| def transfer_sync(history_text, source_provider, target_provider): | |
| """Synchronous wrapper for async function""" | |
| return asyncio.run(transfer_tool.transfer_conversation(history_text, source_provider, target_provider)) | |
| # Create Gradio interface | |
| def create_interface(): | |
| with gr.Blocks(title="LLM Conversation Transfer", theme=gr.themes.Default()) as interface: | |
| gr.Markdown("# π LLM Conversation Transfer Tool") | |
| gr.Markdown("**Seamlessly transfer conversations between different LLM providers!**") | |
| with gr.Row(): | |
| with gr.Column(scale=2): | |
| history_input = gr.Textbox( | |
| label="π Conversation History", | |
| placeholder="""Paste your conversation here... | |
| Examples: | |
| β’ Plain text: "User: Hello\nAssistant: Hi there!" | |
| β’ JSON: [{"role": "user", "content": "Hello"}] | |
| β’ ChatGPT export format""", | |
| lines=10, | |
| max_lines=25 | |
| ) | |
| with gr.Row(): | |
| source_dropdown = gr.Dropdown( | |
| choices=["ChatGPT", "Claude", "Gemini", "Mistral", "Other"], | |
| label="π Source Provider", | |
| value="ChatGPT" | |
| ) | |
| target_dropdown = gr.Dropdown( | |
| choices=["anthropic", "mistral", "hyperbolic"], | |
| label="π― Target Provider", | |
| value="anthropic" | |
| ) | |
| transfer_btn = gr.Button("π Transfer Conversation", variant="primary", size="lg") | |
| with gr.Column(scale=1): | |
| gr.Markdown("### π Quick Guide") | |
| gr.Markdown(""" | |
| **1. Get Your Conversation** | |
| - Copy from ChatGPT, Claude, etc. | |
| - Export as JSON or plain text | |
| **2. Paste & Select** | |
| - Paste in the text box | |
| - Choose source and target | |
| **3. Transfer!** | |
| - Click the button | |
| - Get response from new LLM | |
| ### π§ Supported Providers | |
| - β **Anthropic** (Claude) | |
| - β **Mistral AI** | |
| - β **Hyperbolic Labs** | |
| ### π Status | |
| """) | |
| # API Status | |
| status_text = "**API Keys:**\n" | |
| status_text += f"- Anthropic: {'β ' if transfer_tool.anthropic_key else 'β'}\n" | |
| status_text += f"- Mistral: {'β ' if transfer_tool.mistral_key else 'β'}\n" | |
| status_text += f"- Hyperbolic: {'β ' if transfer_tool.hyperbolic_key else 'β'}\n" | |
| status_text += f"- MCP Server: {'β ' if MCP_AVAILABLE else 'β'}" | |
| gr.Markdown(status_text) | |
| output = gr.Textbox( | |
| label="π€ Transfer Result", | |
| lines=12, | |
| max_lines=25, | |
| interactive=False | |
| ) | |
| transfer_btn.click( | |
| fn=transfer_sync, | |
| inputs=[history_input, source_dropdown, target_dropdown], | |
| outputs=output | |
| ) | |
| # Add examples | |
| with gr.Row(): | |
| gr.Examples( | |
| examples=[ | |
| [ | |
| "User: What is Python programming?\nAssistant: Python is a high-level, interpreted programming language known for its simple syntax and readability. It's widely used in web development, data science, AI, and automation.", | |
| "ChatGPT", | |
| "anthropic" | |
| ], | |
| [ | |
| '[{"role": "user", "content": "Explain quantum computing in simple terms"}, {"role": "assistant", "content": "Quantum computing uses quantum mechanical phenomena like superposition and entanglement to process information in ways that classical computers cannot."}]', | |
| "Other", | |
| "mistral" | |
| ], | |
| [ | |
| "Human: Write a haiku about programming\nClaude: Code flows like water\nBugs hide in logic's shadows\nDebug brings the light", | |
| "Claude", | |
| "hyperbolic" | |
| ] | |
| ], | |
| inputs=[history_input, source_dropdown, target_dropdown], | |
| label="π‘ Try These Examples" | |
| ) | |
| return interface | |
| # Main execution | |
| if __name__ == "__main__": | |
| print("π Starting LLM Conversation Transfer Tool...") | |
| # Check if running as MCP server | |
| if len(sys.argv) > 1 and sys.argv[1] == "mcp": | |
| if MCP_AVAILABLE: | |
| print("π§ Running as MCP Server...") | |
| asyncio.run(stdio_server(server)) | |
| else: | |
| print("β MCP not available. Install with: pip install mcp") | |
| sys.exit(1) | |
| else: | |
| # Run Gradio interface | |
| print("π Starting Gradio Interface...") | |
| interface = create_interface() | |
| interface.launch( | |
| share=False, # Disable share link | |
| server_name="127.0.0.1", # Use localhost instead of 0.0.0.0 | |
| server_port=7860, | |
| show_error=True, | |
| inbrowser=True # Auto-open browser | |
| ) |