Spaces:
Paused
Paused
| import os | |
| import json | |
| import requests | |
| import logging | |
| from http.server import SimpleHTTPRequestHandler, ThreadingHTTPServer | |
| from urllib.parse import parse_qs, urlparse | |
| from typing import Dict, Any, Tuple | |
| from dotenv import load_dotenv | |
| # Load environment variables | |
| load_dotenv() | |
| # Configure logging | |
| logging.basicConfig(level=logging.INFO) | |
| logger = logging.getLogger(__name__) | |
| # Constants | |
| ANTHROPIC_API_URL = os.getenv( | |
| 'ANTHROPIC_API_URL', | |
| 'https://relay.stagwellmarketingcloud.io/anthropic/v1/messages' | |
| ) | |
| API_KEY = os.getenv('ANTHROPIC_API_KEY') | |
| def transform_to_anthropic_request(request_data: Dict[str, Any]) -> Dict[str, Any]: | |
| """Transform OpenAI-style request to Anthropic format.""" | |
| if 'messages' in request_data: | |
| return { | |
| "model": request_data.get('model', 'claude-3-5-sonnet-20240620'), | |
| "max_tokens": request_data.get('max_tokens', 1024), | |
| "messages": request_data['messages'] | |
| } | |
| else: | |
| return { | |
| "model": request_data.get('model', 'claude-3-5-sonnet-20240620'), | |
| "max_tokens": request_data.get('max_tokens', 1024), | |
| "messages": [{ | |
| "role": "user", | |
| "content": request_data.get('prompt', '') | |
| }] | |
| } | |
| def make_anthropic_request( | |
| request_data: Dict[str, Any] | |
| ) -> Tuple[Dict[str, Any], int]: | |
| """Make request to Anthropic API with proper error handling.""" | |
| headers = { | |
| 'Authorization': f'Bearer {API_KEY}', | |
| 'Content-Type': 'application/json', | |
| } | |
| try: | |
| anthropic_request = transform_to_anthropic_request(request_data) | |
| logger.info( | |
| f"Sending request to Anthropic API: {json.dumps(anthropic_request)}" | |
| ) | |
| response = requests.post( | |
| ANTHROPIC_API_URL, | |
| headers=headers, | |
| json=anthropic_request, | |
| timeout=30 | |
| ) | |
| response.raise_for_status() | |
| return response.json(), 200 | |
| except requests.RequestException as e: | |
| error_msg = f"Error communicating with Anthropic API: {str(e)}" | |
| if hasattr(e, 'response') and e.response is not None: | |
| error_msg += f"\nResponse: {e.response.text}" | |
| logger.error(error_msg) | |
| return {"error": error_msg}, e.response.status_code if e.response else 500 | |
| except Exception as e: | |
| error_msg = f"Unexpected error: {str(e)}" | |
| logger.error(error_msg) | |
| return {"error": error_msg}, 500 | |
| class AnthropicProxyHandler(SimpleHTTPRequestHandler): | |
| def send_json_response(self, data: Dict[str, Any], status: int = 200): | |
| """Helper method to send JSON responses.""" | |
| self.send_response(status) | |
| self.send_header('Content-Type', 'application/json') | |
| self.end_headers() | |
| self.wfile.write(json.dumps(data).encode('utf-8')) | |
| def do_GET(self): | |
| """Handle GET requests.""" | |
| if self.path == "/v1": | |
| self.send_json_response({"status": True}) | |
| elif self.path == "/v1/models": | |
| self.send_json_response({ | |
| "data": [{ | |
| "id": "claude-3-5-sonnet-20240620", | |
| "object": "model", | |
| "created": 1706745202, | |
| "owned_by": "anthropic" | |
| }] | |
| }) | |
| else: | |
| super().do_GET() | |
| def do_POST(self): | |
| """Handle POST requests.""" | |
| content_length = int(self.headers.get('Content-Length', 0)) | |
| request_body = self.rfile.read(content_length) | |
| request_data = json.loads(request_body) | |
| if self.path == "/v1/completions": | |
| logger.info(f"Received completion request: {json.dumps(request_data)}") | |
| response, status_code = make_anthropic_request(request_data) | |
| if status_code != 200: | |
| self.send_json_response(response, status_code) | |
| return | |
| completion_response = { | |
| "id": "cmpl-" + response.get('id', 'default'), | |
| "object": "text_completion", | |
| "created": response.get('created', 0), | |
| "choices": [{ | |
| "text": response['content'][0]['text'], | |
| "index": 0, | |
| "finish_reason": "stop" | |
| }], | |
| "usage": { | |
| "prompt_tokens": -1, | |
| "completion_tokens": -1, | |
| "total_tokens": -1 | |
| } | |
| } | |
| self.send_json_response(completion_response) | |
| elif self.path == "/v1/chat/completions": | |
| logger.info( | |
| f"Received chat completion request: {json.dumps(request_data)}" | |
| ) | |
| response, status_code = make_anthropic_request(request_data) | |
| if status_code != 200: | |
| self.send_json_response(response, status_code) | |
| return | |
| chat_response = { | |
| "id": "chatcmpl-" + response.get('id', 'default'), | |
| "object": "chat.completion", | |
| "created": response.get('created', 0), | |
| "choices": [{ | |
| "index": 0, | |
| "message": { | |
| "role": "assistant", | |
| "content": response['content'][0]['text'] | |
| }, | |
| "finish_reason": "stop" | |
| }], | |
| "usage": { | |
| "prompt_tokens": -1, | |
| "completion_tokens": -1, | |
| "total_tokens": -1 | |
| } | |
| } | |
| self.send_json_response(chat_response) | |
| else: | |
| self.send_json_response( | |
| {"error": "Invalid endpoint"}, | |
| status=404 | |
| ) | |
| def run_server(port: int = 7860): | |
| """Start the server on the specified port.""" | |
| server = ThreadingHTTPServer(("", port), AnthropicProxyHandler) | |
| logger.info(f"Starting server on port {port}") | |
| server.serve_forever() | |
| if __name__ == "__main__": | |
| run_server() |