import json import os def generate_portkey_config(local: bool = True): if local: config = { "strategy": { "mode": "fallback" }, "cache": { "mode": "semantic", "max_age": 10000 }, "retry": { "attempts": 3, }, "targets": [ # { # "provider": "mistral-ai", # "api_key": os.getenv("MISTRAL_API_KEY"), # "override_params": { # "model": "codestral-latest", # "max_tokens": 1024, # "temperature": 0 # } # }, { "provider": "anthropic", "api_key": os.getenv("ANTHROPIC_API_KEY"), "override_params": { "model": "claude-3-5-sonnet-20240620", # "max_tokens": 1024, # "temperature": 0 } }, { "provider": "openai", "api_key": os.getenv("OPENAI_API_KEY"), "override_params": { "model": "gpt-4o", # "max_tokens": 1024, # "temperature": 0 } }, { "provider": "anthropic", "api_key": os.getenv("ANTHROPIC_API_KEY"), "override_params": { "model": "claude-3-opus-20240229", # "max_tokens": 1024, # "temperature": 0 } } ] } else: config = { "strategy": { "mode": "fallback" }, "cache": { "mode": "semantic", "max_age": 10000 }, "retry": { "attempts": 3, }, "targets": [ { "virtual_key": os.getenv("PORTKEY_OPENAI_VIRTUAL_KEY"), "override_params": { "model": "gpt-4o", # "max_tokens": 1024, # "temperature": 0 } }, { "provider": "anthropic", "api_key": os.getenv("POETRY_ANTHROPIC_API_KEY"), "override_params": { "model": "claude-3-5-sonnet-20240620", # "max_tokens": 1024, # "temperature": 0 } }, { "virtual_key": os.getenv("PORTKEY_ANTHROPIC_API_KEY"), "override_params": { "model": "claude-3-opus-20240229", # "max_tokens": 1024, # "temperature": 0 } }, { "virtual_key": os.getenv("PORTKEY_MISTRAL_API_KEY"), "override_params": { "model": "codestral-latest", # "max_tokens": 1024, # "temperature": 0 } } ] } return json.dumps(config)