File size: 3,495 Bytes
66bc554 25d025b 66bc554 25d025b 66bc554 25d025b 66bc554 25d025b 66bc554 25d025b 66bc554 25d025b 66bc554 25d025b 66bc554 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 |
import json
import os
def generate_portkey_config(local: bool = True):
if local:
config = {
"strategy": {
"mode": "fallback"
},
"cache": {
"mode": "semantic",
"max_age": 10000
},
"retry": {
"attempts": 3,
},
"targets": [
# {
# "provider": "mistral-ai",
# "api_key": os.getenv("MISTRAL_API_KEY"),
# "override_params": {
# "model": "codestral-latest",
# "max_tokens": 1024,
# "temperature": 0
# }
# },
{
"provider": "anthropic",
"api_key": os.getenv("ANTHROPIC_API_KEY"),
"override_params": {
"model": "claude-3-5-sonnet-20240620",
# "max_tokens": 1024,
# "temperature": 0
}
},
{
"provider": "openai",
"api_key": os.getenv("OPENAI_API_KEY"),
"override_params": {
"model": "gpt-4o",
# "max_tokens": 1024,
# "temperature": 0
}
},
{
"provider": "anthropic",
"api_key": os.getenv("ANTHROPIC_API_KEY"),
"override_params": {
"model": "claude-3-opus-20240229",
# "max_tokens": 1024,
# "temperature": 0
}
}
]
}
else:
config = {
"strategy": {
"mode": "fallback"
},
"cache": {
"mode": "semantic",
"max_age": 10000
},
"retry": {
"attempts": 3,
},
"targets": [
{
"virtual_key": os.getenv("PORTKEY_OPENAI_VIRTUAL_KEY"),
"override_params": {
"model": "gpt-4o",
# "max_tokens": 1024,
# "temperature": 0
}
},
{
"provider": "anthropic",
"api_key": os.getenv("POETRY_ANTHROPIC_API_KEY"),
"override_params": {
"model": "claude-3-5-sonnet-20240620",
# "max_tokens": 1024,
# "temperature": 0
}
},
{
"virtual_key": os.getenv("PORTKEY_ANTHROPIC_API_KEY"),
"override_params": {
"model": "claude-3-opus-20240229",
# "max_tokens": 1024,
# "temperature": 0
}
},
{
"virtual_key": os.getenv("PORTKEY_MISTRAL_API_KEY"),
"override_params": {
"model": "codestral-latest",
# "max_tokens": 1024,
# "temperature": 0
}
}
]
}
return json.dumps(config)
|