Spaces:
Running
Running
Anirudh Esthuri
commited on
Commit
·
b3b1df7
1
Parent(s):
422aadb
Add GPT-5 model and update Claude models to Haiku 4.5 and Sonnet 4.5
Browse files- llm.py +2 -2
- model_config.py +6 -4
llm.py
CHANGED
|
@@ -94,7 +94,7 @@ def chat(messages, persona):
|
|
| 94 |
"max_completion_tokens": 4000,
|
| 95 |
}
|
| 96 |
# Some newer OpenAI models only support the default temperature.
|
| 97 |
-
if MODEL_STRING not in {"gpt-5-nano", "gpt-5-mini"}:
|
| 98 |
request_kwargs["temperature"] = 0.3
|
| 99 |
|
| 100 |
response = client.chat.completions.create(**request_kwargs)
|
|
@@ -375,7 +375,7 @@ def check_credentials():
|
|
| 375 |
bedrock_runtime = get_bedrock_client()
|
| 376 |
# Try a simple test invocation to verify credentials
|
| 377 |
test_response = bedrock_runtime.invoke_model(
|
| 378 |
-
modelId="anthropic.claude-
|
| 379 |
contentType="application/json",
|
| 380 |
accept="application/json",
|
| 381 |
body=json.dumps({
|
|
|
|
| 94 |
"max_completion_tokens": 4000,
|
| 95 |
}
|
| 96 |
# Some newer OpenAI models only support the default temperature.
|
| 97 |
+
if MODEL_STRING not in {"gpt-5", "gpt-5-nano", "gpt-5-mini"}:
|
| 98 |
request_kwargs["temperature"] = 0.3
|
| 99 |
|
| 100 |
response = client.chat.completions.create(**request_kwargs)
|
|
|
|
| 375 |
bedrock_runtime = get_bedrock_client()
|
| 376 |
# Try a simple test invocation to verify credentials
|
| 377 |
test_response = bedrock_runtime.invoke_model(
|
| 378 |
+
modelId="anthropic.claude-haiku-4-5-20251001-v1:0",
|
| 379 |
contentType="application/json",
|
| 380 |
accept="application/json",
|
| 381 |
body=json.dumps({
|
model_config.py
CHANGED
|
@@ -1,12 +1,13 @@
|
|
| 1 |
PROVIDER_MODEL_MAP = {
|
| 2 |
"openai": [
|
| 3 |
"gpt-4.1-mini",
|
|
|
|
| 4 |
"gpt-5-mini",
|
| 5 |
"gpt-5-nano",
|
| 6 |
],
|
| 7 |
"anthropic": [
|
| 8 |
-
"anthropic.claude-
|
| 9 |
-
"anthropic.claude-sonnet-4-
|
| 10 |
],
|
| 11 |
}
|
| 12 |
|
|
@@ -20,10 +21,11 @@ MODEL_TO_PROVIDER = {
|
|
| 20 |
# Model display names with categories
|
| 21 |
MODEL_DISPLAY_NAMES = {
|
| 22 |
"gpt-4.1-mini": "OpenAI - GPT-4.1 Mini",
|
|
|
|
| 23 |
"gpt-5-mini": "OpenAI - GPT-5 Mini",
|
| 24 |
"gpt-5-nano": "OpenAI - GPT-5 Nano",
|
| 25 |
-
"anthropic.claude-
|
| 26 |
-
"anthropic.claude-sonnet-4-
|
| 27 |
}
|
| 28 |
|
| 29 |
MODEL_CHOICES = [model for models in PROVIDER_MODEL_MAP.values() for model in models]
|
|
|
|
| 1 |
PROVIDER_MODEL_MAP = {
|
| 2 |
"openai": [
|
| 3 |
"gpt-4.1-mini",
|
| 4 |
+
"gpt-5",
|
| 5 |
"gpt-5-mini",
|
| 6 |
"gpt-5-nano",
|
| 7 |
],
|
| 8 |
"anthropic": [
|
| 9 |
+
"anthropic.claude-haiku-4-5-20251001-v1:0",
|
| 10 |
+
"anthropic.claude-sonnet-4-5-20250929-v1:0",
|
| 11 |
],
|
| 12 |
}
|
| 13 |
|
|
|
|
| 21 |
# Model display names with categories
|
| 22 |
MODEL_DISPLAY_NAMES = {
|
| 23 |
"gpt-4.1-mini": "OpenAI - GPT-4.1 Mini",
|
| 24 |
+
"gpt-5": "OpenAI - GPT-5",
|
| 25 |
"gpt-5-mini": "OpenAI - GPT-5 Mini",
|
| 26 |
"gpt-5-nano": "OpenAI - GPT-5 Nano",
|
| 27 |
+
"anthropic.claude-haiku-4-5-20251001-v1:0": "AWS Bedrock - Anthropic - Claude Haiku 4.5",
|
| 28 |
+
"anthropic.claude-sonnet-4-5-20250929-v1:0": "AWS Bedrock - Anthropic - Claude Sonnet 4.5",
|
| 29 |
}
|
| 30 |
|
| 31 |
MODEL_CHOICES = [model for models in PROVIDER_MODEL_MAP.values() for model in models]
|