Spaces:
Running
Running
mistral magistral
Browse files
app.py
CHANGED
|
@@ -1625,6 +1625,11 @@ AVAILABLE_MODELS = [
|
|
| 1625 |
"id": "mistral-medium-2508",
|
| 1626 |
"description": "Mistral Medium 2508 model via Mistral API for general tasks and coding"
|
| 1627 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1628 |
{
|
| 1629 |
"name": "Gemini 2.5 Flash",
|
| 1630 |
"id": "gemini-2.5-flash",
|
|
@@ -1683,7 +1688,7 @@ AVAILABLE_MODELS = [
|
|
| 1683 |
]
|
| 1684 |
|
| 1685 |
# Default model selection
|
| 1686 |
-
DEFAULT_MODEL_NAME = "
|
| 1687 |
DEFAULT_MODEL = None
|
| 1688 |
for _m in AVAILABLE_MODELS:
|
| 1689 |
if _m.get("name") == DEFAULT_MODEL_NAME:
|
|
@@ -1827,7 +1832,7 @@ def get_inference_client(model_id, provider="auto"):
|
|
| 1827 |
api_key=os.getenv("STEP_API_KEY"),
|
| 1828 |
base_url="https://api.stepfun.com/v1"
|
| 1829 |
)
|
| 1830 |
-
elif model_id == "codestral-2508" or model_id == "mistral-medium-2508":
|
| 1831 |
# Use Mistral client for Mistral models
|
| 1832 |
return Mistral(api_key=os.getenv("MISTRAL_API_KEY"))
|
| 1833 |
elif model_id == "gemini-2.5-flash":
|
|
@@ -5665,7 +5670,7 @@ This will help me create a better design for you."""
|
|
| 5665 |
messages.append({'role': 'user', 'content': enhanced_query})
|
| 5666 |
try:
|
| 5667 |
# Handle Mistral API method difference
|
| 5668 |
-
if _current_model["id"] in ("codestral-2508", "mistral-medium-2508"):
|
| 5669 |
completion = client.chat.stream(
|
| 5670 |
model=get_real_model_id(_current_model["id"]),
|
| 5671 |
messages=messages,
|
|
@@ -5709,7 +5714,7 @@ This will help me create a better design for you."""
|
|
| 5709 |
for chunk in completion:
|
| 5710 |
# Handle different response formats for Mistral vs others
|
| 5711 |
chunk_content = None
|
| 5712 |
-
if _current_model["id"] in ("codestral-2508", "mistral-medium-2508"):
|
| 5713 |
# Mistral format: chunk.data.choices[0].delta.content
|
| 5714 |
if (
|
| 5715 |
hasattr(chunk, "data") and chunk.data and
|
|
|
|
| 1625 |
"id": "mistral-medium-2508",
|
| 1626 |
"description": "Mistral Medium 2508 model via Mistral API for general tasks and coding"
|
| 1627 |
},
|
| 1628 |
+
{
|
| 1629 |
+
"name": "Magistral Medium 2509",
|
| 1630 |
+
"id": "magistral-medium-2509",
|
| 1631 |
+
"description": "Magistral Medium 2509 model via Mistral API for advanced code generation and reasoning"
|
| 1632 |
+
},
|
| 1633 |
{
|
| 1634 |
"name": "Gemini 2.5 Flash",
|
| 1635 |
"id": "gemini-2.5-flash",
|
|
|
|
| 1688 |
]
|
| 1689 |
|
| 1690 |
# Default model selection
|
| 1691 |
+
DEFAULT_MODEL_NAME = "Magistral Medium 2509"
|
| 1692 |
DEFAULT_MODEL = None
|
| 1693 |
for _m in AVAILABLE_MODELS:
|
| 1694 |
if _m.get("name") == DEFAULT_MODEL_NAME:
|
|
|
|
| 1832 |
api_key=os.getenv("STEP_API_KEY"),
|
| 1833 |
base_url="https://api.stepfun.com/v1"
|
| 1834 |
)
|
| 1835 |
+
elif model_id == "codestral-2508" or model_id == "mistral-medium-2508" or model_id == "magistral-medium-2509":
|
| 1836 |
# Use Mistral client for Mistral models
|
| 1837 |
return Mistral(api_key=os.getenv("MISTRAL_API_KEY"))
|
| 1838 |
elif model_id == "gemini-2.5-flash":
|
|
|
|
| 5670 |
messages.append({'role': 'user', 'content': enhanced_query})
|
| 5671 |
try:
|
| 5672 |
# Handle Mistral API method difference
|
| 5673 |
+
if _current_model["id"] in ("codestral-2508", "mistral-medium-2508", "magistral-medium-2509"):
|
| 5674 |
completion = client.chat.stream(
|
| 5675 |
model=get_real_model_id(_current_model["id"]),
|
| 5676 |
messages=messages,
|
|
|
|
| 5714 |
for chunk in completion:
|
| 5715 |
# Handle different response formats for Mistral vs others
|
| 5716 |
chunk_content = None
|
| 5717 |
+
if _current_model["id"] in ("codestral-2508", "mistral-medium-2508", "magistral-medium-2509"):
|
| 5718 |
# Mistral format: chunk.data.choices[0].delta.content
|
| 5719 |
if (
|
| 5720 |
hasattr(chunk, "data") and chunk.data and
|