Spaces:
Sleeping
Sleeping
Update mistral_api.py
Browse files- mistral_api.py +4 -2
mistral_api.py
CHANGED
|
@@ -39,11 +39,13 @@ COMPLETIONS_MODEL = "mistral-large-latest"
|
|
| 39 |
def get_response(messages: list[dict], model: str=COMPLETIONS_MODEL,
|
| 40 |
temperature=0, max_tokens=800) -> str:
|
| 41 |
"""Chat completion using Mistral models.
|
| 42 |
-
https://docs.mistral.ai/capabilities/completion/
|
|
|
|
|
|
|
| 43 |
response = client.chat.complete(
|
| 44 |
model=model,
|
| 45 |
messages=messages,
|
| 46 |
-
max_tokens=
|
| 47 |
temperature=temperature,
|
| 48 |
# stream=True
|
| 49 |
)
|
|
|
|
| 39 |
def get_response(messages: list[dict], model: str=COMPLETIONS_MODEL,
|
| 40 |
temperature=0, max_tokens=800) -> str:
|
| 41 |
"""Chat completion using Mistral models.
|
| 42 |
+
https://docs.mistral.ai/capabilities/completion/
|
| 43 |
+
https://docs.mistral.ai/api/#tag/chat
|
| 44 |
+
"""
|
| 45 |
response = client.chat.complete(
|
| 46 |
model=model,
|
| 47 |
messages=messages,
|
| 48 |
+
max_tokens=max_tokens,
|
| 49 |
temperature=temperature,
|
| 50 |
# stream=True
|
| 51 |
)
|