Spaces:
Sleeping
Sleeping
Update mistral_hf_wrapper.py
Browse files- mistral_hf_wrapper.py +16 -17
mistral_hf_wrapper.py
CHANGED
|
@@ -1,21 +1,20 @@
|
|
| 1 |
import os
|
| 2 |
import requests
|
| 3 |
|
| 4 |
-
|
| 5 |
-
|
|
|
|
|
|
|
| 6 |
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
json
|
| 19 |
-
)
|
| 20 |
-
response.raise_for_status()
|
| 21 |
-
return response.json()["generated_text"].strip()
|
|
|
|
| 1 |
import os
|
| 2 |
import requests
|
| 3 |
|
| 4 |
+
class MistralInference:
|
| 5 |
+
def __init__(self):
|
| 6 |
+
self.api_url = os.getenv("HF_MISTRAL_URL")
|
| 7 |
+
self.api_token = os.getenv("HF_TOKEN")
|
| 8 |
|
| 9 |
+
def run(self, prompt: str) -> str:
|
| 10 |
+
headers = {
|
| 11 |
+
"Authorization": f"Bearer {self.api_token}",
|
| 12 |
+
"Content-Type": "application/json"
|
| 13 |
+
}
|
| 14 |
+
payload = {
|
| 15 |
+
"inputs": prompt,
|
| 16 |
+
"parameters": {"max_new_tokens": 512}
|
| 17 |
+
}
|
| 18 |
+
response = requests.post(self.api_url, headers=headers, json=payload)
|
| 19 |
+
response.raise_for_status()
|
| 20 |
+
return response.json()[0]["generated_text"]
|
|
|
|
|
|
|
|
|