Spaces:
Sleeping
Sleeping
Update utils/inference.py
Browse files- utils/inference.py +6 -1
utils/inference.py
CHANGED
|
@@ -1,6 +1,11 @@
|
|
| 1 |
from huggingface_hub import InferenceClient
|
|
|
|
| 2 |
|
| 3 |
-
client = InferenceClient(
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
|
| 5 |
def call_model(prompt: str) -> str:
|
| 6 |
return client.text_generation(prompt, max_new_tokens=2048, temperature=0.3, return_full_text=False)
|
|
|
|
| 1 |
from huggingface_hub import InferenceClient
|
| 2 |
+
import os
|
| 3 |
|
| 4 |
+
client = InferenceClient(
|
| 5 |
+
model="mistralai/Mistral-7B-Instruct-v0.2",
|
| 6 |
+
token=os.environ.get("HF_TOKEN"),
|
| 7 |
+
provider="hf-inference"
|
| 8 |
+
)
|
| 9 |
|
| 10 |
def call_model(prompt: str) -> str:
|
| 11 |
return client.text_generation(prompt, max_new_tokens=2048, temperature=0.3, return_full_text=False)
|