Spaces:
Sleeping
Sleeping
File size: 549 Bytes
b040724 7e10d04 4ad42f2 b040724 4aabcd0 b040724 4aabcd0 b040724 8416321 b040724 5f1d48b b040724 5f1d48b b040724 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 | from huggingface_hub import InferenceClient
import os
def query_model(prompt):
try:
HF_TOKEN = os.getenv("HF_TOKEN")
if not HF_TOKEN:
return "⚠ HF_TOKEN not found."
client = InferenceClient(
model="mistralai/Mistral-7B-Instruct-v0.2",
token=HF_TOKEN
)
response = client.text_generation(
prompt,
max_new_tokens=300,
temperature=0.7
)
return response
except Exception as e:
return f"Error: {str(e)}" |