Spaces:
Sleeping
Sleeping
File size: 800 Bytes
4ad42f2 7e10d04 5f1d48b 4ad42f2 f2502b3 5f1d48b 7e10d04 5f1d48b 7e10d04 5f1d48b 4ad42f2 7e10d04 5f1d48b f2502b3 7e10d04 5f1d48b f2502b3 7e10d04 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 | import requests
import os
import time
API_URL = "https://router.huggingface.co/hf-inference/models/mistralai/Mistral-7B-Instruct-v0.2"
HF_TOKEN = os.getenv("HF_TOKEN")
headers = {
"Authorization": f"Bearer {HF_TOKEN}"
}
def query_model(prompt):
payload = {
"inputs": prompt,
"parameters": {
"max_new_tokens": 800,
"temperature": 0.7
}
}
while True:
response = requests.post(API_URL, headers=headers, json=payload)
result = response.json()
# Success response
if isinstance(result, list):
return result[0]["generated_text"]
# Model loading
if "estimated_time" in result:
time.sleep(result["estimated_time"])
else:
return f"Error: {result}" |