Spaces:
Sleeping
Sleeping
File size: 745 Bytes
f145c8e eae93db f145c8e c16e0f7 f145c8e b81045b fd0a8d2 f145c8e | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 | from huggingface_hub import InferenceClient
import os
def query_model(prompt):
try:
HF_TOKEN = os.getenv("HF_TOKEN")
client = InferenceClient(
model="meta-llama/Meta-Llama-3-8B-Instruct",
token=HF_TOKEN
)
response = client.chat_completion(
messages=[
{"role": "system", "content": "You are a certified professional fitness trainer."},
{"role": "user", "content": prompt}
],
max_tokens=2400, # increase slightly
temperature=0.6, # lower randomness
top_p=0.9
)
return response.choices[0].message.content
except Exception as e:
return f"Error: {str(e)}" |