Spaces:
Sleeping
Sleeping
| from huggingface_hub import InferenceClient | |
| import os | |
| def query_model(prompt): | |
| try: | |
| HF_TOKEN = os.getenv("HF_TOKEN") | |
| if not HF_TOKEN: | |
| return "⚠ HF_TOKEN not found." | |
| client = InferenceClient( | |
| model="mistralai/Mistral-7B-Instruct-v0.2", | |
| token=HF_TOKEN | |
| ) | |
| response = client.text_generation( | |
| prompt, | |
| max_new_tokens=300, | |
| temperature=0.7 | |
| ) | |
| return response | |
| except Exception as e: | |
| return f"Error: {str(e)}" |