Spaces:
Sleeping
Sleeping
| import requests | |
| import streamlit as st | |
| import time | |
| API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.2" | |
| headers = { | |
| "Authorization": f"Bearer {st.secrets['HF_TOKEN']}" | |
| } | |
| def query_model(prompt): | |
| payload = { | |
| "inputs": prompt, | |
| "parameters": { | |
| "max_new_tokens": 800, | |
| "temperature": 0.7 | |
| } | |
| } | |
| response = requests.post(API_URL, headers=headers, json=payload) | |
| result = response.json() | |
| # If model is loading | |
| if isinstance(result, dict) and "error" in result: | |
| return f"⚠ Model Error: {result['error']}" | |
| # If normal success response | |
| if isinstance(result, list) and len(result) > 0: | |
| return result[0]["generated_text"] | |
| return "Unexpected response from model." |