File size: 797 Bytes
4ad42f2
5f1d48b
 
4ad42f2
 
5f1d48b
 
 
 
4ad42f2
 
 
 
 
 
 
 
 
 
 
5f1d48b
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import requests
import streamlit as st
import time

API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.2"

headers = {
    "Authorization": f"Bearer {st.secrets['HF_TOKEN']}"
}

def query_model(prompt):
    payload = {
        "inputs": prompt,
        "parameters": {
            "max_new_tokens": 800,
            "temperature": 0.7
        }
    }

    response = requests.post(API_URL, headers=headers, json=payload)
    result = response.json()

    # If model is loading
    if isinstance(result, dict) and "error" in result:
        return f"⚠ Model Error: {result['error']}"

    # If normal success response
    if isinstance(result, list) and len(result) > 0:
        return result[0]["generated_text"]

    return "Unexpected response from model."