my-mistral-chat / app.py
LifeHelperAI's picture
Update app.py
c40c931 verified
import streamlit as st
import requests
import os
# Load Hugging Face API token
HF_TOKEN = os.getenv("HF_TOKEN") or st.secrets["HF_TOKEN"]
# Use a free model that works
API_URL = "https://api-inference.huggingface.co/models/gpt2"
headers = {"Authorization": f"Bearer {HF_TOKEN}"}
def query(payload):
response = requests.post(API_URL, headers=headers, json=payload)
# Debugging info
st.write("πŸ” Debug - Status Code:", response.status_code)
st.write("πŸ” Debug - Raw Response:", response.text)
try:
return response.json()
except ValueError:
return {"error": "Invalid JSON response from Hugging Face"}
# Streamlit UI
st.title("πŸ’‘ Offline AI Chat App (HF Inference)")
user_input = st.text_area("Ask me something:")
if st.button("Send"):
if user_input.strip():
output = query({"inputs": user_input})
if "error" in output:
st.error(f"⚠️ Error: {output['error']}")
elif isinstance(output, list) and "generated_text" in output[0]:
# βœ… Extract clean text
st.success("πŸ€– Reply:")
st.write(output[0]["generated_text"])
else:
st.warning("⚠️ Unexpected response format. Check debug output above.")
else:
st.warning("Please enter a message first!")