Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| import requests | |
| import os | |
| # Load Hugging Face API token | |
| HF_TOKEN = os.getenv("HF_TOKEN") or st.secrets["HF_TOKEN"] | |
| # Use a free model that works | |
| API_URL = "https://api-inference.huggingface.co/models/gpt2" | |
| headers = {"Authorization": f"Bearer {HF_TOKEN}"} | |
| def query(payload): | |
| response = requests.post(API_URL, headers=headers, json=payload) | |
| # Debugging info | |
| st.write("π Debug - Status Code:", response.status_code) | |
| st.write("π Debug - Raw Response:", response.text) | |
| try: | |
| return response.json() | |
| except ValueError: | |
| return {"error": "Invalid JSON response from Hugging Face"} | |
| # Streamlit UI | |
| st.title("π‘ Offline AI Chat App (HF Inference)") | |
| user_input = st.text_area("Ask me something:") | |
| if st.button("Send"): | |
| if user_input.strip(): | |
| output = query({"inputs": user_input}) | |
| if "error" in output: | |
| st.error(f"β οΈ Error: {output['error']}") | |
| elif isinstance(output, list) and "generated_text" in output[0]: | |
| # β Extract clean text | |
| st.success("π€ Reply:") | |
| st.write(output[0]["generated_text"]) | |
| else: | |
| st.warning("β οΈ Unexpected response format. Check debug output above.") | |
| else: | |
| st.warning("Please enter a message first!") | |