Spaces:
Paused
Paused
| import os | |
| import streamlit as st | |
| import tempfile | |
| import requests | |
| import json | |
| # Entfernte Imports: PIL.Image, io, base64, pandas, zipfile, PyPDF2 | |
| # Da kein File Manager mehr benötigt wird. | |
| # ---------------------------------------------------- | |
| # 🚨 KRITISCHE FIXES FÜR DEN PERMISSION ERROR | |
| # Zwingt Streamlit, seine temporären/Konfigurationsdateien | |
| # in den beschreibbaren /tmp-Bereich zu schreiben. | |
| # ---------------------------------------------------- | |
| # 1. Temporären, beschreibbaren Pfad erstellen | |
| TEMP_STREAMLIT_HOME = os.path.join(tempfile.gettempdir(), "st_config_workaround") | |
| os.makedirs(TEMP_STREAMLIT_HOME, exist_ok=True) | |
| # 2. Umgebungsvariablen setzen | |
| os.environ["STREAMLIT_HOME"] = TEMP_STREAMLIT_HOME | |
| os.environ["STREAMLIT_GATHER_USAGE_STATS"] = "false" | |
| # 3. Minimale config.toml erstellen, um Schreibversuche zu unterbinden | |
| CONFIG_PATH = os.path.join(TEMP_STREAMLIT_HOME, "config.toml") | |
| CONFIG_CONTENT = """ | |
| [browser] | |
| gatherUsageStats = false | |
| """ | |
| if not os.path.exists(CONFIG_PATH): | |
| try: | |
| with open(CONFIG_PATH, "w") as f: | |
| f.write(CONFIG_CONTENT) | |
| except Exception as e: | |
| print(f"WARNUNG: Konnte config.toml nicht schreiben: {e}") | |
| # ---------------------------------------------------- | |
| # Ende der Workarounds | |
| # ---------------------------------------------------- | |
| # --- Konfiguration --- | |
| # Hinweis: Layout-Einstellung 'wide' bleibt erhalten | |
| st.set_page_config(page_title="OpenRouter Minimal Chat UI", layout="wide") | |
| OPENROUTER_API_BASE = "https://openrouter.ai/api/v1" | |
| # --- Page Title --- | |
| st.title("💸 OpenRouter Minimal Chat Interface") | |
| st.markdown(""" | |
| **Willkommen im OpenRouter Minimal Chat Interface!** | |
| Chatte mit **kostenlosen (Free-Tier)** Modellen über die OpenRouter API. Nur Text-Chat. | |
| """) | |
| # --- Session State Management --- | |
| if "messages" not in st.session_state: | |
| st.session_state.messages = [] | |
| # st.session_state.uploaded_content wurde entfernt. | |
| # --- Context-Length Fetch (Wird beibehalten, da für den Slider wichtig) --- | |
| def fetch_model_contexts(api_key): | |
| """Lädt alle Modelle + deren context_length.""" | |
| if not api_key: | |
| return {} | |
| headers = {"Authorization": f"Bearer {api_key}"} | |
| try: | |
| res = requests.get(f"{OPENROUTER_API_BASE}/models", headers=headers, timeout=10) | |
| contexts = {} | |
| if res.status_code == 200: | |
| for m in res.json().get("data", []): | |
| mid = m.get("id") | |
| ctx = m.get("context_length", 4096) | |
| contexts[mid] = ctx | |
| return contexts | |
| except Exception as e: | |
| return {} | |
| # --- Sidebar --- | |
| with st.sidebar: | |
| st.header("⚙️ API Settings") | |
| api_key = st.text_input("OpenRouter API Key", type="password") | |
| # --- Manuelle Modelle --- | |
| FREE_MODEL_LIST = [ | |
| "cognitivecomputations/dolphin-mistral-24b-venice-edition:free", | |
| "deepseek/deepseek-chat-v3.1:free", | |
| "nvidia/nemotron-nano-9b-v2:free", | |
| "google/gemma-3-27b-it:free", | |
| "openai/gpt-oss-20b:free", | |
| "qwen/qwen3-coder:free", | |
| "qwen/qwen2.5-vl-72b-instruct:free", | |
| "nousresearch/deephermes-3-llama-3-8b-preview:free", | |
| ] | |
| model = st.selectbox("Wähle ein Modell", FREE_MODEL_LIST, index=0) | |
| # Kontextlänge holen (mit Fallback) | |
| model_contexts = fetch_model_contexts(api_key) | |
| default_ctx = model_contexts.get(model, 4096) | |
| temperature = st.slider("Temperature", 0.0, 1.0, 0.7) | |
| max_tokens = st.slider( | |
| f"Max Output Tokens (max {default_ctx})", | |
| 1, | |
| min(default_ctx, 128000), | |
| min(512, default_ctx) | |
| ) | |
| st.caption(f"🔢 Model Context Length (Fallback 4096): {default_ctx}") | |
| if st.button("🔄 Chat Reset"): | |
| st.session_state.messages = [] | |
| st.success("Chat-Verlauf gelöscht.") | |
| st.experimental_rerun() | |
| st.markdown(""" | |
| --- | |
| **Minimal UI:** Nur Text-Chat. | |
| """) | |
| # --- Datei Upload Sektion komplett entfernt --- | |
| # --- Chat Verlauf anzeigen --- | |
| for msg in st.session_state.messages: | |
| with st.chat_message(msg["role"]): | |
| st.markdown(msg["content"]) | |
| # --- API Request Funktion (Unverändert) --- | |
| def call_openrouter(model, messages, temp, max_tok, key): | |
| headers = { | |
| "Authorization": f"Bearer {key}", | |
| "Content-Type": "application/json", | |
| "Referer": "https://aicodecraft.io", | |
| "X-Title": "OpenRouter-Minimal-Interface", | |
| } | |
| payload = { | |
| "model": model, | |
| "messages": messages, | |
| "temperature": temp, | |
| "max_tokens": max_tok, | |
| } | |
| res = requests.post(f"{OPENROUTER_API_BASE}/chat/completions", headers=headers, data=json.dumps(payload)) | |
| if res.status_code == 200: | |
| try: | |
| return res.json()["choices"][0]["message"]["content"] | |
| except (KeyError, IndexError): | |
| raise Exception("Fehlerhafte API-Antwort: Konnte Antworttext nicht extrahieren.") | |
| else: | |
| try: | |
| err = res.json() | |
| msg = err.get("error", {}).get("message", res.text) | |
| except: | |
| msg = res.text | |
| raise Exception(f"API Error {res.status_code}: {msg}") | |
| # --- Chat Input --- | |
| if prompt := st.chat_input("Deine Nachricht..."): | |
| if not api_key: | |
| st.warning("Bitte trage deinen OpenRouter API Key in der Sidebar ein.") | |
| st.stop() | |
| # Nachricht hinzufügen und anzeigen | |
| st.session_state.messages.append({"role": "user", "content": prompt}) | |
| with st.chat_message("user"): | |
| st.markdown(prompt) | |
| # API Nachrichten vorbereiten (für Chatverlauf) | |
| messages = [{"role": m["role"], "content": m["content"]} for m in st.session_state.messages] | |
| # Antwort generieren | |
| with st.chat_message("assistant"): | |
| with st.spinner(f"Fragend {model}..."): | |
| try: | |
| reply = call_openrouter(model, messages, temperature, max_tokens, api_key) | |
| st.markdown(reply) | |
| st.session_state.messages.append({"role": "assistant", "content": reply}) | |
| except Exception as e: | |
| st.error(str(e)) | |
| st.session_state.messages.append({"role": "assistant", "content": f"❌ {str(e)}"}) |