Spaces:
Running
Running
| import streamlit as st | |
| import requests | |
| import json | |
| from PIL import Image | |
| import io | |
| import base64 | |
| import pandas as pd | |
| import zipfile | |
| import PyPDF2 | |
| import os | |
| # --- Konfiguration --- | |
| st.set_page_config(page_title="OpenRouter Free Interface", layout="wide", initial_sidebar_state="expanded") | |
| OPENROUTER_API_BASE = "https://openrouter.ai/api/v1" | |
| # --- Page Title --- | |
| st.title("💸 OpenRouter Free-Tier Interface") | |
| st.markdown(""" | |
| **Willkommen im All-OpenRouter-Free-Interface Deluxe!** | |
| Chatte mit **kostenlosen (Free-Tier)** Modellen über die OpenRouter API. | |
| Alle Modelle unterliegen den OpenRouter-Ratenbegrenzungen. | |
| """) | |
| # --- Session State Management --- | |
| if "messages" not in st.session_state: | |
| st.session_state.messages = [] | |
| if "uploaded_content" not in st.session_state: | |
| st.session_state.uploaded_content = None | |
| # --- Datei-Verarbeitung --- | |
| def encode_image(image): | |
| buf = io.BytesIO() | |
| image.save(buf, format="JPEG") | |
| return base64.b64encode(buf.getvalue()).decode("utf-8") | |
| def process_file(uploaded_file): | |
| # Ihr robuster File Manager ist perfekt | |
| file_type = uploaded_file.name.split('.')[-1].lower() | |
| text_exts = ('.txt', '.csv', '.py', '.html', '.js', '.css', '.json', '.xml', '.sql', '.xlsx') | |
| if file_type in ["jpg", "jpeg", "png"]: | |
| return {"type": "image", "content": Image.open(uploaded_file).convert('RGB')} | |
| if file_type in ["txt"] + [ext.strip('.') for ext in text_exts if ext not in ('.csv', '.xlsx')]: | |
| return {"type": "text", "content": uploaded_file.read().decode("utf-8", errors="ignore")} | |
| if file_type in ["csv", "xlsx"]: | |
| try: | |
| df = pd.read_csv(uploaded_file) if file_type == "csv" else pd.read_excel(uploaded_file) | |
| return {"type": "text", "content": df.to_string()} | |
| except Exception as e: | |
| return {"type": "error", "content": f"Fehler beim Lesen der Tabelle: {e}"} | |
| if file_type == "pdf": | |
| try: | |
| reader = PyPDF2.PdfReader(uploaded_file) | |
| return {"type": "text", "content": "".join(page.extract_text() or "" for page in reader.pages)} | |
| except Exception as e: | |
| return {"type": "error", "content": f"PDF Fehler: {e}"} | |
| if file_type == "zip": | |
| try: | |
| with zipfile.ZipFile(uploaded_file) as z: | |
| content = "ZIP Contents:\n" | |
| for f in z.infolist(): | |
| if not f.is_dir() and f.filename.lower().endswith(text_exts): | |
| content += f"\n📄 {f.filename}:\n" | |
| content += z.read(f.filename).decode("utf-8", errors="ignore") | |
| return {"type": "text", "content": content or "ZIP enthält keine lesbaren Textdateien."} | |
| except Exception as e: | |
| return {"type": "error", "content": f"ZIP Fehler: {e}"} | |
| return {"type": "error", "content": "Nicht unterstütztes Dateiformat."} | |
| # --- Context-Length Fetch (MIT CACHING) --- | |
| # Caching für 1 Stunde, um API-Calls zu sparen | |
| def fetch_model_contexts(api_key): | |
| """Lädt alle Modelle + deren context_length.""" | |
| if not api_key: | |
| return {} | |
| headers = {"Authorization": f"Bearer {api_key}"} | |
| try: | |
| res = requests.get(f"{OPENROUTER_API_BASE}/models", headers=headers, timeout=10) | |
| contexts = {} | |
| if res.status_code == 200: | |
| for m in res.json().get("data", []): | |
| mid = m.get("id") | |
| ctx = m.get("context_length", 4096) | |
| contexts[mid] = ctx | |
| return contexts | |
| except Exception as e: | |
| # Hier kein st.warning, da es den Cache ungültig machen könnte, wenn API-Key noch nicht da ist | |
| return {} | |
| # --- Sidebar --- | |
| with st.sidebar: | |
| st.header("⚙️ API Settings") | |
| api_key = st.text_input("OpenRouter API Key", type="password") | |
| # Free Modelle (Fallback) | |
| FREE_MODEL_LIST = [ | |
| "cognitivecomputations/dolphin-mistral-24b-venice-edition:free", | |
| "deepseek/deepseek-chat-v3", | |
| "google/gemma-2-9b-it", | |
| "mistralai/mistral-7b-instruct-v0.2", | |
| "qwen/qwen2-72b-instruct", | |
| "nousresearch/nous-hermes-2-mixtral-8x7b-dpo", | |
| ] | |
| model = st.selectbox("Wähle ein Modell", FREE_MODEL_LIST, index=0) | |
| # Context automatisch anpassen (durch Caching nur bei Bedarf neu geladen) | |
| model_contexts = fetch_model_contexts(api_key) | |
| default_ctx = model_contexts.get(model, 4096) | |
| temperature = st.slider("Temperature", 0.0, 1.0, 0.7) | |
| # Slider Maxwert auf Context-Länge des Modells setzen, Min-Output-Tokens auf 512 | |
| max_tokens = st.slider( | |
| f"Max Output Tokens (max {default_ctx})", | |
| 1, | |
| min(default_ctx, 32000), | |
| min(512, default_ctx) | |
| ) | |
| st.caption(f"🔢 Model Context Length: {default_ctx}") | |
| st.markdown("---") | |
| # Verbesserter Reset-Button | |
| if st.button("🔄 Chat Reset (Full)"): | |
| st.session_state.messages = [] | |
| st.session_state.uploaded_content = None | |
| # Rerun, um st.file_uploader zu leeren und die App in den Startzustand zu versetzen | |
| st.experimental_rerun() | |
| st.markdown(""" | |
| --- | |
| 🧠 **Hinweis:** Dein API-Key wird nur **lokal** verwendet, um Anfragen an OpenRouter zu authentifizieren. | |
| """) | |
| # --- Datei Upload --- | |
| uploaded_file = st.file_uploader("Upload File (optional)", | |
| type=["jpg", "jpeg", "png", "txt", "pdf", "zip", "csv", "xlsx", "html", "css", "js", "py"]) | |
| if uploaded_file and st.session_state.uploaded_content is None: | |
| st.session_state.uploaded_content = process_file(uploaded_file) | |
| # Rerun, um die Vorschau sofort zu zeigen | |
| st.experimental_rerun() | |
| if st.session_state.uploaded_content: | |
| processed = st.session_state.uploaded_content | |
| st.subheader("📎 Current Attachment:") | |
| if processed["type"] == "image": | |
| st.image(processed["content"], caption="Attached Image", width=300) | |
| elif processed["type"] == "text": | |
| st.text_area("File Preview", processed["content"], height=150) | |
| elif processed["type"] == "error": | |
| st.error(processed["content"]) | |
| # Anhang einzeln entfernen | |
| if st.button("❌ Remove Attachment"): | |
| st.session_state.uploaded_content = None | |
| st.experimental_rerun() | |
| # --- Chat Verlauf anzeigen --- | |
| for msg in st.session_state.messages: | |
| with st.chat_message(msg["role"]): | |
| st.markdown(msg["content"]) | |
| # --- API Call --- | |
| def call_openrouter(model, messages, temp, max_tok, key): | |
| headers = { | |
| "Authorization": f"Bearer {key}", | |
| "Content-Type": "application/json", | |
| "Referer": "https://aicodecraft.io", | |
| "X-Title": "OpenRouter-Free-Interface", | |
| } | |
| payload = { | |
| "model": model, | |
| "messages": messages, | |
| "temperature": temp, | |
| "max_tokens": max_tok, | |
| } | |
| res = requests.post(f"{OPENROUTER_API_BASE}/chat/completions", headers=headers, data=json.dumps(payload)) | |
| if res.status_code == 200: | |
| try: | |
| return res.json()["choices"][0]["message"]["content"] | |
| except (KeyError, IndexError): | |
| raise Exception("Fehlerhafte API-Antwort: Konnte Antworttext nicht extrahieren.") | |
| else: | |
| try: | |
| err = res.json() | |
| msg = err.get("error", {}).get("message", res.text) | |
| except: | |
| msg = res.text | |
| raise Exception(f"API Error {res.status_code}: {msg}") | |
| # --- Chat Input --- | |
| if prompt := st.chat_input("Deine Nachricht..."): | |
| if not api_key: | |
| st.warning("Bitte trage deinen OpenRouter API Key in der Sidebar ein.") | |
| st.stop() | |
| st.session_state.messages.append({"role": "user", "content": prompt}) | |
| with st.chat_message("user"): | |
| st.markdown(prompt) | |
| messages = [{"role": m["role"], "content": m["content"]} for m in st.session_state.messages] | |
| if st.session_state.uploaded_content: | |
| content = st.session_state.uploaded_content | |
| if content["type"] == "image": | |
| base64_img = encode_image(content["content"]) | |
| # OpenRouter Multimodalität (OpenAI-Schema) | |
| messages[-1]["content"] = [ | |
| {"type": "text", "text": prompt}, | |
| {"type": "image_url", "image_url": {"url": f"data:image/jpeg;base64,{base64_img}"}} | |
| ] | |
| elif content["type"] == "text": | |
| # Text-Dateien einfach dem letzten Prompt anhängen | |
| messages[-1]["content"] += f"\n\n[Attached File Content]\n{content['content']}" | |
| with st.chat_message("assistant"): | |
| with st.spinner(f"Fragend {model}..."): | |
| try: | |
| reply = call_openrouter(model, messages, temperature, max_tokens, api_key) | |
| st.markdown(reply) | |
| st.session_state.messages.append({"role": "assistant", "content": reply}) | |
| except Exception as e: | |
| st.error(str(e)) | |
| st.session_state.messages.append({"role": "assistant", "content": f"❌ {str(e)}"}) |