Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| import requests | |
| import json | |
| from PIL import Image | |
| import io | |
| import base64 | |
| import pandas as pd | |
| import zipfile | |
| import PyPDF2 | |
| import os # Wird für die zukünftige Umgebungsvariablen-Nutzung bereitgehalten | |
| # --- Konfiguration --- | |
| st.set_page_config(page_title="OpenRouter Free Interface", layout="wide") | |
| OPENROUTER_API_BASE = "https://openrouter.ai/api/v1" | |
| # --- Page Title --- | |
| st.title("💸 OpenRouter Free-Tier Interface") | |
| st.markdown(""" | |
| **Willkommen im All-OpenRouter-Free-Interface Deluxe!** | |
| Chatte mit **kostenlosen (Free-Tier)** Modellen über die OpenRouter API. | |
| Alle Modelle unterliegen den OpenRouter-Ratenbegrenzungen. | |
| """) | |
| # --- Session State Management --- | |
| # Verwenden des einheitlichen Schlüssels 'messages' | |
| if "messages" not in st.session_state: | |
| st.session_state.messages = [] | |
| if "uploaded_content" not in st.session_state: | |
| st.session_state.uploaded_content = None | |
| # --- Datei-Verarbeitung (Multimodal, basierend auf Gemini-UI) --- | |
| def encode_image(image): | |
| """Encodiert ein PIL-Image-Objekt in einen Base64-String.""" | |
| buf = io.BytesIO() | |
| image.save(buf, format="JPEG") | |
| return base64.b64encode(buf.getvalue()).decode("utf-8") | |
| def process_file(uploaded_file): | |
| """Verarbeitet die hochgeladene Datei (Text, Bild, PDF, ZIP) und extrahiert den Inhalt.""" | |
| file_type = uploaded_file.name.split('.')[-1].lower() | |
| text_exts = ('.txt', '.csv', '.py', '.html', '.js', '.css', '.json', '.xml', '.sql', '.xlsx') | |
| if file_type in ["jpg", "jpeg", "png"]: | |
| return {"type": "image", "content": Image.open(uploaded_file).convert('RGB')} | |
| if file_type in ["txt"] + [ext.strip('.') for ext in text_exts if ext not in ('.csv', '.xlsx')]: | |
| return {"type": "text", "content": uploaded_file.read().decode("utf-8", errors="ignore")} | |
| if file_type in ["csv", "xlsx"]: | |
| try: | |
| if file_type == "csv": | |
| df = pd.read_csv(uploaded_file) | |
| else: # xlsx | |
| df = pd.read_excel(uploaded_file) | |
| return {"type": "text", "content": df.to_string()} | |
| except Exception as e: | |
| return {"type": "error", "content": f"Fehler beim Lesen der Tabelle: {e}"} | |
| if file_type == "pdf": | |
| try: | |
| reader = PyPDF2.PdfReader(uploaded_file) | |
| # Extrahieren des Textes und Zusammenführen aller Seiten | |
| return {"type": "text", "content": "".join(page.extract_text() or "" for page in reader.pages)} | |
| except Exception as e: | |
| return {"type": "error", "content": f"PDF Fehler: {e}"} | |
| if file_type == "zip": | |
| try: | |
| with zipfile.ZipFile(uploaded_file) as z: | |
| content = "ZIP Contents:\n" | |
| for f in z.infolist(): | |
| if not f.is_dir() and f.filename.lower().endswith(text_exts): | |
| content += f"\n📄 {f.filename}:\n" | |
| # Sicherstellen, dass das Lesen und Dekodieren robust ist | |
| content += z.read(f.filename).decode("utf-8", errors="ignore") | |
| return {"type": "text", "content": content or "ZIP enthält keine lesbaren Textdateien."} | |
| except Exception as e: | |
| return {"type": "error", "content": f"ZIP Fehler: {e}"} | |
| return {"type": "error", "content": "Nicht unterstütztes Dateiformat."} | |
| # --- Sidebar --- | |
| with st.sidebar: | |
| st.header("⚙️ API Settings") | |
| api_key = st.text_input("OpenRouter API Key", type="password") | |
| # --- Manuelle Modelle (Austauschbar mit Auto-Discovery) --- | |
| FREE_MODEL_LIST = [ | |
| "cognitivecomputations/dolphin-mistral-24b-venice-edition:free", | |
| "deepseek/deepseek-chat-v3", | |
| "google/gemma-2-9b-it", | |
| "mistralai/mistral-7b-instruct-v0.2", | |
| "qwen/qwen2-72b-instruct", | |
| "nousresearch/nous-hermes-2-mixtral-8x7b-dpo", # Ein Beispiel für ein großes Free-Modell | |
| ] | |
| model = st.selectbox("Wähle ein Modell", FREE_MODEL_LIST, index=0) | |
| temperature = st.slider("Temperature", 0.0, 1.0, 0.7) | |
| max_tokens = st.slider("Max Tokens", 1, 4096, 512) | |
| if st.button("🔄 Chat Reset"): | |
| st.session_state.messages = [] | |
| st.session_state.uploaded_content = None # Auch den Anhang löschen | |
| st.success("Chat-Verlauf und Anhang gelöscht.") | |
| st.markdown(""" | |
| --- | |
| 🧠 **Hinweis:** Diese Modelle sind **kostenlos**, aber ggf. durch Rate-Limits beschränkt. | |
| Dein API-Key wird nur **lokal** verwendet. | |
| """) | |
| # --- Datei Upload --- | |
| uploaded_file = st.file_uploader("Upload File (optional)", | |
| type=["jpg", "jpeg", "png", "txt", "pdf", "zip", "csv", "xlsx", "html", "css", "js", "py"]) | |
| # Logik zur Dateiverarbeitung und Vorschau | |
| if uploaded_file and st.session_state.uploaded_content is None: | |
| st.session_state.uploaded_content = process_file(uploaded_file) | |
| if st.session_state.uploaded_content: | |
| processed = st.session_state.uploaded_content | |
| st.subheader("📎 Current Attachment:") | |
| if processed["type"] == "image": | |
| st.image(processed["content"], caption="Attached Image", width=300) | |
| elif processed["type"] == "text": | |
| st.text_area("File Preview", processed["content"], height=150) | |
| elif processed["type"] == "error": | |
| st.error(processed["content"]) | |
| if st.button("❌ Remove Attachment"): | |
| st.session_state.uploaded_content = None | |
| st.experimental_rerun() # Nötig, um den file_uploader optisch zu resetten | |
| # --- Chat Verlauf anzeigen --- | |
| for msg in st.session_state.messages: | |
| with st.chat_message(msg["role"]): | |
| st.markdown(msg["content"]) | |
| # --- API Request Funktion --- | |
| def call_openrouter(model, messages, temp, max_tok, key): | |
| """Führt den API-Aufruf an OpenRouter (OpenAI-Schema) durch.""" | |
| headers = { | |
| "Authorization": f"Bearer {key}", | |
| "Content-Type": "application/json", | |
| # KORRIGIERT: "Referer" statt "HTTP-Referer" | |
| "Referer": "https://aicodecraft.io", | |
| "X-Title": "OpenRouter-Free-Interface", | |
| } | |
| payload = { | |
| "model": model, | |
| "messages": messages, | |
| "temperature": temp, | |
| "max_tokens": max_tok, | |
| } | |
| res = requests.post(f"{OPENROUTER_API_BASE}/chat/completions", headers=headers, data=json.dumps(payload)) | |
| if res.status_code == 200: | |
| try: | |
| # KORRIGIERT: Stabile Extraktion der Antwort | |
| return res.json()["choices"][0]["message"]["content"] | |
| except (KeyError, IndexError): | |
| raise Exception("Fehlerhafte API-Antwort: Konnte Antworttext nicht extrahieren.") | |
| else: | |
| try: | |
| err = res.json() | |
| # Versuch, die spezifische Fehlermeldung zu finden | |
| msg = err.get("error", {}).get("message", res.text) | |
| except: | |
| msg = res.text | |
| raise Exception(f"API Error {res.status_code}: {msg}") | |
| # --- Chat Input --- | |
| if prompt := st.chat_input("Deine Nachricht..."): | |
| if not api_key: | |
| st.warning("Bitte trage deinen OpenRouter API Key in der Sidebar ein.") | |
| st.stop() | |
| # Nachricht hinzufügen und anzeigen | |
| st.session_state.messages.append({"role": "user", "content": prompt}) | |
| with st.chat_message("user"): | |
| st.markdown(prompt) | |
| # API Nachrichten vorbereiten (für Chatverlauf) | |
| messages = [{"role": m["role"], "content": m["content"]} for m in st.session_state.messages] | |
| # Datei anhängen, falls vorhanden (Multimodalitäts-Handling) | |
| if st.session_state.uploaded_content: | |
| content = st.session_state.uploaded_content | |
| # OpenRouter/OpenAI Multimodalität: Bilder als 'url' mit Base64 | |
| if content["type"] == "image": | |
| base64_img = encode_image(content["content"]) | |
| # Aufbau des Multimodal-Contents für die API | |
| messages[-1]["content"] = [ | |
| {"type": "text", "text": prompt}, | |
| {"type": "image_url", "image_url": {"url": f"data:image/jpeg;base64,{base64_img}"}} | |
| ] | |
| # Text-Dateien einfach dem letzten Prompt anhängen | |
| elif content["type"] == "text": | |
| messages[-1]["content"] += f"\n\n[Attached File Content]\n{content['content']}" | |
| # Antwort generieren | |
| with st.chat_message("assistant"): | |
| with st.spinner(f"Fragend {model}..."): | |
| try: | |
| reply = call_openrouter(model, messages, temperature, max_tokens, api_key) | |
| st.markdown(reply) | |
| st.session_state.messages.append({"role": "assistant", "content": reply}) | |
| except Exception as e: | |
| st.error(str(e)) | |
| st.session_state.messages.append({"role": "assistant", "content": f"❌ {str(e)}"}) |