| | import os |
| | import shutil |
| | import tempfile |
| | import threading |
| | import gradio as gr |
| | from google import genai |
| | from google.genai import types |
| |
|
| | client = genai.Client(api_key=os.environ.get("GEMINI_API_KEY")) |
| | MODEL = "gemini-3.1-flash-lite-preview" |
| |
|
| | |
| | tools = [types.Tool(googleSearch=types.GoogleSearch())] |
| | generate_content_config = types.GenerateContentConfig( |
| | thinking_config=types.ThinkingConfig(thinking_level="MINIMAL"), |
| | tools=tools, |
| | ) |
| |
|
| | def respond(message, history, state): |
| | if state is None: |
| | state = { |
| | "chat": client.chats.create(model=MODEL, config=generate_content_config), |
| | "lock": threading.Lock(), |
| | } |
| | |
| | history = history or [] |
| | text = (message or {}).get("text", "") |
| | files = (message or {}).get("files", []) |
| | parts = [] |
| |
|
| | |
| | for file_path in files: |
| | |
| | base_name = os.path.basename(file_path) |
| | safe_name = base_name.encode('ascii', 'ignore').decode('ascii') |
| | |
| | |
| | if not safe_name.strip(". "): |
| | safe_name = "upload_file.pdf" |
| | |
| | |
| | safe_temp_path = os.path.join(tempfile.gettempdir(), f"safe_{safe_name}") |
| | |
| | try: |
| | |
| | shutil.copy2(file_path, safe_temp_path) |
| | |
| | |
| | uploaded_file = client.files.upload( |
| | file=safe_temp_path, |
| | config=types.UploadFileConfig(display_name=safe_name) |
| | ) |
| | parts.append(uploaded_file) |
| | finally: |
| | |
| | if os.path.exists(safe_temp_path): |
| | os.remove(safe_temp_path) |
| | |
| | |
| | history.append({"role": "user", "content": gr.FileData(path=file_path)}) |
| | |
| | |
| | if text: |
| | parts.append(text) |
| | history.append({"role": "user", "content": text}) |
| |
|
| | if not parts: |
| | yield history, state |
| | return |
| |
|
| | |
| | history.append({"role": "assistant", "content": ""}) |
| | yield history, state |
| |
|
| | |
| | with state["lock"]: |
| | out = "" |
| | for chunk in state["chat"].send_message_stream(parts): |
| | if getattr(chunk, "text", None): |
| | out += chunk.text |
| | history[-1]["content"] = out |
| | yield history, state |
| |
|
| | def cleanup_gemini_files(): |
| | deleted_count = 0 |
| | |
| | for f in client.files.list(): |
| | |
| | client.files.delete(name=f.name) |
| | deleted_count += 1 |
| | |
| | |
| | gr.Info(f"{deleted_count} Datei(en) erfolgreich gelöscht.") |
| |
|
| | def clear_msg(): |
| | return {"text": "", "files": []} |
| |
|
| | with gr.Blocks() as demo: |
| | gr.Markdown("# Gemini AI Chat (Gradio 6.x & File API)") |
| | |
| | |
| | chatbot = gr.Chatbot(height=600) |
| | |
| | msg = gr.MultimodalTextbox( |
| | placeholder="Schreib was oder lade Dateien hoch…", |
| | file_count="multiple", |
| | ) |
| | state = gr.State(None) |
| | |
| | msg.submit(respond, inputs=[msg, chatbot, state], outputs=[chatbot, state]) \ |
| | .then(clear_msg, outputs=msg) |
| |
|
| | with gr.Row(): |
| | clear_btn = gr.Button("Clear Chat") |
| | cleanup_btn = gr.Button("🗑️ Server-Dateien bereinigen") |
| | |
| | clear_btn.click( |
| | lambda: ([], None, {"text": "", "files": []}), |
| | outputs=[chatbot, state, msg] |
| | ) |
| | |
| | cleanup_btn.click(fn=cleanup_gemini_files) |
| |
|
| | if __name__ == "__main__": |
| | demo.launch() |