File size: 1,073 Bytes
d922793 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
import gradio as gr
import requests
def predict_with_model(message, history, model_name):
url = 'http://localhost:11434/api/chat'
payload = {
'model': model_name,
'messages': [{'role': 'user', 'content': message}],
'stream': False
}
try:
response = requests.post(url, json=payload)
response.raise_for_status()
return response.json()['message']['content']
except Exception as e:
return f"Fehler mit Modell {model_name}: {str(e)}"
available_models = ["deepseek-coder-v2", "dolphin-mistral", "worm-r1", "deepseek-r1:7b"]
with gr.Blocks(theme="soft") as multi_model_app:
gr.Markdown("# Ollama Multi-Model Chat")
model_dropdown = gr.Dropdown(
choices=available_models,
value=available_models[0],
label="Wähle ein Modell"
)
gr.ChatInterface(
fn=predict_with_model,
additional_inputs=[model_dropdown],
title="DeepSeek & Friends",
description="Wähle dein bevorzugtes Modell aus dem Dropdown."
)
multi_model_app.launch()
|