File size: 4,886 Bytes
240219c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
import gradio as gr
import subprocess
import os
import json
from typing import List, Dict, Any


def list_models() -> List[Dict[str, Any]]:
    """List available Ollama models"""
    try:
        result = subprocess.run(
            ["ollama", "list"],
            capture_output=True,
            text=True,
            check=True
        )
        models = []
        for line in result.stdout.splitlines():
            if line.startswith("NAME"):
                continue
            name, id, size, modified = line.split("\t")
            models.append({
                "name": name,
                "id": id,
                "size": size,
                "modified": modified
            })
        return models
    except Exception as e:
        return [{"error": str(e)}]


def pull_model(model_name: str) -> str:
    """Pull an Ollama model"""
    try:
        result = subprocess.run(
            ["ollama", "pull", model_name],
            capture_output=True,
            text=True,
            check=True
        )
        return f"Pomyślnie pobrano model: {model_name}\n{result.stdout}"
    except subprocess.CalledProcessError as e:
        return f"Błąd podczas pobierania modelu: {e.stderr}"
    except Exception as e:
        return f"Nieoczekiwany błąd: {str(e)}"


def run_model(prompt: str, model_name: str = "llama3") -> str:
    """Run an Ollama model with a prompt"""
    try:
        result = subprocess.run(
            ["ollama", "run", model_name, prompt],
            capture_output=True,
            text=True,
            check=True,
            timeout=60  # 1 minute timeout
        )
        return result.stdout
    except subprocess.CalledProcessError as e:
        return f"Błąd podczas uruchamiania modelu:\n{e.stderr}"
    except Exception as e:
        return f"Nieoczekiwany błąd: {str(e)}"


def get_model_info(model_name: str) -> str:
    """Get information about a specific Ollama model"""
    try:
        result = subprocess.run(
            ["ollama", "show", model_name],
            capture_output=True,
            text=True,
            check=True
        )
        return result.stdout
    except Exception as e:
        return f"Błąd podczas pobierania informacji o modelu: {str(e)}"


with gr.Blocks(title="Ollama Server Example", theme=gr.themes.Soft()) as demo:
    gr.Markdown("# 🦙 Ollama Server Example\nProsty serwer Ollama na Hugging Face Spaces z interfejsem Gradio")

    with gr.Tabs():
        with gr.Tab("🔄 Uruchom Model"):
            with gr.Row():
                model_input = gr.Textbox(
                    label="Nazwa modelu (domyślnie: llama3)",
                    value="llama3",
                    interactive=True
                )
                prompt_input = gr.Textbox(
                    label="Prompt",
                    placeholder="Wprowadź tekst...",
                    lines=3
                )
            
            run_btn = gr.Button("🚀 Uruchom Model", variant="primary")
            output_text = gr.Textbox(label="Wynik", lines=10)
            
            run_btn.click(
                fn=run_model,
                inputs=[prompt_input, model_input],
                outputs=output_text
            )

        with gr.Tab("📦 Pobierz Model"):
            with gr.Row():
                pull_model_input = gr.Textbox(
                    label="Nazwa modelu do pobrania (np. llama3, mistral)",
                    placeholder="llama3"
                )
            
            pull_btn = gr.Button("💾 Pobierz Model", variant="secondary")
            pull_output = gr.Textbox(label="Status pobierania", lines=5)
            
            pull_btn.click(
                fn=pull_model,
                inputs=pull_model_input,
                outputs=pull_output
            )

        with gr.Tab("📊 Lista Modeli"):
            refresh_btn = gr.Button("🔄 Odśwież Listę Modeli", variant="primary")
            models_table = gr.JSON(label="Dostępne modele")
            
            refresh_btn.click(
                fn=list_models,
                outputs=models_table
            )
            
            # Load models on startup
            demo.load(
                fn=list_models,
                outputs=models_table
            )

        with gr.Tab("ℹ️ Informacje o Modelu"):
            with gr.Row():
                info_model_input = gr.Textbox(
                    label="Nazwa modelu",
                    value="llama3"
                )
            
            info_btn = gr.Button("📋 Pobierz Informacje", variant="secondary")
            info_output = gr.Textbox(label="Informacje o modelu", lines=15)
            
            info_btn.click(
                fn=get_model_info,
                inputs=info_model_input,
                outputs=info_output
            )


if __name__ == "__main__":
    demo.launch()