File size: 395 Bytes
90aaac1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
import gradio as gr
from llama_cpp import Llama

# Загружаем модель
llm = Llama(model_path="mini_llama.gguf")

def chat(message, history):
    output = llm(message, max_tokens=500)
    return output['choices'][0]['text']

# Запускаем интерфейс
gr.ChatInterface(
    fn=chat,
    title="Mini Llama",
    description="Чат с моделью"
).launch(share=True)