Spaces:
Build error
Build error
| import subprocess | |
| import gradio as gr | |
| MODEL_PATH = "/app/llama.cpp/build/bin/llama-cli" | |
| def generate(prompt, temperature): | |
| cmd = [ | |
| "/app/llama.cpp/main", | |
| "-m", MODEL_PATH, | |
| "-p", prompt, | |
| "--temp", str(temperature), | |
| "-n", "512", | |
| "--ctx-size", "512", | |
| "--no-mmap", | |
| ] | |
| result = subprocess.run( | |
| cmd, | |
| stdout=subprocess.PIPE, | |
| stderr=subprocess.PIPE, | |
| text=True | |
| ) | |
| return result.stdout | |
| iface = gr.Interface( | |
| fn=generate, | |
| inputs=[ | |
| gr.Textbox(label="Prompt", lines=4), | |
| gr.Slider(0.01, 2.0, value=0.8, step=0.01, label="Temperature"), | |
| ], | |
| outputs=gr.Textbox(label="Output", lines=10), | |
| title="SLiNeP 7M GGUF", | |
| description="Tiny 7M parameter GGUF running with llama.cpp" | |
| ) | |
| iface.launch(server_name="0.0.0.0", server_port=7860) | |