Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import time | |
| import ctypes #to run on C api directly | |
| import llama_cpp | |
| from llama_cpp import Llama | |
| from huggingface_hub import hf_hub_download #load from huggingfaces | |
| llm = Llama(model_path= hf_hub_download(repo_id="TheBloke/Vigogne-2-7B-Chat-GGML", filename="vigogne-2-7b-chat.ggmlv3.q4_1.bin"), n_ctx=2048) #download model from hf/ n_ctx=2048 for high ccontext length | |
| history = [] | |
| def generate_text(input_text, history): | |
| print("history ",history) | |
| print("input ", input_text) | |
| output = llm(input_text, max_tokens=1024, stop=["Q:", "\n"], echo=True) | |
| response = output['choices'][0]['text'] | |
| return response | |
| demo = gr.ChatInterface(generate_text) | |
| demo.queue(concurrency_count=1, max_size=5) | |
| demo.launch() | |