| import gradio as gr | |
| from llama_cpp import Llama | |
| from typing import Optional | |
| import gradio as gr | |
| llm = Llama.from_pretrained( | |
| repo_id="Haary/USK_Mistral_7B_Unsloth_GGUF", | |
| filename="unsloth.Q4_K_M.gguf" | |
| ) | |
| class Chat: | |
| def __init__(self, system: Optional[str] = None): | |
| self.system = system | |
| self.messages = [] | |
| if system is not None: | |
| self.messages.append({ | |
| "role": "system", | |
| "content": system | |
| }) | |
| def prompt(self, content: str) -> str: | |
| self.messages.append({ | |
| "role": "user", | |
| "content": content | |
| }) | |
| response = llm.create_chat_completion( | |
| messages = [ | |
| { | |
| "role": "user", | |
| "content": "sertifikat akreditasi bisa dicari dimana yaa?" | |
| } | |
| ] | |
| ) | |
| response_content = response["choices"][0]["message"]["content"] | |
| self.messages.append({ | |
| "role": "assistant", | |
| "content": response_content | |
| }) | |
| return response_content | |
| chat = Chat(system="You are a helpful assistant.") | |
| def respond(message, chat_history): | |
| bot_message = chat.prompt(content=message) | |
| chat_history.append((message, bot_message)) | |
| return "", chat_history | |
| with gr.Blocks() as demo: | |
| chatbot = gr.Chatbot() | |
| msg = gr.Textbox() | |
| clear = gr.Button("Clear") | |
| msg.submit(respond, [msg, chatbot], [msg, chatbot]) | |
| clear.click(lambda: None, None, chatbot, queue=False) | |
| if __name__ == "__main__": | |
| demo.launch(debug=True) |