Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import pipeline | |
| # βββ Initialize your model pipeline βββ | |
| chat_pipe = pipeline( | |
| "text-generation", | |
| model="Hulk810154/Kai", | |
| trust_remote_code=True | |
| ) # Uses your HF model directly ξ¨4ξ¨ | |
| # βββ Chat handler βββ | |
| def chat_fn(message, history): | |
| if history is None: | |
| history = [] | |
| # Append user message, get generation, then append AI reply | |
| output = chat_pipe(message, max_new_tokens=128, do_sample=True)[0]["generated_text"] | |
| history.append((message, output)) | |
| return history, history | |
| # βββ Build and launch Gradio chat UI βββ | |
| demo = gr.ChatInterface( | |
| fn=chat_fn, | |
| title="π§ Kai AGI Text Chat", | |
| description="Text-only chat with the Hulk810154/Kai model on Hugging Face Spaces." | |
| ) | |
| demo.launch() |