Spaces:
Runtime error
Runtime error
File size: 816 Bytes
7e75a71 2a5933d 7e75a71 2a5933d 7e75a71 2a5933d 7e75a71 2a5933d 7e75a71 2a5933d 7e75a71 2a5933d 7e75a71 2a5933d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
model_id = "psy191190/LimitlessAGI" # e.g. your Llama 3.2 3B Instruct
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto")
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer, max_new_tokens=1024)
def respond(history):
messages = []
for msg in history:
messages.append({"role": msg["role"], "content": msg["content"]})
prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
output = pipe(prompt)[0]["generated_text"]
response = output[len(prompt):].strip()
yield response
with gr.Blocks() as demo:
gr.ChatInterface(respond)
demo.queue().launch() |