Spaces:
Sleeping
Sleeping
File size: 624 Bytes
aac87fe 55cd92f aac87fe 7cb1638 aac87fe 55cd92f aac87fe 7cb1638 55cd92f 7cb1638 aac87fe 55cd92f | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 | import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
model_name = "tiiuae/falcon-rw-1b"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
def chat(user_input):
response = pipe(user_input, max_new_tokens=100, do_sample=True, temperature=0.7)[0]["generated_text"]
reply = response[len(user_input):].strip()
return reply
gr.Interface(fn=chat, inputs="text", outputs="text", title="Falcon 1B Chatbot", description="Ask me anything!").launch()
|