Spaces:
Sleeping
Sleeping
File size: 598 Bytes
4360a61 9b1a897 4360a61 9b1a897 4360a61 9b1a897 2366aea 4360a61 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 | import gradio as gr
from transformers import pipeline
# Load open-access model
pipe = pipeline("text-generation", model="tiiuae/falcon-7b-instruct", device_map="auto")
def ask(question):
prompt = f"User: {question}\nAssistant:"
response = pipe(prompt, max_new_tokens=200, do_sample=True, temperature=0.7)[0]['generated_text']
return response.split("Assistant:")[-1].strip()
demo = gr.Interface(fn=ask, inputs="text", outputs="text",
title="🧠 Ask This LLM!",
description="Ask about any topic.")
if __name__ == "__main__":
demo.launch()
|