Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import pipeline | |
| # Load open-access model | |
| pipe = pipeline("text-generation", model="tiiuae/falcon-7b-instruct", device_map="auto") | |
| def ask(question): | |
| prompt = f"User: {question}\nAssistant:" | |
| response = pipe(prompt, max_new_tokens=200, do_sample=True, temperature=0.7)[0]['generated_text'] | |
| return response.split("Assistant:")[-1].strip() | |
| demo = gr.Interface(fn=ask, inputs="text", outputs="text", | |
| title="🧠 Ask This LLM!", | |
| description="Ask about any topic.") | |
| if __name__ == "__main__": | |
| demo.launch() | |