Spaces:
Runtime error
Runtime error
| # -*- coding: utf-8 -*- | |
| """falcon-7b-instruct.ipynb | |
| Automatically generated by Colaboratory. | |
| Original file is located at | |
| https://colab.research.google.com/drive/1VY6GckUeF6uVKiDGymZZgtxiO7plJvo4 | |
| https://huggingface.co/tiiuae/falcon-7b-instruct""" | |
| """## Import libraries""" | |
| from transformers import AutoTokenizer, AutoModelForCausalLM | |
| import transformers | |
| import torch | |
| import gradio as gr | |
| import random | |
| """## Define the model""" | |
| model = "tiiuae/falcon-7b-instruct" | |
| tokenizer = AutoTokenizer.from_pretrained(model) | |
| pipeline = transformers.pipeline( | |
| "text-generation", | |
| model=model, | |
| tokenizer=tokenizer, | |
| torch_dtype=torch.bfloat16, | |
| trust_remote_code=True, | |
| device_map="auto", | |
| ) | |
| """## Generate Text Function""" | |
| def generate_text(text): | |
| sequences = pipeline( | |
| text, | |
| max_length=200, | |
| do_sample=True, | |
| top_k=10, | |
| num_return_sequences=1, | |
| eos_token_id=tokenizer.eos_token_id, | |
| ) | |
| for seq in sequences: | |
| return(f"Result: {seq['generated_text']}") | |
| #Create a Gradio interface | |
| iface = gr.Interface( | |
| fn=generate_text, | |
| inputs="text", | |
| outputs="text", | |
| title="ChatBot", | |
| description="Type anything you want to chat" | |
| ) | |