Spaces:
Runtime error
Runtime error
| from transformers import AutoTokenizer, AutoModelForCausalLM | |
| import transformers | |
| import torch | |
| class Model(): | |
| def __init__(self, model="tiiuae/falcon-7b-instruct") -> None: | |
| pass | |
| self.tokenizer = AutoTokenizer.from_pretrained(model) | |
| self.pipeline = transformers.pipeline( | |
| "text-generation", | |
| model=model, | |
| tokenizer=self.tokenizer, | |
| torch_dtype=torch.bfloat16, | |
| trust_remote_code=True, | |
| device_map="auto", | |
| ) | |
| def gen(self, content, temp=0.0, max_length=500): | |
| sequences = self.pipeline( | |
| content, | |
| max_length=max_length, | |
| do_sample=False, | |
| temperature=temp, | |
| num_return_sequences=1, | |
| eos_token_id=self.tokenizer.eos_token_id, | |
| ) | |
| return '\n'.join([seq['generated_text'] for seq in sequences]) |