Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -10,12 +10,10 @@ When in legal doubt, you better call BLOOM! Ask BLOOM any legal question:
|
|
| 10 |
title = "Better Call Bloom!"
|
| 11 |
examples = [["Adventurer is approached by a mysterious stranger in the tavern for a new quest."]]
|
| 12 |
|
| 13 |
-
|
| 14 |
-
|
| 15 |
tokenizer = BloomTokenizerFast.from_pretrained("tomrb/bettercallbloom-3b")
|
| 16 |
-
|
| 17 |
|
| 18 |
-
generator = pipeline('text-generation', model=
|
| 19 |
|
| 20 |
|
| 21 |
def preprocess(text):
|
|
@@ -26,7 +24,7 @@ def preprocess(text):
|
|
| 26 |
def generate(text):
|
| 27 |
|
| 28 |
preprocessed_text = preprocess(text)
|
| 29 |
-
result = generator(preprocessed_text, max_length=
|
| 30 |
#output = re.split(r'\nQuestion:|Answer #|Title:',result[0]['generated_text'])[2]
|
| 31 |
output = result[0]['generated_text']
|
| 32 |
return output
|
|
|
|
| 10 |
title = "Better Call Bloom!"
|
| 11 |
examples = [["Adventurer is approached by a mysterious stranger in the tavern for a new quest."]]
|
| 12 |
|
|
|
|
|
|
|
| 13 |
tokenizer = BloomTokenizerFast.from_pretrained("tomrb/bettercallbloom-3b")
|
| 14 |
+
model_8bit = BloomForCausalLM.from_pretrained("tomrb/bettercallbloom-3b",device_map="auto",load_in_8bit=True)
|
| 15 |
|
| 16 |
+
generator = pipeline('text-generation', model=model_8bit, tokenizer=tokenizer)
|
| 17 |
|
| 18 |
|
| 19 |
def preprocess(text):
|
|
|
|
| 24 |
def generate(text):
|
| 25 |
|
| 26 |
preprocessed_text = preprocess(text)
|
| 27 |
+
result = generator(preprocessed_text, max_length=256)
|
| 28 |
#output = re.split(r'\nQuestion:|Answer #|Title:',result[0]['generated_text'])[2]
|
| 29 |
output = result[0]['generated_text']
|
| 30 |
return output
|