Upload folder using huggingface_hub
Browse files
README.md
CHANGED
|
@@ -24,8 +24,7 @@ from transformers import pipeline
|
|
| 24 |
pipe = pipeline(
|
| 25 |
"text-generation", model="bart1259/MiniCOTMath"
|
| 26 |
)
|
| 27 |
-
print(pipe("Input: (5 + 5)
|
| 28 |
-
", max_new_tokens=100)[0]["generated_text"])
|
| 29 |
```
|
| 30 |
|
| 31 |
Outputs:
|
|
@@ -65,8 +64,7 @@ class StopCriteria(StoppingCriteria):
|
|
| 65 |
def __iter__(self):
|
| 66 |
yield self
|
| 67 |
|
| 68 |
-
prompt = "Input: (5 + 5)
|
| 69 |
-
"
|
| 70 |
|
| 71 |
tokenizer = AutoTokenizer.from_pretrained("bart1259/MiniCOTMath")
|
| 72 |
model = AutoModelForCausalLM.from_pretrained("bart1259/MiniCOTMath").cuda()
|
|
|
|
| 24 |
pipe = pipeline(
|
| 25 |
"text-generation", model="bart1259/MiniCOTMath"
|
| 26 |
)
|
| 27 |
+
print(pipe("Input: (5 + 5)\n", max_new_tokens=100)[0]["generated_text"])
|
|
|
|
| 28 |
```
|
| 29 |
|
| 30 |
Outputs:
|
|
|
|
| 64 |
def __iter__(self):
|
| 65 |
yield self
|
| 66 |
|
| 67 |
+
prompt = "Input: (5 + 5)\n"
|
|
|
|
| 68 |
|
| 69 |
tokenizer = AutoTokenizer.from_pretrained("bart1259/MiniCOTMath")
|
| 70 |
model = AutoModelForCausalLM.from_pretrained("bart1259/MiniCOTMath").cuda()
|