Commit
·
9bedbb7
1
Parent(s):
a64eeb1
Update README.md
Browse files
README.md
CHANGED
|
@@ -89,11 +89,11 @@ Below shows a code example on how to use this model
|
|
| 89 |
import torch
|
| 90 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
| 91 |
|
| 92 |
-
tokenizer = AutoTokenizer.from_pretrained("pankajmathur/
|
| 93 |
model = AutoModelForCausalLM.from_pretrained(
|
| 94 |
-
"pankajmathur/
|
| 95 |
torch_dtype=torch.float16,
|
| 96 |
-
|
| 97 |
low_cpu_mem_usage=True,
|
| 98 |
device_map="auto"
|
| 99 |
)
|
|
@@ -127,11 +127,11 @@ Below shows a code example on how to use this model
|
|
| 127 |
import torch
|
| 128 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
| 129 |
|
| 130 |
-
tokenizer = AutoTokenizer.from_pretrained("pankajmathur/
|
| 131 |
model = AutoModelForCausalLM.from_pretrained(
|
| 132 |
-
"pankajmathur/
|
| 133 |
torch_dtype=torch.float16,
|
| 134 |
-
|
| 135 |
low_cpu_mem_usage=True,
|
| 136 |
device_map="auto"
|
| 137 |
)
|
|
@@ -163,13 +163,14 @@ Exercise caution and cross-check information when necessary.
|
|
| 163 |
Please kindly cite using the following BibTeX:
|
| 164 |
|
| 165 |
```
|
| 166 |
-
@misc{
|
| 167 |
author = {Pankaj Mathur},
|
| 168 |
-
title = {
|
|
|
|
| 169 |
year = {2023},
|
| 170 |
publisher = {HuggingFace},
|
| 171 |
journal = {HuggingFace repository},
|
| 172 |
-
howpublished = {\url{https://https://huggingface.co/pankajmathur/
|
| 173 |
}
|
| 174 |
```
|
| 175 |
|
|
|
|
| 89 |
import torch
|
| 90 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
| 91 |
|
| 92 |
+
tokenizer = AutoTokenizer.from_pretrained("pankajmathur/model_101")
|
| 93 |
model = AutoModelForCausalLM.from_pretrained(
|
| 94 |
+
"pankajmathur/model_101",
|
| 95 |
torch_dtype=torch.float16,
|
| 96 |
+
load_in_4bit=True,
|
| 97 |
low_cpu_mem_usage=True,
|
| 98 |
device_map="auto"
|
| 99 |
)
|
|
|
|
| 127 |
import torch
|
| 128 |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
| 129 |
|
| 130 |
+
tokenizer = AutoTokenizer.from_pretrained("pankajmathur/model_101")
|
| 131 |
model = AutoModelForCausalLM.from_pretrained(
|
| 132 |
+
"pankajmathur/model_101",
|
| 133 |
torch_dtype=torch.float16,
|
| 134 |
+
load_in_4bit=True,
|
| 135 |
low_cpu_mem_usage=True,
|
| 136 |
device_map="auto"
|
| 137 |
)
|
|
|
|
| 163 |
Please kindly cite using the following BibTeX:
|
| 164 |
|
| 165 |
```
|
| 166 |
+
@misc{model_101,
|
| 167 |
author = {Pankaj Mathur},
|
| 168 |
+
title = {model_101: A hybrid (explain + instruct) style Llama2-70b model},
|
| 169 |
+
month = {August},
|
| 170 |
year = {2023},
|
| 171 |
publisher = {HuggingFace},
|
| 172 |
journal = {HuggingFace repository},
|
| 173 |
+
howpublished = {\url{https://https://huggingface.co/pankajmathur/model_101},
|
| 174 |
}
|
| 175 |
```
|
| 176 |
|