Instructions to use jumelet/test with libraries, inference providers, notebooks, and local apps. Follow these links to get started.
- Libraries
- Transformers
How to use jumelet/test with Transformers:
# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="jumelet/test")# Load model directly from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("jumelet/test") model = AutoModelForCausalLM.from_pretrained("jumelet/test") - Notebooks
- Google Colab
- Kaggle
- Local Apps
- vLLM
How to use jumelet/test with vLLM:
Install from pip and serve model
# Install vLLM from pip: pip install vllm # Start the vLLM server: vllm serve "jumelet/test" # Call the server using curl (OpenAI-compatible API): curl -X POST "http://localhost:8000/v1/completions" \ -H "Content-Type: application/json" \ --data '{ "model": "jumelet/test", "prompt": "Once upon a time,", "max_tokens": 512, "temperature": 0.5 }'Use Docker
docker model run hf.co/jumelet/test
- SGLang
How to use jumelet/test with SGLang:
Install from pip and serve model
# Install SGLang from pip: pip install sglang # Start the SGLang server: python3 -m sglang.launch_server \ --model-path "jumelet/test" \ --host 0.0.0.0 \ --port 30000 # Call the server using curl (OpenAI-compatible API): curl -X POST "http://localhost:30000/v1/completions" \ -H "Content-Type: application/json" \ --data '{ "model": "jumelet/test", "prompt": "Once upon a time,", "max_tokens": 512, "temperature": 0.5 }'Use Docker images
docker run --gpus all \ --shm-size 32g \ -p 30000:30000 \ -v ~/.cache/huggingface:/root/.cache/huggingface \ --env "HF_TOKEN=<secret>" \ --ipc=host \ lmsysorg/sglang:latest \ python3 -m sglang.launch_server \ --model-path "jumelet/test" \ --host 0.0.0.0 \ --port 30000 # Call the server using curl (OpenAI-compatible API): curl -X POST "http://localhost:30000/v1/completions" \ -H "Content-Type: application/json" \ --data '{ "model": "jumelet/test", "prompt": "Once upon a time,", "max_tokens": 512, "temperature": 0.5 }' - Docker Model Runner
How to use jumelet/test with Docker Model Runner:
docker model run hf.co/jumelet/test
Training in progress, step 1
Browse files- config.json +5 -5
- pytorch_model.bin +2 -2
- tokenizer.json +63 -77
- training_args.bin +2 -2
config.json
CHANGED
|
@@ -19,11 +19,11 @@
|
|
| 19 |
"layer_norm_epsilon": 1e-05,
|
| 20 |
"model_type": "gpt2",
|
| 21 |
"n_ctx": 1024,
|
| 22 |
-
"n_embd":
|
| 23 |
-
"n_head":
|
| 24 |
"n_inner": null,
|
| 25 |
-
"n_layer":
|
| 26 |
-
"n_positions":
|
| 27 |
"reorder_and_upcast_attn": false,
|
| 28 |
"resid_pdrop": 0.1,
|
| 29 |
"scale_attn_by_inverse_layer_idx": false,
|
|
@@ -42,5 +42,5 @@
|
|
| 42 |
"torch_dtype": "float32",
|
| 43 |
"transformers_version": "4.30.2",
|
| 44 |
"use_cache": true,
|
| 45 |
-
"vocab_size":
|
| 46 |
}
|
|
|
|
| 19 |
"layer_norm_epsilon": 1e-05,
|
| 20 |
"model_type": "gpt2",
|
| 21 |
"n_ctx": 1024,
|
| 22 |
+
"n_embd": 16,
|
| 23 |
+
"n_head": 2,
|
| 24 |
"n_inner": null,
|
| 25 |
+
"n_layer": 2,
|
| 26 |
+
"n_positions": 30,
|
| 27 |
"reorder_and_upcast_attn": false,
|
| 28 |
"resid_pdrop": 0.1,
|
| 29 |
"scale_attn_by_inverse_layer_idx": false,
|
|
|
|
| 42 |
"torch_dtype": "float32",
|
| 43 |
"transformers_version": "4.30.2",
|
| 44 |
"use_cache": true,
|
| 45 |
+
"vocab_size": 74
|
| 46 |
}
|
pytorch_model.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e1e2aeb84675e10641cb83695751b8230c6a0d837f0543c153fd94ca800344df
|
| 3 |
+
size 41986
|
tokenizer.json
CHANGED
|
@@ -96,88 +96,74 @@
|
|
| 96 |
"<bos>": 3,
|
| 97 |
".": 4,
|
| 98 |
"the": 5,
|
| 99 |
-
"
|
| 100 |
-
"
|
| 101 |
"was": 8,
|
| 102 |
-
"
|
| 103 |
-
"
|
| 104 |
-
"
|
| 105 |
-
"
|
| 106 |
-
"
|
| 107 |
-
"
|
| 108 |
"a": 15,
|
| 109 |
-
"
|
| 110 |
-
"
|
| 111 |
-
"
|
| 112 |
-
"
|
| 113 |
-
"
|
| 114 |
-
"
|
| 115 |
"by": 22,
|
| 116 |
"knew": 23,
|
| 117 |
-
"
|
| 118 |
-
"
|
| 119 |
-
"
|
| 120 |
-
"
|
| 121 |
-
"
|
| 122 |
-
"
|
| 123 |
-
"
|
| 124 |
-
"
|
| 125 |
-
"
|
| 126 |
-
"
|
| 127 |
-
"
|
| 128 |
-
"
|
| 129 |
-
"
|
| 130 |
-
"
|
| 131 |
-
"
|
| 132 |
-
"
|
| 133 |
-
"
|
| 134 |
-
"
|
| 135 |
-
"
|
| 136 |
-
"
|
| 137 |
-
"
|
| 138 |
"find": 45,
|
| 139 |
-
"
|
| 140 |
-
"
|
| 141 |
-
"
|
| 142 |
-
"
|
| 143 |
-
"
|
| 144 |
-
"
|
| 145 |
-
"
|
| 146 |
-
"
|
| 147 |
-
"
|
| 148 |
-
"
|
| 149 |
-
"
|
| 150 |
-
"
|
| 151 |
-
"
|
| 152 |
-
"
|
| 153 |
-
"
|
| 154 |
-
"
|
| 155 |
-
"
|
| 156 |
-
"
|
| 157 |
-
"
|
| 158 |
-
"
|
| 159 |
-
"
|
| 160 |
-
"
|
| 161 |
-
"
|
| 162 |
-
"
|
| 163 |
-
"
|
| 164 |
-
"
|
| 165 |
-
"
|
| 166 |
-
"
|
| 167 |
-
"child": 74,
|
| 168 |
-
"like": 75,
|
| 169 |
-
"lower": 76,
|
| 170 |
-
"lip": 77,
|
| 171 |
-
"hazel": 78,
|
| 172 |
-
"grass": 79,
|
| 173 |
-
"than": 80,
|
| 174 |
-
"darker": 81,
|
| 175 |
-
"own": 82,
|
| 176 |
-
"baby": 83,
|
| 177 |
-
"eyes": 84,
|
| 178 |
-
"quivered": 85,
|
| 179 |
-
"from": 86,
|
| 180 |
-
"silky": 87
|
| 181 |
},
|
| 182 |
"unk_token": "<unk>"
|
| 183 |
}
|
|
|
|
| 96 |
"<bos>": 3,
|
| 97 |
".": 4,
|
| 98 |
"the": 5,
|
| 99 |
+
",": 6,
|
| 100 |
+
"and": 7,
|
| 101 |
"was": 8,
|
| 102 |
+
"he": 9,
|
| 103 |
+
"in": 10,
|
| 104 |
+
"had": 11,
|
| 105 |
+
"n<apostrophe>t": 12,
|
| 106 |
+
"it": 13,
|
| 107 |
+
"ditch": 14,
|
| 108 |
"a": 15,
|
| 109 |
+
"Bible": 16,
|
| 110 |
+
"cows": 17,
|
| 111 |
+
"to": 18,
|
| 112 |
+
"They": 19,
|
| 113 |
+
"class": 20,
|
| 114 |
+
"But": 21,
|
| 115 |
"by": 22,
|
| 116 |
"knew": 23,
|
| 117 |
+
"of": 24,
|
| 118 |
+
":": 25,
|
| 119 |
+
"Oh": 26,
|
| 120 |
+
"did": 27,
|
| 121 |
+
"finally": 28,
|
| 122 |
+
"mattered": 29,
|
| 123 |
+
"they": 30,
|
| 124 |
+
"He": 31,
|
| 125 |
+
"happened": 32,
|
| 126 |
+
"here": 33,
|
| 127 |
+
"there": 34,
|
| 128 |
+
"<apostrophe>s": 35,
|
| 129 |
+
"The": 36,
|
| 130 |
+
"Today": 37,
|
| 131 |
+
"Tuesday": 38,
|
| 132 |
+
"care": 39,
|
| 133 |
+
"justice": 40,
|
| 134 |
+
"now": 41,
|
| 135 |
+
"blackened": 42,
|
| 136 |
+
"boy": 43,
|
| 137 |
+
"child": 44,
|
| 138 |
"find": 45,
|
| 139 |
+
"intended": 46,
|
| 140 |
+
"none": 47,
|
| 141 |
+
"Because": 48,
|
| 142 |
+
"finger": 49,
|
| 143 |
+
"fingers": 50,
|
| 144 |
+
"sure": 51,
|
| 145 |
+
"that": 52,
|
| 146 |
+
"told": 53,
|
| 147 |
+
"were": 54,
|
| 148 |
+
"world": 55,
|
| 149 |
+
"darker": 56,
|
| 150 |
+
"grass": 57,
|
| 151 |
+
"narrowed": 58,
|
| 152 |
+
"them": 59,
|
| 153 |
+
"virtue": 60,
|
| 154 |
+
"And": 61,
|
| 155 |
+
"help": 62,
|
| 156 |
+
"meant": 63,
|
| 157 |
+
"moved": 64,
|
| 158 |
+
"not": 65,
|
| 159 |
+
"really": 66,
|
| 160 |
+
"should": 67,
|
| 161 |
+
"eyes": 68,
|
| 162 |
+
"his": 69,
|
| 163 |
+
"leave": 70,
|
| 164 |
+
"quite": 71,
|
| 165 |
+
"than": 72,
|
| 166 |
+
"what": 73
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 167 |
},
|
| 168 |
"unk_token": "<unk>"
|
| 169 |
}
|
training_args.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e68f36608ebbacbe4b53d31e2b20b322c94ec07e610585b26ac4d0ee319c2210
|
| 3 |
+
size 4408
|