Outer Step 2044. Inner Step 14. Batch Size 252
Browse files- config.json +5 -5
- model.safetensors +1 -1
config.json
CHANGED
|
@@ -268,17 +268,17 @@
|
|
| 268 |
"AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
|
| 269 |
},
|
| 270 |
"block_list": [
|
| 271 |
-
|
| 272 |
-
|
| 273 |
-
|
| 274 |
-
|
| 275 |
],
|
| 276 |
"block_size": 1024,
|
| 277 |
"bos_token_id": 50256,
|
| 278 |
"embd_pdrop": 0.1,
|
| 279 |
"eos_token_id": 50256,
|
| 280 |
"initializer_range": 0.02,
|
| 281 |
-
"inner_step":
|
| 282 |
"layer_norm_epsilon": 1e-05,
|
| 283 |
"model_type": "gpt_optimized",
|
| 284 |
"n_embd": 1280,
|
|
|
|
| 268 |
"AutoModelForCausalLM": "distributed/optimized-gpt2-500m--modeling_gpt_optimized.GPTOptim"
|
| 269 |
},
|
| 270 |
"block_list": [
|
| 271 |
+
3988057,
|
| 272 |
+
3988060,
|
| 273 |
+
3988063,
|
| 274 |
+
3988067
|
| 275 |
],
|
| 276 |
"block_size": 1024,
|
| 277 |
"bos_token_id": 50256,
|
| 278 |
"embd_pdrop": 0.1,
|
| 279 |
"eos_token_id": 50256,
|
| 280 |
"initializer_range": 0.02,
|
| 281 |
+
"inner_step": 14,
|
| 282 |
"layer_norm_epsilon": 1e-05,
|
| 283 |
"model_type": "gpt_optimized",
|
| 284 |
"n_embd": 1280,
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 4040701744
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4a28996d170fb945589afb5e59f00b0430146c927d499ca29031f213f8ee7168
|
| 3 |
size 4040701744
|