ccore commited on
Commit
6a72de9
·
verified ·
1 Parent(s): 197bbcb

Training in progress, step 1000

Browse files
Files changed (3) hide show
  1. config.json +6 -9
  2. model.safetensors +2 -2
  3. training_args.bin +1 -1
config.json CHANGED
@@ -1,7 +1,5 @@
1
  {
2
- "_name_or_path": "facebook/opt-125m",
3
  "_remove_final_layer_norm": false,
4
- "activation_dropout": 0.0,
5
  "activation_function": "relu",
6
  "architectures": [
7
  "OPTForCausalLM"
@@ -12,20 +10,19 @@
12
  "dropout": 0.1,
13
  "enable_bias": true,
14
  "eos_token_id": 2,
15
- "ffn_dim": 3072,
16
- "hidden_size": 768,
17
  "init_std": 0.02,
18
  "layer_norm_elementwise_affine": true,
19
  "layerdrop": 0.0,
20
  "max_position_embeddings": 2048,
21
  "model_type": "opt",
22
- "num_attention_heads": 12,
23
- "num_hidden_layers": 12,
24
  "pad_token_id": 1,
25
- "prefix": "</s>",
26
  "torch_dtype": "float32",
27
  "transformers_version": "4.42.4",
28
  "use_cache": true,
29
- "vocab_size": 50272,
30
- "word_embed_proj_dim": 768
31
  }
 
1
  {
 
2
  "_remove_final_layer_norm": false,
 
3
  "activation_function": "relu",
4
  "architectures": [
5
  "OPTForCausalLM"
 
10
  "dropout": 0.1,
11
  "enable_bias": true,
12
  "eos_token_id": 2,
13
+ "ffn_dim": 512,
14
+ "hidden_size": 256,
15
  "init_std": 0.02,
16
  "layer_norm_elementwise_affine": true,
17
  "layerdrop": 0.0,
18
  "max_position_embeddings": 2048,
19
  "model_type": "opt",
20
+ "num_attention_heads": 2,
21
+ "num_hidden_layers": 4,
22
  "pad_token_id": 1,
 
23
  "torch_dtype": "float32",
24
  "transformers_version": "4.42.4",
25
  "use_cache": true,
26
+ "vocab_size": 50265,
27
+ "word_embed_proj_dim": 256
28
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5b00f1e99410e83225955f691179166e02e35fa62d9698bff3b15dedae8bbafd
3
- size 500979600
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e1f48148b128785941b6f9cb0bd73013e4cf73f6eef0d3c698adfb6da7c02a1
3
+ size 62013896
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:27d6d579cc7b5769d9826ba14c0c95626a70e6009f76aac0e14196ab8ee13de6
3
  size 5112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ef461fb370e080f40e9c6c5d5b49ab5fb711afa9a896465203446818d1a4924
3
  size 5112