6023oji commited on
Commit
f50d15a
·
verified ·
1 Parent(s): a177422

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -2
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "activation": "gelu",
3
  "alibi": true,
4
  "apply_residual_connection_post_layernorm": false,
@@ -29,12 +30,12 @@
29
  "num_attention_heads": 32,
30
  "num_hidden_layers": 24,
31
  "num_kv_heads": 32,
32
- "num_ln_in_parallel_attn": null,
33
  "parallel_attn": false,
34
  "rope_scaling": null,
35
  "rope_theta": 10000.0,
36
  "torch_dtype": "float32",
37
  "transformers_version": "4.52.4",
38
  "use_cache": true,
39
- "vocab_size": 50304
 
40
  }
 
1
  {
2
+ "_name_or_path": "6023oji/DPO_MBTI",
3
  "activation": "gelu",
4
  "alibi": true,
5
  "apply_residual_connection_post_layernorm": false,
 
30
  "num_attention_heads": 32,
31
  "num_hidden_layers": 24,
32
  "num_kv_heads": 32,
 
33
  "parallel_attn": false,
34
  "rope_scaling": null,
35
  "rope_theta": 10000.0,
36
  "torch_dtype": "float32",
37
  "transformers_version": "4.52.4",
38
  "use_cache": true,
39
+ "vocab_size": 50304,
40
+ "tokenizer_class": "AutoTokenizer"
41
  }