TenFate commited on
Commit
0fbf52b
·
verified ·
1 Parent(s): 648d87a

Delete config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -43
config.json DELETED
@@ -1,43 +0,0 @@
1
- {
2
- "_name_or_path": "TenFate/ZDXD",
3
- "model_type": "chatglm",
4
- "architectures": [
5
- "ChatGLMModel"
6
- ],
7
- "auto_map": {
8
- "AutoConfig": "configuration_zdxdllm.ChatGLMConfig",
9
- "AutoModel": "modeling_zdxdllm.ChatGLMForConditionalGeneration",
10
- "AutoModelForCausalLM": "modeling_zdxdllm.ChatGLMForConditionalGeneration",
11
- "AutoModelForSeq2SeqLM": "modeling_zdxdllm.ChatGLMForConditionalGeneration",
12
- "AutoModelForSequenceClassification": "modeling_zdxdllm.ChatGLMForSequenceClassification"
13
- },
14
- "add_bias_linear": false,
15
- "add_qkv_bias": true,
16
- "apply_query_key_layer_scaling": true,
17
- "apply_residual_connection_post_layernorm": false,
18
- "attention_dropout": 0.0,
19
- "attention_softmax_in_fp32": true,
20
- "attn_implementation": "sdpa",
21
- "bias_dropout_fusion": true,
22
- "ffn_hidden_size": 13696,
23
- "fp32_residual_connection": false,
24
- "hidden_dropout": 0.0,
25
- "hidden_size": 4096,
26
- "kv_channels": 128,
27
- "layernorm_epsilon": 0.00000015625,
28
- "multi_query_attention": true,
29
- "multi_query_group_num": 2,
30
- "num_attention_heads": 32,
31
- "num_layers": 40,
32
- "original_rope": true,
33
- "padded_vocab_size": 151552,
34
- "post_layer_norm": true,
35
- "rmsnorm": true,
36
- "seq_length": 8192,
37
- "use_cache": true,
38
- "torch_dtype": "bfloat16",
39
- "transformers_version": "4.42.4",
40
- "tie_word_embeddings": false,
41
- "eos_token_id": [151329, 151336, 151338],
42
- "pad_token_id": 151329
43
- }