JakeOh commited on
Commit
2d6e01c
·
verified ·
1 Parent(s): 003aa2e

Upload config

Browse files
Files changed (1) hide show
  1. config.json +4 -4
config.json CHANGED
@@ -10,8 +10,8 @@
10
  "attention_layer_norm_with_affine": true,
11
  "auto_map": {
12
  "AutoConfig": "configuration_llada.LLaDAConfig",
13
- "AutoModel": "GSAI-ML/LLaDA-8B-Base--modeling_llada.LLaDAModelLM",
14
- "AutoModelForCausalLM": "GSAI-ML/LLaDA-8B-Base--modeling_llada.LLaDAModelLM"
15
  },
16
  "bias_for_layer_norm": false,
17
  "block_group_size": 1,
@@ -19,7 +19,7 @@
19
  "d_model": 768,
20
  "dtype": "float32",
21
  "embedding_dropout": 0.0,
22
- "embedding_size": 10,
23
  "eos_token_id": 9,
24
  "flash_attention": false,
25
  "include_bias": false,
@@ -50,6 +50,6 @@
50
  "scale_logits": false,
51
  "transformers_version": "4.57.1",
52
  "use_cache": false,
53
- "vocab_size": 10,
54
  "weight_tying": false
55
  }
 
10
  "attention_layer_norm_with_affine": true,
11
  "auto_map": {
12
  "AutoConfig": "configuration_llada.LLaDAConfig",
13
+ "AutoModel": "modeling_llada.LLaDAModelLM",
14
+ "AutoModelForCausalLM": "modeling_llada.LLaDAModelLM"
15
  },
16
  "bias_for_layer_norm": false,
17
  "block_group_size": 1,
 
19
  "d_model": 768,
20
  "dtype": "float32",
21
  "embedding_dropout": 0.0,
22
+ "embedding_size": 50259,
23
  "eos_token_id": 9,
24
  "flash_attention": false,
25
  "include_bias": false,
 
50
  "scale_logits": false,
51
  "transformers_version": "4.57.1",
52
  "use_cache": false,
53
+ "vocab_size": 50259,
54
  "weight_tying": false
55
  }