File size: 2,176 Bytes
a67ba35
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
{
  "architectures": [
    "MiniCPMForCausalLM"
  ],
  "attention_bias": false,
  "attention_dropout": 0.0,
  "auto_map": {
    "AutoConfig": "openbmb/MiniCPM4-8B--configuration_minicpm.MiniCPMConfig",
    "AutoModel": "openbmb/MiniCPM4-8B--modeling_minicpm.MiniCPMModel",
    "AutoModelForCausalLM": "openbmb/MiniCPM4-8B--modeling_minicpm.MiniCPMForCausalLM",
    "AutoModelForSeq2SeqLM": "openbmb/MiniCPM4-8B--modeling_minicpm.MiniCPMForCausalLM",
    "AutoModelForSequenceClassification": "openbmb/MiniCPM4-8B--modeling_minicpm.MiniCPMForSequenceClassification"
  },
  "bos_token_id": 1,
  "dim_model_base": 32,
  "eos_token_id": [
    2,
    73440
  ],
  "hidden_act": "silu",
  "hidden_size": 64,
  "initializer_range": 0.1,
  "intermediate_size": 128,
  "max_position_embeddings": 32768,
  "model_type": "minicpm",
  "mup_denominator": 32,
  "num_attention_heads": 2,
  "num_hidden_layers": 2,
  "num_key_value_heads": 1,
  "pad_token_id": 2,
  "pretraining_tp": 1,
  "rms_norm_eps": 1e-06,
  "rope_scaling": {
    "long_factor": [
      0.9977997200264581,
      1.014658295992452,
      1.0349680404997148,
      1.059429246056193,
      1.0888815016813513,
      1.1243301355211495,
      1.166977103606075,
      1.2182568066927284,
      1.2798772354275727,
      1.3538666751582975,
      1.4426259039919596,
      1.5489853358570191,
      1.6762658237220625,
      1.8283407612492941,
      2.0096956085876183,
      2.225478927469756
    ],
    "original_max_position_embeddings": 32768,
    "rope_type": "longrope",
    "short_factor": [
      0.9977997200264581,
      1.014658295992452,
      1.0349680404997148,
      1.059429246056193,
      1.0888815016813513,
      1.1243301355211495,
      1.166977103606075,
      1.2182568066927284,
      1.2798772354275727,
      1.3538666751582975,
      1.4426259039919596,
      1.5489853358570191,
      1.6762658237220625,
      1.8283407612492941,
      2.0096956085876183,
      2.225478927469756
    ]
  },
  "rope_theta": 10000.0,
  "scale_depth": 1.4,
  "scale_emb": 12,
  "sparse_config": null,
  "torch_dtype": "bfloat16",
  "transformers_version": "4.51.3",
  "use_cache": true,
  "vocab_size": 73448
}