icxcn commited on
Commit
f5611af
·
verified ·
1 Parent(s): abf49e3

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +24 -11
config.json CHANGED
@@ -1,16 +1,29 @@
1
  {
2
  "model_type": "hydra-bitnet",
3
- "vocab_size": 32000,
4
- "hidden_size": 192,
5
- "num_hidden_layers": 4,
 
6
  "num_experts": 4,
7
  "top_k_experts": 2,
8
- "num_compression_classes": 4,
9
- "num_security_classes": 2,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  "max_position_embeddings": 512,
11
- "quantization_bits": 1.58,
12
- "architectures": [
13
- "HydraBitNetForSequenceClassification"
14
- ],
15
- "torch_dtype": "float32"
16
- }
 
1
  {
2
  "model_type": "hydra-bitnet",
3
+ "architecture": "BitNetMoE",
4
+ "vocab_size": 256,
5
+ "hidden_size": 256,
6
+ "num_hidden_layers": 6,
7
  "num_experts": 4,
8
  "top_k_experts": 2,
9
+ "intermediate_size": 512,
10
+ "quantization": "1.58-bit",
11
+ "quantization_config": {
12
+ "bits": 1.58,
13
+ "weight_values": [-1, 0, 1],
14
+ "activation_bits": 8
15
+ },
16
+ "task_heads": {
17
+ "compression": {
18
+ "num_labels": 4,
19
+ "labels": ["NONE", "BPE", "BROTLI", "ZLIB"]
20
+ },
21
+ "security": {
22
+ "num_labels": 2,
23
+ "labels": ["SAFE", "UNSAFE"]
24
+ }
25
+ },
26
  "max_position_embeddings": 512,
27
+ "torch_dtype": "float32",
28
+ "transformers_version": "4.40.0"
29
+ }