File size: 637 Bytes
769f40c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
{
  "architectures": [
    "DaisyForCausalLM"
  ],
  "attn_all_layers": true,
  "attn_impl": "standard",
  "bos_token_id": 50256,
  "dtype": "float32",
  "eos_token_id": 50256,
  "head_dim": 128,
  "kd_alpha": null,
  "kd_eps": null,
  "kd_temperature": null,
  "max_position_embeddings": 131072,
  "model_dim": 1280,
  "model_type": "daisy",
  "num_heads": 10,
  "num_layers": 20,
  "padded_embeddings": false,
  "skip_mix_mode": "linear",
  "tokenizer_name": "jonathanmiddleton/daisy",
  "transformers_version": "5.0.0rc1",
  "use_tied_embeddings": false,
  "use_value_embeddings": true,
  "vocab_size": 50259,
  "window_size": 2048
}