File size: 510 Bytes
775225a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
{
  "architectures": ["BVVForCausalLM"],
  "auto_map": {
    "AutoConfig": "model_256_float.BVVConfig",
    "AutoModel": "model_256_float.BVVForCausalLM",
    "AutoModelForCausalLM": "model_256_float.BVVForCausalLM"
  },
  "model_type": "model_256_float",
  "vocab_size": 65536,
  "block_size": 1024,
  "n_embd": 256,
  "d_model": 1024,
  "n_layer": 16,
  "n_head": 32,
  "pad_id": 57344,
  "bos_token": "<s>",
  "eos_token": "</s>",
  "unk_token": "<unk>",
  "pad_token": "<pad>",
  "torch_dtype": "float32"
}