Raziel1234 commited on
Commit
059cc4a
·
verified ·
1 Parent(s): 377a941

Upload 4 files

Browse files
config (10).json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "MCGPTForCausalLM"
4
+ ],
5
+ "dtype": "float32",
6
+ "hidden_size": 256,
7
+ "max_position_embeddings": 512,
8
+ "model_type": "mcgpt",
9
+ "nhead": 8,
10
+ "num_experts": 4,
11
+ "num_layers": 4,
12
+ "transformers_version": "5.2.0",
13
+ "vocab_size": 33152
14
+ }
model (9).safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3441781a28ad9511c43f80a08f1383b166add316ec773ef71c2f84de09ccf98
3
+ size 89475280
tokenizer (5).json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config (11).json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "backend": "tokenizers",
3
+ "bos_token": "<s>",
4
+ "clean_up_tokenization_spaces": false,
5
+ "eos_token": "</s>",
6
+ "is_local": false,
7
+ "legacy": true,
8
+ "model_max_length": 1000000000000000019884624838656,
9
+ "pad_token": "</s>",
10
+ "sp_model_kwargs": {},
11
+ "spaces_between_special_tokens": false,
12
+ "tokenizer_class": "TokenizersBackend",
13
+ "unk_token": "<unk>",
14
+ "use_default_system_prompt": false
15
+ }