AdamF92 commited on
Commit
50c7eab
·
verified ·
1 Parent(s): e1a2b5c

Epoch 0 - Val loss 0.2619

Browse files
Files changed (3) hide show
  1. README.md +12 -0
  2. config.json +44 -0
  3. model.safetensors +3 -0
README.md ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ pipeline_tag: text-generation
4
+ tags:
5
+ - model_hub_mixin
6
+ - pytorch_model_hub_mixin
7
+ ---
8
+
9
+ This model has been pushed to the Hub using the [PytorchModelHubMixin](https://huggingface.co/docs/huggingface_hub/package_reference/mixins#huggingface_hub.PyTorchModelHubMixin) integration:
10
+ - Code: [More Information Needed]
11
+ - Paper: [More Information Needed]
12
+ - Docs: [More Information Needed]
config.json ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "att_groups": 4,
3
+ "att_heads": 16,
4
+ "att_query_groups": 8,
5
+ "cross_att_type": "sqa",
6
+ "dense_layer_dim": 1536,
7
+ "embed_dim": 512,
8
+ "ff_activation": "silu",
9
+ "ff_dim": 192,
10
+ "ff_dropout": 0.0,
11
+ "final_stateless_layers_config": [
12
+ "moe",
13
+ "moe"
14
+ ],
15
+ "head_norm_type": "rms_norm",
16
+ "moe_bias_mode": "global",
17
+ "moe_grouped_gemm": true,
18
+ "moe_shared_experts_bias_mode": "global",
19
+ "moe_top_k": 10,
20
+ "moe_use_cutlass_grouped_gemm": true,
21
+ "moe_use_weighted_shared_experts": false,
22
+ "num_experts": 384,
23
+ "num_layers": 21,
24
+ "num_shared_experts": 2,
25
+ "rope_base": 100000,
26
+ "router_amp": true,
27
+ "self_att_type": "sqa",
28
+ "seq_len": 8192,
29
+ "shared_expert_dim": 384,
30
+ "stateless_layers_config": [
31
+ "dense",
32
+ "moe"
33
+ ],
34
+ "stm_size": 4096,
35
+ "use_attention_output_bias": false,
36
+ "use_flash_attention": true,
37
+ "use_gated": true,
38
+ "use_gated_attention": true,
39
+ "use_gated_cross_attention": false,
40
+ "use_head_norm": true,
41
+ "use_moe": true,
42
+ "use_vectorized_moe": true,
43
+ "vocab_size": 65536
44
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0af3c8516bbd486450d409cd4fe041db5036e2ff9c554eb93b9122b5fef7f8e7
3
+ size 5772284720