FL33TW00D commited on
Commit
619b3fb
·
verified ·
1 Parent(s): e63a8b3

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. best-model.pt +3 -0
  2. collator_config.yml +18 -0
  3. model_config.yml +47 -0
  4. vocab.json +0 -0
best-model.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9fef62359c4bc06dc7af92eb99c4269e42fd662803bf6e3ff5dda99b7ded41f6
3
+ size 34080866159
collator_config.yml ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ do_padding: True
2
+ pad_value: -2
3
+ do_mlm: True
4
+ do_binning: True
5
+ mlm_probability: 0.5
6
+ mask_value: -1
7
+ max_length: 2048
8
+ sampling: True
9
+ data_style: "both"
10
+ num_bins: 51
11
+ right_binning: False
12
+ use_junk_tokens: False
13
+ use_gp_token: True
14
+ gp_to_id_path:
15
+ remote: "s3://fleetwood/orion/gp_to_id.json"
16
+ local: "gp_to_id.json"
17
+ keep_first_n_tokens: 2
18
+ pad_token_id: 0
model_config.yml ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: tahoe_x1
2
+ d_model: 2560
3
+ n_layers: 32
4
+ init_device: cpu
5
+ expansion_ratio: 4
6
+ standard_scale_outputs: False
7
+ transformer_activation: gelu
8
+ n_heads: 20
9
+ norm_scheme: "pre"
10
+ use_generative_training: True
11
+ use_cell_conditioned_generation: False
12
+ use_glu: False
13
+ cell_emb_style: cls
14
+ attn_config:
15
+ attn_impl: flash
16
+ use_attn_mask: False
17
+ attn_type: "grouped_query_attention"
18
+ kv_nheads: 20
19
+ attn_pdrop: 0.0
20
+ norm_config:
21
+ norm_type: "layernorm"
22
+ eps: 1.0e-5
23
+ expression_encoder:
24
+ input_emb_style: "continuous"
25
+ dropout: 0.1
26
+ max_value: 512
27
+ activation: gelu
28
+ use_norm: True
29
+ gene_encoder:
30
+ use_norm: True
31
+ mvc:
32
+ arch_style: "inner product"
33
+ query_activation: "sigmoid"
34
+ scaled_dot_product: True
35
+ expression_decoder:
36
+ n_outputs: 1
37
+ n_layers: 1
38
+ activation: "gelu"
39
+ gp_encoder:
40
+ gp_path:
41
+ remote: "s3://fleetwood/orion/gp_features.pt"
42
+ local: "gp_features.pt"
43
+ activation: "gelu"
44
+ padding_idx: 0
45
+ freeze: False
46
+ vocab_size: 62721
47
+ precision: amp_bf16
vocab.json ADDED
The diff for this file is too large to render. See raw diff