Taykhoom commited on
Commit
cecd1f2
·
verified ·
1 Parent(s): 93b0213

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. README.md +8 -5
README.md CHANGED
@@ -2,6 +2,7 @@
2
  license: other
3
  ---
4
  [![License](https://img.shields.io/badge/license-GenBio_AI_Community_License-orange)](https://github.com/genbio-ai/ModelGenerator/blob/main/LICENSE)
 
5
  # How to use
6
  ```python
7
  from transformers import AutoModel, AutoTokenizer
@@ -16,6 +17,11 @@ model = AutoModel.from_pretrained(
16
  trust_remote_code=True,
17
  base_model="genbio-ai/AIDO.RNA-650M-CDS",
18
  )
 
 
 
 
 
19
  ```
20
 
21
  # Model Variants
@@ -70,12 +76,9 @@ model = AutoModel.from_pretrained(
70
  )
71
 
72
  dna = "ACGTAGCATCGGATCTATCTATCGACACTTGGTTATCGATCTACGAGCATCTCGTTAGC"
73
- inputs = tokenizer(dna, add_special_tokens=True, return_special_tokens_mask=True, return_tensors="pt")
74
 
75
- embedding = model(
76
- input_ids=inputs["input_ids"],
77
- attention_mask=inputs["attention_mask"],
78
- ).last_hidden_state # [1, sequence_length, 1280]
79
 
80
  embedding_mean = torch.mean(embedding, dim=1)
81
  print(torch.mean(embedding_mean)) # Outputs tensor(0.0005, grad_fn=<MeanBackward0>)
 
2
  license: other
3
  ---
4
  [![License](https://img.shields.io/badge/license-GenBio_AI_Community_License-orange)](https://github.com/genbio-ai/ModelGenerator/blob/main/LICENSE)
5
+
6
  # How to use
7
  ```python
8
  from transformers import AutoModel, AutoTokenizer
 
17
  trust_remote_code=True,
18
  base_model="genbio-ai/AIDO.RNA-650M-CDS",
19
  )
20
+
21
+ dna = "ACGTAGCATCGGATCTATCTATCGACACTTGGTTATCGATCTACGAGCATCTCGTTAGC"
22
+ inputs = tokenizer(dna, add_special_tokens=True, return_tensors="pt")
23
+
24
+ embedding = model(**inputs).last_hidden_state # [1, sequence_length, 1280]
25
  ```
26
 
27
  # Model Variants
 
76
  )
77
 
78
  dna = "ACGTAGCATCGGATCTATCTATCGACACTTGGTTATCGATCTACGAGCATCTCGTTAGC"
79
+ inputs = tokenizer(dna, add_special_tokens=True, return_tensors="pt")
80
 
81
+ embedding = model(**inputs).last_hidden_state # [1, sequence_length, 1280]
 
 
 
82
 
83
  embedding_mean = torch.mean(embedding, dim=1)
84
  print(torch.mean(embedding_mean)) # Outputs tensor(0.0005, grad_fn=<MeanBackward0>)