weicaijaden commited on
Commit
131ca4f
·
verified ·
1 Parent(s): 7ee1858

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +4 -4
README.md CHANGED
@@ -29,7 +29,7 @@ MutBERT-Multi is a transformer-based genome foundation model trained on 100 mult
29
  ```python
30
  from transformers import AutoTokenizer, AutoModel
31
 
32
- model_name = "JadenLong/MutBERT-Multi"
33
  tokenizer = AutoTokenizer.from_pretrained(model_name)
34
  model = AutoModel.from_pretrained(model_name, trust_remote_code=True)
35
  ```
@@ -44,7 +44,7 @@ import torch.nn.functional as F
44
 
45
  from transformers import AutoTokenizer, AutoModel
46
 
47
- model_name = "JadenLong/MutBERT-Multi"
48
  tokenizer = AutoTokenizer.from_pretrained(model_name)
49
  model = AutoModel.from_pretrained(model_name, trust_remote_code=True)
50
 
@@ -70,7 +70,7 @@ print(embedding_max.shape) # expect to be 768
70
  ```python
71
  from transformers import AutoModelForSequenceClassification
72
 
73
- model_name = "JadenLong/MutBERT-Multi"
74
  model = AutoModelForSequenceClassification.from_pretrained(model_name, trust_remote_code=True, num_labels=2)
75
  ```
76
 
@@ -81,7 +81,7 @@ Allowed types for RoPE scaling are: `linear` and `dynamic`. To extend the model'
81
  If you want to scale your model context by 2x:
82
 
83
  ```python
84
- model_name = "JadenLong/MutBERT-Multi"
85
  model = AutoModel.from_pretrained(model_name,
86
  trust_remote_code=True,
87
  rope_scaling={'type': 'dynamic','factor': 2.0}
 
29
  ```python
30
  from transformers import AutoTokenizer, AutoModel
31
 
32
+ model_name = "CompBioDSA/MutBERT-Multi"
33
  tokenizer = AutoTokenizer.from_pretrained(model_name)
34
  model = AutoModel.from_pretrained(model_name, trust_remote_code=True)
35
  ```
 
44
 
45
  from transformers import AutoTokenizer, AutoModel
46
 
47
+ model_name = "CompBioDSA/MutBERT-Multi"
48
  tokenizer = AutoTokenizer.from_pretrained(model_name)
49
  model = AutoModel.from_pretrained(model_name, trust_remote_code=True)
50
 
 
70
  ```python
71
  from transformers import AutoModelForSequenceClassification
72
 
73
+ model_name = "CompBioDSA/MutBERT-Multi"
74
  model = AutoModelForSequenceClassification.from_pretrained(model_name, trust_remote_code=True, num_labels=2)
75
  ```
76
 
 
81
  If you want to scale your model context by 2x:
82
 
83
  ```python
84
+ model_name = "CompBioDSA/MutBERT-Multi"
85
  model = AutoModel.from_pretrained(model_name,
86
  trust_remote_code=True,
87
  rope_scaling={'type': 'dynamic','factor': 2.0}