ccmaymay commited on
Commit
da4b785
·
verified ·
1 Parent(s): 699708f

Revert to original behavior, specifying no upstream revision by default

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. config.py +3 -1
  3. model.py +2 -1
config.json CHANGED
@@ -8,7 +8,7 @@
8
  },
9
  "embedding_size": 512,
10
  "k_bucket_size": 1024,
11
- "upstream_transformer_revision": "48bffbbd27bf028ecdd0cd55abb51236ec12ef1b",
12
  "model_type": "LUAR",
13
  "q_bucket_size": 512,
14
  "torch_dtype": "float32",
 
8
  },
9
  "embedding_size": 512,
10
  "k_bucket_size": 1024,
11
+ "upstream_transformer_revision": null,
12
  "model_type": "LUAR",
13
  "q_bucket_size": 512,
14
  "torch_dtype": "float32",
config.py CHANGED
@@ -1,4 +1,6 @@
1
 
 
 
2
  from transformers import PretrainedConfig
3
 
4
  class LUARConfig(PretrainedConfig):
@@ -9,7 +11,7 @@ class LUARConfig(PretrainedConfig):
9
  use_memory_efficient_attention=False,
10
  q_bucket_size=512,
11
  k_bucket_size=1024,
12
- upstream_transformer_revision=None,
13
  **kwargs,
14
  ):
15
  self.embedding_size = embedding_size
 
1
 
2
+ from typing import Optional
3
+
4
  from transformers import PretrainedConfig
5
 
6
  class LUARConfig(PretrainedConfig):
 
11
  use_memory_efficient_attention=False,
12
  q_bucket_size=512,
13
  k_bucket_size=1024,
14
+ upstream_transformer_revision: Optional[str] = None,
15
  **kwargs,
16
  ):
17
  self.embedding_size = embedding_size
model.py CHANGED
@@ -1,6 +1,7 @@
1
 
2
  import math
3
  from functools import partial
 
4
 
5
  import torch
6
  import torch.nn as nn
@@ -147,7 +148,7 @@ class LUAR(PreTrainedModel):
147
  )
148
  self.linear = nn.Linear(self.hidden_size, config.embedding_size)
149
 
150
- def create_transformer(self, revision=None):
151
  """Creates the Transformer backbone.
152
  """
153
  kwargs = {"revision": revision} if revision else {}
 
1
 
2
  import math
3
  from functools import partial
4
+ from typing import Optional
5
 
6
  import torch
7
  import torch.nn as nn
 
148
  )
149
  self.linear = nn.Linear(self.hidden_size, config.embedding_size)
150
 
151
+ def create_transformer(self, revision: Optional[str] = None):
152
  """Creates the Transformer backbone.
153
  """
154
  kwargs = {"revision": revision} if revision else {}