row56 commited on
Commit
b367dc5
·
verified ·
1 Parent(s): 6f412c0

Upload proto_model/configuration_proto.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. proto_model/configuration_proto.py +53 -0
proto_model/configuration_proto.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import PretrainedConfig
2
+
3
+ class ProtoConfig(PretrainedConfig):
4
+ model_type = "proto"
5
+
6
+ def __init__(self,
7
+ pretrained_model_name_or_path="xlm-roberta-base",
8
+ num_classes=10,
9
+ label_order_path=None,
10
+ use_sigmoid=False,
11
+ use_cuda=True,
12
+ lr_prototypes=5e-2,
13
+ lr_features=2e-6,
14
+ lr_others=2e-2,
15
+ num_training_steps=5000,
16
+ num_warmup_steps=1000,
17
+ loss='BCE',
18
+ save_dir='output',
19
+ use_attention=True,
20
+ dot_product=False,
21
+ normalize=None,
22
+ final_layer=False,
23
+ reduce_hidden_size=None,
24
+ use_prototype_loss=False,
25
+ prototype_vector_path=None,
26
+ attention_vector_path=None,
27
+ eval_buckets=None,
28
+ seed=7,
29
+ **kwargs):
30
+ super().__init__(**kwargs)
31
+
32
+ self.pretrained_model_name_or_path = pretrained_model_name_or_path
33
+ self.num_classes = num_classes
34
+ self.label_order_path = label_order_path
35
+ self.use_sigmoid = use_sigmoid
36
+ self.use_cuda = use_cuda
37
+ self.lr_prototypes = lr_prototypes
38
+ self.lr_features = lr_features
39
+ self.lr_others = lr_others
40
+ self.num_training_steps = num_training_steps
41
+ self.num_warmup_steps = num_warmup_steps
42
+ self.loss = loss
43
+ self.save_dir = save_dir
44
+ self.use_attention = use_attention
45
+ self.dot_product = dot_product
46
+ self.normalize = normalize
47
+ self.final_layer = final_layer
48
+ self.reduce_hidden_size = reduce_hidden_size
49
+ self.use_prototype_loss = use_prototype_loss
50
+ self.prototype_vector_path = prototype_vector_path
51
+ self.attention_vector_path = attention_vector_path
52
+ self.eval_buckets = eval_buckets
53
+ self.seed = seed