beelzeebuub commited on
Commit
38cc7ca
·
1 Parent(s): 1ca266a

move custom ordinalRegressionMetric to space

Browse files
Files changed (1) hide show
  1. pipeline.py +0 -40
pipeline.py CHANGED
@@ -6,46 +6,6 @@ import json
6
  import numpy as np
7
  from fastai.learner import load_learner
8
 
9
- class OrdinalRegressionMetric(Metric):
10
- def __init__(self):
11
- super().__init__()
12
- self.total = 0
13
- self.count = 0
14
-
15
- def accumulate(self, learn):
16
- # Get predictions and targets
17
- preds, targs = learn.pred, learn.y
18
-
19
- # Your custom logic to convert predictions and targets to numeric values
20
- preds_numeric = torch.argmax(preds, dim=1)
21
- targs_numeric = targs
22
-
23
- #print("preds_numeric: ",preds_numeric)
24
- #print("targs_numeric: ",targs_numeric)
25
-
26
- # Calculate the metric (modify this based on your specific needs)
27
- squared_diff = torch.sum(torch.sqrt((preds_numeric - targs_numeric)**2))
28
-
29
- # Normalize by the maximum possible difference
30
- max_diff = torch.sqrt((torch.max(targs_numeric) - torch.min(targs_numeric))**2)
31
-
32
- #print("squared_diff: ",squared_diff)
33
- #print("max_diff: ",max_diff)
34
-
35
- # Update the metric value
36
- self.total += squared_diff
37
- #print("self.total: ",self.total)
38
- self.count += max_diff
39
- #print("self.count: ",self.count)
40
- @property
41
- def value(self):
42
- if self.count == 0:
43
- return 0.0 # or handle this case appropriately
44
- #print("self.total / self.count: ", (self.total / self.count))
45
- # Calculate the normalized metric value
46
- metric_value = 1/(self.total / self.count)
47
- return metric_value
48
-
49
  class PreTrainedPipeline():
50
  def __init__(self, path=""):
51
  # IMPLEMENT_THIS
 
6
  import numpy as np
7
  from fastai.learner import load_learner
8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  class PreTrainedPipeline():
10
  def __init__(self, path=""):
11
  # IMPLEMENT_THIS