beelzeebuub commited on
Commit
173c796
·
1 Parent(s): a225e3d

put custom ordinalRegressionMetric function in space

Browse files
Files changed (1) hide show
  1. app.py +40 -0
app.py CHANGED
@@ -1,3 +1,43 @@
1
  import gradio as gr
2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  gr.load("models/beelzeebuub/FJModel").launch()
 
1
  import gradio as gr
2
 
3
+ class OrdinalRegressionMetric(Metric):
4
+ def __init__(self):
5
+ super().__init__()
6
+ self.total = 0
7
+ self.count = 0
8
+
9
+ def accumulate(self, learn):
10
+ # Get predictions and targets
11
+ preds, targs = learn.pred, learn.y
12
+
13
+ # Your custom logic to convert predictions and targets to numeric values
14
+ preds_numeric = torch.argmax(preds, dim=1)
15
+ targs_numeric = targs
16
+
17
+ #print("preds_numeric: ",preds_numeric)
18
+ #print("targs_numeric: ",targs_numeric)
19
+
20
+ # Calculate the metric (modify this based on your specific needs)
21
+ squared_diff = torch.sum(torch.sqrt((preds_numeric - targs_numeric)**2))
22
+
23
+ # Normalize by the maximum possible difference
24
+ max_diff = torch.sqrt((torch.max(targs_numeric) - torch.min(targs_numeric))**2)
25
+
26
+ #print("squared_diff: ",squared_diff)
27
+ #print("max_diff: ",max_diff)
28
+
29
+ # Update the metric value
30
+ self.total += squared_diff
31
+ #print("self.total: ",self.total)
32
+ self.count += max_diff
33
+ #print("self.count: ",self.count)
34
+ @property
35
+ def value(self):
36
+ if self.count == 0:
37
+ return 0.0 # or handle this case appropriately
38
+ #print("self.total / self.count: ", (self.total / self.count))
39
+ # Calculate the normalized metric value
40
+ metric_value = 1/(self.total / self.count)
41
+ return metric_value
42
+
43
  gr.load("models/beelzeebuub/FJModel").launch()