agkphysics commited on
Commit
3db0544
·
unverified ·
1 Parent(s): ef85f09

Add app and module.

Browse files
Files changed (5) hide show
  1. README.md +64 -6
  2. app.py +6 -0
  3. ccc.py +107 -0
  4. requirements.txt +2 -0
  5. tests.py +5 -0
README.md CHANGED
@@ -1,12 +1,70 @@
1
  ---
2
- title: Ccc
3
- emoji: 🐠
4
- colorFrom: gray
5
- colorTo: indigo
 
6
  sdk: gradio
7
- sdk_version: 4.16.0
8
  app_file: app.py
9
  pinned: false
10
  ---
11
 
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ title: ccc
3
+ tags:
4
+ - evaluate
5
+ - metric
6
+ description: "Concordance correlation coefficient"
7
  sdk: gradio
8
+ sdk_version: 3.19.1
9
  app_file: app.py
10
  pinned: false
11
  ---
12
 
13
+ # Metric Card for CCC
14
+
15
+ ## Metric Description
16
+ The concordance correlation coefficient measures the agreement between two sets of
17
+ values. It is often used as a measure of inter-rater agreement when ratings have
18
+ continuous values.
19
+
20
+ ## How to Use
21
+ The inputs are two sequences of floating point values. For example:
22
+ ```python
23
+ ccc_metric = evaluate.load("agkphysics/ccc")
24
+ results = ccc_metric.compute(references=[0.2, 0.1], predictions=[0.1, 0.2])
25
+ ```
26
+
27
+ ### Inputs
28
+ - **predictions** (list of float): model predictions
29
+ - **references** (list of float): reference labels
30
+
31
+ ### Output Values
32
+ - `ccc`: the concordance correlation coefficient. This is a value between -1 (perfect
33
+ anti-agreement) and 1 (perfect agreement), with 0 indicating no agreement.
34
+
35
+ ### Examples
36
+ ```python
37
+ >>> ccc_metric = evaluate.load("agkphysics/ccc")
38
+ >>> results = ccc_metric.compute(references=[0.2, 0.1], predictions=[0.1, 0.2])
39
+ >>> print(results)
40
+ {'ccc': -1.0}
41
+ >>> results = ccc_metric.compute(references=[0.1, 0.2], predictions=[0.1, 0.2])
42
+ >>> print(results)
43
+ {'ccc': 1.0}
44
+ >>> results = ccc_metric.compute(references=[0.1, 0.3], predictions=[0.1, 0.2])
45
+ >>> print(results)
46
+ {'ccc': 0.666666641831399}
47
+ ```
48
+
49
+ ## Limitations and Bias
50
+ *Note any known limitations or biases that the metric has, with links and references if possible.*
51
+
52
+ ## Citation
53
+ ```bibtex
54
+ @article{linConcordanceCorrelationCoefficient1989,
55
+ title = {A {{Concordance Correlation Coefficient}} to {{Evaluate Reproducibility}}},
56
+ author = {Lin, Lawrence I-Kuei},
57
+ year = {1989},
58
+ journal = {Biometrics},
59
+ volume = {45},
60
+ number = {1},
61
+ pages = {255--268},
62
+ publisher = {{International Biometric Society}},
63
+ issn = {0006-341X},
64
+ url = {https://www.jstor.org/stable/2532051},
65
+ doi = {10.2307/2532051}
66
+ }
67
+ ```
68
+
69
+ ## Further References
70
+ Wikipedia: https://en.wikipedia.org/wiki/Concordance_correlation_coefficient
app.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ import evaluate
2
+ from evaluate.utils import launch_gradio_widget
3
+
4
+
5
+ module = evaluate.load("agkphysics/ccc")
6
+ launch_gradio_widget(module)
ccc.py ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (C) 2024 Aaron Keesing
2
+ #
3
+ # Permission is hereby granted, free of charge, to any person obtaining
4
+ # a copy of this software and associated documentation files (the
5
+ # “Software”), to deal in the Software without restriction, including
6
+ # without limitation the rights to use, copy, modify, merge, publish,
7
+ # distribute, sublicense, and/or sell copies of the Software, and to
8
+ # permit persons to whom the Software is furnished to do so, subject to
9
+ # the following conditions:
10
+ #
11
+ # The above copyright notice and this permission notice shall be
12
+ # included in all copies or substantial portions of the Software.
13
+ #
14
+ # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND,
15
+ # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16
+ # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
17
+ # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
18
+ # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
19
+ # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
20
+ # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21
+
22
+ """Concordance correlation coefficient"""
23
+
24
+ import datasets
25
+ import evaluate
26
+ import numpy as np
27
+
28
+ _CITATION = """\
29
+ @article{linConcordanceCorrelationCoefficient1989,
30
+ title = {A {{Concordance Correlation Coefficient}} to {{Evaluate Reproducibility}}},
31
+ author = {Lin, Lawrence I-Kuei},
32
+ year = {1989},
33
+ journal = {Biometrics},
34
+ volume = {45},
35
+ number = {1},
36
+ pages = {255--268},
37
+ publisher = {{International Biometric Society}},
38
+ issn = {0006-341X},
39
+ url = {https://www.jstor.org/stable/2532051},
40
+ doi = {10.2307/2532051}
41
+ }
42
+
43
+ """
44
+
45
+ _DESCRIPTION = """\
46
+ A metric to measure the degree of agreement between continuous-values evaluations from two raters.
47
+ """
48
+
49
+
50
+ # TODO: Add description of the arguments of the module here
51
+ _KWARGS_DESCRIPTION = """
52
+ Calculates the CCC between predictions and references
53
+ Args:
54
+ predictions: list of predictions to score. Each prediction
55
+ should be a floating point value.
56
+ references: list of references, one for each prediction. Each
57
+ reference should be a floating point value.
58
+ Returns:
59
+ ccc: the concordance correlation coefficient, -1 <= ccc <= 1
60
+ Examples:
61
+ >>> ccc_metric = evaluate.load("agkphysics/ccc")
62
+ >>> results = ccc_metric.compute(references=[0.2, 0.1], predictions=[0.1, 0.2])
63
+ >>> print(results)
64
+ {'ccc': -1.0}
65
+
66
+ >>> results = ccc_metric.compute(references=[0.1, 0.2], predictions=[0.1, 0.2])
67
+ >>> print(results)
68
+ {'ccc': 1.0}
69
+
70
+ >>> results = ccc_metric.compute(references=[0.1, 0.3], predictions=[0.1, 0.2])
71
+ >>> print(results)
72
+ {'ccc': 0.666666641831399}
73
+ """
74
+
75
+
76
+ @evaluate.utils.file_utils.add_start_docstrings(_DESCRIPTION, _KWARGS_DESCRIPTION)
77
+ class CCC(evaluate.Metric):
78
+ """Computes the CCC, concordance correlation coefficient."""
79
+
80
+ def _info(self):
81
+ return evaluate.MetricInfo(
82
+ module_type="metric",
83
+ description=_DESCRIPTION,
84
+ citation=_CITATION,
85
+ inputs_description=_KWARGS_DESCRIPTION,
86
+ features=datasets.Features(
87
+ {
88
+ "predictions": datasets.Value("float32"),
89
+ "references": datasets.Value("float32"),
90
+ }
91
+ ),
92
+ homepage="https://en.wikipedia.org/wiki/Concordance_correlation_coefficient",
93
+ reference_urls=[
94
+ "https://www.jstor.org/stable/2532051",
95
+ "https://en.wikipedia.org/wiki/Concordance_correlation_coefficient",
96
+ ],
97
+ )
98
+
99
+ def _compute(self, predictions, references):
100
+ """Returns the CCC score"""
101
+ sxy = np.cov(predictions, references, ddof=0)[0, 1]
102
+ sxx = np.var(predictions, ddof=0)
103
+ syy = np.var(references, ddof=0)
104
+ mu_x = np.mean(predictions)
105
+ mu_y = np.mean(references)
106
+ ccc = 2 * sxy / (sxx + syy + (mu_x - mu_y) ** 2)
107
+ return {"ccc": ccc}
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ git+https://github.com/huggingface/evaluate@main
2
+ numpy
tests.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ test_cases = [
2
+ {"predictions": [0.1, 0.2], "references": [0.2, 0.1], "result": {"ccc": -1}},
3
+ {"predictions": [0.1, 0.2], "references": [0.1, 0.2], "result": {"ccc": 1}},
4
+ {"predictions": [0.1, 0.2], "references": [0.1, 0.3], "result": {"ccc": 0.6666666}},
5
+ ]