add four swiss_criticality configurations
#6
by
Stern5497
- opened
- lextreme.py +116 -0
lextreme.py
CHANGED
@@ -4039,6 +4039,118 @@ _TURKISH_CONSTITUTIONAL_COURT_DECISIONS_JUDGMENT = {
|
|
4039 |
"label_classes": ["Violation", "No violation"],
|
4040 |
}
|
4041 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4042 |
|
4043 |
class LEXTREME(datasets.GeneratorBasedBuilder):
|
4044 |
"""LEXTREME: A Multilingual Legal Benchmark for Natural Language Understanding. Version 1.0"""
|
@@ -4053,6 +4165,10 @@ class LEXTREME(datasets.GeneratorBasedBuilder):
|
|
4053 |
LextremeConfig(name="greek_legal_code_subject", **_GREEK_LEGAL_CODE_SUBJECT),
|
4054 |
LextremeConfig(name="online_terms_of_service_unfairness_levels", **_ONLINE_TERMS_OF_SERVICE_UNFAIRNESS_LEVELS),
|
4055 |
LextremeConfig(name="turkish_constitutional_court_decisions_judgment", **_TURKISH_CONSTITUTIONAL_COURT_DECISIONS_JUDGMENT),
|
|
|
|
|
|
|
|
|
4056 |
# MLTC tasks
|
4057 |
LextremeConfig(name="online_terms_of_service_clause_topics", **_ONLINE_TERMS_OF_SERVICE_CLAUSE_TOPICS),
|
4058 |
LextremeConfig(name="covid19_emergency_event", **_COVID19_EMERGENCY_EVENT),
|
|
|
4039 |
"label_classes": ["Violation", "No violation"],
|
4040 |
}
|
4041 |
|
4042 |
+
_SWISS_CRITICLALITY_PREDICTION_BGE_FACTS = {
|
4043 |
+
"task_type": TaskType.SLTC,
|
4044 |
+
"hf_hub_name": "legal_criticaliy_prediction",
|
4045 |
+
"config_name": "full",
|
4046 |
+
"input_col": "facts",
|
4047 |
+
"label_col": "bge_label",
|
4048 |
+
"url": "https://huggingface.co/datasets/rcds/legal_criticality_prediction",
|
4049 |
+
"description":
|
4050 |
+
"""
|
4051 |
+
Legal Criticality Prediction (LCP) is a multilingual, diachronic dataset of 130K Swiss Federal Supreme Court (FSCS) cases annotated with two criticality labels. The bge_label i a binary label (critical, non-critical), while the citation label has 5 classes (critical-1, critical-2, critical-3, critical-4, non-critical). Critical classes of the citation_label are distinct subsets of the critical class of the bge_label. This dataset creates a challenging text classification task. We also provide additional metadata as the publication year, the law area and the canton of origin per case, to promote robustness and fairness studies on the critical area of legal NLP.
|
4052 |
+
"""
|
4053 |
+
,
|
4054 |
+
"citation":
|
4055 |
+
"""
|
4056 |
+
@InProceedings{niklaus-etal-2023,
|
4057 |
+
author = {Stern, Ronja
|
4058 |
+
and Niklaus, Joel
|
4059 |
+
and Stürmer, Matthias},
|
4060 |
+
title = {Title: Subtitle},
|
4061 |
+
booktitle = {booktitle},
|
4062 |
+
year = {2023},
|
4063 |
+
location = {Bern, Switzerland},
|
4064 |
+
}
|
4065 |
+
"""
|
4066 |
+
,
|
4067 |
+
"label_classes": ["critical", "non-critical"],
|
4068 |
+
}
|
4069 |
+
|
4070 |
+
_SWISS_CRITICLALITY_PREDICTION_BGE_CONSIDERATIONS = {
|
4071 |
+
"task_type": TaskType.SLTC,
|
4072 |
+
"hf_hub_name": "legal_criticaliy_prediction",
|
4073 |
+
"config_name": "full",
|
4074 |
+
"input_col": "considerations",
|
4075 |
+
"label_col": "bge_label",
|
4076 |
+
"url": "https://huggingface.co/datasets/rcds/legal_criticality_prediction",
|
4077 |
+
"description":
|
4078 |
+
"""
|
4079 |
+
Legal Criticality Prediction (LCP) is a multilingual, diachronic dataset of 130K Swiss Federal Supreme Court (FSCS) cases annotated with two criticality labels. The bge_label i a binary label (critical, non-critical), while the citation label has 5 classes (critical-1, critical-2, critical-3, critical-4, non-critical). Critical classes of the citation_label are distinct subsets of the critical class of the bge_label. This dataset creates a challenging text classification task. We also provide additional metadata as the publication year, the law area and the canton of origin per case, to promote robustness and fairness studies on the critical area of legal NLP.
|
4080 |
+
"""
|
4081 |
+
,
|
4082 |
+
"citation":
|
4083 |
+
"""
|
4084 |
+
@InProceedings{niklaus-etal-2023,
|
4085 |
+
author = {Stern, Ronja
|
4086 |
+
and Niklaus, Joel
|
4087 |
+
and Stürmer, Matthias},
|
4088 |
+
title = {Title: Subtitle},
|
4089 |
+
booktitle = {booktitle},
|
4090 |
+
year = {2023},
|
4091 |
+
location = {Bern, Switzerland},
|
4092 |
+
}
|
4093 |
+
"""
|
4094 |
+
,
|
4095 |
+
"label_classes": ["critical", "non-critical"],
|
4096 |
+
}
|
4097 |
+
|
4098 |
+
_SWISS_CRITICLALITY_PREDICTION_CITATION_FACTS = {
|
4099 |
+
"task_type": TaskType.SLTC,
|
4100 |
+
"hf_hub_name": "legal_criticaliy_prediction",
|
4101 |
+
"config_name": "full",
|
4102 |
+
"input_col": "facts",
|
4103 |
+
"label_col": "citation_label",
|
4104 |
+
"url": "https://huggingface.co/datasets/rcds/legal_criticality_prediction",
|
4105 |
+
"description":
|
4106 |
+
"""
|
4107 |
+
Legal Criticality Prediction (LCP) is a multilingual, diachronic dataset of 130K Swiss Federal Supreme Court (FSCS) cases annotated with two criticality labels. The bge_label i a binary label (critical, non-critical), while the citation label has 5 classes (critical-1, critical-2, critical-3, critical-4, non-critical). Critical classes of the citation_label are distinct subsets of the critical class of the bge_label. This dataset creates a challenging text classification task. We also provide additional metadata as the publication year, the law area and the canton of origin per case, to promote robustness and fairness studies on the critical area of legal NLP.
|
4108 |
+
"""
|
4109 |
+
,
|
4110 |
+
"citation":
|
4111 |
+
"""
|
4112 |
+
@InProceedings{niklaus-etal-2023,
|
4113 |
+
author = {Stern, Ronja
|
4114 |
+
and Niklaus, Joel
|
4115 |
+
and Stürmer, Matthias},
|
4116 |
+
title = {Title: Subtitle},
|
4117 |
+
booktitle = {booktitle},
|
4118 |
+
year = {2023},
|
4119 |
+
location = {Bern, Switzerland},
|
4120 |
+
}
|
4121 |
+
"""
|
4122 |
+
,
|
4123 |
+
"label_classes": ["critical-1", "critical-2", "critical-3", "critical-4", "non-critical"],
|
4124 |
+
}
|
4125 |
+
|
4126 |
+
_SWISS_CRITICLALITY_PREDICTION_CITATION_CONSIDERATIONS = {
|
4127 |
+
"task_type": TaskType.SLTC,
|
4128 |
+
"hf_hub_name": "legal_criticaliy_prediction",
|
4129 |
+
"config_name": "full",
|
4130 |
+
"input_col": "considerations",
|
4131 |
+
"label_col": "citation_label",
|
4132 |
+
"url": "https://huggingface.co/datasets/rcds/legal_criticality_prediction",
|
4133 |
+
"description":
|
4134 |
+
"""
|
4135 |
+
Legal Criticality Prediction (LCP) is a multilingual, diachronic dataset of 130K Swiss Federal Supreme Court (FSCS) cases annotated with two criticality labels. The bge_label i a binary label (critical, non-critical), while the citation label has 5 classes (critical-1, critical-2, critical-3, critical-4, non-critical). Critical classes of the citation_label are distinct subsets of the critical class of the bge_label. This dataset creates a challenging text classification task. We also provide additional metadata as the publication year, the law area and the canton of origin per case, to promote robustness and fairness studies on the critical area of legal NLP.
|
4136 |
+
"""
|
4137 |
+
,
|
4138 |
+
"citation":
|
4139 |
+
"""
|
4140 |
+
@InProceedings{niklaus-etal-2023,
|
4141 |
+
author = {Stern, Ronja
|
4142 |
+
and Niklaus, Joel
|
4143 |
+
and Stürmer, Matthias},
|
4144 |
+
title = {Title: Subtitle},
|
4145 |
+
booktitle = {booktitle},
|
4146 |
+
year = {2023},
|
4147 |
+
location = {Bern, Switzerland},
|
4148 |
+
}
|
4149 |
+
"""
|
4150 |
+
,
|
4151 |
+
"label_classes": ["critical-1", "critical-2", "critical-3", "critical-4", "non-critical"],
|
4152 |
+
}
|
4153 |
+
|
4154 |
|
4155 |
class LEXTREME(datasets.GeneratorBasedBuilder):
|
4156 |
"""LEXTREME: A Multilingual Legal Benchmark for Natural Language Understanding. Version 1.0"""
|
|
|
4165 |
LextremeConfig(name="greek_legal_code_subject", **_GREEK_LEGAL_CODE_SUBJECT),
|
4166 |
LextremeConfig(name="online_terms_of_service_unfairness_levels", **_ONLINE_TERMS_OF_SERVICE_UNFAIRNESS_LEVELS),
|
4167 |
LextremeConfig(name="turkish_constitutional_court_decisions_judgment", **_TURKISH_CONSTITUTIONAL_COURT_DECISIONS_JUDGMENT),
|
4168 |
+
LextremeConfig(name="swiss_criticality_prediction_bge_facts", **_SWISS_CRITICLALITY_PREDICTION_BGE_FACTS),
|
4169 |
+
LextremeConfig(name="swiss_criticality_prediction_bge_considerations", **_SWISS_CRITICLALITY_PREDICTION_BGE_CONSIDERATIONS),
|
4170 |
+
LextremeConfig(name="swiss_criticality_prediction_citation_facts", **_SWISS_CRITICLALITY_PREDICTION_CITATION_FACTS),
|
4171 |
+
LextremeConfig(name="swiss_criticality_prediction_citation_considerations", **_SWISS_CRITICLALITY_PREDICTION_CITATION_CONSIDERATIONS),
|
4172 |
# MLTC tasks
|
4173 |
LextremeConfig(name="online_terms_of_service_clause_topics", **_ONLINE_TERMS_OF_SERVICE_CLAUSE_TOPICS),
|
4174 |
LextremeConfig(name="covid19_emergency_event", **_COVID19_EMERGENCY_EVENT),
|