code
stringlengths
501
5.19M
package
stringlengths
2
81
path
stringlengths
9
304
filename
stringlengths
4
145
import django.contrib.postgres.fields.jsonb from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('IoT_DataMgmt', '0055_auto_20190224_0155'), ('IoT_MaintOps', '0089_auto_20190223_2312'), ] operations = [ migrations.AlterField( model_name='blueprint', name='benchmark_metrics', field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True), ), migrations.AlterField( model_name='equipmentinstancedailyriskscore', name='date', field=models.DateField(db_index=True), ), migrations.AlterField( model_name='equipmentinstancedailyriskscore', name='risk_score_value', field=models.FloatField(), ), migrations.AlterField( model_name='equipmentinstanceproblemdiagnosis', name='from_date', field=models.DateField(db_index=True), ), migrations.AlterField( model_name='equipmentinstanceproblemdiagnosis', name='to_date', field=models.DateField(db_index=True), ), migrations.AlterField( model_name='equipmentproblemtype', name='name', field=models.CharField(db_index=True, max_length=255, unique=True, verbose_name='Equipment Problem Type'), ), migrations.AlterField( model_name='equipmentuniquetypegroupdatafieldblueprintbenchmarkmetricprofile', name='n', field=models.BigIntegerField(), ), migrations.AlterField( model_name='equipmentuniquetypegroupdatafieldblueprintbenchmarkmetricprofile', name='trained_to_date', field=models.DateField(db_index=True), ), migrations.AlterField( model_name='equipmentuniquetypegroupdatafieldprofile', name='distinct_values', field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True), ), migrations.AlterField( model_name='equipmentuniquetypegroupdatafieldprofile', name='n_distinct_values', field=models.IntegerField(), ), migrations.AlterField( model_name='equipmentuniquetypegroupdatafieldprofile', name='valid_proportion', field=models.FloatField(), ), migrations.AlterField( model_name='equipmentuniquetypegroupserviceconfig', name='configs', field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True), ), migrations.AlterField( model_name='globalconfig', name='value', field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True), ), migrations.AlterUniqueTogether( name='equipmentuniquetypegroupdatafieldblueprintbenchmarkmetricprofile', unique_together={('equipment_unique_type_group', 'equipment_data_field', 'trained_to_date')}, ), migrations.AlterUniqueTogether( name='equipmentuniquetypegroupdatafieldpairwisecorrelation', unique_together={('equipment_unique_type_group', 'equipment_data_field', 'equipment_data_field_2')}, ), migrations.AlterUniqueTogether( name='equipmentuniquetypegroupdatafieldprofile', unique_together={('equipment_unique_type_group', 'equipment_data_field', 'to_date')}, ), ]
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/maint_ops/migrations/0090_auto_20190224_0155.py
0090_auto_20190224_0155.py
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('IoT_MaintOps', '0071_auto_20181115_0357'), ] operations = [ migrations.AlterField( model_name='alert', name='from_date', field=models.DateField(db_index=True, default=None), ), migrations.AlterField( model_name='alert', name='has_associated_equipment_problem_diagnoses', field=models.BooleanField(db_index=True, default=False), ), migrations.AlterField( model_name='alert', name='ongoing', field=models.BooleanField(db_index=True, default=False), ), migrations.AlterField( model_name='alert', name='risk_score_name', field=models.CharField(db_index=True, max_length=255), ), migrations.AlterField( model_name='alert', name='threshold', field=models.FloatField(db_index=True, default=0), ), migrations.AlterField( model_name='alert', name='to_date', field=models.DateField(db_index=True, default=None), ), migrations.AlterField( model_name='blueprint', name='trained_to_date', field=models.DateField(db_index=True, default=None), ), migrations.AlterField( model_name='equipmentinstancedailyriskscore', name='date', field=models.DateField(db_index=True, default=None), ), migrations.AlterField( model_name='equipmentinstancedailyriskscore', name='risk_score_name', field=models.CharField(db_index=True, max_length=255), ), migrations.AlterField( model_name='equipmentinstanceproblemdiagnosis', name='dismissed', field=models.BooleanField(db_index=True, default=False), ), migrations.AlterField( model_name='equipmentinstanceproblemdiagnosis', name='from_date', field=models.DateField(db_index=True, default=None), ), migrations.AlterField( model_name='equipmentinstanceproblemdiagnosis', name='has_associated_alerts', field=models.BooleanField(db_index=True, default=False), ), migrations.AlterField( model_name='equipmentinstanceproblemdiagnosis', name='has_equipment_problems', field=models.BooleanField(db_index=True, default=False), ), migrations.AlterField( model_name='equipmentinstanceproblemdiagnosis', name='to_date', field=models.DateField(db_index=True, default=None), ), migrations.AlterField( model_name='equipmentproblemtype', name='name', field=models.CharField(db_index=True, max_length=255, unique=True, verbose_name='Equipment Problem Type Name'), ), migrations.AlterField( model_name='equipmentuniquetypegroupdatafieldblueprintbenchmarkmetricprofile', name='trained_to_date', field=models.DateField(db_index=True, default=None), ), migrations.AlterField( model_name='equipmentuniquetypegroupdatafieldprofile', name='to_date', field=models.DateField(blank=True, db_index=True, null=True), ), migrations.AlterField( model_name='equipmentuniquetypegroupmonitoreddatafieldconfig', name='active', field=models.BooleanField(db_index=True, default=True), ), migrations.AlterField( model_name='equipmentuniquetypegroupserviceconfig', name='active', field=models.BooleanField(db_index=True, default=True), ), ]
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/maint_ops/migrations/0072_auto_20181115_0423.py
0072_auto_20181115_0423.py
from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): """EquipmentInstanceDailyPredictedFault.""" dependencies = [ ('IoT_DataMgmt', '0092_verbose_names'), ('IoT_MaintOps', '0117_verbose_names'), ] operations = [ migrations.CreateModel( name='EquipmentInstanceDailyPredictedFault', fields=[ ('id', models.BigAutoField( primary_key=True, serialize=False)), ('equipment_unique_type_group', models.ForeignKey( help_text='Equipment Unique Type Group', on_delete=django.db.models.deletion.PROTECT, related_name='equipment_instance_daily_predicted_faults', related_query_name=('equipment_instance_' 'daily_predicted_fault'), to='IoT_DataMgmt.equipmentuniquetypegroup', verbose_name='Equipment Unique Type Group')), ('equipment_instance', models.ForeignKey( help_text='Equipment Instance', on_delete=django.db.models.deletion.PROTECT, related_name='equipment_instance_daily_predicted_faults', related_query_name=('equipment_instance_' 'daily_predicted_fault'), to='IoT_DataMgmt.equipmentinstance', verbose_name='Equipment Instance')), ('date', models.DateField( db_index=True, help_text='Date', verbose_name='Date')), ('fault_type', models.ForeignKey( help_text='Fault Type', on_delete=django.db.models.deletion.PROTECT, related_name='equipment_instance_daily_predicted_faults', related_query_name=('equipment_instance_' 'daily_predicted_fault'), to='IoT_MaintOps.equipmentproblemtype', verbose_name='Fault Type')), ('fault_predictor_name', models.CharField( db_index=True, help_text='Fault Predictor Name', max_length=255, verbose_name='Fault Predictor Name')), ('predicted_fault_probability', models.FloatField( help_text='Predicted Fault Probability', verbose_name='Predicted Fault Probability')) ], options={ 'verbose_name': 'Equipment Instance Daily Predicted Fault', 'verbose_name_plural': 'Equipment Instance Daily Predicted Faults' } ), migrations.AddConstraint( model_name='equipmentinstancedailypredictedfault', constraint=models.UniqueConstraint( fields=('equipment_unique_type_group', 'equipment_instance', 'date', 'fault_type', 'fault_predictor_name'), name='EquipmentInstanceDailyPredictedFault_unique_together')) ]
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/maint_ops/migrations/0118_EquipmentInstanceDailyPredictedFault.py
0118_EquipmentInstanceDailyPredictedFault.py
from django.db.models.fields.json import JSONField from rest_framework_filters import CharFilter, FilterSet, RelatedFilter from aito.iot_mgmt.data.models import ( LogicalDataType, NumericMeasurementUnit, EquipmentDataFieldType, EquipmentGeneralType, EquipmentDataField, EquipmentUniqueTypeGroup, EquipmentUniqueType, EquipmentFacility, EquipmentInstance, EquipmentSystem, ) class DataTypeFilter(FilterSet): """DataTypeFilter.""" class Meta: """DataTypeFilter metadata.""" model = LogicalDataType fields = dict( name=['exact'] ) class NumericMeasurementUnitFilter(FilterSet): """NumericMeasurementUnitFilter.""" class Meta: """NumericMeasurementUnitFilter metadata.""" model = NumericMeasurementUnit fields = dict( name=[ 'exact', 'iexact', 'in', 'contains', 'icontains', 'startswith', 'istartswith', 'endswith', 'iendswith', 'regex', 'iregex' ] ) class EquipmentDataFieldTypeFilter(FilterSet): """EquipmentDataFieldTypeFilter.""" class Meta: """EquipmentDataFieldTypeFilter metadata.""" model = EquipmentDataFieldType fields = dict( name=['exact'] ) class EquipmentGeneralTypeFilter(FilterSet): """EquipmentGeneralTypeFilter.""" class Meta: """EquipmentGeneralTypeFilter metadata.""" model = EquipmentGeneralType fields = dict( name=[ 'exact', 'iexact', 'in', 'contains', 'icontains', 'startswith', 'istartswith', 'endswith', 'iendswith', 'regex', 'iregex' ] ) class EquipmentDataFieldFilter(FilterSet): """EquipmentDataFieldFilter.""" equipment_general_type = \ RelatedFilter( queryset=EquipmentGeneralType.objects.all(), filterset=EquipmentGeneralTypeFilter) equipment_data_field_type = \ RelatedFilter( queryset=EquipmentDataFieldType.objects.all(), filterset=EquipmentDataFieldTypeFilter) data_type = \ RelatedFilter( queryset=LogicalDataType.objects.all(), filterset=DataTypeFilter) numeric_measurement_unit = \ RelatedFilter( queryset=NumericMeasurementUnit.objects.all(), filterset=NumericMeasurementUnitFilter) equipment_unique_types = \ RelatedFilter( queryset=EquipmentUniqueType.objects.all(), filterset='EquipmentUniqueTypeFilter') class Meta: """EquipmentDataFieldFilter metadata.""" model = EquipmentDataField fields = dict( name=[ 'exact', 'iexact', 'in', 'contains', 'icontains', 'startswith', 'istartswith', 'endswith', 'iendswith', 'regex', 'iregex' ], lower_numeric_null=[ 'exact', 'gt', 'gte', 'lt', 'lte', 'in', 'contains', 'startswith', 'endswith', 'range' ], upper_numeric_null=[ 'exact', 'gt', 'gte', 'lt', 'lte', 'in', 'contains', 'startswith', 'endswith', 'range' ], min_val=[ 'exact', 'gt', 'gte', 'lt', 'lte', 'in', 'contains', 'startswith', 'endswith', 'range', 'isnull' ], max_val=[ 'exact', 'gt', 'gte', 'lt', 'lte', 'in', 'contains', 'startswith', 'endswith', 'range', 'isnull' ], description='__all__') filter_overrides = { JSONField: dict( filter_class=CharFilter # 'extra': lambda f: {'lookup_expr': 'icontains'} ) } class EquipmentUniqueTypeGroupFilter(FilterSet): """EquipmentUniqueTypeGroupFilter.""" equipment_general_type = \ RelatedFilter( queryset=EquipmentGeneralType.objects.all(), filterset=EquipmentGeneralTypeFilter) equipment_unique_types = \ RelatedFilter( queryset=EquipmentUniqueType.objects.all(), filterset='EquipmentUniqueTypeFilter') equipment_data_fields = \ RelatedFilter( queryset=EquipmentDataField.objects.all(), filterset=EquipmentDataFieldFilter) class Meta: """EquipmentUniqueTypeGroupFilter metadata.""" model = EquipmentUniqueTypeGroup fields = dict( name=[ 'exact', 'iexact', 'in', 'contains', 'icontains', 'startswith', 'istartswith', 'endswith', 'iendswith', 'regex', 'iregex' ], description='__all__') filter_overrides = { JSONField: dict( filter_class=CharFilter # 'extra': lambda f: {'lookup_expr': 'icontains'} ) } class EquipmentUniqueTypeFilter(FilterSet): """EquipmentUniqueTypeFilter.""" equipment_general_type = \ RelatedFilter( queryset=EquipmentGeneralType.objects.all(), filterset=EquipmentGeneralTypeFilter) equipment_data_fields = \ RelatedFilter( queryset=EquipmentDataField.objects.all(), filterset=EquipmentDataFieldFilter) equipment_unique_type_groups = \ RelatedFilter( queryset=EquipmentUniqueTypeGroup.objects.all(), filterset=EquipmentUniqueTypeGroupFilter) class Meta: """EquipmentUniqueTypeFilter metadata.""" model = EquipmentUniqueType fields = dict( name=[ 'exact', 'iexact', 'in', 'contains', 'icontains', 'startswith', 'istartswith', 'endswith', 'iendswith', 'regex', 'iregex' ], description='__all__') filter_overrides = { JSONField: dict( filter_class=CharFilter # 'extra': lambda f: {'lookup_expr': 'icontains'} ) } class EquipmentFacilityFilter(FilterSet): """EquipmentFacilityFilter.""" class Meta: """EquipmentFacilityFilter metadata.""" model = EquipmentFacility fields = dict( name=[ 'exact', 'iexact', 'in', 'contains', 'icontains', 'startswith', 'istartswith', 'endswith', 'iendswith', 'regex', 'iregex' ], info='__all__') filter_overrides = { JSONField: dict( filter_class=CharFilter # 'extra': lambda f: {'lookup_expr': 'icontains'} ) } class EquipmentInstanceFilter(FilterSet): """EquipmentInstanceFilter.""" equipment_general_type = \ RelatedFilter( queryset=EquipmentGeneralType.objects.all(), filterset=EquipmentGeneralTypeFilter) equipment_unique_type = \ RelatedFilter( queryset=EquipmentUniqueType.objects.all(), filterset=EquipmentUniqueTypeFilter) equipment_facility = \ RelatedFilter( queryset=EquipmentFacility.objects.all(), filterset=EquipmentFacilityFilter) class Meta: """EquipmentInstanceFilter metadata.""" model = EquipmentInstance fields = dict( name=[ 'exact', 'iexact', 'in', 'contains', 'icontains', 'startswith', 'istartswith', 'endswith', 'iendswith', 'regex', 'iregex' ], info='__all__') filter_overrides = { JSONField: dict( filter_class=CharFilter # 'extra': lambda f: {'lookup_expr': 'icontains'} ) } class EquipmentSystemFilter(FilterSet): """EquipmentSystemFilter.""" equipment_facility = \ RelatedFilter( queryset=EquipmentFacility.objects.all(), filterset=EquipmentFacilityFilter) equipment_instances = \ RelatedFilter( queryset=EquipmentInstance.objects.all(), filterset=EquipmentInstanceFilter) class Meta: """EquipmentSystemFilter metadata.""" model = EquipmentSystem fields = dict( name=[ 'exact', 'iexact', 'in', 'contains', 'icontains', 'startswith', 'istartswith', 'endswith', 'iendswith', 'regex', 'iregex' ], date=[ 'exact', 'gt', 'gte', 'lt', 'lte', 'in', 'contains', 'startswith', 'endswith', 'range', 'isnull', 'year', 'year__gt', 'year__gte', 'year__lt', 'year__lte', 'year__in', 'year__range', 'month', 'month__gt', 'month__gte', 'month__lt', 'month__lte', 'month__in', 'month__range' ])
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/data/filters.py
filters.py
from django.contrib.admin.decorators import register from django.contrib.admin.options import ModelAdmin, TabularInline from django.db.models.query import Prefetch from django.forms.models import BaseInlineFormSet from silk.profiling.profiler import silk_profile from aito.iot_mgmt.data.models import ( NumericMeasurementUnit, EquipmentGeneralType, EquipmentDataField, EquipmentUniqueTypeGroup, EquipmentUniqueType, EquipmentFacility, EquipmentInstance, EquipmentSystem, EquipmentUniqueTypeGroupDataFieldProfile, ) from aito.iot_mgmt.data.querysets import ( EQUIPMENT_DATA_FIELD_ID_ONLY_UNORDERED_QUERYSET, EQUIPMENT_DATA_FIELD_STR_QUERYSET, EQUIPMENT_UNIQUE_TYPE_GROUP_ID_ONLY_UNORDERED_QUERYSET, EQUIPMENT_UNIQUE_TYPE_GROUP_NAME_ONLY_QUERYSET, EQUIPMENT_UNIQUE_TYPE_ID_ONLY_UNORDERED_QUERYSET, EQUIPMENT_UNIQUE_TYPE_NAME_ONLY_QUERYSET, EQUIPMENT_INSTANCE_ID_ONLY_UNORDERED_QUERYSET, EQUIPMENT_INSTANCE_RELATED_TO_EQUIPMENT_UNIQUE_TYPE_ID_ONLY_UNORDERED_QUERYSET, # noqa: E501 EQUIPMENT_INSTANCE_RELATED_TO_EQUIPMENT_FACILITY_ID_ONLY_UNORDERED_QUERYSET, # noqa: E501 ) # pylint: disable=invalid-name,line-too-long @register(NumericMeasurementUnit) class NumericMeasurementUnitAdmin(ModelAdmin): """NumericMeasurementUnit admin.""" list_display = ('name',) show_full_result_count = False @silk_profile(name='Admin: Numeric Measurement Units') def changelist_view(self, *args, **kwargs): """Change-list view.""" return super().changelist_view(*args, **kwargs) @silk_profile(name='Admin: Numeric Measurement Unit') def changeform_view(self, *args, **kwargs): """Change-form view.""" return super().changeform_view(*args, **kwargs) @register(EquipmentGeneralType) class EquipmentGeneralTypeAdmin(ModelAdmin): """EquipmentGeneralType admin.""" list_display = ('name',) show_full_result_count = False @silk_profile(name='Admin: Equipment General Types') def changelist_view(self, *args, **kwargs): """Change-list view.""" return super().changelist_view(*args, **kwargs) @silk_profile(name='Admin: Equipment General Type') def changeform_view(self, *args, **kwargs): """Change-form view.""" return super().changeform_view(*args, **kwargs) @register(EquipmentDataField) class EquipmentDataFieldAdmin(ModelAdmin): """EquipmentDataField admin.""" list_display = ( 'equipment_general_type', 'name', 'equipment_data_field_type', 'logical_data_type', 'numeric_measurement_unit', 'lower_numeric_null', 'upper_numeric_null', 'min_val', 'max_val', 'n_equipment_unique_types', ) list_filter = ( 'equipment_general_type__name', 'equipment_data_field_type__name', 'logical_data_type__name', 'numeric_measurement_unit__name', 'lower_numeric_null', 'upper_numeric_null', 'name', 'min_val', 'max_val', ) search_fields = ( 'equipment_general_type__name', 'equipment_data_field_type__name', 'name', 'logical_data_type__name', 'numeric_measurement_unit__name', ) show_full_result_count = False def n_equipment_unique_types(self, obj): # pylint: disable=no-self-use """Extra displayed field.""" return obj.equipment_unique_types.count() def get_queryset(self, request): """Get queryset.""" return super().get_queryset(request=request) \ .select_related( 'equipment_general_type', 'equipment_data_field_type', 'logical_data_type', 'numeric_measurement_unit') \ .prefetch_related( Prefetch( lookup='equipment_unique_types', queryset=EQUIPMENT_UNIQUE_TYPE_ID_ONLY_UNORDERED_QUERYSET)) @silk_profile(name='Admin: Equipment Data Fields') def changelist_view(self, *args, **kwargs): """Change-list view.""" return super().changelist_view(*args, **kwargs) @silk_profile(name='Admin: Equipment Data Field') def changeform_view(self, *args, **kwargs): """Change-form view.""" return super().changeform_view(*args, **kwargs) @register(EquipmentUniqueTypeGroup) class EquipmentUniqueTypeGroupAdmin(ModelAdmin): """EquipmentUniqueTypeGroup admin.""" list_display = ( 'equipment_general_type', 'name', 'equipment_unique_type_list', 'n_equipment_data_fields', 'n_equipment_instances', ) list_filter = ('equipment_general_type__name',) search_fields = 'equipment_general_type__name', 'name' show_full_result_count = False readonly_fields = ('equipment_data_fields',) def equipment_unique_type_list(self, obj): # pylint: disable=no-self-use """Extra displayed field.""" n = obj.equipment_unique_types.count() return ((f'{n}: ' + ', '.join(equipment_unique_type.name for equipment_unique_type in obj.equipment_unique_types.all())) if n else '') def n_equipment_data_fields(self, obj): # pylint: disable=no-self-use """Extra displayed field.""" return obj.equipment_data_fields.count() def n_equipment_instances(self, obj): # pylint: disable=no-self-use """Extra displayed field.""" return obj.equipment_instances.count() def get_queryset(self, request): """Get queryset.""" qs = super().get_queryset(request=request) \ .select_related( 'equipment_general_type') return qs.prefetch_related( Prefetch( lookup='equipment_unique_types', queryset=EQUIPMENT_UNIQUE_TYPE_ID_ONLY_UNORDERED_QUERYSET), Prefetch( lookup='equipment_data_fields', queryset=EQUIPMENT_DATA_FIELD_STR_QUERYSET)) \ if request.resolver_match.url_name.endswith('_change') \ else qs.prefetch_related( Prefetch( lookup='equipment_unique_types', queryset=EQUIPMENT_UNIQUE_TYPE_NAME_ONLY_QUERYSET), Prefetch( lookup='equipment_data_fields', queryset=EQUIPMENT_DATA_FIELD_ID_ONLY_UNORDERED_QUERYSET), Prefetch( lookup='equipment_instances', queryset=EQUIPMENT_INSTANCE_ID_ONLY_UNORDERED_QUERYSET)) @silk_profile(name='Admin: Equipment Unique Type Groups') def changelist_view(self, *args, **kwargs): """Change-list view.""" return super().changelist_view(*args, **kwargs) @silk_profile(name='Admin: Equipment Unique Type Group') def changeform_view(self, *args, **kwargs): """Change-form view.""" return super().changeform_view(*args, **kwargs) @register(EquipmentUniqueType) class EquipmentUniqueTypeAdmin(ModelAdmin): """EquipmentUniqueType admin.""" list_display = ( 'equipment_general_type', 'name', 'n_equipment_data_fields', 'equipment_unique_type_group_list', 'n_equipment_instances', ) list_filter = ('equipment_general_type__name',) show_full_result_count = False search_fields = 'equipment_general_type__name', 'name' def n_equipment_data_fields(self, obj): # pylint: disable=no-self-use """Extra displayed field.""" return obj.equipment_data_fields.count() def n_equipment_instances(self, obj): # pylint: disable=no-self-use """Extra displayed field.""" return obj.equipment_instances.count() def equipment_unique_type_group_list(self, obj): # pylint: disable=no-self-use """Extra displayed field.""" n = obj.equipment_unique_type_groups.count() return ((f'{n}: ' + ', '.join(equipment_unique_type_group.name for equipment_unique_type_group in obj.equipment_unique_type_groups.all())) if n else '') def get_queryset(self, request): """Get queryset.""" qs = super().get_queryset(request=request) \ .select_related( 'equipment_general_type') \ .prefetch_related( Prefetch( lookup='equipment_data_fields', queryset=EQUIPMENT_DATA_FIELD_ID_ONLY_UNORDERED_QUERYSET)) return ( qs.prefetch_related( Prefetch( lookup='equipment_unique_type_groups', queryset=EQUIPMENT_UNIQUE_TYPE_GROUP_ID_ONLY_UNORDERED_QUERYSET)) # noqa: E501 ) if request.resolver_match.url_name.endswith('_change') \ else qs.prefetch_related( Prefetch( lookup='equipment_instances', queryset=EQUIPMENT_INSTANCE_RELATED_TO_EQUIPMENT_UNIQUE_TYPE_ID_ONLY_UNORDERED_QUERYSET), # noqa: E501 Prefetch( lookup='equipment_unique_type_groups', queryset=EQUIPMENT_UNIQUE_TYPE_GROUP_NAME_ONLY_QUERYSET)) @silk_profile(name='Admin: Equipment Unique Types') def changelist_view(self, *args, **kwargs): """Change-list view.""" return super().changelist_view(*args, **kwargs) @silk_profile(name='Admin: Equipment Unique Type') def changeform_view(self, *args, **kwargs): """Change-form view.""" return super().changeform_view(*args, **kwargs) class EquipmentInstanceInLineFormSet(BaseInlineFormSet): """EquipmentInstanceInLineFormSet.""" model = EquipmentInstance # def get_queryset(self): # return super().get_queryset() \ # .select_related( # 'equipment_general_type', # 'equipment_unique_type', # 'equipment_unique_type__equipment_general_type') class EquipmentInstanceTabularInline(TabularInline): """EquipmentInstanceTabularInline.""" model = EquipmentInstance fields = 'equipment_general_type', 'equipment_unique_type', 'name' formset = EquipmentInstanceInLineFormSet extra = 0 def get_queryset(self, request): """Get queryset.""" return super().get_queryset(request=request) \ .select_related( 'equipment_general_type', 'equipment_unique_type', 'equipment_unique_type__equipment_general_type') @register(EquipmentFacility) class EquipmentFacilityAdmin(ModelAdmin): """EquipmentFacility admin.""" list_display = ( 'name', 'info', 'n_equipment_instances', ) search_fields = 'name', 'info' show_full_result_count = False # inlines = EquipmentInstanceTabularInline, def n_equipment_instances(self, obj): # pylint: disable=no-self-use """Extra displayed field.""" return obj.equipment_instances.count() def get_queryset(self, request): """Get queryset.""" qs = super().get_queryset(request=request) return qs \ if request.resolver_match.url_name.endswith('_change') \ else qs.prefetch_related( Prefetch( lookup='equipment_instances', queryset=EQUIPMENT_INSTANCE_RELATED_TO_EQUIPMENT_FACILITY_ID_ONLY_UNORDERED_QUERYSET)) # noqa: E501 @silk_profile(name='Admin: Equipment Facilities') def changelist_view(self, *args, **kwargs): """Change-list view.""" return super().changelist_view(*args, **kwargs) @silk_profile(name='Admin: Equipment Facility') def changeform_view(self, *args, **kwargs): """Change-form view.""" return super().changeform_view(*args, **kwargs) @register(EquipmentInstance) class EquipmentInstanceAdmin(ModelAdmin): """EquipmentInstance admin.""" list_display = ( 'equipment_general_type', 'equipment_unique_type', 'equipment_facility', 'name', 'info', ) list_filter = ( 'equipment_general_type__name', 'equipment_unique_type__name', 'equipment_facility__name', ) search_fields = ( 'equipment_general_type__name', 'equipment_unique_type__name', 'equipment_facility__name', 'name', 'info', ) show_full_result_count = False def get_queryset(self, request): """Get queryset.""" qs = super().get_queryset(request=request) return ( qs.select_related( 'equipment_general_type', 'equipment_unique_type') .defer( 'equipment_unique_type__equipment_general_type') .prefetch_related( Prefetch( lookup='equipment_unique_type_groups', queryset=EQUIPMENT_UNIQUE_TYPE_GROUP_ID_ONLY_UNORDERED_QUERYSET)) # noqa: E501 ) if request.resolver_match.url_name.endswith('_change') \ else qs.select_related( 'equipment_general_type', 'equipment_unique_type', 'equipment_unique_type__equipment_general_type', 'equipment_facility') \ .defer( 'equipment_facility__info') @silk_profile(name='Admin: Equipment Instances') def changelist_view(self, *args, **kwargs): """Change-list view.""" return super().changelist_view(*args, **kwargs) @silk_profile(name='Admin: Equipment Instance') def changeform_view(self, *args, **kwargs): """Change-form view.""" return super().changeform_view(*args, **kwargs) @register(EquipmentSystem) class EquipmentSystemAdmin(ModelAdmin): """EquipmentSystem admin.""" list_display = ( 'equipment_facility', 'name', 'date', 'n_equipment_instances', ) list_filter = 'equipment_facility__name', 'date' search_fields = 'equipment_facility__name', 'name' show_full_result_count = False def n_equipment_instances(self, obj): # pylint: disable=no-self-use """Extra displayed field.""" return obj.equipment_instances.count() def get_queryset(self, request): """Get queryset.""" return super().get_queryset(request=request) \ .select_related( 'equipment_facility') \ .defer( 'equipment_facility__info') \ .prefetch_related( Prefetch( lookup='equipment_instances', queryset=EQUIPMENT_INSTANCE_ID_ONLY_UNORDERED_QUERYSET)) @silk_profile(name='Admin: Equipment Systems') def changelist_view(self, *args, **kwargs): """Change-list view.""" return super().changelist_view(*args, **kwargs) @silk_profile(name='Admin: Equipment System') def changeform_view(self, *args, **kwargs): """Change-form view.""" return super().changeform_view(*args, **kwargs) @register(EquipmentUniqueTypeGroupDataFieldProfile) class EquipmentUniqueTypeGroupDataFieldProfileAdmin(ModelAdmin): """EquipmentUniqueTypeGroupDataFieldProfile admin.""" list_display = ( 'equipment_unique_type_group', 'equipment_data_field', 'to_date', 'valid_proportion', 'n_distinct_values', 'distinct_values', 'sample_min', 'outlier_rst_min', 'sample_quartile', 'sample_median', 'sample_3rd_quartile', 'outlier_rst_max', 'sample_max', ) list_filter = ( 'equipment_unique_type_group__equipment_general_type__name', 'equipment_unique_type_group__name', 'to_date', 'equipment_data_field__name', ) search_fields = ( 'equipment_unique_type_group__equipment_general_type__name', 'equipment_unique_type_group__name', 'equipment_data_field__name', ) show_full_result_count = False ordering = ( 'equipment_unique_type_group', '-to_date', '-n_distinct_values', ) readonly_fields = ( 'equipment_unique_type_group', 'equipment_data_field', 'to_date', 'valid_proportion', 'n_distinct_values', 'distinct_values', 'sample_min', 'outlier_rst_min', 'sample_quartile', 'sample_median', 'sample_3rd_quartile', 'outlier_rst_max', 'sample_max', ) def get_queryset(self, request): """Get queryset.""" return super().get_queryset(request=request) \ .select_related( 'equipment_unique_type_group', 'equipment_unique_type_group__equipment_general_type', 'equipment_data_field', 'equipment_data_field__equipment_general_type', 'equipment_data_field__equipment_data_field_type', 'equipment_data_field__logical_data_type', 'equipment_data_field__numeric_measurement_unit') @silk_profile( name='Admin: Equipment Unique Type Group Data Field Profiles') def changelist_view(self, *args, **kwargs): """Change-list view.""" return super().changelist_view(*args, **kwargs) @silk_profile(name='Admin: Equipment Unique Type Group Data Field Profile') def changeform_view(self, *args, **kwargs): """Change-form view.""" return super().changeform_view(*args, **kwargs)
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/data/admin.py
admin.py
from rest_framework.authentication import (BasicAuthentication, RemoteUserAuthentication, SessionAuthentication, TokenAuthentication) from rest_framework.pagination import LimitOffsetPagination from rest_framework.permissions import (IsAuthenticated, IsAuthenticatedOrReadOnly) from rest_framework.renderers import CoreJSONRenderer, JSONRenderer from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet from silk.profiling.profiler import silk_profile from aito.iot_mgmt.data.filters import ( DataTypeFilter, NumericMeasurementUnitFilter, EquipmentDataFieldTypeFilter, EquipmentGeneralTypeFilter, EquipmentDataFieldFilter, EquipmentUniqueTypeGroupFilter, EquipmentUniqueTypeFilter, EquipmentFacilityFilter, EquipmentInstanceFilter, EquipmentSystemFilter, ) from aito.iot_mgmt.data.querysets import ( DATA_TYPE_QUERYSET, NUMERIC_MEASUREMENT_UNIT_QUERYSET, EQUIPMENT_DATA_FIELD_TYPE_QUERYSET, EQUIPMENT_GENERAL_TYPE_QUERYSET, EQUIPMENT_DATA_FIELD_REST_API_QUERYSET, EQUIPMENT_UNIQUE_TYPE_GROUP_REST_API_QUERYSET, EQUIPMENT_UNIQUE_TYPE_REST_API_QUERYSET, EQUIPMENT_FACILITY_REST_API_QUERYSET, EQUIPMENT_INSTANCE_REST_API_QUERYSET, EQUIPMENT_SYSTEM_REST_API_QUERYSET, ) from aito.iot_mgmt.data.serializers import ( DataTypeSerializer, NumericMeasurementUnitSerializer, EquipmentDataFieldTypeSerializer, EquipmentGeneralTypeSerializer, EquipmentDataFieldSerializer, EquipmentUniqueTypeGroupSerializer, EquipmentUniqueTypeSerializer, EquipmentFacilitySerializer, EquipmentInstanceSerializer, EquipmentSystemSerializer, ) class DataTypeViewSet(ReadOnlyModelViewSet): """DataTypeViewSet. list: `GET` an filterable, unpaginated list of 2 Data Types named "cat" and "num" retrieve: `GET` the Data Type specified by `name` "cat" or "num" """ queryset = DATA_TYPE_QUERYSET serializer_class = DataTypeSerializer authentication_classes = ( BasicAuthentication, RemoteUserAuthentication, SessionAuthentication, TokenAuthentication, ) permission_classes = (IsAuthenticatedOrReadOnly,) filter_class = DataTypeFilter ordering_fields = ('name',) ordering = ('name',) pagination_class = None lookup_field = 'name' lookup_url_kwarg = 'data_type_name___cat_or_num' renderer_classes = CoreJSONRenderer, JSONRenderer @silk_profile(name='API: Data Types') def list(self, request, *args, **kwargs): """List items.""" return super().list(request, *args, **kwargs) @silk_profile(name='API: Data Type') def retrieve(self, request, *args, **kwargs): """Retrieve 1 item.""" return super().retrieve(request, *args, **kwargs) class NumericMeasurementUnitViewSet(ModelViewSet): """NumericMeasurementUnitViewSet. list: `GET` a filterable, unpaginated list of Numeric Measurement Units retrieve: `GET` the Numeric Measurement Unit specified by `name` create: `POST` a new Numeric Measurement Unit by `name` update: `PUT` updated data for the Numeric Measurement Unit specified by `name` partial_update: `PATCH` the Numeric Measurement Unit specified by `name` destroy: `DELETE` the Numeric Measurement Unit specified by `name` """ queryset = NUMERIC_MEASUREMENT_UNIT_QUERYSET serializer_class = NumericMeasurementUnitSerializer authentication_classes = ( BasicAuthentication, RemoteUserAuthentication, SessionAuthentication, TokenAuthentication, ) permission_classes = (IsAuthenticatedOrReadOnly,) filter_class = NumericMeasurementUnitFilter ordering_fields = ('name',) ordering = ('name',) pagination_class = None lookup_field = 'name' lookup_url_kwarg = 'numeric_measurement_unit_name' renderer_classes = CoreJSONRenderer, JSONRenderer @silk_profile(name='API: Numeric Measurement Units') def list(self, request, *args, **kwargs): """List items.""" return super().list(request, *args, **kwargs) @silk_profile(name='API: Numeric Measurement Unit') def retrieve(self, request, *args, **kwargs): """Retrieve 1 item.""" return super().retrieve(request, *args, **kwargs) class EquipmentDataFieldTypeViewSet(ReadOnlyModelViewSet): """EquipmentDataFieldTypeViewSet. list: `GET` an unfiltered, unpaginated list of Equipment Data Field Types retrieve: `GET` the Equipment Data Field Type specified by `name` """ queryset = EQUIPMENT_DATA_FIELD_TYPE_QUERYSET serializer_class = EquipmentDataFieldTypeSerializer authentication_classes = ( BasicAuthentication, RemoteUserAuthentication, SessionAuthentication, TokenAuthentication, ) permission_classes = (IsAuthenticatedOrReadOnly,) filter_class = EquipmentDataFieldTypeFilter ordering_fields = ('name',) ordering = ('name',) pagination_class = None lookup_field = 'name' lookup_url_kwarg = 'equipment_data_field_type_name' renderer_classes = CoreJSONRenderer, JSONRenderer @silk_profile(name='API: Equipment Data Field Types') def list(self, request, *args, **kwargs): """List items.""" return super().list(request, *args, **kwargs) @silk_profile(name='API: Equipment Data Field Type') def retrieve(self, request, *args, **kwargs): """Retrieve 1 item.""" return super().retrieve(request, *args, **kwargs) class EquipmentGeneralTypeViewSet(ModelViewSet): """EquipmentGeneralTypeViewSet. list: `GET` a filterable, unpaginated list of Equipment General Types retrieve: `GET` the Equipment General Type specified by `name` create: `POST` a new Equipment General Type by `name` update: `PUT` updated data for the Equipment General Type specified by `name` partial_update: `PATCH` the Equipment General Type specified by `name` destroy: `DELETE` the Equipment General Type specified by `name` """ queryset = EQUIPMENT_GENERAL_TYPE_QUERYSET serializer_class = EquipmentGeneralTypeSerializer authentication_classes = ( BasicAuthentication, RemoteUserAuthentication, SessionAuthentication, TokenAuthentication, ) permission_classes = (IsAuthenticated,) filter_class = EquipmentGeneralTypeFilter ordering_fields = ('name',) ordering = ('name',) pagination_class = None lookup_field = 'name' lookup_url_kwarg = 'equipment_general_type_name' renderer_classes = CoreJSONRenderer, JSONRenderer @silk_profile(name='API: Equipment General Types') def list(self, request, *args, **kwargs): """List items.""" return super().list(request, *args, **kwargs) @silk_profile(name='API: Equipment General Type') def retrieve(self, request, *args, **kwargs): """Retrieve 1 item.""" return super().retrieve(request, *args, **kwargs) class EquipmentDataFieldViewSet(ModelViewSet): """EquipmentDataFieldViewSet. list: `GET` a filterable, unpaginated list of Equipment Data Fields retrieve: `GET` the Equipment Data Field specified by `id` create: `POST` a new Equipment Data Field update: `PUT` updated data for the Equipment Data Field specified by `id` partial_update: `PATCH` the Equipment Data Field specified by `id` destroy: `DELETE` the Equipment Data Field specified by `id` """ queryset = EQUIPMENT_DATA_FIELD_REST_API_QUERYSET serializer_class = EquipmentDataFieldSerializer authentication_classes = ( BasicAuthentication, RemoteUserAuthentication, SessionAuthentication, TokenAuthentication, ) permission_classes = (IsAuthenticated,) filter_class = EquipmentDataFieldFilter ordering_fields = ( 'equipment_general_type', 'name', 'equipment_data_field_type', 'logical_data_type', 'numeric_measurement_unit', ) ordering = 'equipment_general_type', 'name' pagination_class = None renderer_classes = CoreJSONRenderer, JSONRenderer @silk_profile(name='API: Equipment Data Fields') def list(self, request, *args, **kwargs): """List items.""" return super().list(request, *args, **kwargs) @silk_profile(name='API: Equipment Data Field') def retrieve(self, request, *args, **kwargs): """Retrieve 1 item.""" return super().retrieve(request, *args, **kwargs) class EquipmentUniqueTypeGroupViewSet(ModelViewSet): """EquipmentUniqueTypeGroupViewSet. list: `GET` a filterable, unpaginated list of Equipment Unique Type Groups retrieve: `GET` the Equipment Unique Type Group specified by `name` create: `POST` a new Equipment Unique Type Group update: `PUT` updated data for the Equipment Unique Type Group specified by `name` partial_update: `PATCH` the Equipment Unique Type Group specified by `name` destroy: `DELETE` the Equipment Unique Type Group specified by `name` """ queryset = EQUIPMENT_UNIQUE_TYPE_GROUP_REST_API_QUERYSET serializer_class = EquipmentUniqueTypeGroupSerializer authentication_classes = ( BasicAuthentication, RemoteUserAuthentication, SessionAuthentication, TokenAuthentication, ) permission_classes = (IsAuthenticated,) filter_class = EquipmentUniqueTypeGroupFilter ordering_fields = 'equipment_general_type', 'name' ordering = 'equipment_general_type', 'name' pagination_class = None lookup_field = 'name' lookup_url_kwarg = 'equipment_unique_type_group_name' renderer_classes = CoreJSONRenderer, JSONRenderer @silk_profile(name='API: Equipment Unique Type Groups') def list(self, request, *args, **kwargs): """List items.""" return super().list(request, *args, **kwargs) @silk_profile(name='API: Equipment Unique Type Group') def retrieve(self, request, *args, **kwargs): """Retrieve 1 item.""" return super().retrieve(request, *args, **kwargs) class EquipmentUniqueTypeViewSet(ModelViewSet): """EquipmentUniqueTypeViewSet. list: `GET` a filterable, unpaginated list of Equipment Unique Types retrieve: `GET` the Equipment Unique Type specified by `name` create: `POST` a new Equipment Unique Type update: `PUT` updated data for the Equipment Unique Type specified by `name` partial_update: `PATCH` the Equipment Unique Type specified by `name` destroy: `DELETE` the Equipment Unique Type specified by `name` """ queryset = EQUIPMENT_UNIQUE_TYPE_REST_API_QUERYSET serializer_class = EquipmentUniqueTypeSerializer authentication_classes = ( BasicAuthentication, RemoteUserAuthentication, SessionAuthentication, TokenAuthentication, ) permission_classes = (IsAuthenticated,) filter_class = EquipmentUniqueTypeFilter ordering_fields = 'equipment_general_type', 'name' ordering = 'equipment_general_type', 'name' pagination_class = None lookup_field = 'name' lookup_url_kwarg = 'equipment_unique_type_name' renderer_classes = CoreJSONRenderer, JSONRenderer @silk_profile(name='API: Equipment Unique Types') def list(self, request, *args, **kwargs): """List items.""" return super().list(request, *args, **kwargs) @silk_profile(name='API: Equipment Unique Type') def retrieve(self, request, *args, **kwargs): """Retrieve 1 item.""" return super().retrieve(request, *args, **kwargs) class EquipmentFacilityViewSet(ModelViewSet): """EquipmentFacilityViewSet. list: `GET` a filterable, paginated list of Equipment Facilities retrieve: `GET` the Equipment Facility specified by `name` create: `POST` a new Equipment Facility update: `PUT` updated data for the Equipment Facility specified by `name` partial_update: `PATCH` the Equipment Facility specified by `name` destroy: `DELETE` the Equipment Facility specified by `name` """ queryset = EQUIPMENT_FACILITY_REST_API_QUERYSET serializer_class = EquipmentFacilitySerializer authentication_classes = ( BasicAuthentication, RemoteUserAuthentication, SessionAuthentication, TokenAuthentication, ) permission_classes = (IsAuthenticated,) filter_class = EquipmentFacilityFilter ordering_fields = ('name',) ordering = ('name',) pagination_class = LimitOffsetPagination lookup_field = 'name' lookup_url_kwarg = 'equipment_facility_name' renderer_classes = CoreJSONRenderer, JSONRenderer @silk_profile(name='API: Equipment Facilities') def list(self, request, *args, **kwargs): """List items.""" return super().list(request, *args, **kwargs) @silk_profile(name='API: Equipment Facility') def retrieve(self, request, *args, **kwargs): """Retrieve 1 item.""" return super().retrieve(request, *args, **kwargs) class EquipmentInstanceViewSet(ModelViewSet): """EquipmentInstanceViewSet. list: `GET` a filterable, paginated list of Equipment Instances retrieve: `GET` the Equipment Instance specified by `name` create: `POST` a new Equipment Instance update: `PUT` updated data for the Equipment Instance specified by `name` partial_update: `PATCH` the Equipment Instance specified by `name` destroy: `DELETE` the Equipment Instance specified by `name` """ queryset = EQUIPMENT_INSTANCE_REST_API_QUERYSET serializer_class = EquipmentInstanceSerializer authentication_classes = ( BasicAuthentication, RemoteUserAuthentication, SessionAuthentication, TokenAuthentication, ) permission_classes = (IsAuthenticated,) filter_class = EquipmentInstanceFilter ordering_fields = ( 'equipment_general_type', 'equipment_unique_type', 'name', 'equipment_facility', ) ordering = 'equipment_general_type', 'equipment_unique_type', 'name' lookup_field = 'name' lookup_url_kwarg = 'equipment_instance_name' pagination_class = LimitOffsetPagination renderer_classes = CoreJSONRenderer, JSONRenderer @silk_profile(name='API: Equipment Instances') def list(self, request, *args, **kwargs): """List items.""" return super().list(request, *args, **kwargs) @silk_profile(name='API: Equipment Instance') def retrieve(self, request, *args, **kwargs): """Retrieve 1 item.""" return super().retrieve(request, *args, **kwargs) class EquipmentSystemViewSet(ModelViewSet): """EquipmentSystemViewSet. list: `GET` a filterable, paginated list of Equipment Systems retrieve: `GET` the Equipment System specified by `id` create: `POST` a new Equipment System update: `PUT` updated data for the Equipment System specified by `id` partial_update: `PATCH` the Equipment System specified by `id` destroy: `DELETE` the Equipment System specified by `id` """ queryset = EQUIPMENT_SYSTEM_REST_API_QUERYSET serializer_class = EquipmentSystemSerializer authentication_classes = ( BasicAuthentication, RemoteUserAuthentication, SessionAuthentication, TokenAuthentication, ) permission_classes = (IsAuthenticated,) filter_class = EquipmentSystemFilter ordering_fields = 'equipment_facility', 'name', 'date' ordering = 'equipment_facility', 'name', 'date' pagination_class = LimitOffsetPagination renderer_classes = CoreJSONRenderer, JSONRenderer @silk_profile(name='API: Equipment Systems') def list(self, request, *args, **kwargs): """List items.""" return super().list(request, *args, **kwargs) @silk_profile(name='API: Equipment System') def retrieve(self, request, *args, **kwargs): """Retrieve 1 item.""" return super().retrieve(request, *args, **kwargs)
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/data/views.py
views.py
from django.db.models import Prefetch from aito.iot_mgmt.data.models import ( LogicalDataType, NumericMeasurementUnit, EquipmentDataFieldType, EquipmentGeneralType, EquipmentDataField, EquipmentUniqueTypeGroup, EquipmentUniqueType, EquipmentFacility, EquipmentInstance, EquipmentSystem, EquipmentUniqueTypeGroupDataFieldProfile, ) DATA_TYPE_QUERYSET = \ LogicalDataType.objects.all() NUMERIC_MEASUREMENT_UNIT_NAME_ONLY_UNORDERED_QUERYSET = \ NumericMeasurementUnit.objects \ .only('name') \ .order_by() NUMERIC_MEASUREMENT_UNIT_QUERYSET = \ NumericMeasurementUnit.objects.all() EQUIPMENT_DATA_FIELD_TYPE_QUERYSET = \ EquipmentDataFieldType.objects.all() EQUIPMENT_GENERAL_TYPE_UNORDERED_QUERYSET = \ EquipmentGeneralType.objects \ .order_by() EQUIPMENT_GENERAL_TYPE_QUERYSET = \ EquipmentGeneralType.objects.all() EQUIPMENT_DATA_FIELD_ID_ONLY_UNORDERED_QUERYSET = \ EquipmentDataField.objects \ .only('id') \ .order_by() EQUIPMENT_DATA_FIELD_NAME_ONLY_QUERYSET = \ EquipmentDataField.objects \ .only('name') \ .order_by('name') EQUIPMENT_DATA_FIELD_INCL_DESCRIPTION_QUERYSET = \ EquipmentDataField.objects \ .select_related( 'equipment_general_type', 'equipment_data_field_type', 'logical_data_type', 'numeric_measurement_unit') EQUIPMENT_DATA_FIELD_STR_QUERYSET = \ EQUIPMENT_DATA_FIELD_INCL_DESCRIPTION_QUERYSET EQUIPMENT_DATA_FIELD_STR_UNORDERED_QUERYSET = \ EQUIPMENT_DATA_FIELD_STR_QUERYSET \ .order_by() EQUIPMENT_UNIQUE_TYPE_GROUP_ID_ONLY_UNORDERED_QUERYSET = \ EquipmentUniqueTypeGroup.objects \ .only('id') \ .order_by() EQUIPMENT_UNIQUE_TYPE_GROUP_NAME_ONLY_QUERYSET = \ EquipmentUniqueTypeGroup.objects \ .only('name') \ .order_by('name') EQUIPMENT_UNIQUE_TYPE_GROUP_INCL_DESCRIPTION_QUERYSET = \ EquipmentUniqueTypeGroup.objects \ .select_related( 'equipment_general_type') EQUIPMENT_UNIQUE_TYPE_GROUP_STR_QUERYSET = \ EQUIPMENT_UNIQUE_TYPE_GROUP_INCL_DESCRIPTION_QUERYSET EQUIPMENT_UNIQUE_TYPE_GROUP_STR_UNORDERED_QUERYSET = \ EQUIPMENT_UNIQUE_TYPE_GROUP_STR_QUERYSET \ .order_by() EQUIPMENT_UNIQUE_TYPE_ID_ONLY_UNORDERED_QUERYSET = \ EquipmentUniqueType.objects \ .only('id') \ .order_by() EQUIPMENT_UNIQUE_TYPE_NAME_ONLY_QUERYSET = \ EquipmentUniqueType.objects \ .defer( 'equipment_general_type') \ .order_by( 'name') EQUIPMENT_UNIQUE_TYPE_INCL_DESCRIPTION_QUERYSET = \ EquipmentUniqueType.objects \ .select_related( 'equipment_general_type') EQUIPMENT_UNIQUE_TYPE_STR_QUERYSET = \ EQUIPMENT_UNIQUE_TYPE_INCL_DESCRIPTION_QUERYSET EQUIPMENT_UNIQUE_TYPE_STR_UNORDERED_QUERYSET = \ EQUIPMENT_UNIQUE_TYPE_STR_QUERYSET \ .order_by() EQUIPMENT_DATA_FIELD_REST_API_QUERYSET = \ EQUIPMENT_DATA_FIELD_INCL_DESCRIPTION_QUERYSET \ .prefetch_related( Prefetch( lookup='equipment_unique_types', queryset=EQUIPMENT_UNIQUE_TYPE_INCL_DESCRIPTION_QUERYSET)) EQUIPMENT_UNIQUE_TYPE_GROUP_REST_API_QUERYSET = \ EQUIPMENT_UNIQUE_TYPE_GROUP_INCL_DESCRIPTION_QUERYSET \ .prefetch_related( Prefetch( lookup='equipment_unique_types', queryset=EQUIPMENT_UNIQUE_TYPE_INCL_DESCRIPTION_QUERYSET), Prefetch( lookup='equipment_data_fields', queryset=EQUIPMENT_DATA_FIELD_INCL_DESCRIPTION_QUERYSET)) EQUIPMENT_UNIQUE_TYPE_REST_API_QUERYSET = \ EQUIPMENT_UNIQUE_TYPE_INCL_DESCRIPTION_QUERYSET \ .prefetch_related( Prefetch( lookup='equipment_data_fields', queryset=EQUIPMENT_DATA_FIELD_INCL_DESCRIPTION_QUERYSET), Prefetch( lookup='equipment_unique_type_groups', queryset=EQUIPMENT_UNIQUE_TYPE_GROUP_INCL_DESCRIPTION_QUERYSET)) EQUIPMENT_INSTANCE_ID_ONLY_UNORDERED_QUERYSET = \ EquipmentInstance.objects \ .only('id') \ .order_by() EQUIPMENT_INSTANCE_RELATED_TO_EQUIPMENT_UNIQUE_TYPE_ID_ONLY_UNORDERED_QUERYSET = ( # noqa: E501 EquipmentInstance.objects .only('id', 'equipment_unique_type') .order_by()) EQUIPMENT_INSTANCE_RELATED_TO_EQUIPMENT_FACILITY_ID_ONLY_UNORDERED_QUERYSET = \ EquipmentInstance.objects \ .only( 'id', 'equipment_facility') \ .order_by() EQUIPMENT_INSTANCE_RELATED_TO_EQUIPMENT_FACILITY_STR_QUERYSET = \ EquipmentInstance.objects \ .only( 'name', 'equipment_facility') \ .order_by( 'name') EQUIPMENT_INSTANCE_NAME_ONLY_QUERYSET = \ EquipmentInstance.objects \ .only( 'name') \ .order_by( 'name') EQUIPMENT_INSTANCE_STR_QUERYSET = \ EquipmentInstance.objects \ .defer( 'equipment_facility', 'info') \ .select_related( 'equipment_general_type', 'equipment_unique_type') \ .defer( 'equipment_unique_type__equipment_general_type') EQUIPMENT_INSTANCE_REST_API_QUERYSET = \ EquipmentInstance.objects \ .select_related( 'equipment_general_type', 'equipment_unique_type', 'equipment_unique_type__equipment_general_type', 'equipment_facility') \ .defer( 'equipment_facility__info') \ .prefetch_related( Prefetch( lookup='equipment_unique_type_groups', queryset=EQUIPMENT_UNIQUE_TYPE_GROUP_INCL_DESCRIPTION_QUERYSET)) EQUIPMENT_FACILITY_NAME_ONLY_UNORDERED_QUERYSET = \ EquipmentFacility.objects \ .only('name') \ .order_by() EQUIPMENT_FACILITY_STR_QUERYSET = \ EquipmentFacility.objects EQUIPMENT_FACILITY_REST_API_QUERYSET = \ EQUIPMENT_FACILITY_STR_QUERYSET \ .prefetch_related( Prefetch( lookup='equipment_instances', queryset=EQUIPMENT_INSTANCE_RELATED_TO_EQUIPMENT_FACILITY_STR_QUERYSET)) # noqa: E501 EQUIPMENT_SYSTEM_REST_API_QUERYSET = \ EquipmentSystem.objects \ .select_related( 'equipment_facility') \ .defer( 'equipment_facility__info') \ .prefetch_related( Prefetch( lookup='equipment_instances', queryset=EQUIPMENT_INSTANCE_NAME_ONLY_QUERYSET)) EQUIPMENT_UNIQUE_TYPE_GROUP_DATA_FIELD_PROFILE_REST_API_QUERYSET = \ EquipmentUniqueTypeGroupDataFieldProfile.objects \ .select_related( 'equipment_unique_type_group', 'equipment_data_field', 'equipment_data_field__equipment_general_type', 'equipment_data_field__equipment_data_field_type', 'equipment_data_field__data_type', 'equipment_data_field__numeric_measurement_unit') \ .defer( 'equipment_unique_type_group__equipment_general_type')
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/data/querysets.py
querysets.py
from rest_framework.serializers import (ModelSerializer, RelatedField, SlugRelatedField) from drf_writable_nested.serializers import WritableNestedModelSerializer from aito.iot_mgmt.data.models import ( LogicalDataType, NumericMeasurementUnit, EquipmentDataFieldType, EquipmentGeneralType, EquipmentDataField, EquipmentUniqueTypeGroup, EquipmentUniqueType, EquipmentFacility, EquipmentInstance, EquipmentSystem, ) from aito.iot_mgmt.utils import clean_lower_str class DataTypeSerializer(ModelSerializer): """DataTypeSerializer.""" class Meta: """Metadata.""" model = LogicalDataType fields = ('name',) class NumericMeasurementUnitSerializer(ModelSerializer): """NumericMeasurementUnitSerializer.""" class Meta: """Metadata.""" model = NumericMeasurementUnit fields = ('name',) class EquipmentDataFieldTypeSerializer(ModelSerializer): """EquipmentDataFieldTypeSerializer.""" class Meta: """Metadata.""" model = EquipmentDataFieldType fields = ('name',) class EquipmentGeneralTypeSerializer(ModelSerializer): """EquipmentGeneralTypeSerializer.""" class Meta: """Metadata.""" model = EquipmentGeneralType fields = ('name',) class EquipmentDataFieldRelatedField(RelatedField): """EquipmentDataFieldRelatedField.""" def to_internal_value(self, data): """Get internal value.""" return EquipmentDataField.objects.update_or_create( equipment_general_type=( EquipmentGeneralType.objects .get_or_create( name=clean_lower_str(data['equipment_general_type']))[0]), name=clean_lower_str(data['name']), defaults=dict( equipment_data_field_type=( EquipmentDataFieldType.objects .get(name=clean_lower_str( data['equipment_data_field_type']))), logical_data_type=( LogicalDataType.objects .get(name=clean_lower_str(data['logical_data_type']))), numeric_measurement_unit=( NumericMeasurementUnit.objects .get_or_create( name=data['numeric_measurement_unit'].strip())[0]), lower_numeric_null=data['lower_numeric_null'], upper_numeric_null=data['upper_numeric_null'], min_val=data['min_val'], max_val=data['max_val']))[0] def to_representation(self, value): """Get representation.""" return dict( id=value.id, equipment_general_type=value.equipment_general_type.name, name=value.name, description=value.description, equipment_data_field_type=value.equipment_data_field_type.name, logical_data_type=(value.logical_data_type.name if value.logical_data_type else None), numeric_measurement_unit=(value.numeric_measurement_unit.name if value.numeric_measurement_unit else None), lower_numeric_null=value.lower_numeric_null, upper_numeric_null=value.upper_numeric_null, min_val=value.min_val, max_val=value.max_val) class EquipmentUniqueTypeRelatedField(RelatedField): """EquipmentUniqueTypeRelatedField.""" def to_internal_value(self, data): """Get internal value.""" return EquipmentUniqueType.objects.update_or_create( equipment_general_type=( EquipmentGeneralType.objects .get_or_create( name=clean_lower_str(data['equipment_general_type']))[0]), name=clean_lower_str(data['name']))[0] def to_representation(self, value): """Get representation.""" return dict( equipment_general_type=value.equipment_general_type.name, name=value.name) class EquipmentUniqueTypeGroupRelatedField(RelatedField): """EquipmentUniqueTypeGroupRelatedField.""" def to_internal_value(self, data): """Get internal value.""" return EquipmentUniqueTypeGroup.objects.update_or_create( equipment_general_type=( EquipmentGeneralType.objects .get_or_create( name=clean_lower_str(data['equipment_general_type']))[0]), name=clean_lower_str(data['name']))[0] def to_representation(self, value): """Get representation.""" return dict( equipment_general_type=value.equipment_general_type.name, name=value.name) class EquipmentDataFieldSerializer(WritableNestedModelSerializer): """EquipmentDataFieldSerializer.""" equipment_general_type = \ SlugRelatedField( queryset=EquipmentGeneralType.objects.all(), read_only=False, slug_field='name', many=False, required=True) equipment_data_field_type = \ SlugRelatedField( queryset=EquipmentDataFieldType.objects.all(), read_only=False, slug_field='name', many=False, required=True) data_type = \ SlugRelatedField( queryset=LogicalDataType.objects.all(), read_only=False, slug_field='name', many=False, required=False) numeric_measurement_unit = \ SlugRelatedField( queryset=NumericMeasurementUnit.objects.all(), read_only=False, slug_field='name', many=False, required=False) equipment_unique_types = \ EquipmentUniqueTypeRelatedField( queryset=EquipmentUniqueType.objects.all(), read_only=False, many=True, required=False) class Meta: """Metadata.""" model = EquipmentDataField fields = ( 'id', 'equipment_general_type', 'name', 'equipment_data_field_type', 'logical_data_type', 'numeric_measurement_unit', 'lower_numeric_null', 'upper_numeric_null', 'min_val', 'max_val', 'equipment_unique_types', ) class EquipmentUniqueTypeGroupSerializer(WritableNestedModelSerializer): """EquipmentUniqueTypeGroupSerializer.""" equipment_general_type = \ SlugRelatedField( queryset=EquipmentGeneralType.objects.all(), read_only=False, slug_field='name', many=False, required=True) equipment_unique_types = \ EquipmentUniqueTypeRelatedField( queryset=EquipmentUniqueType.objects.all(), read_only=False, many=True, required=False) equipment_data_fields = \ EquipmentDataFieldRelatedField( queryset=EquipmentDataField.objects.all(), read_only=False, many=True, required=False) class Meta: """Metadata.""" model = EquipmentUniqueTypeGroup fields = ( 'equipment_general_type', 'name', 'equipment_unique_types', 'equipment_data_fields', ) class EquipmentUniqueTypeSerializer(WritableNestedModelSerializer): """EquipmentUniqueTypeSerializer.""" equipment_general_type = \ SlugRelatedField( queryset=EquipmentGeneralType.objects.all(), read_only=False, slug_field='name', many=False, required=True) equipment_data_fields = \ EquipmentDataFieldRelatedField( queryset=EquipmentDataField.objects.all(), read_only=False, many=True, required=False) equipment_unique_type_groups = \ EquipmentUniqueTypeGroupRelatedField( queryset=EquipmentUniqueTypeGroup.objects.all(), read_only=False, many=True, required=False) class Meta: """Metadata.""" model = EquipmentUniqueType fields = ( 'equipment_general_type', 'name', 'equipment_data_fields', 'equipment_unique_type_groups', ) class EquipmentFacilitySerializer(ModelSerializer): """EquipmentFacilitySerializer.""" equipment_instances = \ SlugRelatedField( queryset=EquipmentInstance.objects.all(), read_only=False, slug_field='name', many=True, required=False) class Meta: """Metadata.""" model = EquipmentFacility fields = ( 'name', 'info', 'equipment_instances', ) class EquipmentInstanceSerializer(WritableNestedModelSerializer): """EquipmentInstanceSerializer.""" equipment_general_type = \ SlugRelatedField( queryset=EquipmentGeneralType.objects.all(), read_only=False, slug_field='name', many=False, required=True) equipment_unique_type = \ EquipmentUniqueTypeRelatedField( queryset=EquipmentUniqueType.objects.all(), read_only=False, many=False, required=False) equipment_facility = \ SlugRelatedField( queryset=EquipmentFacility.objects.all(), read_only=False, slug_field='name', many=False, required=False) equipment_unique_type_groups = \ EquipmentUniqueTypeGroupRelatedField( queryset=EquipmentUniqueTypeGroup.objects.all(), read_only=False, many=True, required=False) class Meta: """Metadata.""" model = EquipmentInstance fields = ( 'equipment_general_type', 'equipment_unique_type', 'equipment_facility', 'name', 'info', 'equipment_unique_type_groups', ) class EquipmentSystemSerializer(ModelSerializer): """EquipmentSystemSerializer.""" equipment_facility = \ SlugRelatedField( queryset=EquipmentFacility.objects.all(), read_only=False, slug_field='name', many=False, required=False) equipment_instances = \ SlugRelatedField( queryset=EquipmentInstance.objects.all(), read_only=False, slug_field='name', many=True, required=False) class Meta: """Metadata.""" model = EquipmentSystem fields = ( 'id', 'equipment_facility', 'name', 'date', 'equipment_instances', )
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/data/serializers.py
serializers.py
import warnings from django.db.models import ( Model, CharField, DateField, FloatField, IntegerField, JSONField, ForeignKey, ManyToManyField, PROTECT) from django.db.models.signals import m2m_changed, pre_delete from aito.iot_mgmt.utils import MAX_CHAR_LEN, clean_lower_str, clean_upper_str # noqa: E501 # pylint: disable=line-too-long class LogicalDataType(Model): """Logical Data Type.""" name = \ CharField( verbose_name='Logical Data Type', blank=False, null=False, unique=True, db_index=True, max_length=MAX_CHAR_LEN) class Meta: """Metadata.""" verbose_name = 'Logical Data Type' verbose_name_plural = 'Logical Data Types' ordering = ('name',) def __str__(self): """Return string repr.""" return f'LogicalDataTp {self.name.upper()}' def save(self, *args, **kwargs): """Save.""" self.name = clean_lower_str(self.name) super().save(*args, **kwargs) class NumericMeasurementUnit(Model): """Numeric Measurement Unit.""" name = \ CharField( verbose_name='Numeric Measurement Unit', blank=False, null=False, unique=True, db_index=True, max_length=MAX_CHAR_LEN) class Meta: """Metadata.""" verbose_name = 'Numeric Measurement Unit' verbose_name_plural = 'Numeric Measurement Units' ordering = ('name',) def __str__(self): """Return string repr.""" return f'NumMeasureUnit "{self.name}"' def save(self, *args, **kwargs): """Save.""" self.name = self.name.strip() super().save(*args, **kwargs) class EquipmentDataFieldType(Model): """Equipment Data Field Type.""" name = \ CharField( verbose_name='Equipment Data Field Type', blank=False, null=False, unique=True, db_index=True, max_length=MAX_CHAR_LEN) class Meta: """Metadata.""" verbose_name = 'Equipment Data Field Type' verbose_name_plural = 'Equipment Data Field Types' ordering = ('name',) def __str__(self): """Return string repr.""" return f'EqDataFldTp {self.name.upper()}' def save(self, *args, **kwargs): """Save.""" self.name = clean_lower_str(self.name) super().save(*args, **kwargs) class EquipmentGeneralType(Model): """Equipment General Type.""" name = \ CharField( verbose_name='Equipment General Type', blank=False, null=False, unique=True, db_index=True, max_length=MAX_CHAR_LEN) class Meta: """Metadata.""" verbose_name = 'Equipment General Type' verbose_name_plural = 'Equipment General Types' ordering = ('name',) def __str__(self): """Return string repr.""" return f'EqGenTp {self.name.upper()}' def save(self, *args, **kwargs): """Save.""" self.name = clean_lower_str(self.name) super().save(*args, **kwargs) class EquipmentDataField(Model): """Equipment Data Field.""" RELATED_NAME = 'equipment_data_fields' RELATED_QUERY_NAME = 'equipment_data_field' DEFAULT_UPPER_NUMERIC_NULL = 2 ** 30 # << MaxInt = 2 ** 31 - 1 DEFAULT_LOWER_NUMERIC_NULL = -DEFAULT_UPPER_NUMERIC_NULL equipment_general_type = \ ForeignKey( to=EquipmentGeneralType, related_name=RELATED_NAME, related_query_name=RELATED_QUERY_NAME, blank=False, null=False, on_delete=PROTECT) name = \ CharField( verbose_name='Equipment Data Field', blank=False, null=False, db_index=True, max_length=MAX_CHAR_LEN) equipment_data_field_type = \ ForeignKey( to=EquipmentDataFieldType, related_name=RELATED_NAME, related_query_name=RELATED_QUERY_NAME, blank=False, null=False, on_delete=PROTECT) logical_data_type = \ ForeignKey( to=LogicalDataType, related_name=RELATED_NAME, related_query_name=RELATED_QUERY_NAME, blank=True, null=True, on_delete=PROTECT) numeric_measurement_unit = \ ForeignKey( to=NumericMeasurementUnit, related_name=RELATED_NAME, related_query_name=RELATED_QUERY_NAME, blank=True, null=True, on_delete=PROTECT) lower_numeric_null = \ FloatField( blank=False, null=False, default=DEFAULT_LOWER_NUMERIC_NULL) upper_numeric_null = \ FloatField( blank=False, null=False, default=DEFAULT_UPPER_NUMERIC_NULL) min_val = \ FloatField( blank=True, null=True) max_val = \ FloatField( blank=True, null=True) equipment_unique_types = \ ManyToManyField( to='EquipmentUniqueType', related_name=RELATED_NAME + '_reverse', related_query_name=RELATED_QUERY_NAME, blank=True) class Meta: """Metadata.""" verbose_name = 'Equipment Data Field' verbose_name_plural = 'Equipment Data Fields' unique_together = 'equipment_general_type', 'name' ordering = 'equipment_general_type', 'name' def __str__(self): """Return string repr.""" return ((f'{self.equipment_general_type.name.upper()} ' f'[{self.equipment_data_field_type.name}] ' f'{self.name} [') + (self.logical_data_type.name if self.logical_data_type else 'UNTYPED') + (f', unit {self.numeric_measurement_unit.name.upper()}' if self.numeric_measurement_unit and self.numeric_measurement_unit.name # noqa: E501 else '') + f', nulls ({self.lower_numeric_null}, {self.upper_numeric_null})' + # noqa: E501 ('' if self.min_val is None else f', min {self.min_val}') + ('' if self.max_val is None else f', max {self.max_val}') + ']') def save(self, *args, **kwargs): """Save.""" self.name = clean_lower_str(self.name) super().save(*args, **kwargs) class EquipmentUniqueTypeGroup(Model): """Equipment Unique Type Group.""" RELATED_NAME = 'equipment_unique_type_groups' RELATED_QUERY_NAME = 'equipment_unique_type_group' equipment_general_type = \ ForeignKey( to=EquipmentGeneralType, related_name=RELATED_NAME, related_query_name=RELATED_QUERY_NAME, blank=False, null=False, on_delete=PROTECT) name = \ CharField( verbose_name='Equipment Unique Type Group', blank=False, null=False, unique=True, db_index=True, max_length=MAX_CHAR_LEN) equipment_unique_types = \ ManyToManyField( to='EquipmentUniqueType', related_name=RELATED_NAME + '_reverse', related_query_name=RELATED_QUERY_NAME, blank=True) equipment_data_fields = \ ManyToManyField( to=EquipmentDataField, related_name=RELATED_NAME, related_query_name=RELATED_QUERY_NAME, blank=True) class Meta: """Metadata.""" verbose_name = 'Equipment Unique Type Group' verbose_name_plural = 'Equipment Unique Type Groups' ordering = 'equipment_general_type', 'name' def __str__(self): """Return string repr.""" return (f'{self.equipment_general_type.name.upper()} ' f'UnqTpGrp {self.name.upper()}') def save(self, *args, **kwargs): """Save.""" self.name = clean_lower_str(self.name) super().save(*args, **kwargs) class EquipmentUniqueType(Model): """Equipment Unique Type.""" RELATED_NAME = 'equipment_unique_types' RELATED_QUERY_NAME = 'equipment_unique_type' equipment_general_type = \ ForeignKey( to=EquipmentGeneralType, related_name=RELATED_NAME, related_query_name=RELATED_QUERY_NAME, blank=False, null=False, on_delete=PROTECT) name = \ CharField( verbose_name='Equipment Unique Type', blank=False, null=False, unique=True, db_index=True, max_length=MAX_CHAR_LEN) equipment_data_fields = \ ManyToManyField( to=EquipmentDataField, through=EquipmentDataField.equipment_unique_types.through, related_name=RELATED_NAME + '_reverse', related_query_name=RELATED_QUERY_NAME, blank=True) equipment_unique_type_groups = \ ManyToManyField( to=EquipmentUniqueTypeGroup, through=EquipmentUniqueTypeGroup.equipment_unique_types.through, related_name=RELATED_NAME + '_reverse', related_query_name=RELATED_QUERY_NAME, blank=True) class Meta: """Metadata.""" verbose_name = 'Equipment Unique Type' verbose_name_plural = 'Equipment Unique Types' ordering = 'equipment_general_type', 'name' def __str__(self): """Return string repr.""" return (f'{self.equipment_general_type.name.upper()} ' f'UnqTp {self.name.upper()}') def save(self, *args, **kwargs): """Save.""" self.name = clean_lower_str(self.name) super().save(*args, **kwargs) def equipment_unique_types_equipment_data_fields_m2m_changed( sender, instance, action, reverse, model, pk_set, using, *args, **kwargs): """M2M-changed signal.""" # pylint: disable=too-many-arguments,too-many-branches,too-many-locals # pylint: disable=unused-argument if action == 'pre_add': invalid_objs = \ model.objects \ .filter(pk__in=pk_set) \ .exclude(equipment_general_type=instance.equipment_general_type) if invalid_objs: warnings.warn( message=(f'*** {instance}: CANNOT ADD INVALID {invalid_objs} ' 'WITH DIFFERENT EQUIPMENT GENERAL TYPE(S) ***')) pk_set.difference_update( i['pk'] for i in invalid_objs.values('pk')) elif action in ('post_add', 'post_remove') and pk_set: if (model is EquipmentDataField) and \ instance.equipment_unique_type_groups.count(): equipment_unique_type_groups_to_update = \ instance.equipment_unique_type_groups.all() print( f'{instance}: Changed Equipment Data Fields: {action.upper()}:' f' Updating Equipment Data Fields of {equipment_unique_type_groups_to_update}...' # noqa: E501 ) for equipment_unique_type_group_to_update in \ equipment_unique_type_groups_to_update: equipment_unique_type_group_to_update.equipment_data_fields.set( # noqa: E501 equipment_unique_type_group_to_update.equipment_unique_types.all()[0].equipment_data_fields.all().union( # noqa: E501 *(equipment_unique_type.equipment_data_fields.all() for equipment_unique_type in equipment_unique_type_group_to_update.equipment_unique_types.all()[1:]), # noqa: E501 all=False), clear=False) elif model is EquipmentUniqueType: changed_equipment_unique_types = \ model.objects.filter(pk__in=pk_set) equipment_unique_type_groups_to_update = \ changed_equipment_unique_types[0].equipment_unique_type_groups.all().union( # noqa: E501 *(equipment_unique_type.equipment_unique_type_groups.all() for equipment_unique_type in changed_equipment_unique_types[1:]), # noqa: E501 all=False) if equipment_unique_type_groups_to_update: print( f'{instance}: Changed Equipment Unique Types: ' f'{action.upper()}: Updating Equipment Data Fields of ' f'{equipment_unique_type_groups_to_update} Related to ' f'Added/Removed {changed_equipment_unique_types}...') for equipment_unique_type_group_to_update in \ equipment_unique_type_groups_to_update: equipment_unique_type_group_to_update.equipment_data_fields.set( # noqa: E501 equipment_unique_type_group_to_update.equipment_unique_types.all()[0].equipment_data_fields.all().union( # noqa: E501 *(equipment_unique_type.equipment_data_fields.all() for equipment_unique_type in equipment_unique_type_group_to_update.equipment_unique_types.all()[1:]), # noqa: E501 all=False), clear=False) elif action == 'pre_clear': if (model is EquipmentDataField) and \ instance.equipment_unique_type_groups.count(): equipment_unique_type_groups_to_update = \ instance.equipment_unique_type_groups.all() print( f'*** {instance}: CLEARING Equipment Data Fields: ' f'{action.upper()}: Updating Equipment Data Fields of ' f'{equipment_unique_type_groups_to_update}... ***') for equipment_unique_type_group_to_update in \ equipment_unique_type_groups_to_update: remaining_equipment_unique_types = ( equipment_unique_type_group_to_update .equipment_unique_types.exclude(pk=instance.pk)) if remaining_equipment_unique_types.count(): equipment_unique_type_group_to_update.equipment_data_fields.set( # noqa: E501 remaining_equipment_unique_types[0].equipment_data_fields.all().union( # noqa: E501 *(remaining_equipment_unique_type.equipment_data_fields.all() # noqa: E501 for remaining_equipment_unique_type in remaining_equipment_unique_types[1:]), all=False), clear=False) else: print( f'*** {instance}: CLEARING Equipment Data Fields: ' f'{action.upper()}: CLEARING Equipment Data Fields ' f'of {equipment_unique_type_groups_to_update}... ***') equipment_unique_type_group_to_update.equipment_data_fields.clear() # noqa: E501 elif (model is EquipmentUniqueType) and \ instance.equipment_unique_types.count(): equipment_unique_types_to_clear = \ instance.equipment_unique_types.all() equipment_unique_type_groups_to_update = \ equipment_unique_types_to_clear[0].equipment_unique_type_groups.all().union( # noqa: E501 *(equipment_unique_type_to_clear.equipment_unique_type_groups.all() # noqa: E501 for equipment_unique_type_to_clear in equipment_unique_types_to_clear[1:]), all=False) if equipment_unique_type_groups_to_update: print( f'*** {instance}: CLEARING Equipment Unique Types: ' f'{action.upper()}: Updating Equipment Data Fields of ' f'{equipment_unique_type_groups_to_update} Related to ' f'{equipment_unique_types_to_clear} to Clear...') for equipment_unique_type_group_to_update in \ equipment_unique_type_groups_to_update: first_equipment_unique_type = ( equipment_unique_type_group_to_update .equipment_unique_types.all()[0]) equipment_unique_type_group_to_update.equipment_data_fields.set( # noqa: E501 (first_equipment_unique_type.equipment_data_fields.exclude(pk=instance.pk) # noqa: E501 if first_equipment_unique_type in equipment_unique_types_to_clear # noqa: E501 else first_equipment_unique_type.equipment_data_fields.all()).union( # noqa: E501 *((equipment_unique_type_group_equipment_unique_type.equipment_data_fields.exclude(pk=instance.pk) # noqa: E501 if equipment_unique_type_group_equipment_unique_type in equipment_unique_types_to_clear # noqa: E501 else equipment_unique_type_group_equipment_unique_type.equipment_data_fields.all()) # noqa: E501 for equipment_unique_type_group_equipment_unique_type in # noqa: E501 equipment_unique_type_group_to_update.equipment_unique_types.all()[1:]), # noqa: E501 all=False), clear=False) m2m_changed.connect( receiver=equipment_unique_types_equipment_data_fields_m2m_changed, sender=EquipmentUniqueType.equipment_data_fields.through, weak=True, dispatch_uid=None, apps=None) def equipment_unique_type_groups_equipment_unique_types_m2m_changed( sender, instance, action, reverse, model, pk_set, using, *args, **kwargs): """M2M-changed signal.""" # pylint: disable=too-many-arguments,too-many-branches,unused-argument if action == 'pre_add': invalid_objs = ( model.objects .filter(pk__in=pk_set) .exclude(equipment_general_type=instance.equipment_general_type)) if invalid_objs: warnings.warn( message=(f'*** {instance}: CANNOT ADD INVALID {invalid_objs} ' 'WITH DIFFERENT EQUIPMENT GENERAL TYPE(S) ***')) pk_set.difference_update( i['pk'] for i in invalid_objs.values('pk')) elif action in ('post_add', 'post_remove') and pk_set: if model is EquipmentUniqueType: if instance.equipment_unique_types.count(): print(f'{instance}: Changed Equipment Unique Types: ' f'{action.upper()}: Updating Data Fields...') instance.equipment_data_fields.set( instance.equipment_unique_types.all()[0].equipment_data_fields.all().union( # noqa: E501 *(equipment_unique_type.equipment_data_fields.all() for equipment_unique_type in instance.equipment_unique_types.all()[1:]), all=False), clear=False) else: print(f'*** {instance}: REMOVED Equipment Unique Types: ' f'{action.upper()}: CLEARING Data Fields... ***') instance.equipment_data_fields.clear() elif model is EquipmentUniqueTypeGroup: equipment_unique_type_groups_to_update = \ model.objects.filter(pk__in=pk_set) print(f'{instance}: Changed Equipment Unique Type Groups: ' f'{action.upper()}: Updating Data Fields of Added/Removed ' f'{equipment_unique_type_groups_to_update}...') for equipment_unique_type_group_to_update in \ equipment_unique_type_groups_to_update: if equipment_unique_type_group_to_update.equipment_unique_types.count(): # noqa: E501 equipment_unique_type_group_to_update.equipment_data_fields.set( # noqa: E501 equipment_unique_type_group_to_update.equipment_unique_types.all()[0].equipment_data_fields.all().union( # noqa: E501 *(equipment_unique_type.equipment_data_fields.all() for equipment_unique_type in equipment_unique_type_group_to_update.equipment_unique_types.all()[1:]), # noqa: E501 all=False), clear=False) else: print(f'*** {equipment_unique_type_group_to_update}: ' f'REMOVED Equipment Unique Types: {action.upper()}: ' 'CLEARING Data Fields... ***') equipment_unique_type_group_to_update.equipment_data_fields.clear() # noqa: E501 elif action == 'pre_clear': if model is EquipmentUniqueType: print(f'*** {instance}: CLEARING Equipment Unique Types: ' f'{action.upper()}: CLEARING Data Fields... ***') instance.equipment_data_fields.clear() elif (model is EquipmentUniqueTypeGroup) and \ instance.equipment_unique_type_groups.count(): equipment_unique_type_groups_to_update = \ instance.equipment_unique_type_groups.all() print(f'{instance}: CLEARING Equipment Unique Type Groups: ' f'{action.upper()}: Updating Data Fields of ' f'{equipment_unique_type_groups_to_update} to Clear...') for equipment_unique_type_group_to_update in \ equipment_unique_type_groups_to_update: remaining_equipment_unique_types = ( equipment_unique_type_group_to_update .equipment_unique_types.exclude(pk=instance.pk)) if remaining_equipment_unique_types.count(): equipment_unique_type_group_to_update.equipment_data_fields.set( # noqa: E501 remaining_equipment_unique_types.all()[0].equipment_data_fields.all().union( # noqa: E501 *(equipment_unique_type.equipment_data_fields.all() for equipment_unique_type in remaining_equipment_unique_types[1:]), all=False), clear=False) else: print(f'*** {equipment_unique_type_group_to_update}: ' f'REMOVING Equipment Unique Types: {action.upper()}:' f' CLEARING Data Fields... ***') equipment_unique_type_group_to_update.equipment_data_fields.clear() # noqa: E501 m2m_changed.connect( receiver=equipment_unique_type_groups_equipment_unique_types_m2m_changed, sender=EquipmentUniqueTypeGroup.equipment_unique_types.through, weak=True, dispatch_uid=None, apps=None) def equipment_unique_type_pre_delete(sender, instance, using, *args, **kwargs): """Pre-Delete signal.""" # pylint: disable=unused-argument if instance.equipment_unique_type_groups.count(): equipment_unique_type_groups_to_update = \ instance.equipment_unique_type_groups.all() print(f'*** DELETING {instance}: ' 'Updating Data Streams of ' f'{equipment_unique_type_groups_to_update}... ***' # noqa: E501 ) for equipment_unique_type_group_to_update in \ equipment_unique_type_groups_to_update: remaining_equipment_unique_types = ( equipment_unique_type_groups_to_update.equipment_unique_types .exclude(pk=instance.pk)) if remaining_equipment_unique_types.count(): equipment_unique_type_group_to_update.equipment_data_fields.set( # noqa: E501 remaining_equipment_unique_types.all()[0].equipment_data_fields.all().union( # noqa: E501 *(equipment_unique_type.equipment_data_fields.all() for equipment_unique_type in remaining_equipment_unique_types[1:]), all=False), clear=False) else: print(f'*** DELETING {instance}: ' f'CLEARING Data Streams of {equipment_unique_type_group_to_update}... ***' # noqa: E501 ) equipment_unique_type_group_to_update.equipment_data_fields.clear() # noqa: E501 pre_delete.connect( receiver=equipment_unique_type_pre_delete, sender=EquipmentUniqueType, weak=True, dispatch_uid=None, apps=None) class EquipmentFacility(Model): """Equipment Facility.""" RELATED_NAME = 'equipment_facilities' RELATED_QUERY_NAME = 'equipment_facility' name = \ CharField( verbose_name='Equipment Facility', blank=False, null=False, unique=True, db_index=True, max_length=MAX_CHAR_LEN) info = \ JSONField( blank=True, null=True) class Meta: """Metadata.""" verbose_name = 'Equipment Facility' verbose_name_plural = 'Equipment Facilities' ordering = ('name',) def __str__(self): """Return string repr.""" return f'EqFacility "{self.name}"' def save(self, *args, **kwargs): """Save.""" self.name = clean_lower_str(self.name) super().save(*args, **kwargs) class EquipmentInstance(Model): """Equipment Instance.""" RELATED_NAME = 'equipment_instances' RELATED_QUERY_NAME = 'equipment_instance' equipment_general_type = \ ForeignKey( to=EquipmentGeneralType, related_name=RELATED_NAME, related_query_name=RELATED_QUERY_NAME, blank=False, null=False, on_delete=PROTECT) equipment_unique_type = \ ForeignKey( to=EquipmentUniqueType, related_name=RELATED_NAME, related_query_name=RELATED_QUERY_NAME, blank=True, null=True, on_delete=PROTECT) equipment_facility = \ ForeignKey( to=EquipmentFacility, related_name=RELATED_NAME, related_query_name=RELATED_QUERY_NAME, blank=True, null=True, on_delete=PROTECT) name = \ CharField( verbose_name='Equipment Instance', blank=False, null=False, unique=True, db_index=True, max_length=MAX_CHAR_LEN) info = \ JSONField( blank=True, null=True) equipment_unique_type_groups = \ ManyToManyField( to=EquipmentUniqueTypeGroup, related_name=RELATED_NAME, related_query_name=RELATED_QUERY_NAME, blank=True) class Meta: """Metadata.""" verbose_name = 'Equipment Instance' verbose_name_plural = 'Equipment Instances' ordering = 'equipment_general_type', 'equipment_unique_type', 'name' def __str__(self): """Return string repr.""" return (self.equipment_general_type.name.upper() + (f' UnqTp {self.equipment_unique_type.name}' if self.equipment_unique_type else '') + f' #{self.name}') def save(self, *args, **kwargs): """Save.""" self.name = clean_lower_str(self.name) if self.equipment_unique_type and ( self.equipment_unique_type.equipment_general_type != self.equipment_general_type): warnings.warn( message=(f'*** EQUIPMENT INSTANCE #{self.name}: ' f'EQUIPMENT UNIQUE TYPE {self.equipment_unique_type} ' 'NOT OF EQUIPMENT GENERAL TYPE ' f'{self.equipment_general_type} ***')) self.equipment_unique_type = None super().save(*args, **kwargs) class EquipmentSystem(Model): """Equipment System.""" RELATED_NAME = 'equipment_systems' RELATED_QUERY_NAME = 'equipment_system' equipment_facility = \ ForeignKey( to=EquipmentFacility, related_name=RELATED_NAME, related_query_name=RELATED_QUERY_NAME, blank=True, null=True, on_delete=PROTECT) name = \ CharField( verbose_name='Equipment System', blank=False, null=False, default=None, db_index=True, max_length=MAX_CHAR_LEN) date = \ DateField( blank=False, null=False, db_index=True) equipment_instances = \ ManyToManyField( to=EquipmentInstance, related_name=RELATED_NAME, related_query_name=RELATED_QUERY_NAME, blank=True) class Meta: """Metadata.""" verbose_name = 'Equipment System' verbose_name_plural = 'Equipment Systems' unique_together = 'name', 'date' ordering = 'equipment_facility', 'name', 'date' def __str__(self): """Return string repr.""" return (self.name + (f' @ EqFacility "{self.equipment_facility.name}"' if self.equipment_facility else '') + f' on {self.date}') def save(self, *args, **kwargs): """Save.""" self.name = clean_lower_str(self.name) super().save(*args, **kwargs) class EquipmentUniqueTypeGroupDataFieldProfile(Model): """Equipment Unique Type Group Data Field Profile.""" RELATED_NAME = 'equipment_unique_type_group_data_field_profiles' RELATED_QUERY_NAME = 'equipment_unique_type_group_data_field_profile' equipment_unique_type_group = \ ForeignKey( to=EquipmentUniqueTypeGroup, related_name=RELATED_NAME, related_query_name=RELATED_QUERY_NAME, blank=False, null=False, on_delete=PROTECT) equipment_data_field = \ ForeignKey( to=EquipmentDataField, related_name=RELATED_NAME, related_query_name=RELATED_QUERY_NAME, blank=False, null=False, on_delete=PROTECT) to_date = \ DateField( blank=True, null=True, db_index=True) valid_proportion = \ FloatField( blank=False, null=False) n_distinct_values = \ IntegerField( blank=False, null=False) distinct_values = \ JSONField( blank=True, null=True) sample_min = \ FloatField( blank=True, null=True) outlier_rst_min = \ FloatField( blank=True, null=True) sample_quartile = \ FloatField( blank=True, null=True) sample_median = \ FloatField( blank=True, null=True) sample_3rd_quartile = \ FloatField( blank=True, null=True) outlier_rst_max = \ FloatField( blank=True, null=True) sample_max = \ FloatField( blank=True, null=True) class Meta: """Metadata.""" verbose_name = 'Equipment Unique Type Group Data Field Profile' verbose_name_plural = 'Equipment Unique Type Group Data Field Profiles' unique_together = \ 'equipment_unique_type_group', \ 'equipment_data_field', \ 'to_date' ordering = \ 'equipment_unique_type_group', \ 'equipment_data_field', \ '-to_date'
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/data/models.py
models.py
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('IoT_DataMgmt', '0020_remove_equipmentdatafield_nullable'), ] operations = [ migrations.AlterField( model_name='datatype', name='name', field=models.CharField(max_length=255, unique=True, verbose_name='Data Type Name'), ), migrations.AlterField( model_name='equipmentdatafield', name='name', field=models.CharField(max_length=255, verbose_name='Equipment Data Field Name'), ), migrations.AlterField( model_name='equipmentdatafieldtype', name='name', field=models.CharField(max_length=255, unique=True, verbose_name='Equipment Data Field Type Name'), ), migrations.AlterField( model_name='equipmentfacility', name='name', field=models.CharField(max_length=255, unique=True, verbose_name='Equipment Facility Name'), ), migrations.AlterField( model_name='equipmentgeneraltype', name='name', field=models.CharField(max_length=255, unique=True, verbose_name='Equipment General Type Name'), ), migrations.AlterField( model_name='equipmentinstance', name='name', field=models.CharField(max_length=255, unique=True, verbose_name='Equipment Instance Name'), ), migrations.AlterField( model_name='equipmentsystem', name='name', field=models.CharField(default=None, max_length=255, verbose_name='Equipment System Name'), ), migrations.AlterField( model_name='equipmentuniquetype', name='name', field=models.CharField(max_length=255, verbose_name='Equipment Unique Type Name'), ), migrations.AlterField( model_name='equipmentuniquetypegroup', name='name', field=models.CharField(max_length=255, unique=True, verbose_name='Equipment Unique Type Group Name'), ), migrations.AlterField( model_name='numericmeasurementunit', name='name', field=models.CharField(max_length=255, unique=True, verbose_name='Numeric Measurement Unit Name'), ), ]
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/data/migrations/0021_auto_20181025_0733.py
0021_auto_20181025_0733.py
from django.db import migrations, models class Migration(migrations.Migration): """Verbose names.""" dependencies = [ ('IoT_DataMgmt', '0091_rename_logical_data_type') ] operations = [ migrations.AlterModelOptions( name='equipmentdatafield', options={'ordering': ('equipment_general_type', 'name'), 'verbose_name': 'Equipment Data Field', 'verbose_name_plural': 'Equipment Data Fields'}), migrations.AlterModelOptions( name='equipmentdatafieldtype', options={'ordering': ('name',), 'verbose_name': 'Equipment Data Field Type', 'verbose_name_plural': 'Equipment Data Field Types'}), migrations.AlterModelOptions( name='equipmentfacility', options={'ordering': ('name',), 'verbose_name': 'Equipment Facility', 'verbose_name_plural': 'Equipment Facilities'}), migrations.AlterModelOptions( name='equipmentgeneraltype', options={'ordering': ('name',), 'verbose_name': 'Equipment General Type', 'verbose_name_plural': 'Equipment General Types'}), migrations.AlterModelOptions( name='equipmentinstance', options={'ordering': ('equipment_general_type', 'equipment_unique_type', 'name'), 'verbose_name': 'Equipment Instance', 'verbose_name_plural': 'Equipment Instances'}), migrations.AlterModelOptions( name='equipmentsystem', options={'ordering': ('equipment_facility', 'name', 'date'), 'verbose_name': 'Equipment System', 'verbose_name_plural': 'Equipment Systems'}), migrations.AlterModelOptions( name='equipmentuniquetype', options={'ordering': ('equipment_general_type', 'name'), 'verbose_name': 'Equipment Unique Type', 'verbose_name_plural': 'Equipment Unique Types'}), migrations.AlterModelOptions( name='equipmentuniquetypegroup', options={'ordering': ('equipment_general_type', 'name'), 'verbose_name': 'Equipment Unique Type Group', 'verbose_name_plural': 'Equipment Unique Type Groups'}), migrations.AlterModelOptions( name='equipmentuniquetypegroupdatafieldprofile', options={ 'ordering': ('equipment_unique_type_group', 'equipment_data_field', '-to_date'), 'verbose_name': 'Equipment Unique Type Group Data Field Profile', 'verbose_name_plural': 'Equipment Unique Type Group Data Field Profiles'}), migrations.AlterModelOptions( name='globalconfig', options={'ordering': ('key',), 'verbose_name': 'Global Config', 'verbose_name_plural': 'Global Configs'}), migrations.AlterModelOptions( name='logicaldatatype', options={'ordering': ('name',), 'verbose_name': 'Logical Data Type', 'verbose_name_plural': 'Logical Data Types'}), migrations.AlterModelOptions( name='numericmeasurementunit', options={'ordering': ('name',), 'verbose_name': 'Numeric Measurement Unit', 'verbose_name_plural': 'Numeric Measurement Units'}), migrations.AlterField( model_name='logicaldatatype', name='name', field=models.CharField( db_index=True, max_length=255, unique=True, verbose_name='Logical Data Type')) ]
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/data/migrations/0092_verbose_names.py
0092_verbose_names.py
from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): """Move Data Field Profile models to Base module.""" dependencies = [ ('IoT_DataMgmt', '0082_auto_20200916_2306'), ] operations = [ migrations.CreateModel( name='EquipmentUniqueTypeGroupDataFieldProfile', fields=[ ('id', models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('to_date', models.DateField( blank=True, db_index=True, null=True)), ('valid_proportion', models.FloatField()), ('n_distinct_values', models.IntegerField()), ('distinct_values', models.JSONField( blank=True, null=True)), ('sample_min', models.FloatField( blank=True, null=True)), ('outlier_rst_min', models.FloatField( blank=True, null=True)), ('sample_quartile', models.FloatField( blank=True, null=True)), ('sample_median', models.FloatField( blank=True, null=True)), ('sample_3rd_quartile', models.FloatField( blank=True, null=True)), ('outlier_rst_max', models.FloatField( blank=True, null=True)), ('sample_max', models.FloatField( blank=True, null=True)), ('last_updated', models.DateTimeField( auto_now=True)), ('equipment_data_field', models.ForeignKey( on_delete=django.db.models.deletion.PROTECT, related_name=('equipment_unique_type_group_' 'data_field_profiles'), related_query_name=('equipment_unique_type_group_' 'data_field_profile'), to='IoT_DataMgmt.equipmentdatafield')), ('equipment_unique_type_group', models.ForeignKey( on_delete=django.db.models.deletion.PROTECT, related_name=('equipment_unique_type_group_' 'data_field_profiles'), related_query_name=('equipment_unique_type_group_' 'data_field_profile'), to='IoT_DataMgmt.equipmentuniquetypegroup')) ], options={ 'ordering': ('equipment_unique_type_group', 'equipment_data_field', '-to_date'), 'unique_together': { ('equipment_unique_type_group', 'equipment_data_field', 'to_date') } } ), migrations.CreateModel( name='EquipmentUniqueTypeGroupDataFieldPairwiseCorrelation', fields=[ ('id', models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('sample_correlation', models.FloatField()), ('last_updated', models.DateTimeField( auto_now=True)), ('equipment_data_field', models.ForeignKey( on_delete=django.db.models.deletion.PROTECT, related_name=('equipment_unique_type_group_' 'data_field_pairwise_correlations'), related_query_name=('equipment_unique_type_group_' 'data_field_pairwise_correlation'), to='IoT_DataMgmt.equipmentdatafield')), ('equipment_data_field_2', models.ForeignKey( on_delete=django.db.models.deletion.PROTECT, to='IoT_DataMgmt.equipmentdatafield')), ('equipment_unique_type_group', models.ForeignKey( on_delete=django.db.models.deletion.PROTECT, related_name=('equipment_unique_type_group_' 'data_field_pairwise_correlations'), related_query_name=('equipment_unique_type_group_' 'data_field_pairwise_correlation'), to='IoT_DataMgmt.equipmentuniquetypegroup')) ], options={ 'ordering': ('equipment_unique_type_group', 'equipment_data_field', 'equipment_data_field_2'), 'unique_together': { ('equipment_unique_type_group', 'equipment_data_field', 'equipment_data_field_2') } } ) ]
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/data/migrations/0083_move_data_field_profile_models_to_base.py
0083_move_data_field_profile_models_to_base.py
import django.contrib.postgres.fields.jsonb from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('IoT_DataMgmt', '0022_equipmentinstancedatafielddailyaggregate'), ] operations = [ migrations.CreateModel( name='EquipmentInstanceDataFieldDailyAgg', fields=[ ('id', models.BigAutoField(primary_key=True, serialize=False)), ('date', models.DateField()), ('daily_count', models.IntegerField(default=0)), ('daily_distinct_value_counts', django.contrib.postgres.fields.jsonb.JSONField(default=dict)), ('daily_min', models.FloatField(blank=True, null=True)), ('daily_quartile', models.FloatField(blank=True, null=True)), ('daily_median', models.FloatField(blank=True, null=True)), ('daily_mean', models.FloatField(blank=True, null=True)), ('daily_3rd_quartile', models.FloatField(blank=True, null=True)), ('daily_max', models.FloatField(blank=True, null=True)), ('last_updated', models.DateTimeField(auto_now=True)), ('equipment_data_field', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_instance_data_field_daily_aggs', related_query_name='equipment_instance_data_field_daily_agg', to='IoT_DataMgmt.EquipmentDataField')), ('equipment_instance', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_instance_data_field_daily_aggs', related_query_name='equipment_instance_data_field_daily_agg', to='IoT_DataMgmt.EquipmentInstance')), ], options={ 'ordering': ('equipment_instance', 'equipment_data_field', 'date'), }, ), migrations.RemoveField( model_name='equipmentinstancedatafielddailyaggregate', name='equipment_data_field', ), migrations.RemoveField( model_name='equipmentinstancedatafielddailyaggregate', name='equipment_instance', ), migrations.DeleteModel( name='EquipmentInstanceDataFieldDailyAggregate', ), ]
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/data/migrations/0023_auto_20181110_0727.py
0023_auto_20181110_0727.py
import django.contrib.postgres.fields.jsonb from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('IoT_DataMgmt', '0053_auto_20190223_0816'), ] operations = [ migrations.AlterModelOptions( name='equipmentdatafield', options={'ordering': ('equipment_general_type', 'name')}, ), migrations.AlterField( model_name='datatype', name='name', field=models.CharField(db_index=True, max_length=255, unique=True, verbose_name='Data Type'), ), migrations.AlterField( model_name='equipmentdatafield', name='name', field=models.CharField(db_index=True, max_length=255, verbose_name='Equipment Data Field'), ), migrations.AlterField( model_name='equipmentdatafieldtype', name='name', field=models.CharField(db_index=True, max_length=255, unique=True, verbose_name='Equipment Data Field Type'), ), migrations.AlterField( model_name='equipmentfacility', name='name', field=models.CharField(db_index=True, max_length=255, unique=True, verbose_name='Equipment Facility'), ), migrations.AlterField( model_name='equipmentgeneraltype', name='name', field=models.CharField(db_index=True, max_length=255, unique=True, verbose_name='Equipment General Type'), ), migrations.AlterField( model_name='equipmentinstance', name='name', field=models.CharField(db_index=True, max_length=255, unique=True, verbose_name='Equipment Instance'), ), migrations.AlterField( model_name='equipmentinstancedailymetadata', name='n_columns', field=models.IntegerField(), ), migrations.AlterField( model_name='equipmentinstancedailymetadata', name='n_rows', field=models.IntegerField(), ), migrations.AlterField( model_name='equipmentinstancedailymetadata', name='schema', field=django.contrib.postgres.fields.jsonb.JSONField(), ), migrations.AlterField( model_name='equipmentinstancedatafielddailyagg', name='daily_count', field=models.IntegerField(), ), migrations.AlterField( model_name='equipmentinstancedatafielddailyagg', name='daily_distinct_value_counts', field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True), ), migrations.AlterField( model_name='equipmentuniquetype', name='name', field=models.CharField(db_index=True, max_length=255, unique=True, verbose_name='Equipment Unique Type'), ), migrations.AlterField( model_name='equipmentuniquetypegroup', name='name', field=models.CharField(db_index=True, max_length=255, unique=True, verbose_name='Equipment Unique Type Group'), ), migrations.AlterField( model_name='globalconfig', name='value', field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True), ), migrations.AlterField( model_name='numericmeasurementunit', name='name', field=models.CharField(db_index=True, max_length=255, unique=True, verbose_name='Numeric Measurement Unit'), ), ]
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/data/migrations/0054_auto_20190223_2312.py
0054_auto_20190223_2312.py
from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='DataType', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255, unique=True)), ], options={ 'ordering': ('name',), }, ), migrations.CreateModel( name='EquipmentDataField', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255)), ('nullable', models.BooleanField(default=False)), ('lower_numeric_null', models.FloatField(blank=True, null=True)), ('upper_numeric_null', models.FloatField(blank=True, null=True)), ('default_val', models.FloatField(blank=True, null=True)), ('min_val', models.FloatField(blank=True, null=True)), ('max_val', models.FloatField(blank=True, null=True)), ('data_type', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='equipment_data_fields', related_query_name='equipment_data_field', to='IoT_DataMgmt.DataType')), ], options={ 'ordering': ('equipment_general_type', 'equipment_data_field_type', 'name'), }, ), migrations.CreateModel( name='EquipmentDataFieldType', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255, unique=True)), ], options={ 'ordering': ('name',), }, ), migrations.CreateModel( name='EquipmentGeneralType', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255, unique=True)), ], options={ 'ordering': ('name',), }, ), migrations.CreateModel( name='EquipmentUniqueType', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255)), ('equipment_general_type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_unique_types', related_query_name='equipment_unique_type', to='IoT_DataMgmt.EquipmentGeneralType')), ], options={ 'ordering': ('equipment_general_type', 'name'), }, ), migrations.AddField( model_name='equipmentdatafield', name='equipment_data_field_type', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_data_fields', related_query_name='equipment_data_field', to='IoT_DataMgmt.EquipmentDataFieldType'), ), migrations.AddField( model_name='equipmentdatafield', name='equipment_general_type', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='equipment_data_fields', related_query_name='equipment_data_field', to='IoT_DataMgmt.EquipmentGeneralType'), ), migrations.AddField( model_name='equipmentdatafield', name='equipment_unique_types', field=models.ManyToManyField(blank=True, related_name='equipment_data_fields', related_query_name='equipment_data_field', to='IoT_DataMgmt.EquipmentUniqueType'), ), ]
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/data/migrations/0001_initial.py
0001_initial.py
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('IoT_DataMgmt', '0003_equipmentinstance'), ] operations = [ migrations.AddField( model_name='equipmentinstance', name='control_data_db_tbl', field=models.CharField(blank=True, default=None, max_length=255, null=True), ), migrations.AddField( model_name='equipmentinstance', name='control_data_db_url', field=models.URLField(blank=True, default=None, max_length=255, null=True), ), migrations.AddField( model_name='equipmentinstance', name='control_data_file_url', field=models.URLField(blank=True, default=None, max_length=255, null=True), ), migrations.AddField( model_name='equipmentinstance', name='data_db_tbl', field=models.CharField(blank=True, default=None, max_length=255, null=True), ), migrations.AddField( model_name='equipmentinstance', name='data_db_url', field=models.URLField(blank=True, default=None, max_length=255, null=True), ), migrations.AddField( model_name='equipmentinstance', name='data_file_url', field=models.URLField(blank=True, default=None, max_length=255, null=True), ), migrations.AddField( model_name='equipmentinstance', name='measure_data_db_tbl', field=models.CharField(blank=True, default=None, max_length=255, null=True), ), migrations.AddField( model_name='equipmentinstance', name='measure_data_db_url', field=models.URLField(blank=True, default=None, max_length=255, null=True), ), migrations.AddField( model_name='equipmentinstance', name='measure_data_file_url', field=models.URLField(blank=True, default=None, max_length=255, null=True), ), ]
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/data/migrations/0004_auto_20180103_1007.py
0004_auto_20180103_1007.py
from pandas._libs.missing import NA # pylint: disable=no-name-in-module from tqdm import tqdm from aito.pmfp.data_mgmt import EquipmentParquetDataSet from aito.util.data_proc import ParquetDataset from aito.iot_mgmt.api import (EquipmentUniqueTypeGroup, EquipmentUniqueTypeGroupDataFieldProfile) MAX_N_DISTINCT_VALUES_TO_PROFILE: int = 30 def run(general_type: str, unique_type_group: str): """Run this script to profile Equipment Unique Type Group's data fields.""" # get Equipment Unique Type Group and corresponding Parquet Data Set eq_unq_tp_grp: EquipmentUniqueTypeGroup = \ EquipmentUniqueTypeGroup.objects.get(name=unique_type_group) eq_unq_tp_grp_parquet_data_set: EquipmentParquetDataSet = \ EquipmentParquetDataSet(general_type=general_type, unique_type_group=unique_type_group) eq_unq_tp_grp_parquet_ds: ParquetDataset = \ eq_unq_tp_grp_parquet_data_set.load() # delete previously stored Data Field profiles EquipmentUniqueTypeGroupDataFieldProfile.objects.filter( equipment_unique_type_group=eq_unq_tp_grp).delete() # profile Data Fields and save profiles into DB for equipment_data_field in tqdm(eq_unq_tp_grp.equipment_data_fields.all()): # noqa: E501 eq_data_field_name: str = equipment_data_field.name if eq_data_field_name in eq_unq_tp_grp_parquet_ds.possibleFeatureCols: # pylint: disable=protected-access if eq_unq_tp_grp_parquet_ds.typeIsNum(eq_data_field_name): eq_unq_tp_grp_parquet_ds._nulls[eq_data_field_name] = \ equipment_data_field.lower_numeric_null, \ equipment_data_field.upper_numeric_null _distinct_values_proportions: dict = { (str(NA) if k is NA else k): v for k, v in eq_unq_tp_grp_parquet_ds.distinct(eq_data_field_name).items()} _n_distinct_values: int = len(_distinct_values_proportions) eq_unq_tp_grp_data_field_profile: \ EquipmentUniqueTypeGroupDataFieldProfile = \ EquipmentUniqueTypeGroupDataFieldProfile.objects.create( equipment_unique_type_group=eq_unq_tp_grp, equipment_data_field=equipment_data_field, valid_proportion=(eq_unq_tp_grp_parquet_ds .nonNullProportion(eq_data_field_name)), n_distinct_values=_n_distinct_values) if _n_distinct_values <= MAX_N_DISTINCT_VALUES_TO_PROFILE: eq_unq_tp_grp_data_field_profile.distinct_values = \ _distinct_values_proportions if eq_unq_tp_grp_parquet_ds.typeIsNum(eq_data_field_name): quartiles: dict = (eq_unq_tp_grp_parquet_ds .reprSample[eq_data_field_name] .describe(percentiles=(.25, .5, .75)) .drop(index='count', level=None, inplace=False, errors='raise') .to_dict()) eq_unq_tp_grp_data_field_profile.sample_min = \ quartiles['min'] eq_unq_tp_grp_data_field_profile.outlier_rst_min = \ eq_unq_tp_grp_parquet_ds.outlierRstMin(eq_data_field_name) eq_unq_tp_grp_data_field_profile.sample_quartile = \ quartiles['25%'] eq_unq_tp_grp_data_field_profile.sample_median = \ quartiles['50%'] eq_unq_tp_grp_data_field_profile.sample_3rd_quartile = \ quartiles['75%'] eq_unq_tp_grp_data_field_profile.outlier_rst_max = \ eq_unq_tp_grp_parquet_ds.outlierRstMax(eq_data_field_name) eq_unq_tp_grp_data_field_profile.sample_max = \ quartiles['max'] eq_unq_tp_grp_data_field_profile.save()
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/iot_mgmt/data/scripts/profile_equipment_data_fields.py
profile_equipment_data_fields.py
from collections import OrderedDict from collections.abc import Sequence import json from pathlib import Path from typing import Union from transformers.pipelines import pipeline from transformers.pipelines.image_classification import ImageClassificationPipeline # noqa: E501 from ..util.prob import OrderedClassifProbSet, normalize, order from .util import ImgInput __all__: Sequence[str] = ( 'IMAGENET_CLASSES', 'imagenet_classify', 'profile_imagenet_similarity', 'ImageNetSimilarityBasedClassifier', ) _IMAGENET_CLASSES_FILE_NAME: str = 'ImageNet-Classes.json' _IMAGENET_N_CLASSES: int = 10 ** 3 - 3 # duplicates: cardigan, crane, maillot with open(file=Path(__file__).parent / _IMAGENET_CLASSES_FILE_NAME, mode='rt', encoding='utf8') as f: IMAGENET_CLASSES: set[str] = {v[1].lower() for v in json.load(f).values()} assert len(IMAGENET_CLASSES) == _IMAGENET_N_CLASSES _IMAGENET_CLASSIFIER: ImageClassificationPipeline = \ pipeline(task='image-classification', model=None, config=None, tokenizer=None, feature_extractor=None, framework=None, revision=None, use_fast=True, use_auth_token=None, device=None, device_map=None, torch_dtype=None, trust_remote_code=None, model_kwargs={}, pipeline_class=None) _HuggingFaceClassifOutput: type = list[dict[str, Union[float, str]]] def _convert_hugging_face_classif_output(output: _HuggingFaceClassifOutput) \ -> OrderedClassifProbSet: """Convert Hugging Face classification output.""" converted_output: OrderedClassifProbSet = OrderedDict( (i['label'].split(',')[0].replace(' ', '_').lower(), i['score']) for i in output) assert IMAGENET_CLASSES.issuperset(converted_output), \ KeyError('*** INVALID OUTPUT CLASSES ' f'{set(converted_output).difference(IMAGENET_CLASSES)} ***') return converted_output def imagenet_classify(img_input: Union[ImgInput, Sequence[ImgInput]], /) \ -> Union[OrderedClassifProbSet, Sequence[OrderedClassifProbSet]]: """Classify image(s) according to ImageNet.""" output: Union[_HuggingFaceClassifOutput, list[_HuggingFaceClassifOutput]] = \ _IMAGENET_CLASSIFIER(img_input, top_k=_IMAGENET_N_CLASSES) return ([_convert_hugging_face_classif_output(i) for i in output] if isinstance(img_input, (list, tuple)) else _convert_hugging_face_classif_output(output)) def profile_imagenet_similarity(imgs: Sequence[ImgInput], /, *, labels: Sequence[str]) \ -> dict[str, OrderedClassifProbSet]: """Profile similarity between ImageNet classes a set of labels.""" imagenet_classifs: Sequence[OrderedClassifProbSet] = imagenet_classify(imgs) # noqa: E501 d: dict[str, dict[str, float]] = {} for imagenet_classif, label in zip(imagenet_classifs, labels): profile: dict[str, float] = d.setdefault(label, {}) for imagenet_class_name, prob in imagenet_classif.items(): if imagenet_class_name in profile: profile[imagenet_class_name] += prob else: profile[imagenet_class_name]: float = prob return {k: order(normalize(v)) for k, v in d.items()} class ImageNetSimilarityBasedClassifier: # pylint: disable=too-few-public-methods """Classify target classes based on mapping from such classes to ImageNet.""" # noqa: E501 def __init__( self, classes_mapped_to_similar_imagenet_classes: dict[str, list[str]], /, *, prob_threshold: float = 3e-6): """Initialize.""" self.classes_mapped_to_imagenet_classes: dict[str, list[str]] = \ classes_mapped_to_similar_imagenet_classes self.prob_threshold: float = prob_threshold def __call__(self, img_input: Union[ImgInput, Sequence[ImgInput]]) \ -> Union[OrderedClassifProbSet, Sequence[OrderedClassifProbSet]]: """Classify.""" return ( [order(normalize({ target_class: sum((p if (p := i.get(imagenet_class, 0)) > self.prob_threshold else 0) for imagenet_class in imagenet_classes) for target_class, imagenet_classes in self.classes_mapped_to_imagenet_classes.items()})) for i in imagenet_classif] if isinstance(imagenet_classif := imagenet_classify(img_input), (list, tuple)) else order(normalize( {target_class: sum((p if ((p := imagenet_classif.get(imagenet_class, 0)) > self.prob_threshold) else 0) for imagenet_class in imagenet_class_names) for target_class, imagenet_class_names in self.classes_mapped_to_imagenet_classes.items()})) )
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/img_classif/imagenet.py
imagenet.py
from __future__ import annotations from argparse import Namespace as ArgParseNamespace import copy import datetime import json from pathlib import Path from types import ModuleType, SimpleNamespace from typing import Any, Optional, Union from typing import Collection, List, Tuple # Py3.9+: use built-ins from ruamel import yaml from aito.util.data_types.numpy_pandas import NUMPY_INT_TYPES from aito.util.fs import PathType, mkdir __all__ = 'Namespace', 'DICT_OR_NAMESPACE_TYPES' class Namespace(ArgParseNamespace): """Namespace with support for nested keys.""" @staticmethod def _as_namespace_if_applicable(obj: Any, /) -> Union[Namespace, Any]: """Try to create namespace from object.""" if isinstance(obj, dict) and all(isinstance(k, str) for k in obj): obj: Namespace = Namespace(**obj) elif isinstance(obj, (ArgParseNamespace, SimpleNamespace)): obj: Namespace = Namespace(**obj.__dict__) elif isinstance(obj, ModuleType): # then get module's non-special, non-module-typed members only obj: Namespace = Namespace(**{ k: v for k, v in obj.__dict__.items() if not (k.startswith('__') or isinstance(v, ModuleType))}) return obj def __init__(self, **kwargs: Any): """Init Namespace.""" self.__dict__['__metadata__'] = kwargs.pop('__metadata__', {}) super().__init__(**{k: self._as_namespace_if_applicable(v) for k, v in kwargs.items()}) # move any nested metadata to corresponding nested child Namespace # pylint: disable=invalid-name for k, v in self.__metadata__.copy().items(): nested_attr_names: List[str] = k.split(sep='.', maxsplit=-1) if len(nested_attr_names) > 1: del self.__metadata__[k] self._get_nested_attr(nested_attr_names[:-1]) \ .__metadata__[nested_attr_names[-1]] = v @staticmethod def pprint(namespace_or_dict: Union[Namespace, dict], /, *, indent: int = 0, addl_indent: int = 2): # pylint: disable=too-many-locals """Pretty-print namespace or dict.""" indent_str: str = indent * ' ' single_addl_indent_str: str = (indent + addl_indent) * ' ' double_addl_indent_str: str = (indent + 2 * addl_indent) * ' ' s: str = indent_str + '{' # pylint: disable=invalid-name d: dict = (namespace_or_dict.__dict__ # pylint: disable=invalid-name if isinstance(namespace_or_dict, Namespace) else namespace_or_dict) if d: # pylint: disable=too-many-nested-blocks s += '\n' # pylint: disable=invalid-name for k, v in d.items(): # pylint: disable=invalid-name if k != '__metadata__': v_metadata_str: str = '' if isinstance(namespace_or_dict, Namespace): v_metadata: Union[Namespace, ArgParseNamespace, SimpleNamespace, dict] = \ namespace_or_dict.__metadata__.get(k) if v_metadata: if isinstance(v_metadata, (Namespace, ArgParseNamespace, SimpleNamespace)): v_metadata: dict = v_metadata.__dict__ label: Optional[str] = v_metadata.get('label') if label: v_metadata_str += ( double_addl_indent_str + f'{label}\n' ) description: Optional[str] = \ v_metadata.get('description') if description: v_metadata_str += ( double_addl_indent_str + f'({description})\n' ) choices: Optional[Collection[Any]] = \ v_metadata.get('choices') if choices: v_metadata_str += ( double_addl_indent_str + 'choices:\n' + '\n'.join((double_addl_indent_str + f' - {choice}') for choice in choices) + '\n' ) default: Optional[Any] = v_metadata.get('default') if default: v_metadata_str += ( double_addl_indent_str + f'default: {default}\n' ) s += ( # pylint: disable=invalid-name single_addl_indent_str + f'{k} = ' ) + ( ('\n' + v_metadata_str + Namespace.pprint(v, indent=indent + 2 * addl_indent)) if isinstance(v, (Namespace, dict)) else ( f'{v}\n' + (f'{v_metadata_str}\n' if v_metadata_str else '') ) ) s += (indent_str + '}\n') # pylint: disable=invalid-name return s def __repr__(self) -> str: """Return string repr.""" return self.pprint(self) def __str__(self) -> str: """Return string repr.""" return repr(self) @classmethod def create(cls, obj: Any) -> Union[Namespace, Any]: """(Try to) Create namespace from object.""" return cls._as_namespace_if_applicable(obj) def _get_nested_attr(self, nested_attr_names: List[str], /) -> Any: nested_attr = self for nested_attr_name in nested_attr_names: nested_attr = nested_attr.__getattribute__(nested_attr_name) return nested_attr def __getattr__(self, attr: str, /) -> Any: """Get (nested) attribute value by (nested) attribute name.""" return self._get_nested_attr(attr.split(sep='.', maxsplit=-1)) def __getitem__(self, item: str, /) -> Any: """Get (nested) item value by (nested) item name.""" return getattr(self, item) def __setattr__(self, attr: str, value: Any, /): """Set (nested) attribute value by (nested) attribute name.""" nested_attr_names: List[str] = attr.split(sep='.', maxsplit=-1) value = self._as_namespace_if_applicable(value) if len(nested_attr_names) > 1: nested_ns: Namespace = self._get_nested_attr(nested_attr_names[:-1]) # noqa: E501 setattr(nested_ns, nested_attr_names[-1], value) else: self.__dict__[attr] = value def __setitem__(self, item: str, value: Any, /): """Set (nested) item value by (nested) item name.""" setattr(self, item, value) def __delattr__(self, attr: str, /): """Delete (nested) attr.""" nested_attr_names: List[str] = attr.split(sep='.', maxsplit=-1) del self._get_nested_attr(nested_attr_names[:-1]) \ .__dict__[nested_attr_names[-1]] def __delitem__(self, item: str, /): """Delete (nested) item.""" delattr(self, item) def __iter__(self): """Iterate through content.""" return (k for k in self.__dict__ if k != '__metadata__') def update(self, # noqa: MC0001 other: Union[ArgParseNamespace, SimpleNamespace, dict, ModuleType] = {}, /, **kwargs: Any): # pylint: disable=dangerous-default-value,too-many-branches """Update content.""" if isinstance(other, (ArgParseNamespace, SimpleNamespace)): other = copy.deepcopy(other.__dict__) elif isinstance(other, dict): other = copy.deepcopy(other) elif isinstance(other, ModuleType): other = {k: v for k, v in other.__dict__.items() if not (k.startswith('__') or isinstance(v, ModuleType))} else: raise ValueError('*** `other` must be Namespace, Dict or Module ***') # noqa: E501 __modules_first__ = kwargs.pop('__modules_first__', False) other.update(copy.deepcopy(kwargs)) __metadata__ = other.get('__metadata__', {}) if __modules_first__: for k, v in other.items(): # pylint: disable=invalid-name if k != '__metadata__': n = getattr(self, k, None) # pylint: disable=invalid-name if isinstance(n, Namespace) and isinstance(v, ModuleType): n.update(v, __modules_first__=True) for k, v in other.items(): # pylint: disable=invalid-name if k != '__metadata__': n = getattr(self, k, None) # pylint: disable=invalid-name if isinstance(n, Namespace) and \ isinstance(v, (dict, ArgParseNamespace, SimpleNamespace)): n.update(v, __modules_first__=True) elif not isinstance(v, ModuleType): setattr(self, k, v) else: for k, v in other.items(): # pylint: disable=invalid-name if k != '__metadata__': n = getattr(self, k, None) # pylint: disable=invalid-name if isinstance(n, Namespace) and \ isinstance(v, (dict, ArgParseNamespace, SimpleNamespace, ModuleType)): n.update(v) else: setattr(self, k, v) for k, v in __metadata__.items(): # pylint: disable=invalid-name nested_attr_names: List[str] = k.split(sep='.', maxsplit=-1) self._get_nested_attr(nested_attr_names[:-1]) \ .__metadata__[nested_attr_names[-1]] = v def keys(self, all_nested: bool = False) -> List[str]: """Get (nested) keys.""" if all_nested: keys: List[str] = [] for k, v in self.__dict__.items(): # pylint: disable=invalid-name if k != '__metadata__': keys.append(k) if isinstance(v, Namespace): keys.extend(f'{k}.{sub_k}' for sub_k in v.keys(all_nested=True)) return keys return [k for k in self.__dict__ if k != '__metadata__'] def values(self) -> List[Any]: """Get top-level values.""" return [v for k, v in self.__dict__.items() if k != '__metadata__'] def items(self) -> List[Tuple[str, Any]]: """Get top-level items.""" return [i for i in self.__dict__.items() if i[0] != '__metadata__'] def get(self, key: str, default: Optional[Any] = None): """Get top-level item by key string, with a default fall-back value.""" return self.__dict__.get(key, default) def __len__(self): """Count number of top-level items.""" return len(self.keys()) def __call__(self, key: str, /): """Get metadata of a certain (nested) key.""" nested_attr_names: List[str] = key.split(sep='.', maxsplit=-1) return (self._get_nested_attr(nested_attr_names[:-1]) .__metadata__.get(nested_attr_names[-1], Namespace())) def to_dict(self): """Convert content to dict.""" def _dict_no_inf(d: dict, /) -> dict: # pylint: disable=invalid-name return {k: (_dict_no_inf(v) if isinstance(v, dict) else (None if str(v)[-3:] == 'inf' else v)) for k, v in d.items()} return {k: (v.to_dict() if isinstance(v, Namespace) else (_dict_no_inf(v) if isinstance(v, dict) else (None if str(v)[-3:] == 'inf' else v))) for k, v in self.items()} @staticmethod def _serializable(x: Any, /): if isinstance(x, (list, set, tuple)): return [Namespace._serializable(i) for i in x] if isinstance(x, (dict, Namespace)): return {k: Namespace._serializable(v) for k, v in x.items()} if isinstance(x, (datetime.datetime, datetime.time)): return str(x) if isinstance(x, NUMPY_INT_TYPES): return int(x) if str(x)[-3:] == 'inf': return None return x class _JSONEncoder(json.JSONEncoder): def default(self, obj): # pylint: disable=arguments-renamed,protected-access return (Namespace._serializable(obj) if isinstance(obj, (list, set, tuple, dict, Namespace)) else json.JSONEncoder.default(self, obj)) @classmethod def from_json(cls, path: PathType) -> Namespace: """Load content from JSON file.""" with open(file=path, mode='rt', buffering=-1, encoding='utf-8', errors='strict', newline=None, closefd=True, opener=None) as json_file: return cls(**json.load(fp=json_file, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None)) def to_json(self, path: PathType): """Dump content to JSON file.""" mkdir(dir_path=Path(path).resolve(strict=False).parent, hdfs=False) with open(file=path, mode='wt', buffering=-1, encoding='utf-8', errors='strict', newline=None, closefd=True, opener=None) as json_file: json.dump(obj=self.__dict__, fp=json_file, skipkeys=False, ensure_ascii=False, check_circular=True, allow_nan=True, cls=self._JSONEncoder, indent=2, separators=None, default=None, sort_keys=False) @classmethod def from_yaml(cls, path: PathType) -> Namespace: """Load content from YAML file.""" with open(file=path, mode='rt', buffering=-1, encoding='utf-8', errors='strict', newline=None, closefd=True, opener=None) as yaml_file: return cls(**yaml.safe_load(stream=yaml_file, version=None)) def to_yaml(self, path: PathType): """Dump content to YAML file.""" mkdir(dir_path=Path(path).resolve(strict=False).parent, hdfs=False) with open(file=path, mode='wt', buffering=-1, encoding='utf-8', errors='strict', newline=None, closefd=True, opener=None) as yaml_file: yaml.safe_dump(data=self._serializable(self), stream=yaml_file, # Dumper=yaml.dumper.SafeDumper, default_style=None, default_flow_style=False, encoding='utf-8', explicit_start=None, explicit_end=None, version=None, tags=None, canonical=False, indent=2, width=100, allow_unicode=True, line_break=None) DICT_OR_NAMESPACE_TYPES: Tuple[type] = (dict, ArgParseNamespace, SimpleNamespace, Namespace)
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/util/namespace.py
namespace.py
import pygame import random import time import turtle # Class thể hiện đối tượng Câu hỏi # Một đối tượng Question gồm có 2 fields: # - question: đề bài # - answer: đáp án class Question: def __init__(self, question, answer): self.question = question self.answer = answer # Class thể hiện trạng thái hiện tại của trò chơi class GameState: # Điểm số hiện tại score = 0 roundnum = 1 # Khởi động lại đồng hồ bấm giờ: cho giá trị bằng thời gian hiện tại def reset_timer(self): self.start_time = time.time() # Trả về thời gian trả lời câu hỏi (tính bằng giây), bằng cách lấy # thời gian đồng hồ trừ đi thời gian start_time đã lưu. def get_timer(self): return time.time() - self.start_time # Khởi tạo đối tượng cụ thể lưu trạng thái của trò chơi state = GameState() # Dùng thư viện pygame để chơi âm thanh. def play_music(file): pygame.mixer.init() pygame.mixer.music.load(file) pygame.mixer.music.play() def play_sound(file): pygame.mixer.init() sound = pygame.mixer.Sound(file) sound.play() # Vẽ hình nhân vật. avatar = turtle.Turtle() def draw_avatar(image): # Phải gọi lệnh turtle.addshape trước khi vẽ ảnh. turtle.addshape(image) avatar.clear() avatar.penup() avatar.setposition(350, -100) # Lưu ý: turtle chỉ vẽ được ảnh có định dạng .gif avatar.shape(image) # Khởi tạo cây bút chuyên dùng để vẽ thời gian. pen_timer = turtle.Turtle() def draw_timer(): # Ẩn con rùa. pen_timer.hideturtle() # Nhấc bút lên. pen_timer.penup() # Xoá, để khi vẽ điểm không bị đè lên nhau. pen_timer.clear() # Đổi màu. pen_timer.color('green') # Đặt vị trí. pen_timer.setposition(-240, 170) # Viết điểm số ra màn hình. pen_timer.write(round(state.get_timer()), font=get_font(20)) # Vẽ lại điểm số sau 1000ms (1 giây) nữa turtle.Screen().ontimer(draw_timer, 1000) # Khai báo dữ liệu câu hỏi và đáp án def read_data(round_num): # Đọc câu hỏi và đáp án từ Files. # Số lượng câu hỏi num_questions = 3 # Ban đầu, mảng dữ liệu là trống data = [] # Các file câu hỏi đánh số là q1.txt, q2.txt, q3.txt,... # Các file câu trả lời đánh số là a1.txt, a2.txt, a3.txt,... # Ta dùng hàm range(1, x + 1) để duyệt qua các số 1, 2, ..., x for i in range(1, num_questions + 1): # Đọc câu hỏi, dùng encoding='utf-8' để đọc tiếng Việt filename ='r' + str(round_num) + 'q' + str(i) + '.txt' f = open(filename, 'r', encoding='utf-8') question = f.read() f.close() # Đọc đáp án filename ='r' +str(round_num) + 'a' + str(i) + '.txt' f = open(filename, 'r', encoding='utf-8') answer = f.read() f.close() # Tạo đối tượng Question và thêm vào mảng dữ liệu data data.append(Question(question, answer)) # Trả về mảng dữ liệu data return data # Sinh ra các câu hỏi tính nhẩm ngẫu nhiên Siêu Trí Tuệ def generate_math_questions(round_num): # Ban đầu, danh sách câu hỏi trống. data = [] # Số lượng câu hỏi sinh ra. num_questions = 3 # Hai phép toán: cộng và nhân operators = ["+", "x"] # Số lượng chữ số tối đa khi sinh câu hỏi ngẫu nhiên if round_num == 1: max_digits = 9 min_digits = 1 elif round_num == 2: max_digits = 99 min_digits = 10 else: max_digits = 999 min_digits = 100 for i in range(num_questions): # Chọn số ngẫu nhiên từ 0 đến 10^max_digits - 1 a = random.randint(min_digits, max_digits) b = random.randint(min_digits, max_digits) # Chọn một phép toán ngẫu nhiên op = random.choice(operators) # Sinh ra đề bài question = str(a) + " " + op + " " + str(b) + " = ?" # Sinh ra đáp án if op == "+": answer = a + b elif op == "x": answer = a * b # Thêm câu hỏi vào danh sách data.append(Question(question, str(answer))) # Trả về danh sách câu hỏi tính nhẩm Siêu Trí Tuệ. return data # Trả về font chữ với kích thước được cho. def get_font(font_size): return ("Arial", font_size, "normal") # Khởi tạo cây bút chuyên dùng để vẽ Điểm số. pen_score = turtle.Turtle() def draw_score(): # Ẩn con rùa. pen_score.hideturtle() # Nhấc bút lên. pen_score.penup() pen_score.clear() pen_score.color('red') pen_score.setposition(300, 175) temp ="ROUND: "+ str(state.roundnum) pen_score.write(temp, font=get_font(30)) pen_score.color('white') pen_score.setposition(340, 110) pen_score.write(state.score, font=get_font(40)) pen_round = turtle.Turtle() def draw_round_number(round_num): pen_round.hideturtle() pen_round.penup() pen_round.clear() pen_round.color('red') pen_round.setposition(300, 175) temp ="ROUND: "+ str(state.roundnum) pen_round.write(temp, font=get_font(30)) def ask_question(question): print("***************************") print(question.question) turtle.clear() turtle.hideturtle() turtle.penup() turtle.setposition(-240, 20) turtle.write(question.question, font=get_font(15)) draw_score() draw_avatar('KimNguu-normal.gif') state.reset_timer() result = turtle.textinput("Siêu Lập Trình", "Câu trả lời của bạn là gì?\n") check_result(result, question.answer) def check_result(result, answer): time_taken = state.get_timer() if time_taken < 5: bonus = 5 else: bonus = 0 state.roundnum =round_number if result == answer: state.score += 10 + bonus play_sound("correct_answer.wav") draw_avatar('KimNguu-correct.gif') print("Đúng rồi") else: play_sound("wrong_answer.wav") draw_avatar('KimNguu-wrong.gif') print("Sai rồi") time.sleep(0.5) print("Thời gian trả lời câu hỏi là:", round(time_taken), "giây") if bonus > 0: print("Bạn nhận được điểm thưởng là", bonus, "vì trả lời nhanh") print("Điểm hiện tại của bạn là: ", state.score) def setup_turtle(): screen = turtle.Screen() screen.setup(1200, 600) screen.bgpic('background.gif') turtle.title("Siêu lập trình") # Gọi hàm thiết lập màn hình setup_turtle() # Chơi nhạc play_music("music.wav") # Vẽ thời gian state.reset_timer() draw_timer() round_number = 1 while round_number < 4: #draw_round_number(round_number) data = read_data(round_number) + generate_math_questions(round_number) for question in data: ask_question(question) round_number += 1
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/util/finalproject.py
finalproject.py
from logging import getLogger, Logger, INFO import os import time from typing import Optional import botocore import boto3 from aito.util.fs import PathType from aito.util.iter import to_iterable from aito.util.log import STDOUT_HANDLER __all__ = 'client', 'cp', 'mv', 'rm', 'sync' _LOGGER: Logger = getLogger(name=__name__) _LOGGER.setLevel(level=INFO) _LOGGER.addHandler(hdlr=STDOUT_HANDLER) _CLIENT = None def client(region: Optional[str] = None, access_key: Optional[str] = None, secret_key: Optional[str] = None): """Get Boto3 S3 Client.""" global _CLIENT # pylint: disable=global-statement if _CLIENT is None: _CLIENT = boto3.client(service_name='s3', region_name=region, api_version=None, use_ssl=True, verify=None, endpoint_url=None, aws_access_key_id=access_key, aws_secret_access_key=secret_key, aws_session_token=None, config=botocore.client.Config(connect_timeout=9, read_timeout=9)) return _CLIENT def cp(from_path: PathType, to_path: PathType, *, is_dir: bool = True, quiet: bool = True, verbose: bool = True): # pylint: disable=invalid-name,too-many-arguments """Copy a directory or a file between S3 paths or between S3 and local.""" s3_command: str = (f'aws s3 cp {from_path} {to_path}' + (' --recursive' if is_dir else '') + (' --quiet' if quiet else '')) if verbose: _LOGGER.info(msg=(msg := f'Copying "{from_path}" to "{to_path}"...')) _LOGGER.debug(msg=f'Running: {s3_command}...') tic: float = time.time() os.system(command=s3_command) if verbose: toc: float = time.time() _LOGGER.info(msg=f'{msg} done! <{toc - tic:,.1f} s>') def mv(from_path: PathType, to_path: PathType, *, is_dir: bool = True, quiet: bool = True, verbose: bool = True): # pylint: disable=invalid-name,too-many-arguments """Move a directory or a file between S3 paths or between S3 and local.""" s3_command: str = (f'aws s3 mv {from_path} {to_path}' + (' --recursive' if is_dir else '') + (' --quiet' if quiet else '')) if verbose: _LOGGER.info(msg=(msg := f'Moving "{from_path}" to "{to_path}"...')) _LOGGER.debug(msg=f'Running: {s3_command}...') tic: float = time.time() os.system(command=s3_command) if verbose: toc: float = time.time() _LOGGER.info(msg=f'{msg} done! <{toc - tic:,.1f} s>') def rm(path: PathType, *, is_dir: bool = True, globs: Optional[str] = None, quiet: bool = True, verbose: bool = True): # pylint: disable=invalid-name,too-many-arguments """Remove a directory, a file, or glob-pattern-matched items from S3.""" s3_command: str = (f'aws s3 rm {path}' + ((' --recursive' + ((' --exclude "*" ' + ' '.join(f'--include "{glob}"' for glob in to_iterable(globs))) if globs else '')) if is_dir else '') + (' --quiet' if quiet else '')) if verbose: _LOGGER.info(msg=(msg := ('Deleting ' + ((f'Globs "{globs}" @ ' if globs else 'Directory ') if is_dir else '') + f'"{path}"...'))) _LOGGER.debug(msg=f'Running: {s3_command}...') tic: float = time.time() os.system(command=s3_command) if verbose: toc: float = time.time() _LOGGER.info(msg=f'{msg} done! <{toc - tic:,.1f} s>') def sync(from_dir_path: PathType, to_dir_path: PathType, *, delete: bool = True, quiet: bool = True, verbose=True): # pylint: disable=too-many-arguments """Sync a directory between S3 paths or between S3 and local.""" s3_command: str = (f'aws s3 sync {from_dir_path} {to_dir_path}' + (' --delete' if delete else '') + (' --quiet' if quiet else '')) if verbose: _LOGGER.info(msg=(msg := f'Syncing "{from_dir_path}" to "{to_dir_path}"...')) # noqa: E501 _LOGGER.debug(msg=f'Running: {s3_command}...') tic = time.time() os.system(command=s3_command) if verbose: toc = time.time() _LOGGER.info(msg=f'{msg} done! <{toc - tic:,.1f} s>')
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/util/s3.py
s3.py
from logging import getLogger, Logger, DEBUG import os from pathlib import Path import shutil import subprocess import sys from typing import Union from pyarrow.hdfs import HadoopFileSystem from aito.util.log import STDOUT_HANDLER __all__ = ( 'PathType', '_HADOOP_HOME_ENV_VAR_NAME', '_HADOOP_HOME', '_HADOOP_CONF_DIR_ENV_VAR_NAME', '_ON_LINUX_CLUSTER', 'HDFS_CLIENT', 'exist', 'mkdir', 'rm', 'empty', 'cp', 'mv', 'get', 'put', ) _LOGGER: Logger = getLogger(name=__name__) _LOGGER.setLevel(level=DEBUG) _LOGGER.addHandler(hdlr=STDOUT_HANDLER) PathType = Union[str, Path] # Hadoop configuration directory _HADOOP_HOME_ENV_VAR_NAME: str = 'HADOOP_HOME' _HADOOP_HOME: PathType = os.environ.get(_HADOOP_HOME_ENV_VAR_NAME) def _hdfs_cmd(hadoop_home: PathType = _HADOOP_HOME) -> str: if hadoop_home: cmd: str = f'{hadoop_home}/bin/hdfs' if Path(cmd).resolve(strict=True).is_file(): return cmd return 'hdfs' return 'hdfs' _HADOOP_CONF_DIR_ENV_VAR_NAME: str = 'HADOOP_CONF_DIR' # check if running on Linux cluster or local Mac _ON_LINUX_CLUSTER: bool = sys.platform.startswith('linux') # detect & set up HDFS client if _HADOOP_HOME: os.environ['ARROW_LIBHDFS_DIR'] = \ str(Path(_HADOOP_HOME).resolve(strict=True) / 'lib' / 'native') try: HDFS_CLIENT = HadoopFileSystem() try: _LOGGER.debug(msg=(msg := 'Testing HDFS...')) if HDFS_CLIENT.isdir(path='/'): _ON_LINUX_CLUSTER_WITH_HDFS: bool = True _LOGGER.debug(msg=f'{msg} done!') else: _ON_LINUX_CLUSTER_WITH_HDFS: bool = False _LOGGER.debug(msg=f'{msg} UNAVAILABLE') except Exception: # pylint: disable=broad-except HDFS_CLIENT = None _ON_LINUX_CLUSTER_WITH_HDFS: bool = False _LOGGER.debug(msg=f'{msg} UNAVAILABLE') except Exception: # pylint: disable=broad-except HDFS_CLIENT = None _ON_LINUX_CLUSTER_WITH_HDFS: bool = False _LOGGER.debug(msg='*** HDFS UNAVAILABLE ***') else: HDFS_CLIENT = None _ON_LINUX_CLUSTER_WITH_HDFS: bool = False def _exec(cmd: str, must_succeed: bool = False): with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) as proc: out, err = proc.communicate() if must_succeed and proc.returncode: raise RuntimeError(f'*** COMMAND ERROR: {cmd} ***\n{out}\n{err}\n') def command_prefix(hdfs: bool = True, hadoop_home: PathType = '/opt/hadoop') -> str: # noqa: E501 """Get command prefix.""" return f'{_hdfs_cmd(hadoop_home=hadoop_home)} dfs -' if hdfs else '' def exist(path: PathType, *, hdfs: bool = False, is_dir: bool = False) -> bool: """Check whether path exists.""" if hdfs and _ON_LINUX_CLUSTER_WITH_HDFS: return (HDFS_CLIENT.isdir(path=path) if is_dir else HDFS_CLIENT.isfile(path=path)) path: Path = Path(path).resolve(strict=False) return (path.is_dir() if is_dir else (path.is_file() or path.is_symlink())) def mkdir(dir_path: PathType, *, hdfs: bool = True, hadoop_home: PathType = '/opt/hadoop'): """Make directory.""" cmd_prefix: str = command_prefix(hdfs=hdfs, hadoop_home=hadoop_home) command: str = (f'{cmd_prefix}mkdir -p "{dir_path}"' + (' -m 0777' if _ON_LINUX_CLUSTER and (not hdfs) else '')) _ = os.system(command=command) assert _ <= 0, OSError(f'*** FAILED: {command} (EXIT CODE: {_}) ***') def rm(path: PathType, *, hdfs: bool = True, is_dir: bool = True, hadoop_home: PathType = '/opt/hadoop'): # pylint: disable=invalid-name """Remove directory or file.""" if not _ON_LINUX_CLUSTER_WITH_HDFS: hdfs: bool = False if hdfs: os.system( command=f'{command_prefix(hdfs=True, hadoop_home=hadoop_home)}rm' + (' -r' if is_dir else '') + f' -skipTrash "{path}"') else: path: Path = Path(path).resolve(strict=False) if is_dir and path.is_dir(): try: shutil.rmtree(path=path, ignore_errors=False, onerror=None) except Exception: # pylint: disable=broad-except os.system(command=f'rm -f "{path}"') assert not path.is_dir(), \ OSError(f'*** CANNOT REMOVE LOCAL DIR "{path}" ***') elif path.is_file() or path.is_symlink(): os.remove(path=path) assert not (path.is_file() or path.is_symlink()), \ OSError(f'*** CANNOT REMOVE LOCAL FILE/SYMLINK "{path}" ***') def empty(dir_path: PathType, *, hdfs: bool = True, hadoop_home: PathType = '/opt/hadoop'): """Empty directory.""" if exist(path=dir_path, hdfs=hdfs, is_dir=True): rm(path=dir_path, hdfs=hdfs, is_dir=True, hadoop_home=hadoop_home) mkdir(dir_path=dir_path, hdfs=hdfs, hadoop_home=hadoop_home) def cp(from_path: PathType, to_path: PathType, *, hdfs: bool = True, is_dir: bool = True, hadoop_home: PathType = '/opt/hadoop'): # pylint: disable=invalid-name """Copy directory or file.""" rm(path=to_path, hdfs=hdfs, is_dir=is_dir, hadoop_home=hadoop_home) to_path = Path(to_path).resolve(strict=False) mkdir(dir_path=to_path.parent, hdfs=hdfs, hadoop_home=hadoop_home) if hdfs: os.system( command=(f'{command_prefix(hdfs=True, hadoop_home=hadoop_home)}cp ' f'"{from_path}" "{to_path}"')) elif is_dir: shutil.copytree(src=from_path, dst=to_path, symlinks=False, ignore=None, ignore_dangling_symlinks=False, dirs_exist_ok=False) else: shutil.copyfile(src=from_path, dst=to_path, follow_symlinks=True) def mv(from_path: PathType, to_path: PathType, *, hdfs: bool = True, is_dir: bool = True, hadoop_home: PathType = '/opt/hadoop'): # pylint: disable=invalid-name """Move directory or file.""" rm(path=to_path, hdfs=hdfs, is_dir=is_dir, hadoop_home=hadoop_home) to_path = Path(to_path).resolve(strict=False) mkdir(dir_path=to_path.parent, hdfs=hdfs, hadoop_home=hadoop_home) if hdfs: os.system( command=(f'{command_prefix(hdfs=hdfs, hadoop_home=hadoop_home)}mv ' f'"{from_path}" "{to_path}"')) else: try: shutil.move(src=from_path, dst=to_path) except Exception: # pylint: disable=broad-except os.system(command=f'mv "{from_path}" "{to_path}"') def get(from_hdfs: PathType, to_local: PathType, *, is_dir: bool = False, overwrite: bool = True, _mv: bool = False, hadoop_home: PathType = '/opt/hadoop', must_succeed: bool = False, _on_linux_cluster_with_hdfs: bool = _ON_LINUX_CLUSTER_WITH_HDFS): """Get directory or file from HDFS to local.""" if _on_linux_cluster_with_hdfs: if overwrite: rm(path=to_local, hdfs=False, is_dir=is_dir) to_local = Path(to_local).resolve(strict=False) if overwrite or \ (is_dir and (not to_local.is_dir())) or \ ((not is_dir) and (not to_local.is_file())): mkdir(dir_path=to_local.parent, hdfs=False) _exec(cmd=f'{_hdfs_cmd(hadoop_home=hadoop_home)} dfs -get ' f'"{from_hdfs}" "{to_local}"') if _mv: rm(path=from_hdfs, hdfs=True, is_dir=is_dir, hadoop_home=hadoop_home) # noqa: E501 elif from_hdfs != to_local: if _mv: mv(from_path=from_hdfs, to_path=to_local, hdfs=False, is_dir=is_dir) # noqa: E501 else: cp(from_path=from_hdfs, to_path=to_local, hdfs=False, is_dir=is_dir) # noqa: E501 if must_succeed: if isinstance(to_local, str): to_local = Path(to_local).resolve(strict=False) assert to_local.is_dir() if is_dir else to_local.is_file(), \ OSError(f'*** FS.GET({from_hdfs} -> {to_local}) FAILED! ***') def put(from_local: PathType, to_hdfs: PathType, *, is_dir: bool = True, _mv: bool = True, hadoop_home: PathType = '/opt/hadoop'): """Put directory or file from local to HDFS.""" if _ON_LINUX_CLUSTER_WITH_HDFS: rm(path=to_hdfs, hdfs=True, is_dir=is_dir, hadoop_home=hadoop_home) to_hdfs = Path(to_hdfs).resolve(strict=False) mkdir(dir_path=to_hdfs.parent, hdfs=True, hadoop_home=hadoop_home) os.system(command=f'{_hdfs_cmd(hadoop_home=hadoop_home)} dfs -put ' f'"{from_local}" "{to_hdfs}"') if _mv: rm(path=from_local, hdfs=False, is_dir=is_dir) elif from_local != to_hdfs: if _mv: mv(from_path=from_local, to_path=to_hdfs, hdfs=False, is_dir=is_dir) # noqa: E501 else: cp(from_path=from_local, to_path=to_hdfs, hdfs=False, is_dir=is_dir) # noqa: E501
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/util/fs.py
fs.py
from __future__ import annotations from collections.abc import Callable, Collection, Sequence import datetime from functools import lru_cache, partial from itertools import chain from logging import Logger import math from pathlib import Path import os import random import re import time from typing import Any, LiteralString, Optional, Union from urllib.parse import ParseResult, urlparse from uuid import uuid4 # from warnings import simplefilter from numpy import isfinite, ndarray, vstack from pandas import DataFrame, Series, concat, isnull, notnull, read_parquet # from pandas.errors import PerformanceWarning from pandas._libs.missing import NAType # pylint: disable=no-name-in-module from tqdm import tqdm from pyarrow.dataset import dataset from pyarrow.fs import LocalFileSystem, S3FileSystem from pyarrow.lib import RecordBatch, Schema, Table # pylint: disable=no-name-in-module from pyarrow.parquet import FileMetaData, read_metadata, read_schema, read_table from aito.util import debug, fs, s3 from aito.util.data_types.arrow import ( DataType, _ARROW_STR_TYPE, _ARROW_DATE_TYPE, is_binary, is_boolean, is_num, is_possible_cat, is_possible_feature, is_string) from aito.util.data_types.numpy_pandas import NUMPY_FLOAT_TYPES, NUMPY_INT_TYPES from aito.util.data_types.python import (PY_NUM_TYPES, PyNumType, PyPossibleFeatureType, PY_LIST_OR_TUPLE) from aito.util.default_dict import DefaultDict from aito.util.iter import to_iterable from aito.util.namespace import Namespace, DICT_OR_NAMESPACE_TYPES from ._abstract import (AbstractDataHandler, AbstractFileDataHandler, AbstractS3FileDataHandler, ColsType, ReducedDataSetType) from .pandas import PandasMLPreprocessor __all__: Sequence[LiteralString] = ('ParquetDataset',) # flake8: noqa # (too many camelCase names) # pylint: disable=c-extension-no-member # e.g., `math.` # pylint: disable=invalid-name # e.g., camelCase names # pylint: disable=no-member # e.g., `._cache` # pylint: disable=protected-access # e.g., `._cache` # pylint: disable=too-many-lines # (this whole module) # TODO: revisit Pandas PeformanceWarning # github.com/twopirllc/pandas-ta/issues/340#issuecomment-879450854 # simplefilter(action='ignore', category=PerformanceWarning) def randomSample(population: Sequence[Any], sampleSize: int, returnCollectionType=set) -> Collection[Any]: """Draw random sample from population.""" return returnCollectionType(random.sample(population=list(population), k=sampleSize) if len(population) > sampleSize else population) class ParquetDataset(AbstractS3FileDataHandler): # pylint: disable=too-many-instance-attributes,too-many-public-methods """S3 Parquet Data Feeder.""" # caches _CACHE: dict[str, Namespace] = {} _FILE_CACHES: dict[str, Namespace] = {} # default arguments dict # (cannot be aito.util.namespace.Namespace # because that makes nested dicts into normal dicts) _DEFAULT_KWARGS: dict[str, Optional[Union[str, DefaultDict]]] = dict( iCol=None, tCol=None, reprSampleMinNFiles=AbstractFileDataHandler._REPR_SAMPLE_MIN_N_FILES, reprSampleSize=AbstractDataHandler._DEFAULT_REPR_SAMPLE_SIZE, nulls=DefaultDict((None, None)), minNonNullProportion=DefaultDict(AbstractDataHandler._DEFAULT_MIN_NON_NULL_PROPORTION), outlierTailProportion=DefaultDict(AbstractDataHandler._DEFAULT_OUTLIER_TAIL_PROPORTION), maxNCats=DefaultDict(AbstractDataHandler._DEFAULT_MAX_N_CATS), minProportionByMaxNCats=DefaultDict( AbstractDataHandler._DEFAULT_MIN_PROPORTION_BY_MAX_N_CATS)) def __init__(self, path: str, *, awsRegion: Optional[str] = None, accessKey: Optional[str] = None, secretKey: Optional[str] = None, # noqa: E501 _mappers: Optional[Callable] = None, _reduceMustInclCols: Optional[ColsType] = None, verbose: bool = True, **kwargs: Any): # pylint: disable=too-many-branches,too-many-locals,too-many-statements """Init S3 Parquet Data Feeder.""" if verbose or debug.ON: logger: Logger = self.classStdOutLogger() self.awsRegion: Optional[str] = awsRegion self.accessKey: Optional[str] = accessKey self.secretKey: Optional[str] = secretKey self.onS3: bool = path.startswith('s3://') if self.onS3: self.s3Client = s3.client(region=awsRegion, access_key=accessKey, secret_key=secretKey) self.path: str = path if self.onS3 else os.path.expanduser(path) if self.path in self._CACHE: _cache: Namespace = self._CACHE[self.path] else: self._CACHE[self.path] = _cache = Namespace() if _cache: if debug.ON: logger.debug(msg=f'*** RETRIEVING CACHE FOR "{self.path}" ***') else: if self.onS3: _parsedURL: ParseResult = urlparse(url=path, scheme='', allow_fragments=True) _cache.s3Bucket = _parsedURL.netloc _cache.pathS3Key = _parsedURL.path[1:] if self.path in self._FILE_CACHES: _cache.nFiles = 1 _cache.filePaths = {self.path} else: if verbose: logger.info(msg=(msg := f'Loading "{self.path}" by Arrow...')) tic: float = time.time() if self.onS3: s3.rm(path=path, is_dir=True, globs='*_$folder$', # redundant AWS EMR-generated files quiet=True, verbose=False) _cache._srcArrowDS = dataset(source=(path.replace('s3://', '') if self.onS3 else self.path), schema=None, format='parquet', filesystem=(S3FileSystem(region=awsRegion, access_key=accessKey, secret_key=secretKey) if self.onS3 else LocalFileSystem()), partitioning=None, partition_base_dir=None, exclude_invalid_files=None, ignore_prefixes=None) if verbose: toc: float = time.time() logger.info(msg=f'{msg} done! <{toc - tic:,.1f} s>') if _filePaths := _cache._srcArrowDS.files: _cache.filePaths = {(f's3://{filePath}' if self.onS3 else filePath) for filePath in _filePaths if not filePath.endswith('_$folder$')} _cache.nFiles = len(_cache.filePaths) else: _cache.nFiles = 1 _cache.filePaths = {self.path} _cache.srcColsInclPartitionKVs = set() _cache.srcTypesInclPartitionKVs = Namespace() for i, filePath in enumerate(_cache.filePaths): if filePath in self._FILE_CACHES: fileCache: Namespace = self._FILE_CACHES[filePath] if (fileCache.nRows is None) and (i < self._SCHEMA_MIN_N_FILES): fileCache.localPath = self.fileLocalPath(filePath=filePath) schema: Schema = read_schema(where=fileCache.localPath) fileCache.srcColsExclPartitionKVs = (set(schema.names) - {'__index_level_0__'}) fileCache.srcColsInclPartitionKVs.update(fileCache.srcColsExclPartitionKVs) for col in (fileCache.srcColsExclPartitionKVs .difference(fileCache.partitionKVs)): fileCache.srcTypesExclPartitionKVs[col] = \ fileCache.srcTypesInclPartitionKVs[col] = \ schema.field(col).type metadata: FileMetaData = read_metadata(where=fileCache.localPath) fileCache.nCols = metadata.num_columns fileCache.nRows = metadata.num_rows else: srcColsInclPartitionKVs: set[str] = set() srcTypesExclPartitionKVs: Namespace = Namespace() srcTypesInclPartitionKVs: Namespace = Namespace() partitionKVs: dict[str, Union[datetime.date, str]] = {} for partitionKV in re.findall(pattern='[^/]+=[^/]+/', string=filePath): k, v = partitionKV.split(sep='=', maxsplit=1) srcColsInclPartitionKVs.add(k) if k == self._DATE_COL: srcTypesInclPartitionKVs[k] = _ARROW_DATE_TYPE partitionKVs[k] = datetime.datetime.strptime(v[:-1], '%Y-%m-%d').date() else: srcTypesInclPartitionKVs[k] = _ARROW_STR_TYPE partitionKVs[k] = v[:-1] if i < self._SCHEMA_MIN_N_FILES: localPath: Path = self.fileLocalPath(filePath=filePath) schema: Schema = read_schema(where=localPath) srcColsExclPartitionKVs: set[str] = (set(schema.names) - {'__index_level_0__'}) srcColsInclPartitionKVs.update(srcColsExclPartitionKVs) for col in srcColsExclPartitionKVs.difference(partitionKVs): srcTypesExclPartitionKVs[col] = \ srcTypesInclPartitionKVs[col] = \ schema.field(col).type metadata: FileMetaData = read_metadata(where=localPath) nCols: int = metadata.num_columns nRows: int = metadata.num_rows else: localPath: Optional[Path] = None if self.onS3 else filePath srcColsExclPartitionKVs: Optional[set[str]] = None nCols: Optional[int] = None nRows: Optional[int] = None self._FILE_CACHES[filePath] = fileCache = \ Namespace(localPath=localPath, partitionKVs=partitionKVs, srcColsExclPartitionKVs=srcColsExclPartitionKVs, srcColsInclPartitionKVs=srcColsInclPartitionKVs, srcTypesExclPartitionKVs=srcTypesExclPartitionKVs, srcTypesInclPartitionKVs=srcTypesInclPartitionKVs, nCols=nCols, nRows=nRows) _cache.srcColsInclPartitionKVs |= fileCache.srcColsInclPartitionKVs for col, arrowType in fileCache.srcTypesInclPartitionKVs.items(): if col in _cache.srcTypesInclPartitionKVs: assert arrowType == _cache.srcTypesInclPartitionKVs[col], \ TypeError(f'*** {filePath} COLUMN {col}: ' f'DETECTED TYPE {arrowType} != ' f'{_cache.srcTypesInclPartitionKVs[col]} ***') else: _cache.srcTypesInclPartitionKVs[col] = arrowType _cache.cachedLocally = False self.__dict__.update(_cache) self._mappers: tuple[Callable] = (() if _mappers is None else to_iterable(_mappers, iterable_type=tuple)) self._reduceMustInclCols: set[str] = (set() if _reduceMustInclCols is None else to_iterable(_reduceMustInclCols, iterable_type=set)) # extract standard keyword arguments self._extractStdKwArgs(kwargs, resetToClassDefaults=True, inplace=True) # organize time series if applicable self._organizeIndexCols() # set profiling settings and create empty profiling cache self._emptyCache() # =========== # STRING REPR # ----------- # __repr__ # __shortRepr__ def __repr__(self) -> str: """Return string repr.""" colAndTypeStrs: list[str] = [] if self._iCol: colAndTypeStrs.append(f'(iCol) {self._iCol}: {self.type(self._iCol)}') if self._dCol: colAndTypeStrs.append(f'(dCol) {self._dCol}: {self.type(self._dCol)}') if self._tCol: colAndTypeStrs.append(f'(tCol) {self._tCol}: {self.type(self._tCol)}') colAndTypeStrs.extend(f'{col}: {self.type(col)}' for col in self.contentCols) return (f'{self.nFiles:,}-file ' + (f'{self._cache.nRows:,}-row ' if self._cache.nRows else (f'approx-{self._cache.approxNRows:,.0f}-row ' if self._cache.approxNRows else '')) + type(self).__name__ + (f'[{self.path} + {len(self._mappers):,} transform(s)]' f"[{', '.join(colAndTypeStrs)}]")) @property def __shortRepr__(self) -> str: """Short string repr.""" colsDescStr: list[str] = [] if self._iCol: colsDescStr.append(f'iCol: {self._iCol}') if self._dCol: colsDescStr.append(f'dCol: {self._dCol}') if self._tCol: colsDescStr.append(f'tCol: {self._tCol}') colsDescStr.append(f'{len(self.contentCols)} content col(s)') return (f'{self.nFiles:,}-file ' + (f'{self._cache.nRows:,}-row ' if self._cache.nRows else (f'approx-{self._cache.approxNRows:,.0f}-row ' if self._cache.approxNRows else '')) + type(self).__name__ + (f'[{self.path} + {len(self._mappers):,} transform(s)]' f"[{', '.join(colsDescStr)}]")) # ================================ # "INTERNAL / DON'T TOUCH" METHODS # -------------------------------- # _extractStdKwArgs # pylint: disable=inconsistent-return-statements def _extractStdKwArgs(self, kwargs: dict[str, Any], /, *, resetToClassDefaults: bool = False, inplace: bool = False) -> Optional[Namespace]: namespace: Union[ParquetDataset, Namespace] = self if inplace else Namespace() for k, classDefaultV in self._DEFAULT_KWARGS.items(): _privateK: str = f'_{k}' if not resetToClassDefaults: existingInstanceV: Any = getattr(self, _privateK, None) v: Any = kwargs.pop(k, existingInstanceV if (not resetToClassDefaults) and existingInstanceV else classDefaultV) if (k == 'reprSampleMinNFiles') and (v > self.nFiles): v: int = self.nFiles setattr(namespace, _privateK # use _k to not invoke @k.setter right away if inplace else k, v) if inplace: if self._iCol not in self.columns: self._iCol: Optional[str] = None if self._tCol not in self.columns: self._tCol: Optional[str] = None else: return namespace # ======= # CACHING # ------- # _emptyCache # _inheritCache # cacheLocally # fileLocalPath # cacheFileMetadataAndSchema def _emptyCache(self): self._cache: Namespace = \ Namespace(prelimReprSampleFilePaths=None, reprSampleFilePaths=None, reprSample=None, approxNRows=None, nRows=None, count={}, distinct={}, nonNullProportion={}, suffNonNullProportionThreshold={}, suffNonNull={}, sampleMin={}, sampleMax={}, sampleMean={}, sampleMedian={}, outlierRstMin={}, outlierRstMax={}, outlierRstMean={}, outlierRstMedian={}) def _inheritCache(self, oldS3ParquetDF: ParquetDataset, /, *sameCols: str, **newColToOldColMap: str): # pylint: disable=arguments-differ if oldS3ParquetDF._cache.nRows: if self._cache.nRows is None: self._cache.nRows = oldS3ParquetDF._cache.nRows else: assert self._cache.nRows == oldS3ParquetDF._cache.nRows if oldS3ParquetDF._cache.approxNRows and (self._cache.approxNRows is None): self._cache.approxNRows = oldS3ParquetDF._cache.approxNRows commonCols: set[str] = self.columns.intersection(oldS3ParquetDF.columns) if sameCols or newColToOldColMap: for newCol, oldCol in newColToOldColMap.items(): assert newCol in self.columns assert oldCol in oldS3ParquetDF.columns for sameCol in commonCols.difference(newColToOldColMap).intersection(sameCols): newColToOldColMap[sameCol] = sameCol else: newColToOldColMap: dict[str, str] = {col: col for col in commonCols} for cacheCategory in ( 'count', 'distinct', 'nonNullProportion', 'suffNonNullProportionThreshold', 'suffNonNull', 'sampleMin', 'sampleMax', 'sampleMean', 'sampleMedian', 'outlierRstMin', 'outlierRstMax', 'outlierRstMean', 'outlierRstMedian'): for newCol, oldCol in newColToOldColMap.items(): if oldCol in oldS3ParquetDF._cache.__dict__[cacheCategory]: self._cache.__dict__[cacheCategory][newCol] = \ oldS3ParquetDF._cache.__dict__[cacheCategory][oldCol] def cacheLocally(self, verbose: bool = True): """Cache files to local disk.""" if self.onS3 and (not (_cache := self._CACHE[self.path]).cachedLocally): if verbose: self.stdOutLogger.info(msg=(msg := 'Caching Files to Local Disk...')) tic: float = time.time() parsedURL: ParseResult = urlparse(url=self.path, scheme='', allow_fragments=True) localPath: str = str(self._LOCAL_CACHE_DIR_PATH / parsedURL.netloc / parsedURL.path[1:]) s3.sync(from_dir_path=self.path, to_dir_path=localPath, delete=True, quiet=True, verbose=True) for filePath in self.filePaths: self._FILE_CACHES[filePath].localPath = filePath.replace(self.path, localPath) _cache.cachedLocally = True if verbose: toc: float = time.time() self.stdOutLogger.info(msg=f'{msg} done! <{toc - tic:,.1f} s>') def fileLocalPath(self, filePath: str) -> Path: """Get local cache file path.""" if self.onS3: if (filePath in self._FILE_CACHES) and self._FILE_CACHES[filePath].localPath: return self._FILE_CACHES[filePath].localPath parsedURL: ParseResult = urlparse(url=filePath, scheme='', allow_fragments=True) localPath: Path = self._LOCAL_CACHE_DIR_PATH / parsedURL.netloc / parsedURL.path[1:] localDirPath: Path = localPath.parent fs.mkdir(dir_path=localDirPath, hdfs=False) # make sure the dir has been created while not localDirPath.is_dir(): time.sleep(1) self.s3Client.download_file(Bucket=parsedURL.netloc, Key=parsedURL.path[1:], Filename=str(localPath)) # make sure AWS S3's asynchronous process has finished # downloading a potentially large file while not localPath.is_file(): time.sleep(1) if filePath in self._FILE_CACHES: self._FILE_CACHES[filePath].localPath = localPath return localPath return filePath def cacheFileMetadataAndSchema(self, filePath: str) -> Namespace: """Cache file metadata and schema.""" fileLocalPath: Path = self.fileLocalPath(filePath=filePath) fileCache: Namespace = self._FILE_CACHES[filePath] if fileCache.nRows is None: schema: Schema = read_schema(where=fileLocalPath) fileCache.srcColsExclPartitionKVs = set(schema.names) - {'__index_level_0__'} fileCache.srcColsInclPartitionKVs.update(fileCache.srcColsExclPartitionKVs) self.srcColsInclPartitionKVs.update(fileCache.srcColsExclPartitionKVs) for col in fileCache.srcColsExclPartitionKVs.difference(fileCache.partitionKVs): fileCache.srcTypesExclPartitionKVs[col] = \ fileCache.srcTypesInclPartitionKVs[col] = \ _arrowType = schema.field(col).type assert not is_binary(_arrowType), \ TypeError(f'*** {filePath}: {col} IS OF BINARY TYPE ***') if col in self.srcTypesInclPartitionKVs: assert _arrowType == self.srcTypesInclPartitionKVs[col], \ TypeError(f'*** {filePath} COLUMN {col}: ' f'DETECTED TYPE {_arrowType} != ' f'{self.srcTypesInclPartitionKVs[col]} ***') else: self.srcTypesInclPartitionKVs[col] = _arrowType metadata: FileMetaData = read_metadata(where=fileCache.localPath) fileCache.nCols = metadata.num_columns fileCache.nRows = metadata.num_rows return fileCache # ===================== # ROWS, COLUMNS & TYPES # --------------------- # approxNRows / nRows / __len__ # columns # indexCols # types # type / typeIsNum # possibleFeatureCols # possibleCatCols @property def approxNRows(self) -> int: """Approximate number of rows.""" if self._cache.approxNRows is None: self.stdOutLogger.info(msg='Counting Approx. No. of Rows...') self._cache.approxNRows = ( self.nFiles * sum(self.cacheFileMetadataAndSchema(filePath=filePath).nRows for filePath in (tqdm(self.prelimReprSampleFilePaths) if len(self.prelimReprSampleFilePaths) > 1 else self.prelimReprSampleFilePaths)) / self._reprSampleMinNFiles) return self._cache.approxNRows @property def nRows(self) -> int: """Return number of rows.""" if self._cache.nRows is None: self.stdOutLogger.info(msg='Counting No. of Rows...') self._cache.nRows = \ sum(self.cacheFileMetadataAndSchema(filePath=filePath).nRows for filePath in (tqdm(self.filePaths) if self.nFiles > 1 else self.filePaths)) return self._cache.nRows def __len__(self) -> int: """Return (approximate) number of rows.""" return self._cache.nRows if self._cache.nRows else self.approxNRows @property def columns(self) -> set[str]: """Column names.""" return self.srcColsInclPartitionKVs @property def indexCols(self) -> set[str]: """Return index columns.""" s: set[str] = set() if self._iCol: s.add(self._iCol) if self._dCol: s.add(self._dCol) if self._tCol: s.add(self._tCol) return s @property def types(self) -> Namespace: """Return column data types.""" return self.srcTypesInclPartitionKVs @lru_cache(maxsize=None, typed=False) def type(self, col: str) -> DataType: """Return data type of specified column.""" return self.types[col] @lru_cache(maxsize=None, typed=False) def typeIsNum(self, col: str) -> bool: """Check whether specified column's data type is numerical.""" return is_num(self.type(col)) @property def possibleFeatureCols(self) -> set[str]: """Possible feature columns for ML modeling.""" return {col for col in self.contentCols if is_possible_feature(self.type(col))} @property def possibleCatCols(self) -> set[str]: """Possible categorical content columns.""" return {col for col in self.contentCols if is_possible_cat(self.type(col))} # ==================== # MAP/REDUCE & related # -------------------- # map # reduce # __getitem__ # castType # collect def map(self, *mappers: callable, reduceMustInclCols: Optional[ColsType] = None, **kwargs: Any) -> ParquetDataset: """Apply mapper function(s) to files.""" if reduceMustInclCols is None: reduceMustInclCols: set[str] = set() inheritCache: bool = kwargs.pop('inheritCache', False) inheritNRows: bool = kwargs.pop('inheritNRows', inheritCache) s3ParquetDF: ParquetDataset = \ ParquetDataset( path=self.path, awsRegion=self.awsRegion, accessKey=self.accessKey, secretKey=self.secretKey, _mappers=self._mappers + mappers, _reduceMustInclCols=(self._reduceMustInclCols | to_iterable(reduceMustInclCols, iterable_type=set)), iCol=self._iCol, tCol=self._tCol, reprSampleMinNFiles=self._reprSampleMinNFiles, reprSampleSize=self._reprSampleSize, nulls=self._nulls, minNonNullProportion=self._minNonNullProportion, outlierTailProportion=self._outlierTailProportion, maxNCats=self._maxNCats, minProportionByMaxNCats=self._minProportionByMaxNCats, **kwargs) if inheritCache: s3ParquetDF._inheritCache(self) if inheritNRows: s3ParquetDF._cache.approxNRows = self._cache.approxNRows s3ParquetDF._cache.nRows = self._cache.nRows return s3ParquetDF def reduce(self, *filePaths: str, **kwargs: Any) -> ReducedDataSetType: # pylint: disable=too-many-branches,too-many-locals,too-many-statements """Reduce from mapped content.""" _CHUNK_SIZE: int = 10 ** 5 cols: Optional[Collection[str]] = kwargs.get('cols') cols: set[str] = to_iterable(cols, iterable_type=set) if cols else set() nSamplesPerFile: int = kwargs.get('nSamplesPerFile') reducer: callable = kwargs.get('reducer', lambda results: vstack(tup=results) if isinstance(results[0], ndarray) else concat(objs=results, axis='index', join='outer', ignore_index=False, keys=None, levels=None, names=None, verify_integrity=False, sort=False, copy=False)) verbose: bool = kwargs.pop('verbose', True) if not filePaths: filePaths: set[str] = self.filePaths results: list[ReducedDataSetType] = [] # pylint: disable=too-many-nested-blocks for filePath in (tqdm(filePaths) if verbose and (len(filePaths) > 1) else filePaths): fileLocalPath: Path = self.fileLocalPath(filePath=filePath) fileCache: Namespace = self.cacheFileMetadataAndSchema(filePath=filePath) colsForFile: set[str] = ( cols if cols else fileCache.srcColsInclPartitionKVs ) | self._reduceMustInclCols srcCols: set[str] = colsForFile & fileCache.srcColsExclPartitionKVs partitionKeyCols: set[str] = colsForFile.intersection(fileCache.partitionKVs) if srcCols: pandasDFConstructed: bool = False if toSubSample := nSamplesPerFile and (nSamplesPerFile < fileCache.nRows): intermediateN: float = (nSamplesPerFile * fileCache.nRows) ** .5 if ((nChunksForIntermediateN := int(math.ceil(intermediateN / _CHUNK_SIZE))) < (approxNChunks := int(math.ceil(fileCache.nRows / _CHUNK_SIZE)))): # arrow.apache.org/docs/python/generated/pyarrow.parquet.read_table fileArrowTable: Table = read_table(source=fileLocalPath, columns=list(srcCols), use_threads=True, metadata=None, use_pandas_metadata=True, memory_map=False, read_dictionary=None, filesystem=None, filters=None, buffer_size=0, partitioning='hive', use_legacy_dataset=False, ignore_prefixes=None, pre_buffer=True, coerce_int96_timestamp_unit=None) chunkRecordBatches: list[RecordBatch] = \ fileArrowTable.to_batches(max_chunksize=_CHUNK_SIZE) nChunks: int = len(chunkRecordBatches) # TODO: CHECK # assert nChunks in (approxNChunks - 1, approxNChunks), \ # ValueError(f'*** {filePath}: {nChunks} vs. ' # f'{approxNChunks} Record Batches ***') assert nChunksForIntermediateN <= nChunks, \ ValueError(f'*** {filePath}: {nChunksForIntermediateN} vs. ' f'{nChunks} Record Batches ***') chunkPandasDFs: list[DataFrame] = [] nSamplesPerChunk: int = int(math.ceil(nSamplesPerFile / nChunksForIntermediateN)) for chunkRecordBatch in randomSample(population=chunkRecordBatches, sampleSize=nChunksForIntermediateN, returnCollectionType=tuple): # arrow.apache.org/docs/python/generated/pyarrow.RecordBatch.html # #pyarrow.RecordBatch.to_pandas chunkPandasDF: DataFrame = \ chunkRecordBatch.to_pandas( memory_pool=None, categories=None, strings_to_categorical=False, zero_copy_only=False, integer_object_nulls=False, # TODO: check # (bool, default False) – # Cast integers with nulls to objects date_as_object=True, # TODO: check # (bool, default True) – # Cast dates to objects. # If False, convert to datetime64[ns] dtype. timestamp_as_object=False, use_threads=True, deduplicate_objects=True, # TODO: check # (bool, default False) – # Do not create multiple copies Python objects when created, # to save on memory use. Conversion will be slower. ignore_metadata=False, safe=True, split_blocks=True, # TODO: check # (bool, default False) – # If True, generate one internal “block” # for each column when creating a pandas.DataFrame # from a RecordBatch or Table. # While this can temporarily reduce memory # note that various pandas operations can # trigger “consolidation” which may balloon memory use. self_destruct=True, # TODO: check # EXPERIMENTAL: If True, attempt to deallocate # the originating Arrow memory while # converting the Arrow object to pandas. # If you use the object after calling to_pandas # with this option it will crash your program. # Note that you may not see always memory usage improvements. # For example, if multiple columns share # an underlying allocation, memory can’t be freed # until all columns are converted. types_mapper=None) for k in partitionKeyCols: chunkPandasDF[k] = fileCache.partitionKVs[k] if nSamplesPerChunk < len(chunkPandasDF): chunkPandasDF: DataFrame = \ chunkPandasDF.sample(n=nSamplesPerChunk, # frac=None, replace=False, weights=None, random_state=None, axis='index', ignore_index=False) chunkPandasDFs.append(chunkPandasDF) filePandasDF: DataFrame = concat(objs=chunkPandasDFs, axis='index', join='outer', ignore_index=False, keys=None, levels=None, names=None, verify_integrity=False, sort=False, copy=False) pandasDFConstructed: bool = True if not pandasDFConstructed: # pandas.pydata.org/docs/reference/api/pandas.read_parquet filePandasDF: DataFrame = read_parquet( path=fileLocalPath, engine='pyarrow', columns=list(srcCols), storage_options=None, use_nullable_dtypes=True, # arrow.apache.org/docs/python/generated/pyarrow.parquet.read_table: use_threads=True, metadata=None, use_pandas_metadata=True, memory_map=False, read_dictionary=None, filesystem=None, filters=None, buffer_size=0, partitioning='hive', use_legacy_dataset=False, ignore_prefixes=None, pre_buffer=True, coerce_int96_timestamp_unit=None, # arrow.apache.org/docs/python/generated/pyarrow.Table.html # #pyarrow.Table.to_pandas: # memory_pool=None, # (default) # categories=None, # (default) # strings_to_categorical=False, # (default) # zero_copy_only=False, # (default) # integer_object_nulls=False, # (default) # TODO: check # (bool, default False) – # Cast integers with nulls to objects # date_as_object=True, # (default) # TODO: check # (bool, default True) – # Cast dates to objects. # If False, convert to datetime64[ns] dtype. # timestamp_as_object=False, # (default) # use_threads=True, # (default) # deduplicate_objects=True, # (default: *** False ***) # TODO: check # (bool, default False) – # Do not create multiple copies Python objects when created, # to save on memory use. Conversion will be slower. # ignore_metadata=False, # (default) # safe=True, # (default) # split_blocks=True, # (default: *** False ***) # TODO: check # (bool, default False) – # If True, generate one internal “block” for each column # when creating a pandas.DataFrame from a RecordBatch or Table. # While this can temporarily reduce memory note that # various pandas operations can trigger “consolidation” # which may balloon memory use. # self_destruct=True, # (default: *** False ***) # TODO: check # EXPERIMENTAL: If True, attempt to deallocate the originating # Arrow memory while converting the Arrow object to pandas. # If you use the object after calling to_pandas with this option # it will crash your program. # Note that you may not see always memory usage improvements. # For example, if multiple columns share an underlying allocation, # memory can’t be freed until all columns are converted. # types_mapper=None, # (default) ) for k in partitionKeyCols: filePandasDF[k] = fileCache.partitionKVs[k] if toSubSample: filePandasDF: DataFrame = filePandasDF.sample(n=nSamplesPerFile, # frac=None, replace=False, weights=None, random_state=None, axis='index', ignore_index=False) else: filePandasDF: DataFrame = DataFrame(index=range(nSamplesPerFile if nSamplesPerFile and (nSamplesPerFile < fileCache.nRows) else fileCache.nRows)) for k in partitionKeyCols: filePandasDF[k] = fileCache.partitionKVs[k] result: ReducedDataSetType = filePandasDF for mapper in self._mappers: result: ReducedDataSetType = mapper(result) results.append(result) return reducer(results) @staticmethod def _getCols(pandasDF: DataFrame, cols: Union[str, tuple[str]]) -> DataFrame: for missingCol in to_iterable(cols, iterable_type=set).difference(pandasDF.columns): pandasDF.loc[:, missingCol] = None return pandasDF[cols if isinstance(cols, str) else list(cols)] @lru_cache(maxsize=None, typed=False) def __getitem__(self, cols: Union[str, tuple[str]], /) -> ParquetDataset: """Get column(s).""" return self.map(partial(self._getCols, cols=cols), reduceMustInclCols=cols, inheritNRows=True) @lru_cache(maxsize=None, typed=False) def castType(self, **colsToTypes: dict[str, Any]) -> ParquetDataset: """Cast data type(s) of column(s).""" return self.map(lambda df: df.astype(colsToTypes, copy=False, errors='raise'), reduceMustInclCols=set(colsToTypes), inheritNRows=True) def collect(self, *cols: str, **kwargs: Any) -> ReducedDataSetType: """Collect content.""" return self.reduce(cols=cols if cols else None, **kwargs) # ========= # FILTERING # --------- # _subset # filterByPartitionKeys # filter @lru_cache(maxsize=None, typed=False) # computationally expensive, so cached def _subset(self, *filePaths: str, **kwargs: Any) -> ParquetDataset: # pylint: disable=too-many-locals if filePaths: assert self.filePaths.issuperset(filePaths) nFilePaths: int = len(filePaths) if nFilePaths == self.nFiles: return self if nFilePaths > 1: verbose: bool = kwargs.pop('verbose', True) _pathPlusSepLen: int = len(self.path) + 1 fileSubPaths: list[str] = [filePath[_pathPlusSepLen:] for filePath in filePaths] _uuid = uuid4() subsetPath: str = ( f"s3://{self.s3Bucket}/{(subsetDirS3Key := f'{self._TMP_DIR_S3_KEY}/{_uuid}')}" if self.onS3 else f'{self._LOCAL_CACHE_DIR_PATH}/{_uuid}') if verbose: self.stdOutLogger.info( msg=(msg := f'Subsetting {len(filePaths):,} Files to "{subsetPath}"...')) tic: float = time.time() for fileSubPath in (tqdm(fileSubPaths) if verbose else fileSubPaths): if self.onS3: self.s3Client.copy(CopySource=dict(Bucket=self.s3Bucket, Key=f'{self.pathS3Key}/{fileSubPath}'), Bucket=self.s3Bucket, Key=f'{subsetDirS3Key}/{fileSubPath}') else: fs.cp(from_path=f'{self.path}/{fileSubPath}', to_path=f'{subsetPath}/{fileSubPath}', hdfs=False, is_dir=False) if verbose: toc: float = time.time() self.stdOutLogger.info(msg=f'{msg} done! <{toc-tic:.1f} s>') else: subsetPath: str = filePaths[0] return ParquetDataset( path=subsetPath, awsRegion=self.awsRegion, accessKey=self.accessKey, secretKey=self.secretKey, _mappers=self._mappers, _reduceMustInclCols=self._reduceMustInclCols, iCol=self._iCol, tCol=self._tCol, reprSampleMinNFiles=self._reprSampleMinNFiles, reprSampleSize=self._reprSampleSize, nulls=self._nulls, minNonNullProportion=self._minNonNullProportion, outlierTailProportion=self._outlierTailProportion, maxNCats=self._maxNCats, minProportionByMaxNCats=self._minProportionByMaxNCats, **kwargs) return self @lru_cache(maxsize=None, typed=False) def filterByPartitionKeys(self, *filterCriteriaTuples: Union[tuple[str, str], tuple[str, str, str]], **kwargs: Any) -> ParquetDataset: # pylint: disable=too-many-branches """Filter by partition keys.""" filterCriteria: dict[str, tuple[Optional[str], Optional[str], Optional[set[str]]]] = {} _samplePath: str = next(iter(self.filePaths)) for filterCriteriaTuple in filterCriteriaTuples: assert isinstance(filterCriteriaTuple, PY_LIST_OR_TUPLE) filterCriteriaTupleLen: int = len(filterCriteriaTuple) col: str = filterCriteriaTuple[0] if f'{col}=' in _samplePath: if filterCriteriaTupleLen == 2: fromVal: Optional[str] = None toVal: Optional[str] = None inSet: set[str] = {str(v) for v in to_iterable(filterCriteriaTuple[1])} elif filterCriteriaTupleLen == 3: fromVal: Optional[str] = filterCriteriaTuple[1] if fromVal is not None: fromVal: str = str(fromVal) toVal: Optional[str] = filterCriteriaTuple[2] if toVal is not None: toVal: str = str(toVal) inSet: Optional[set[str]] = None else: raise ValueError(f'*** {type(self)} FILTER CRITERIA MUST BE EITHER ' '(<colName>, <fromVal>, <toVal>) OR ' '(<colName>, <inValsSet>) ***') filterCriteria[col] = fromVal, toVal, inSet if filterCriteria: filePaths: set[str] = set() for filePath in self.filePaths: filePandasDFSatisfiesCriteria: bool = True for col, (fromVal, toVal, inSet) in filterCriteria.items(): v: str = re.search(f'{col}=(.*?)/', filePath).group(1) # pylint: disable=too-many-boolean-expressions if ((fromVal is not None) and (v < fromVal)) or \ ((toVal is not None) and (v > toVal)) or \ ((inSet is not None) and (v not in inSet)): filePandasDFSatisfiesCriteria: bool = False break if filePandasDFSatisfiesCriteria: filePaths.add(filePath) assert filePaths, FileNotFoundError(f'*** {self}: NO PATHS SATISFYING ' f'FILTER CRITERIA {filterCriteria} ***') return self._subset(*filePaths, **kwargs) return self @lru_cache(maxsize=None, typed=False) def filter(self, *conditions: str, **kwargs: Any) -> ParquetDataset: """Apply filtering mapper.""" s3ParquetDF: ParquetDataset = self for condition in conditions: # pylint: disable=cell-var-from-loop s3ParquetDF: ParquetDataset = \ s3ParquetDF.map(lambda df: df.query(expr=condition, inplace=False), **kwargs) return s3ParquetDF # ======== # SAMPLING # -------- # prelimReprSampleFilePaths # reprSampleFilePaths # sample # _assignReprSample @property def prelimReprSampleFilePaths(self) -> set[str]: """Prelim representative sample file paths.""" if self._cache.prelimReprSampleFilePaths is None: self._cache.prelimReprSampleFilePaths = \ randomSample(population=self.filePaths, sampleSize=self._reprSampleMinNFiles) return self._cache.prelimReprSampleFilePaths @property def reprSampleFilePaths(self) -> set[str]: """Return representative sample file paths.""" if self._cache.reprSampleFilePaths is None: reprSampleNFiles: int = \ int(math.ceil( ((min(self._reprSampleSize, self.approxNRows) / self.approxNRows) ** .5) * self.nFiles)) self._cache.reprSampleFilePaths = ( self._cache.prelimReprSampleFilePaths | (randomSample( population=self.filePaths - self._cache.prelimReprSampleFilePaths, sampleSize=reprSampleNFiles - self._reprSampleMinNFiles) if reprSampleNFiles > self._reprSampleMinNFiles else set())) return self._cache.reprSampleFilePaths def sample(self, *cols: str, **kwargs: Any) -> ReducedDataSetType: """Sample.""" n: int = kwargs.pop('n', self._DEFAULT_REPR_SAMPLE_SIZE) filePaths: Optional[Collection[str]] = kwargs.pop('filePaths', None) verbose: bool = kwargs.pop('verbose', True) if filePaths: nFiles: int = len(filePaths) else: minNFiles: int = kwargs.pop('minNFiles', self._reprSampleMinNFiles) maxNFiles: Optional[int] = kwargs.pop('maxNFiles', None) nFiles: int = (max(int(math.ceil(((min(n, self.approxNRows) / self.approxNRows) ** .5) * self.nFiles)), minNFiles) if (self.nFiles > 1) and ((maxNFiles is None) or (maxNFiles > 1)) else 1) if maxNFiles: nFiles: int = min(nFiles, maxNFiles) if nFiles < self.nFiles: filePaths: set[str] = randomSample(population=self.filePaths, sampleSize=nFiles) else: nFiles: int = self.nFiles filePaths: set[str] = self.filePaths if verbose or debug.ON: self.stdOutLogger.info( msg=f"Sampling {n:,} Rows{f' of Columns {cols}' if cols else ''} " f'from {nFiles:,} Files...') return self.reduce(*filePaths, cols=cols, nSamplesPerFile=int(math.ceil(n / nFiles)), verbose=verbose, **kwargs) def _assignReprSample(self): self._cache.reprSample = self.sample(n=self._reprSampleSize, filePaths=self.reprSampleFilePaths, verbose=True) # pylint: disable=attribute-defined-outside-init self._reprSampleSize: int = len(self._cache.reprSample) self._cache.nonNullProportion = {} self._cache.suffNonNull = {} # ================ # COLUMN PROFILING # ---------------- # count # nonNullProportion # distinct # distinctPartitions # quantile # sampleStat # outlierRstStat / outlierRstMin / outlierRstMax # profile def count(self, *cols: str, **kwargs: Any) -> Union[int, Namespace]: """Count non-NULL values in specified column(s). Return: - If 1 column name is given, return its corresponding non-``NULL`` count - If multiple column names are given, return a {``col``: corresponding non-``NULL`` count} *dict* - If no column names are given, return a {``col``: corresponding non-``NULL`` count} *dict* for all columns """ if not cols: cols: set[str] = self.contentCols if len(cols) > 1: return Namespace(**{col: self.count(col, **kwargs) for col in cols}) col: str = cols[0] pandasDF: Optional[DataFrame] = kwargs.get('pandasDF') lowerNumericNull, upperNumericNull = self._nulls[col] if pandasDF is None: if col not in self._cache.count: verbose: Optional[bool] = True if debug.ON else kwargs.get('verbose') if verbose: tic: float = time.time() self._cache.count[col] = result = int( self[col] .map(((lambda series: (series.notnull() .sum(axis='index', skipna=True, level=None, # numeric_only=True, min_count=0))) if isnull(upperNumericNull) else (lambda series: ((series < upperNumericNull) .sum(axis='index', skipna=True, level=None, # numeric_only=True, min_count=0)))) if isnull(lowerNumericNull) else ((lambda series: ((series > lowerNumericNull) .sum(axis='index', skipna=True, level=None, # numeric_only=True, min_count=0))) if isnull(upperNumericNull) else (lambda series: (series .between(left=lowerNumericNull, right=upperNumericNull, inclusive='neither') .sum(axis='index', skipna=True, level=None, # numeric_only=True, min_count=0)))), reduceMustInclCols=col) .reduce(cols=col, reducer=sum)) if verbose: toc: float = time.time() self.stdOutLogger.info(msg=f'No. of Non-NULLs of Column "{col}" = ' f'{result:,} <{toc - tic:,.1f} s>') return self._cache.count[col] series: Series = pandasDF[col] series: Series = ((series.notnull() if isnull(upperNumericNull) else (series < upperNumericNull)) if isnull(lowerNumericNull) else ((series > lowerNumericNull) if isnull(upperNumericNull) else series.between(left=lowerNumericNull, right=upperNumericNull, inclusive='neither'))) return series.sum(axis='index', skipna=True, level=None, # numeric_only=True, min_count=0) def nonNullProportion(self, *cols: str, **kwargs: Any) -> Union[float, Namespace]: """Calculate non-NULL data proportion(s) of specified column(s). Return: - If 1 column name is given, return its *approximate* non-``NULL`` proportion - If multiple column names are given, return {``col``: approximate non-``NULL`` proportion} *dict* - If no column names are given, return {``col``: approximate non-``NULL`` proportion} *dict* for all columns """ if not cols: cols: set[str] = self.contentCols if len(cols) > 1: return Namespace(**{col: self.nonNullProportion(col, **kwargs) for col in cols}) col: str = cols[0] if col not in self._cache.nonNullProportion: self._cache.nonNullProportion[col] = ( self.count(col, pandasDF=self.reprSample, **kwargs) / self.reprSampleSize) return self._cache.nonNullProportion[col] def distinct(self, *cols: str, **kwargs: Any) -> Union[Series, Namespace]: """Return distinct values in specified column(s). Return: *Approximate* list of distinct values of ``ADF``'s column ``col``, with optional descending-sorted counts for those values Args: col (str): name of a column count (bool): whether to count the number of appearances of each distinct value of the specified ``col`` """ if not cols: cols: set[str] = self.contentCols asDict: bool = kwargs.pop('asDict', False) if len(cols) > 1: return Namespace(**{col: self.distinct(col, **kwargs) for col in cols}) col: str = cols[0] if col not in self._cache.distinct: self._cache.distinct[col] = \ self.reprSample[col].value_counts(normalize=True, sort=True, ascending=False, bins=None, dropna=False) return (Namespace(**{col: self._cache.distinct[col]}) if asDict else self._cache.distinct[col]) def distinctPartitions(self, col: str, /) -> set[str]: """Return distinct values of a certain partition key.""" return {re.search(f'{col}=(.*?)/', filePath).group(1) for filePath in self.filePaths} @lru_cache(maxsize=None, typed=False) # computationally expensive, so cached def quantile(self, *cols: str, **kwargs: Any) -> Union[float, int, Series, Namespace]: """Return quantile values in specified column(s).""" if len(cols) > 1: return Namespace(**{col: self.quantile(col, **kwargs) for col in cols}) col: str = cols[0] # for precision, calc from whole data set instead of from reprSample return self[col].reduce(cols=col).quantile(q=kwargs.get('q', .5), interpolation='linear') def sampleStat(self, *cols: str, **kwargs: Any) -> Union[float, int, Namespace]: """Approximate measurements of a certain stat on numerical columns. Args: *cols (str): column name(s) **kwargs: - **stat**: one of the following: - ``avg``/``mean`` (default) - ``median`` - ``min`` - ``max`` """ if not cols: cols: set[str] = self.possibleNumCols if len(cols) > 1: return Namespace(**{col: self.sampleStat(col, **kwargs) for col in cols}) col: str = cols[0] if self.typeIsNum(col): stat: str = kwargs.pop('stat', 'mean').lower() if stat == 'avg': stat: str = 'mean' capitalizedStatName: str = stat.capitalize() s: str = f'sample{capitalizedStatName}' if s not in self._cache: setattr(self._cache, s, {}) _cache: dict[str, PyNumType] = getattr(self._cache, s) if col not in _cache: verbose: Optional[bool] = True if debug.ON else kwargs.get('verbose') if verbose: tic: float = time.time() result = getattr(self.reprSample[col], stat)(axis='index', skipna=True, level=None) if isinstance(result, NUMPY_FLOAT_TYPES): result: float = float(result) elif isinstance(result, NUMPY_INT_TYPES): result: int = int(result) assert isinstance(result, PY_NUM_TYPES + (NAType,)), \ TypeError(f'*** "{col}" SAMPLE ' f'{capitalizedStatName.upper()} = ' f'{result} ({type(result)}) ***') if verbose: toc: float = time.time() self.stdOutLogger.info(msg=f'Sample {capitalizedStatName} for Column "{col}" = ' f'{result:,.3g} <{toc - tic:,.1f} s>') _cache[col] = result return _cache[col] raise ValueError(f'*** {self}.sampleStat({col}, ...): COLUMN "{col}" NOT NUMERICAL ***') def outlierRstStat(self, *cols: str, **kwargs: Any) -> Union[float, int, Namespace]: # pylint: disable=too-many-branches """Return outlier-resistant stat for specified column(s).""" if not cols: cols: set[str] = self.possibleNumCols if len(cols) > 1: return Namespace(**{col: self.outlierRstStat(col, **kwargs) for col in cols}) col: str = cols[0] if self.typeIsNum(col): stat: str = kwargs.pop('stat', 'mean').lower() if stat == 'avg': stat: str = 'mean' capitalizedStatName: str = stat.capitalize() s: str = f'outlierRst{capitalizedStatName}' if hasattr(self, s): return getattr(self, s)(col, **kwargs) if s not in self._cache: setattr(self._cache, s, {}) _cache: dict[str, PyNumType] = getattr(self._cache, s) if col not in _cache: verbose: Optional[bool] = True if debug.ON else kwargs.get('verbose') if verbose: tic: float = time.time() series: Series = self.reprSample[col] outlierTail: Optional[str] = kwargs.pop('outlierTail', 'both') if outlierTail == 'both': series: Series = series.loc[ series.between(left=self.outlierRstMin(col), right=self.outlierRstMax(col), inclusive='both')] elif outlierTail == 'lower': series: Series = series.loc[series >= self.outlierRstMin(col)] elif outlierTail == 'upper': series: Series = series.loc[series <= self.outlierRstMax(col)] result = getattr(series, stat)(axis='index', skipna=True, level=None) if isnull(result): self.stdOutLogger.warning( msg=f'*** "{col}" OUTLIER-RESISTANT ' f'{capitalizedStatName.upper()} = ' f'{result} ***') result: PyNumType = self.outlierRstMin(col) if isinstance(result, NUMPY_FLOAT_TYPES): result: float = float(result) elif isinstance(result, NUMPY_INT_TYPES): result: int = int(result) assert isinstance(result, PY_NUM_TYPES + (NAType,)), \ TypeError(f'*** "{col}" ' f'OUTLIER-RESISTANT {capitalizedStatName.upper()}' f' = {result} ({type(result)}) ***') if verbose: toc: float = time.time() self.stdOutLogger.info(msg=f'Outlier-Resistant {capitalizedStatName}' f' for Column "{col}" = ' f'{result:,.3g} <{toc - tic:,.1f} s>') _cache[col] = result return _cache[col] raise ValueError(f'*** {self}.outlierRstStat({col}, ...): ' f'COLUMN "{col}" NOT NUMERICAL ***') def outlierRstMin(self, *cols: str, **kwargs: Any) -> Union[float, int, Namespace]: """Return outlier-resistant minimum for specified column(s).""" if not cols: cols: set[str] = self.possibleNumCols if len(cols) > 1: return Namespace(**{col: self.outlierRstMin(col, **kwargs) for col in cols}) col: str = cols[0] if self.typeIsNum(col): if 'outlierRstMin' not in self._cache: self._cache.outlierRstMin = {} if col not in self._cache.outlierRstMin: verbose: Optional[bool] = True if debug.ON else kwargs.get('verbose') if verbose: tic: float = time.time() series: Series = self.reprSample[col] outlierRstMin: PyNumType = \ series.quantile(q=self._outlierTailProportion[col], interpolation='linear') sampleMin: PyNumType = self.sampleStat(col, stat='min') sampleMedian: PyNumType = self.sampleStat(col, stat='median') result = (series.loc[series > sampleMin].min(axis='index', skipna=True, level=None) if (outlierRstMin == sampleMin) and (outlierRstMin < sampleMedian) else outlierRstMin) if isinstance(result, NUMPY_FLOAT_TYPES): result: float = float(result) elif isinstance(result, NUMPY_INT_TYPES): result: int = int(result) assert isinstance(result, PY_NUM_TYPES + (NAType,)), \ TypeError(f'*** "{col}" OUTLIER-RESISTANT MIN = ' f'{result} ({type(result)}) ***') if verbose: toc: float = time.time() self.stdOutLogger.info(msg=f'Outlier-Resistant Min of Column "{col}" = ' f'{result:,.3g} <{toc - tic:,.1f} s>') self._cache.outlierRstMin[col] = result return self._cache.outlierRstMin[col] raise ValueError(f'*** {self}.outlierRstMin({col}, ...): ' f'COLUMN "{col}" NOT NUMERICAL ***') def outlierRstMax(self, *cols: str, **kwargs: Any) -> Union[float, int, Namespace]: """Return outlier-resistant maximum for specified column(s).""" if not cols: cols: set[str] = self.possibleNumCols if len(cols) > 1: return Namespace(**{col: self.outlierRstMax(col, **kwargs) for col in cols}) col: str = cols[0] if self.typeIsNum(col): if 'outlierRstMax' not in self._cache: self._cache.outlierRstMax = {} if col not in self._cache.outlierRstMax: verbose: Optional[bool] = True if debug.ON else kwargs.get('verbose') if verbose: tic: float = time.time() series: Series = self.reprSample[col] outlierRstMax: PyNumType = \ series.quantile(q=1 - self._outlierTailProportion[col], interpolation='linear') sampleMax: PyNumType = self.sampleStat(col, stat='max') sampleMedian: PyNumType = self.sampleStat(col, stat='median') result = (series.loc[series < sampleMax].max(axis='index', skipna=True, level=None) if (outlierRstMax == sampleMax) and (outlierRstMax > sampleMedian) else outlierRstMax) if isinstance(result, NUMPY_FLOAT_TYPES): result: float = float(result) elif isinstance(result, NUMPY_INT_TYPES): result: int = int(result) assert isinstance(result, PY_NUM_TYPES + (NAType,)), \ TypeError(f'*** "{col}" OUTLIER-RESISTANT MAX = {result} ' f'({type(result)}) ***') if verbose: toc: float = time.time() self.stdOutLogger.info(msg=f'Outlier-Resistant Max of Column "{col}" = ' f'{result:,.3g} <{toc - tic:,.1f} s>') self._cache.outlierRstMax[col] = result return self._cache.outlierRstMax[col] raise ValueError(f'*** {self}.outlierRstMax({col}, ...): ' f'COLUMN "{col}" NOT NUMERICAL ***') def profile(self, *cols: str, **kwargs: Any) -> Namespace: # pylint: disable=too-many-branches,too-many-locals,too-many-statements """Profile specified column(s). Return: *dict* of profile of salient statistics on specified columns of ``ADF`` Args: *cols (str): names of column(s) to profile **kwargs: - **profileCat** *(bool, default = True)*: whether to profile possible categorical columns - **profileNum** *(bool, default = True)*: whether to profile numerical columns - **skipIfInsuffNonNull** *(bool, default = False)*: whether to skip profiling if column does not have enough non-NULLs """ if not cols: cols: set[str] = self.contentCols asDict: bool = kwargs.pop('asDict', False) if len(cols) > 1: return Namespace(**{col: self.profile(col, **kwargs) for col in cols}) col: str = cols[0] verbose: Optional[Union[bool, int]] = True if debug.ON else kwargs.get('verbose') if verbose: self.stdOutLogger.info(msg=(msg := f'Profiling Column "{col}"...')) tic: float = time.time() colType: DataType = self.type(col) profile: Namespace = Namespace(type=colType) # non-NULL Proportions profile.nonNullProportion = self.nonNullProportion(col, verbose=verbose > 1) if self.suffNonNull(col) or (not kwargs.get('skipIfInsuffNonNull', False)): # profile categorical column if kwargs.get('profileCat', True) and is_possible_cat(colType): profile.distinctProportions = self.distinct(col, verbose=verbose > 1) # profile numerical column if kwargs.get('profileNum', True) and self.typeIsNum(col): outlierTailProportion: float = self._outlierTailProportion[col] quantilesOfInterest: Series = Series(index=(0, outlierTailProportion, .5, 1 - outlierTailProportion, 1)) quantileProbsToQuery: list[float] = [] sampleMin: Optional[PyNumType] = self._cache.sampleMin.get(col) if calcAndCacheSampleMin := (sampleMin is None): quantileProbsToQuery.append(0.) else: quantilesOfInterest[0] = sampleMin outlierRstMin: Optional[PyNumType] = self._cache.outlierRstMin.get(col) if calcAndCacheOutlierRstMin := (outlierRstMin is None): quantileProbsToQuery.append(outlierTailProportion) else: quantilesOfInterest[outlierTailProportion] = outlierRstMin sampleMedian: Optional[PyNumType] = self._cache.sampleMedian.get(col) if calcAndCacheSampleMedian := (sampleMedian is None): quantileProbsToQuery.append(.5) else: quantilesOfInterest[.5] = sampleMedian outlierRstMax: Optional[PyNumType] = self._cache.outlierRstMax.get(col) if calcAndCacheOutlierRstMax := (outlierRstMax is None): quantileProbsToQuery.append(1 - outlierTailProportion) else: quantilesOfInterest[1 - outlierTailProportion] = outlierRstMax sampleMax: Optional[PyNumType] = self._cache.sampleMax.get(col) if calcAndCacheSampleMax := (sampleMax is None): quantileProbsToQuery.append(1.) else: quantilesOfInterest[1] = sampleMax series: Series = self.reprSample[col] if quantileProbsToQuery: quantilesOfInterest.mask( cond=quantilesOfInterest.isnull(), other=series.quantile(q=quantileProbsToQuery, interpolation='linear'), inplace=True, axis=None, level=None) (sampleMin, outlierRstMin, sampleMedian, outlierRstMax, sampleMax) = quantilesOfInterest if calcAndCacheSampleMin: self._cache.sampleMin[col] = sampleMin if calcAndCacheOutlierRstMin: if (outlierRstMin == sampleMin) and (outlierRstMin < sampleMedian): outlierRstMin: PyNumType = ( series.loc[series > sampleMin] .min(axis='index', skipna=True, level=None)) self._cache.outlierRstMin[col] = outlierRstMin if calcAndCacheSampleMedian: self._cache.sampleMedian[col] = sampleMedian if calcAndCacheOutlierRstMax: if (outlierRstMax == sampleMax) and (outlierRstMax > sampleMedian): outlierRstMax: PyNumType = ( series.loc[series < sampleMax] .max(axis='index', skipna=True, level=None)) self._cache.outlierRstMax[col] = outlierRstMax if calcAndCacheSampleMax: self._cache.sampleMax[col] = sampleMax profile.sampleRange = sampleMin, sampleMax profile.outlierRstRange = outlierRstMin, outlierRstMax profile.sampleMean = self.sampleStat(col, stat='mean', verbose=verbose) profile.outlierRstMean = \ self._cache.outlierRstMean.get( col, self.outlierRstStat(col, stat='mean', verbose=verbose)) profile.outlierRstMedian = \ self._cache.outlierRstMedian.get( col, self.outlierRstStat(col, stat='median', verbose=verbose)) if verbose: toc: float = time.time() self.stdOutLogger.info(msg=f'{msg} done! <{toc - tic:,.1f} s>') return Namespace(**{col: profile}) if asDict else profile # ==================== # PREPROCESSING FOR ML # -------------------- # preprocForML def preprocForML(self, *cols: str, **kwargs: Any) -> ParquetDataset: # pylint: disable=too-many-branches,too-many-locals,too-many-statements """Preprocess column(s) for ML training/inferencing. Return: Preprocessed (incl. numerical-``NULL``-filled) ``ParquetDataset`` Args: *cols: column(s) to preprocess **kwargs: - **forceCat** / **forceCatIncl** / **forceCatExcl** *(str or list/set/tuple of str, default = None)*: column(s) to force/include/exclude as categorical variable(s) - **catIdxScaled**: whether to scale categorical indices *(bool, default = True)* - **forceNum** / **forceNumIncl** / **forceNumExcl** *(str or list/set/tuple of str, default = None)*: column(s) to force/include/exclude as numerical variable(s) - **numNulls**: *(dict of column names mapping to (numerical, numerical) tuples, default = None)*: pairs of lower & upper numerical nulls of certain columns - **numOutlierTail**: *(str or dict of column names mapping str, default = 'both')*: string indicating outlier tails of certain columns. One of: - 'both' - 'lower' - 'upper' - None - **numNullFill**: - *dict* ( ``method`` = ... *(default: 'mean')*, ``value`` = ... *(default: None)*, ``outlierTail`` = ... *(default: False)* ) - *OR* ``None`` to not apply any ``NULL``/``NaN``-filling - **numScaler** *(str)*: one of the following methods to use on numerical columns (*ignored* if loading existing ``prep`` pipeline from ``loadPath``): - ``standard`` (default) - ``maxabs`` - ``minmax`` - ``None`` *(do not apply any scaling)* - **loadPath** *(str)*: path to load existing data transformations - **savePath** *(str)*: path to save new fitted data transformations - **method** *(str)*: one of the following methods to fill ``NULL`` values in **numerical** columns, or *dict* of such method specifications by column name - ``avg``/``mean`` (default) - ``min`` - ``max`` - ``None`` (do nothing) - **value**: single value, or *dict* of values by column name, to use if ``method`` is ``None`` or not applicable - **outlierTail** *(str or dict of str, default = 'both')*: specification of in which distribution tail (``None``, ``lower``, ``upper`` and ``both`` (default)) of each numerical column out-lying values may exist """ returnNumPy: bool = kwargs.pop('returnNumPy', False) returnPreproc: bool = kwargs.pop('returnPreproc', False) verbose: Union[bool, int] = kwargs.pop('verbose', True) if debug.ON: verbose: bool = True if loadPath := kwargs.pop('loadPath', None): # pylint: disable=too-many-nested-blocks if verbose: self.stdOutLogger.info(msg=(msg := ('Loading & Applying Data Transformations ' f'from "{loadPath}"...'))) tic: float = time.time() pandasMLPreproc: PandasMLPreprocessor = PandasMLPreprocessor.from_yaml(path=loadPath) else: cols: set[str] = {col for col in ((set(cols) & self.possibleFeatureCols) if cols else self.possibleFeatureCols) if self.suffNonNull(col)} assert cols, ValueError(f'*** {self}: NO COLS WITH SUFFICIENT NON-NULLS ***') profile: Namespace = self.profile(*cols, profileCat=True, profileNum=False, skipIfInsuffNonNull=True, asDict=True, verbose=verbose) cols: set[str] = {col for col in cols if ((distinctProportionsIndex := profile[col].distinctProportions.index).notnull() & (distinctProportionsIndex != '')).sum() > 1} assert cols, ValueError(f'*** {self}: NO COLS WITH SUFFICIENT DISTINCT VALUES ***') forceCat: set[str] = (((to_iterable(forceCat, iterable_type=set) if (forceCat := kwargs.pop('forceCat', None)) else set()) | (to_iterable(forceCatIncl, iterable_type=set) if (forceCatIncl := kwargs.pop('forceCatIncl', None)) else set())) - (to_iterable(forceCatExcl, iterable_type=set) if (forceCatExcl := kwargs.pop('forceCatExcl', None)) else set())) forceNum: set[str] = (((to_iterable(forceNum, iterable_type=set) if (forceNum := kwargs.pop('forceNum', None)) else set()) | (to_iterable(forceNumIncl, iterable_type=set) if (forceNumIncl := kwargs.pop('forceNumIncl', None)) else set())) - (to_iterable(forceNumExcl, iterable_type=set) if (forceNumExcl := kwargs.pop('forceNumExcl', None)) else set())) catCols: set[str] = {col for col in ((cols & self.possibleCatCols) - forceNum) if (col in forceCat) or (profile[col].distinctProportions .iloc[:self._maxNCats[col]].sum() >= self._minProportionByMaxNCats[col])} numCols: set[str] = {col for col in (cols - catCols) if self.typeIsNum(col)} cols: set[str] = catCols | numCols if verbose: self.stdOutLogger.info(msg=(msg := ('Preprocessing Columns ' + ', '.join(f'"{col}"' for col in cols) + '...'))) tic: float = time.time() origToPreprocColMap: Namespace = Namespace() if catCols: if verbose: self.stdOutLogger.info( msg=(cat_prep_msg := ('Transforming Categorical Columns ' + ', '.join(f'"{catCol}"' for catCol in catCols) + '...'))) cat_prep_tic: float = time.time() origToPreprocColMap[PandasMLPreprocessor._CAT_INDEX_SCALED_FIELD_NAME] = \ (catIdxScaled := kwargs.pop('catIdxScaled', True)) catIdxCols: set[str] = set() if catIdxScaled: catScaledIdxCols: set[str] = set() for catCol in catCols: catIdxCol: str = self._CAT_IDX_PREFIX + catCol catColType: DataType = self.type(catCol) if is_boolean(catColType): sortedCats: tuple[bool] = False, True nCats: int = 2 else: isStr: bool = is_string(catColType) sortedCats: tuple[PyPossibleFeatureType] = tuple( cat for cat in (profile[catCol].distinctProportions.index if catCol in forceCat else (profile[catCol].distinctProportions .index[:self._maxNCats[catCol]])) if notnull(cat) and ((cat != '') if isStr else isfinite(cat))) nCats: int = len(sortedCats) if catIdxScaled: catPrepCol: str = self._MIN_MAX_SCL_PREFIX + catIdxCol catScaledIdxCols.add(catPrepCol) else: catPrepCol: str = catIdxCol catIdxCols.add(catPrepCol) origToPreprocColMap[catCol] = {'logical-type': 'cat', 'physical-type': str(catColType), 'n-cats': nCats, 'sorted-cats': sortedCats, 'transform-to': catPrepCol} if verbose: cat_prep_toc: float = time.time() self.stdOutLogger.info( msg=f'{cat_prep_msg} done! <{cat_prep_toc - cat_prep_tic:,.1f} s>') if numCols: origToPreprocColMap[PandasMLPreprocessor._NUM_SCALER_FIELD_NAME] = \ (numScaler := kwargs.pop('numScaler', 'standard')) numNulls: dict[str, tuple[Optional[PyNumType], Optional[PyNumType]]] = \ kwargs.pop('numNulls', {}) numOutlierTail: Optional[Union[str, dict[str, Optional[str]]]] = \ kwargs.pop('numOutlierTail', 'both') if not isinstance(numOutlierTail, DICT_OR_NAMESPACE_TYPES): numOutlierTail: dict[str, Optional[str]] = \ {col: numOutlierTail for col in numCols} numNullFillMethod: Union[str, dict[str, str]] = \ kwargs.pop('numNullFillMethod', 'mean') if not isinstance(numNullFillMethod, DICT_OR_NAMESPACE_TYPES): numNullFillMethod: dict[str, str] = \ {col: numNullFillMethod for col in numCols} numNullFillValue: dict[str, Optional[PyNumType]] = \ kwargs.pop('numNullFillValue', {}) numScaledCols: set[str] = set() if verbose: self.stdOutLogger.info( msg=(num_prep_msg := ( 'Transforming (incl. NULL-Filling) Numerical Columns ' + ', '.join(f'"{numCol}"' for numCol in numCols) + '...'))) num_prep_tic: float = time.time() for numCol in numCols: if numCol in numNulls: numColNulls: tuple[Optional[PyNumType], Optional[PyNumType]] = \ numNulls[numCol] assert (isinstance(numColNulls, PY_LIST_OR_TUPLE) and (len(numColNulls) == 2) and ((numColNulls[0] is None) or isinstance(numColNulls[0], PY_NUM_TYPES)) and ((numColNulls[1] is None) or isinstance(numColNulls[1], PY_NUM_TYPES))) else: numColNulls: tuple[Optional[PyNumType], Optional[PyNumType]] = None, None numColOutlierTail: Optional[str] = numOutlierTail.get(numCol, 'both') numColMin: PyNumType = (self.outlierRstMin(numCol) if numColOutlierTail in ('lower', 'both') else self.sampleStat(numCol, stat='min')) numColMax: PyNumType = (self.outlierRstMax(numCol) if numColOutlierTail in ('upper', 'both') else self.sampleStat(numCol, stat='max')) if numColMin < numColMax: numColType: DataType = self.type(numCol) if numColNullFillMethod := numNullFillMethod.get(numCol, 'mean'): numColNullFillValue: PyNumType = \ self.outlierRstStat(numCol, stat=numColNullFillMethod, outlierTail=numColOutlierTail, verbose=verbose > 1) else: numColNullFillValue: Optional[PyNumType] = numNullFillValue.get(numCol) if not isinstance(numColNullFillValue, PY_NUM_TYPES): numColNullFillValue: Optional[PyNumType] = None if numScaler: if numScaler == 'standard': scaledCol: str = self._STD_SCL_PREFIX + numCol series: Series = self.reprSample[numCol] if numColOutlierTail == 'both': series: Series = series.loc[series.between(left=numColMin, right=numColMax, inclusive='both')] elif numColOutlierTail == 'lower': series: Series = series.loc[series > numColMin] elif numColOutlierTail == 'upper': series: Series = series.loc[series < numColMax] stdDev: float = float(series.std(axis='index', skipna=True, level=None, ddof=1)) origToPreprocColMap[numCol] = { 'logical-type': 'num', 'physical-type': str(numColType), 'nulls': numColNulls, 'null-fill-method': numColNullFillMethod, 'null-fill-value': numColNullFillValue, 'mean': numColNullFillValue, 'std': stdDev, 'transform-to': scaledCol} elif numScaler == 'maxabs': scaledCol: str = self._MAX_ABS_SCL_PREFIX + numCol maxAbs: PyNumType = max(abs(numColMin), abs(numColMax)) origToPreprocColMap[numCol] = { 'logical-type': 'num', 'physical-type': str(numColType), 'nulls': numColNulls, 'null-fill-method': numColNullFillMethod, 'null-fill-value': numColNullFillValue, 'max-abs': maxAbs, 'transform-to': scaledCol} elif numScaler == 'minmax': scaledCol: str = self._MIN_MAX_SCL_PREFIX + numCol origToPreprocColMap[numCol] = { 'logical-type': 'num', 'physical-type': str(numColType), 'nulls': numColNulls, 'null-fill-method': numColNullFillMethod, 'null-fill-value': numColNullFillValue, 'orig-min': numColMin, 'orig-max': numColMax, 'target-min': -1, 'target-max': 1, 'transform-to': scaledCol} else: raise ValueError('*** Scaler must be one of ' '"standard", "maxabs", "minmax" ' 'and None ***') else: scaledCol: str = self._NULL_FILL_PREFIX + numCol origToPreprocColMap[numCol] = { 'logical-type': 'num', 'physical-type': str(numColType), 'nulls': numColNulls, 'null-fill-method': numColNullFillMethod, 'null-fill-value': numColNullFillValue, 'transform-to': scaledCol} numScaledCols.add(scaledCol) if verbose: num_prep_toc: float = time.time() self.stdOutLogger.info( msg=f'{num_prep_msg} done! <{num_prep_toc - num_prep_tic:,.1f} s>') pandasMLPreproc: PandasMLPreprocessor = \ PandasMLPreprocessor(origToPreprocColMap=origToPreprocColMap) if savePath := kwargs.pop('savePath', None): if verbose: self.stdOutLogger.info( msg=(prep_save_msg := ('Saving Data Transformations ' f'to Local Path "{savePath}"...'))) prep_save_tic: float = time.time() pandasMLPreproc.to_yaml(path=savePath) if verbose: prep_save_toc: float = time.time() self.stdOutLogger.info( msg=f'{prep_save_msg} done! <{prep_save_toc - prep_save_tic:,.1f} s>') if returnNumPy: s3ParquetDF: ParquetDataset = \ self.map(partial(pandasMLPreproc.__call__, returnNumPy=True), inheritNRows=True, **kwargs) else: colsToKeep: set[str] = self.indexCols | ( set(chain.from_iterable( (catCol, catPrepColDetails['transform-to']) for catCol, catPrepColDetails in origToPreprocColMap.items() if (catCol not in (PandasMLPreprocessor._CAT_INDEX_SCALED_FIELD_NAME, PandasMLPreprocessor._NUM_SCALER_FIELD_NAME)) and (catPrepColDetails['logical-type'] == 'cat'))) | set(chain.from_iterable( (numCol, numPrepColDetails['transform-to']) for numCol, numPrepColDetails in origToPreprocColMap.items() if (numCol not in (PandasMLPreprocessor._CAT_INDEX_SCALED_FIELD_NAME, PandasMLPreprocessor._NUM_SCALER_FIELD_NAME)) and (numPrepColDetails['logical-type'] == 'num')))) s3ParquetDF: ParquetDataset = \ self.map(pandasMLPreproc, inheritNRows=True, **kwargs)[tuple(colsToKeep)] s3ParquetDF._inheritCache(self, *colsToKeep) s3ParquetDF._cache.reprSample = self._cache.reprSample if verbose: toc: float = time.time() self.stdOutLogger.info(msg=f'{msg} done! <{(toc - tic) / 60:,.1f} m>') return (s3ParquetDF, pandasMLPreproc) if returnPreproc else s3ParquetDF
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/util/data_proc/parquet.py
parquet.py
from __future__ import annotations from dataclasses import dataclass from itertools import chain from pathlib import Path from typing import Optional, Union from typing import Dict, List, Sequence, Tuple # Py3.9+: use built-ins from numpy import array, full, ndarray, tile from pandas import concat, DataFrame, Series from pandas._libs.missing import NA # pylint: disable=no-name-in-module from sklearn.preprocessing import MaxAbsScaler, MinMaxScaler, StandardScaler from aito.util.data_types.python import PyPossibleFeatureType from aito.util.data_types.spark_sql import _STR_TYPE from aito.util.fs import PathType from aito.util.iter import to_iterable from aito.util.namespace import Namespace, DICT_OR_NAMESPACE_TYPES __all__ = ( 'PandasFlatteningSubsampler', 'PandasMLPreprocessor', ) # flake8: noqa # (too many camelCase names) # pylint: disable=invalid-name # e.g., camelCase names @dataclass(init=True, repr=True, eq=True, order=False, unsafe_hash=False, frozen=True) class PandasFlatteningSubsampler: """Flattening Subsampler for Pandas Data Frames.""" columns: Union[str, Tuple[str]] everyNRows: int totalNRows: int @property def rowIndexRange(self): """Integer row index range.""" return range(0, self.totalNRows, self.everyNRows) @property def transformedCols(self) -> List[str]: """Flattened column names.""" r: range = self.rowIndexRange return list(chain.from_iterable((f'{col}__{i}' for i in r) for col in self.columns)) def __call__(self, pandasDF: DataFrame, /, *, padWithLastRow: bool = False) -> Series: """Subsample a Pandas Data Frame's certain columns and flatten them.""" cols: List[str] = to_iterable(self.columns, iterable_type=list) nCols: int = len(cols) df: DataFrame = pandasDF[cols] if (nMissingRows := self.totalNRows - len(df)) > 0: df: DataFrame = concat( objs=(df, DataFrame(data=(tile(A=df.iloc[-1], reps=(nMissingRows, 1)) if padWithLastRow else full(shape=(nMissingRows, nCols), fill_value=NA, dtype=None, order='C', like=None)), columns=cols)), axis='index', join='outer', ignore_index=True, keys=None, levels=None, names=None, verify_integrity=False, sort=False, copy=False) return Series(data=df.iloc[self.rowIndexRange].values.flatten(order='F'), index=self.transformedCols, dtype=None, name=None, copy=False, fastpath=False) class PandasMLPreprocessor: # pylint: disable=too-many-instance-attributes,too-few-public-methods """ML Preprocessor for Pandas Data Frames.""" _CAT_INDEX_SCALED_FIELD_NAME: str = '__CAT_IDX_SCALED__' _NUM_SCALER_FIELD_NAME: str = '__NUM_SCALER__' _PREPROC_CACHE: Dict[Path, Namespace] = {} def __init__(self, origToPreprocColMap: Namespace): """Init ML Preprocessor.""" self.origToPreprocColMap: Namespace = origToPreprocColMap self.catOrigToPreprocColMap: Namespace = Namespace(**{ catCol: catPreprocDetails for catCol, catPreprocDetails in origToPreprocColMap.items() if isinstance(catPreprocDetails, DICT_OR_NAMESPACE_TYPES) and (catPreprocDetails['logical-type'] == 'cat')}) self.sortedCatCols: List[str] = sorted(self.catOrigToPreprocColMap) self.sortedCatPreprocCols: List[str] = \ [self.catOrigToPreprocColMap[catCol]['transform-to'] for catCol in self.sortedCatCols] if self.sortedCatCols: self.catIdxScaled: bool = origToPreprocColMap[self._CAT_INDEX_SCALED_FIELD_NAME] self.numOrigToPreprocColMap: Namespace = Namespace(**{ numCol: numPreprocDetails for numCol, numPreprocDetails in origToPreprocColMap.items() if isinstance(numPreprocDetails, DICT_OR_NAMESPACE_TYPES) and (numPreprocDetails['logical-type'] == 'num')}) self.sortedNumCols: List[str] = sorted(self.numOrigToPreprocColMap) self.sortedNumPreprocCols: List[str] = \ [self.numOrigToPreprocColMap[numCol]['transform-to'] for numCol in self.sortedNumCols] if self.sortedNumCols: self.numScaler: Optional[str] = origToPreprocColMap[self._NUM_SCALER_FIELD_NAME] if self.numScaler == 'standard': self.numScaler: StandardScaler = StandardScaler(copy=True, with_mean=True, with_std=True) # mean value for each feature in the training set self.numScaler.mean_ = array([self.numOrigToPreprocColMap[numCol]['mean'] for numCol in self.sortedNumCols]) # per-feature relative scaling of the data self.numScaler.scale_ = array([self.numOrigToPreprocColMap[numCol]['std'] for numCol in self.sortedNumCols]) elif self.numScaler == 'maxabs': self.numScaler: MaxAbsScaler = MaxAbsScaler(copy=True) # per-feature maximum absolute value / # per-feature relative scaling of the data self.numScaler.max_abs_ = self.numScaler.scale_ = \ array([self.numOrigToPreprocColMap[numCol]['max-abs'] for numCol in self.sortedNumCols]) elif self.numScaler == 'minmax': self.numScaler: MinMaxScaler = MinMaxScaler(feature_range=(-1, 1), copy=True, clip=False) # per-feature minimum seen in the data self.numScaler.data_min_ = array([self.numOrigToPreprocColMap[numCol]['orig-min'] for numCol in self.sortedNumCols]) # per-feature maximum seen in the data self.numScaler.data_max_ = array([self.numOrigToPreprocColMap[numCol]['orig-max'] for numCol in self.sortedNumCols]) # per-feature range (data_max_ - data_min_) seen in the data self.numScaler.data_range_ = self.numScaler.data_max_ - self.numScaler.data_min_ # per-feature relative scaling of the data self.numScaler.scale_ = 2 / self.numScaler.data_range_ # per-feature adjustment for minimum self.numScaler.min_ = -1 - (self.numScaler.scale_ * self.numScaler.data_min_) else: assert self.numScaler is None if self.numScaler is not None: self.numScaler.feature_names_in_ = self.sortedNumPreprocCols self.numScaler.n_features_in_ = len(self.sortedNumCols) self.sortedPreprocCols: List[str] = (self.sortedCatPreprocCols + self.sortedNumPreprocCols) def __call__(self, pandasDF: DataFrame, /, *, returnNumPy: bool = False) \ -> Union[DataFrame, ndarray]: # pylint: disable=too-many-locals """Preprocess a Pandas Data Frame.""" _FLOAT_ABS_TOL: float = 1e-9 if self.sortedCatCols: # preprocess categorical columns for catCol, catPreprocDetails in self.catOrigToPreprocColMap.items(): nCats: int = catPreprocDetails['n-cats'] sortedCats: Sequence[PyPossibleFeatureType] = catPreprocDetails['sorted-cats'] # extract categorical data column series: Series = pandasDF[catCol] # transform categorical data column into integer indices pandasDF.loc[:, (catPreprocCol := catPreprocDetails['transform-to'])] = ( (sum(((series == cat) * i) for i, cat in enumerate(sortedCats)) + ((~series.isin(sortedCats)) * nCats)) if catPreprocDetails['physical-type'] in (bool.__name__, _STR_TYPE) else (sum(((series - cat).abs().between(left=0, right=_FLOAT_ABS_TOL) * i) for i, cat in enumerate(sortedCats)) + ((1 - sum(((series - cat).abs().between(left=0, right=_FLOAT_ABS_TOL) * 1) for cat in sortedCats)) * nCats))) # *** NOTE: NumPy BUG *** # abs(...) of a data type most negative value equals to the same most negative value # github.com/numpy/numpy/issues/5657 # github.com/numpy/numpy/issues/9463 if self.catIdxScaled: # MinMax-scale categorical data integer indices pandasDF.loc[:, catPreprocCol] = minMaxScaledIndices = \ 2 * pandasDF[catPreprocCol] / nCats - 1 assert minMaxScaledIndices.between(left=-1, right=1, inclusive='both').all(), \ ValueError('CERTAIN MIN-MAX SCALED INT INDICES ' f'NOT BETWEEN -1 AND 1: {catPreprocCol}: ' f'({minMaxScaledIndices.min()}, {minMaxScaledIndices.max()})') if self.sortedNumCols: # NULL-fill numerical columns for numCol, numPreprocDetails in self.numOrigToPreprocColMap.items(): lowerNull, upperNull = numPreprocDetails['nulls'] # extract column data series as float type series: Series = pandasDF[numCol].astype(dtype=float, copy=True, errors='raise') # check numerical data validity isValid: Series = series.notnull() if lowerNull is not None: isValid &= (series > lowerNull) if upperNull is not None: isValid &= (series < upperNull) # NULL-fill numerical data column pandasDF.loc[:, numPreprocDetails['transform-to']] = ( series.where(cond=isValid, other=(getattr(series.loc[isValid], nullFillMethod) (axis='index', skipna=True, level=None) if (nullFillMethod := numPreprocDetails['null-fill-method']) else numPreprocDetails['null-fill-value']), inplace=False, axis=None, level=None) if isValid.any(axis='index', skipna=True, level=None) else numPreprocDetails['null-fill-value']) # scale numerical columns if self.numScaler: pandasDF.loc[:, self.sortedNumPreprocCols] = \ self.numScaler.transform(X=pandasDF[self.sortedNumPreprocCols]) return pandasDF[self.sortedPreprocCols].values if returnNumPy else pandasDF @classmethod def from_json(cls, path: PathType) -> PandasMLPreprocessor: """Load from JSON file.""" path: Path = Path(path).resolve(strict=True) if path not in cls._PREPROC_CACHE: cls._PREPROC_CACHE[path] = cls(origToPreprocColMap=Namespace.from_json(path=path)) return cls._PREPROC_CACHE[path] def to_json(self, path: PathType): """Save to JSON file.""" path: Path = Path(path).resolve(strict=True) self.origToPreprocColMap.to_json(path=path) self._PREPROC_CACHE[path] = self @classmethod def from_yaml(cls, path: PathType) -> PandasMLPreprocessor: """Load from YAML file.""" path: Path = Path(path).resolve(strict=True) if path not in cls._PREPROC_CACHE: cls._PREPROC_CACHE[path] = cls(origToPreprocColMap=Namespace.from_yaml(path=path)) return cls._PREPROC_CACHE[path] def to_yaml(self, path: PathType): """Save to YAML file.""" path: Path = Path(path).resolve(strict=True) self.origToPreprocColMap.to_yaml(path=path) self._PREPROC_CACHE[path] = self
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/util/data_proc/pandas.py
pandas.py
from __future__ import annotations from collections.abc import Collection from logging import getLogger, Logger, Handler, DEBUG, INFO from pathlib import Path import tempfile from typing import Any, Optional, Union from numpy import ndarray from pandas import DataFrame, Series from aito.util import debug from aito.util.log import STDOUT_HANDLER from aito.util.namespace import Namespace __all__ = ( 'AbstractDataHandler', 'AbstractFileDataHandler', 'AbstractS3FileDataHandler', # noqa: E501 'ColsType', 'ReducedDataSetType', ) ColsType = Union[str, Collection[str]] ReducedDataSetType = Union[Any, Collection, ndarray, DataFrame, Series] class AbstractDataHandler: # pylint: disable=no-member,too-many-public-methods """Abstract Data Handler.""" # =========== # CLASS ATTRS # ----------- # date column name _DATE_COL: str = 'date' # default representative sample size _DEFAULT_REPR_SAMPLE_SIZE: int = 10 ** 6 # default column profiling settings _DEFAULT_MIN_NON_NULL_PROPORTION: float = .32 _DEFAULT_OUTLIER_TAIL_PROPORTION: float = 1e-3 # 0.1% each tail _DEFAULT_MAX_N_CATS: int = 12 # MoY is likely most numerous-category var _DEFAULT_MIN_PROPORTION_BY_MAX_N_CATS: float = .9 # preprocessing for ML _CAT_IDX_PREFIX: str = 'CAT_INDEX__' _NULL_FILL_PREFIX: str = 'NULL_FILLED__' _STD_SCL_PREFIX: str = 'STD_SCALED__' _MAX_ABS_SCL_PREFIX: str = 'MAXABS_SCALED__' _MIN_MAX_SCL_PREFIX: str = 'MINMAX_SCALED__' # =========== # STRING REPR # ----------- # __repr__ # __shortRepr__ # __str__ def __repr__(self) -> str: """Return string repr.""" raise NotImplementedError @property def __shortRepr__(self) -> str: # noqa: N802 # pylint: disable=invalid-name """Return short string repr.""" raise NotImplementedError def __str__(self) -> str: """Return string repr.""" return repr(self) # ======= # LOGGING # ------- # classLogger # classStdOutLogger # logger # stdOutLogger @classmethod def classLogger(cls, *handlers: Handler, **kwargs: Any) -> Logger: # noqa: E501,N802 # pylint: disable=invalid-name """Get Class Logger.""" logger: Logger = getLogger(name=cls.__name__) level: Optional[int] = kwargs.get('level') if not level: level: int = DEBUG if debug.ON else INFO logger.setLevel(level=level) for handler in handlers: logger.addHandler(hdlr=handler) if kwargs.get('verbose'): logger.addHandler(hdlr=STDOUT_HANDLER) return logger @classmethod def classStdOutLogger(cls) -> Logger: # noqa: N802 # pylint: disable=invalid-name """Get Class StdOut Logger.""" return cls.classLogger(level=DEBUG, verbose=True) def logger(self, *handlers: Handler, **kwargs: Any) -> Logger: """Get Logger.""" logger: Logger = getLogger(name=self.__shortRepr__) level: Optional[int] = kwargs.get('level') if not level: level: int = DEBUG if debug.ON else INFO logger.setLevel(level=level) for handler in handlers: logger.addHandler(hdlr=handler) if kwargs.get('verbose'): logger.addHandler(hdlr=STDOUT_HANDLER) return logger @property def stdOutLogger(self) -> Logger: # noqa: N802 # pylint: disable=invalid-name """Get StdOut Logger.""" return self.logger(level=DEBUG, verbose=True) # =================== # SETTABLE PROPERTIES # ------------------- # iCol # tCol @property def iCol(self) -> Optional[str]: # noqa: N802 # pylint: disable=invalid-name """Entity/Identity column.""" return self._iCol @iCol.setter def iCol(self, iCol: str): # noqa: N802,N803 # pylint: disable=invalid-name if iCol != self._iCol: self._iCol: Optional[str] = iCol if iCol is not None: assert iCol, ValueError(f'*** iCol {iCol} INVALID ***') @iCol.deleter def iCol(self): # noqa: N802 # pylint: disable=invalid-name self._iCol: Optional[str] = None @property def tCol(self) -> Optional[str]: # noqa: N802 # pylint: disable=invalid-name """Date-Time column.""" return self._tCol @tCol.setter def tCol(self, tCol: str): # noqa: N802,N803 # pylint: disable=invalid-name if tCol != self._tCol: self._tCol: Optional[str] = tCol if tCol is not None: assert tCol, ValueError(f'*** tCol {tCol} INVALID ***') @tCol.deleter def tCol(self): # noqa: N802 # pylint: disable=invalid-name self._tCol: Optional[str] = None # ======= # CACHING # ------- # _emptyCache # _inheritCache def _emptyCache(self): # noqa: N802 # pylint: disable=invalid-name """Empty cache.""" raise NotImplementedError def _inheritCache(self, *args: Any, **kwargs: Any): # noqa: N802 # pylint: disable=invalid-name """Inherit existing cache.""" raise NotImplementedError # ===================== # ROWS, COLUMNS & TYPES # --------------------- # __len__ / nRows # columns # _organizeIndexCols # indexCols # contentCols # types / type / typeIsNum # possibleFeatureCols # possibleCatCols # possibleNumCols def __len__(self) -> int: """Return number of rows.""" return self.nRows @property def nRows(self) -> int: # noqa: N802 # pylint: disable=invalid-name """Return number of rows.""" raise NotImplementedError @nRows.deleter def nRows(self): # noqa: N802 # pylint: disable=invalid-name self._cache.nRows = None @property def columns(self) -> set[str]: """Return columns.""" raise NotImplementedError def _organizeIndexCols(self): # noqa: N802 # pylint: disable=attribute-defined-outside-init,invalid-name self._dCol: Optional[str] = (self._DATE_COL if self._DATE_COL in self.columns else None) @property def indexCols(self) -> set[str]: # noqa: N802 # pylint: disable=invalid-name """Return index columns.""" raise NotImplementedError @property def contentCols(self) -> set[str]: # noqa: N802 # pylint: disable=invalid-name """Return content columns.""" return self.columns - self.indexCols @property def types(self) -> Namespace: """Return column data types.""" raise NotImplementedError def type(self, col: str) -> type: """Return data type of specified column.""" raise NotImplementedError def typeIsNum(self, col: str) -> bool: # noqa: N802 # pylint: disable=invalid-name """Check whether specified column's data type is numerical.""" raise NotImplementedError @property def possibleFeatureCols(self) -> set[str]: # noqa: N802 # pylint: disable=invalid-name """Return possible feature content columns.""" raise NotImplementedError @property def possibleCatCols(self) -> set[str]: # noqa: N802 # pylint: disable=invalid-name """Return possible categorical content columns.""" raise NotImplementedError @property def possibleNumCols(self) -> set[str]: # noqa: N802 # pylint: disable=invalid-name """Return possible numerical content columns.""" return {col for col in self.contentCols if self.typeIsNum(col)} # ========= # FILTERING # --------- # filter def filter(self, *conditions: str, **kwargs: Any) -> AbstractDataHandler: """Apply filtering conditions.""" raise NotImplementedError # ======== # SAMPLING # -------- # sample # _assignReprSample # reprSampleSize # reprSample def sample(self, *cols: str, **kwargs: Any) -> Union[ReducedDataSetType, Any]: # noqa: E501 """Sample from data set.""" raise NotImplementedError def _assignReprSample(self): # noqa: N802 # pylint: disable=invalid-name """Assign representative sample.""" raise NotImplementedError @property def reprSampleSize(self) -> int: # noqa: N802 # pylint: disable=invalid-name """Return approx number of rows to sample for profiling purposes. (default = 1,000,000) """ if self._cache.reprSample is None: self._assignReprSample() return self._reprSampleSize @reprSampleSize.setter def reprSampleSize(self, n: int, /): # noqa: N802 # pylint: disable=invalid-name self._reprSampleSize: int = n self._assignReprSample() @property def reprSample(self): # noqa: N802 # pylint: disable=invalid-name """Sub-sampled data set according to ``.reprSampleSize`` attribute.""" if self._cache.reprSample is None: self._assignReprSample() return self._cache.reprSample # ================ # COLUMN PROFILING # ---------------- # minNonNullProportion # outlierTailProportion # maxNCats # minProportionByMaxNCats # count # nonNullProportion # suffNonNull # distinct # quantile # sampleStat # outlierRstStat # profile @property def minNonNullProportion(self) -> float: # noqa: N802 # pylint: disable=invalid-name """Return min proportion of non-NULL values in each column. (to qualify it as a valid feature to use in downstream modeling) (default = .32) """ return self._minNonNullProportion.default @minNonNullProportion.setter def minNonNullProportion(self, proportion: float, /): # noqa: N802 # pylint: disable=invalid-name if proportion != self._minNonNullProportion.default: self._minNonNullProportion.default = proportion self._cache.suffNonNull = {} @property def outlierTailProportion(self) -> float: # noqa: N802 # pylint: disable=invalid-name """Return proportion in each tail of each numerical column's distrib. (to exclude when computing outlier-resistant statistics) (default = .001) """ return self._outlierTailProportion.default @outlierTailProportion.setter def outlierTailProportion(self, proportion: float, /): # noqa: N802 # pylint: disable=invalid-name self._outlierTailProportion.default = proportion @property def maxNCats(self) -> int: # noqa: N802 # pylint: disable=invalid-name """Return max number of categorical levels for possible cat. columns. (default = 12) """ return self._maxNCats.default @maxNCats.setter def maxNCats(self, n: int, /): # noqa: N802 # pylint: disable=invalid-name self._maxNCats.default = n @property def minProportionByMaxNCats(self) -> float: # noqa: N802 # pylint: disable=invalid-name """Return min total proportion accounted for by top ``maxNCats``. (to consider the column truly categorical) (default = .9) """ return self._minProportionByMaxNCats.default @minProportionByMaxNCats.setter def minProportionByMaxNCats(self, proportion: float, /): # noqa: N802 # pylint: disable=invalid-name self._minProportionByMaxNCats.default = proportion def count(self, *cols: str, **kwargs: Any) -> Union[int, Namespace]: """Count non-NULL data values in specified column(s).""" raise NotImplementedError def nonNullProportion(self, *cols: str, **kwargs: Any) \ -> Union[float, Namespace]: # noqa: N802 # pylint: disable=invalid-name """Count non-NULL data proportion(s) in specified column(s).""" raise NotImplementedError def suffNonNull(self, *cols: str, **kwargs: Any) -> Union[bool, Namespace]: # noqa: E501,N802 # pylint: disable=invalid-name: """Check whether columns have sufficient non-NULL values. (at least ``.minNonNullProportion`` of values being non-``NULL``) Return: - If 1 column name is given, return ``True``/``False`` - If multiple column names are given, return a {``col``: ``True`` or ``False``} *dict* - If no column names are given, return a {``col``: ``True`` or ``False``} *dict* for all columns """ if not cols: cols: tuple[str] = tuple(self.contentCols) if len(cols) > 1: return Namespace(**{col: self.suffNonNull(col, **kwargs) for col in cols}) col: str = cols[0] minNonNullProportion: float = self._minNonNullProportion[col] # noqa: E501,N806 outdatedSuffNonNullProportionThreshold: bool = False # noqa: N806 if col in self._cache.suffNonNullProportionThreshold: if self._cache.suffNonNullProportionThreshold[col] != \ minNonNullProportion: outdatedSuffNonNullProportionThreshold: bool = True # noqa: E501,N806 self._cache.suffNonNullProportionThreshold[col] = \ minNonNullProportion else: self._cache.suffNonNullProportionThreshold[col] = \ minNonNullProportion if (col not in self._cache.suffNonNull) or \ outdatedSuffNonNullProportionThreshold: self._cache.suffNonNull[col] = ( self.nonNullProportion(col) >= self._cache.suffNonNullProportionThreshold[col]) return self._cache.suffNonNull[col] def distinct(self, *cols: str, **kwargs: Any) -> Union[dict[str, float], Series, Namespace]: """Return distinct values for specified column(s).""" raise NotImplementedError def quantile(self, *cols: str, **kwargs: Any) -> Union[float, int, Series, Namespace]: """Return quantile values for specified column(s).""" raise NotImplementedError def sampleStat(self, *cols: str, **kwargs: Any) \ -> Union[float, int, Namespace]: # noqa: N802 # pylint: disable=invalid-name: """Return certain sample statistics for specified columns.""" raise NotImplementedError def outlierRstStat(self, *cols: str, **kwargs: Any) \ -> Union[float, int, Namespace]: # noqa: N802 # pylint: disable=invalid-name: """Return outlier-resistant statistics for specified columns.""" raise NotImplementedError def profile(self, *cols: str, **kwargs: Any) -> Namespace: """Profile specified column(s).""" raise NotImplementedError # ==================== # PREPROCESSING FOR ML # -------------------- # preprocForML def preprocForML(self, *cols: str, **kwargs: Any) -> AbstractDataHandler: # noqa: E501,N802 # pylint: disable=invalid-name """Pre-process specified column(s) for ML model training/inference.""" raise NotImplementedError class AbstractFileDataHandler(AbstractDataHandler): # pylint: disable=abstract-method """Abstract File Data Handler.""" # minimum number of files for schema management & representative sampling _SCHEMA_MIN_N_FILES: int = 10 _REPR_SAMPLE_MIN_N_FILES: int = 100 # local file cache dir _LOCAL_CACHE_DIR_PATH: Path = (Path(tempfile.gettempdir()).resolve(strict=True) / # noqa: E501 '.aito/data-proc-cache') # ==================================== # MIN. NO. OF FILES FOR REPR. SAMPLING # ------------------------------------ # reprSampleMinNFiles @property def reprSampleMinNFiles(self) -> int: # noqa: N802 # pylint: disable=invalid-name """Minimum number of pieces for reprensetative sample.""" return self._reprSampleMinNFiles @reprSampleMinNFiles.setter def reprSampleMinNFiles(self, n: int, /): # noqa: N802 # pylint: disable=invalid-name,no-member if (n <= self.nFiles) and (n != self._reprSampleMinNFiles): self._reprSampleMinNFiles: int = n @reprSampleMinNFiles.deleter def reprSampleMinNFiles(self): # noqa: N802 # pylint: disable=invalid-name,no-member self._reprSampleMinNFiles: int = min(self._REPR_SAMPLE_MIN_N_FILES, self.nFiles) class AbstractS3FileDataHandler(AbstractFileDataHandler): # pylint: disable=abstract-method """Abstract S3 File Data Handler.""" # temporary dir key _TMP_DIR_S3_KEY: str = 'tmp'
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/util/data_proc/_abstract.py
_abstract.py
from pyarrow.lib import ( # noqa: F401 DataType, NA, Type_NA, _NULL, null, bool_, Type_BOOL, string, Type_STRING, binary, Type_BINARY, FixedSizeBinaryType, Type_FIXED_SIZE_BINARY, uint8, Type_UINT8, int8, Type_INT8, uint16, Type_UINT16, int16, Type_INT16, uint32, Type_UINT32, int32, Type_INT32, uint64, Type_UINT64, int64, Type_INT64, float16, Type_HALF_FLOAT, float32, Type_FLOAT, float64, Type_DOUBLE, decimal128, Decimal128Type, Type_DECIMAL128, decimal256, Decimal256Type, Type_DECIMAL256, date32, Type_DATE32, date64, Type_DATE64, timestamp, TimestampType, Type_TIMESTAMP, time32, Time32Type, Type_TIME32, time64, Time64Type, Type_TIME64, list_, ListType, Type_LIST, struct, StructType, Type_STRUCT, union, UnionType, dictionary, DictionaryType, Type_DICTIONARY, Type_MAP, is_boolean_value, is_float_value, is_integer_value, ) from pyarrow.types import ( # noqa: F401 is_null, is_boolean, is_string, is_unicode, is_binary, is_fixed_size_binary, _SIGNED_INTEGER_TYPES, _UNSIGNED_INTEGER_TYPES, _INTEGER_TYPES, is_signed_integer, is_unsigned_integer, is_integer, is_int8, is_int16, is_int32, is_int64, is_uint8, is_uint16, is_uint32, is_uint64, _FLOATING_TYPES, is_floating, is_float16, is_float32, is_float64, is_decimal, _DATE_TYPES, is_date, is_date32, is_date64, _TIME_TYPES, is_time, is_time32, is_time64, is_timestamp, _TEMPORAL_TYPES, is_temporal, _NESTED_TYPES, is_list, is_struct, is_union, is_map, is_nested, is_dictionary, ) from aito.util.data_types.spark_sql import ( # noqa: F401 _NULL_TYPE, _BOOL_TYPE, _STR_TYPE, _BINARY_TYPE, _INT_TYPE, _BIGINT_TYPE, _FLOAT_TYPE, _DOUBLE_TYPE, _DATE_TYPE, _TIMESTAMP_TYPE, _VECTOR_TYPE, _DECIMAL_TYPE_PREFIX, _ARRAY_TYPE_PREFIX, _MAP_TYPE_PREFIX, _STRUCT_TYPE_PREFIX, ) __all__ = ( '_ARROW_NULL_TYPE', '_ARROW_BOOL_TYPE', '_ARROW_STR_TYPE', '_ARROW_BINARY_TYPE', '_ARROW_INT_TYPE', '_ARROW_DOUBLE_TYPE', '_ARROW_DATE_TYPE', '_ARROW_TIMESTAMP_TYPE', 'is_float', 'is_num', 'is_possible_cat', 'is_possible_feature', 'is_complex', ) _ARROW_NULL_TYPE: DataType = null() _ARROW_BOOL_TYPE: DataType = bool_() assert (__arrow_bool_type_str := str(_ARROW_BOOL_TYPE)) == bool.__name__ assert _BOOL_TYPE.startswith(__arrow_bool_type_str) _ARROW_STR_TYPE: DataType = string() assert str(_ARROW_STR_TYPE) == _STR_TYPE _ARROW_BINARY_TYPE: DataType = binary(-1) assert str(_ARROW_BINARY_TYPE) == _BINARY_TYPE _ARROW_INT_TYPE: DataType = int64() _ARROW_DOUBLE_TYPE: DataType = float64() _ARROW_DATE_TYPE: DataType = date32() _ARROW_TIMESTAMP_TYPE: DataType = timestamp(unit='ns', tz=None) def is_float(arrow_type: DataType, /) -> bool: """Check if data type is float.""" return is_floating(arrow_type) or is_decimal(arrow_type) def is_num(arrow_type: DataType, /) -> bool: """Check if data type is numerical.""" return is_integer(arrow_type) or is_float(arrow_type) def is_possible_cat(arrow_type: DataType, /) -> bool: """Check if data type is possibly categorical.""" return (is_boolean(arrow_type) or is_string(arrow_type) or is_num(arrow_type)) def is_possible_feature(arrow_type: DataType) -> bool: """Check if data type is possibly an ML feature.""" return is_boolean(arrow_type) or is_string(arrow_type) or is_num(arrow_type) # noqa: E501 def is_complex(arrow_type: DataType, /) -> bool: """Check if data type is complex.""" return is_dictionary(arrow_type) or is_nested(arrow_type)
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/util/data_types/arrow.py
arrow.py
from typing import Tuple # Py3.9+: use built-ins # pylint: disable=unused-import from pyspark.sql.types import ( # noqa: F401 DataType, NullType, AtomicType, BooleanType, StringType, BinaryType, NumericType, IntegralType, ByteType, ShortType, IntegerType, LongType, FractionalType, FloatType, DoubleType, DecimalType, DateType, TimestampType, # Complex Types ArrayType, MapType, StructField, StructType, _atomic_types, _all_atomic_types, _all_complex_types, _type_mappings, _array_signed_int_typecode_ctype_mappings, _array_unsigned_int_typecode_ctype_mappings, _array_type_mappings, _acceptable_types, ) __all__ = ( '_NULL_TYPE', '_BOOL_TYPE', '_STR_TYPE', '_BINARY_TYPE', '_TINYINT_TYPE', '_SMALLINT_TYPE', '_INT_TYPE', '_BIGINT_TYPE', '_INT_TYPES', '_FLOAT_TYPE', '_DOUBLE_TYPE', '_FLOAT_TYPES', '_NUM_TYPES', '_POSSIBLE_CAT_TYPES', '_POSSIBLE_FEATURE_TYPES', '_DATE_TYPE', '_TIMESTAMP_TYPE', '_DATETIME_TYPES', '_DECIMAL_10_0_TYPE', '_DECIMAL_38_18_TYPE', '_DECIMAL_TYPE_PREFIX', '_ARRAY_TYPE_PREFIX', '_MAP_TYPE_PREFIX', '_STRUCT_TYPE_PREFIX', '_VECTOR_TYPE', ) __null_type: NullType = NullType() _NULL_TYPE: str = __null_type.simpleString() assert _NULL_TYPE == __null_type.typeName() __bool_type: BooleanType = BooleanType() _BOOL_TYPE: str = __bool_type.simpleString() assert _BOOL_TYPE == __bool_type.typeName() __str_type: StringType = StringType() _STR_TYPE: str = __str_type.simpleString() assert _STR_TYPE == __str_type.typeName() __binary_type: BinaryType = BinaryType() _BINARY_TYPE: str = __binary_type.simpleString() assert _BINARY_TYPE == __binary_type.typeName() __byte_type: ByteType = ByteType() _TINYINT_TYPE: str = __byte_type.simpleString() __short_type: ShortType = ShortType() _SMALLINT_TYPE: str = __short_type.simpleString() __int_type: IntegerType = IntegerType() _INT_TYPE: str = __int_type.simpleString() assert _INT_TYPE == int.__name__ assert __int_type.typeName().startswith(_INT_TYPE) __long_type: LongType = LongType() _BIGINT_TYPE: str = __long_type.simpleString() assert __long_type.typeName() == 'long' _INT_TYPES: Tuple[str] = _TINYINT_TYPE, _SMALLINT_TYPE, _INT_TYPE, _BIGINT_TYPE __float_type: FloatType = FloatType() _FLOAT_TYPE: str = __float_type.simpleString() assert _FLOAT_TYPE == __float_type.typeName() __double_type: DoubleType = DoubleType() _DOUBLE_TYPE: str = __double_type.simpleString() assert _DOUBLE_TYPE == __double_type.typeName() _FLOAT_TYPES: Tuple[str] = _FLOAT_TYPE, _DOUBLE_TYPE _NUM_TYPES: Tuple[str] = _INT_TYPES + _FLOAT_TYPES _POSSIBLE_CAT_TYPES: Tuple[str] = (_BOOL_TYPE, _STR_TYPE) + _NUM_TYPES _POSSIBLE_FEATURE_TYPES: Tuple[str] = _POSSIBLE_CAT_TYPES + _NUM_TYPES __date_type: DateType = DateType() _DATE_TYPE: str = __date_type.simpleString() assert _DATE_TYPE == __date_type.typeName() __timestamp_type: TimestampType = TimestampType() _TIMESTAMP_TYPE: str = __timestamp_type.simpleString() assert _TIMESTAMP_TYPE == __timestamp_type.typeName() _DATETIME_TYPES: Tuple[str] = _DATE_TYPE, _TIMESTAMP_TYPE __decimal_10_0_type: DecimalType = DecimalType(precision=10, scale=0) _DECIMAL_10_0_TYPE: str = __decimal_10_0_type.simpleString() __decimal_38_18_type: DecimalType = DecimalType(precision=38, scale=18) _DECIMAL_38_18_TYPE: str = __decimal_38_18_type.simpleString() _DECIMAL_TYPE_PREFIX: str = f'{DecimalType.typeName()}(' _ARRAY_TYPE_PREFIX: str = f'{ArrayType.typeName()}<' _MAP_TYPE_PREFIX: str = f'{MapType.typeName()}<' _STRUCT_TYPE_PREFIX: str = f'{StructType.typeName()}<' _VECTOR_TYPE: str = 'vector'
Aitomatic-Contrib
/Aitomatic-Contrib-23.8.10.3.tar.gz/Aitomatic-Contrib-23.8.10.3/src/aito/util/data_types/spark_sql.py
spark_sql.py
from nlu.spellbook import Spellbook import nlu from nlu.info import AllComponentsInfo all_components_info = AllComponentsInfo() class Discoverer: """Various methods that help discover nlu_refs and functionality""" def __init__(self): ''' Initialize every NLU component_to_resolve info object and provide access to them''' self.nlu_info = {} @staticmethod def get_components(m_type='', include_pipes=False, lang='', licensed=False, get_all=False,include_aliases=True): """Filter all NLU components m_type : Component/Model type to filter for include_pipes : Weather to include pipelines in the result or not lang : Which languages to include. By default lang='' will get every lang licensed : Wether to include licensed models or not get_all: If set to true, will ignore other params and gets EVERY NLU_ref from defined name spaces """ nlu_refs_of_type = [] model_universe = nlu.Spellbook.pretrained_models_references for lang_, models in model_universe.items(): if lang != '' : if lang_!= lang : continue for nlu_ref, nlp_ref in model_universe[lang_].items(): if m_type in nlu_ref or get_all: nlu_refs_of_type.append(nlu_ref) if include_pipes : model_universe = nlu.Spellbook.pretrained_pipe_references for lang_, models in model_universe.items(): if lang != '': if lang_!= lang : continue for nlu_ref, nlp_ref in model_universe[lang_].items(): if m_type in nlu_ref or get_all: nlu_refs_of_type.append(nlu_ref) if include_aliases : model_universe = nlu.Spellbook.component_alias_references for nlu_ref, nlp_ref in model_universe.items(): if m_type in nlu_ref or get_all: nlu_refs_of_type.append(nlu_ref) if licensed: model_universe = nlu.Spellbook.pretrained_healthcare_model_references for lang_, models in model_universe.items(): if lang != '': if lang_!= lang : continue for nlu_ref, nlp_ref in model_universe[lang_].items(): if m_type in nlu_ref or get_all: nlu_refs_of_type.append(nlu_ref) return list(set(nlu_refs_of_type)) @staticmethod def print_all_languages(): ''' Print all languages which are available in NLU Spark NLP pointer ''' print('Languages available in NLU : \n ') for lang in all_components_info.all_languages: print(lang) @staticmethod def print_all_nlu_components_for_lang(lang='en', c_type='classifier'): #todo parse for lang '''Print all NLU components available for a language Spark NLP pointer''' if lang in all_components_info.all_languages: # print("All Pipelines for language"+ lang+ "\n"+) for nlu_reference in Spellbook.pretrained_pipe_references[lang]: print("nlu.load('" + nlu_reference + "') returns Spark NLP Pipeline:" + Spellbook.pretrained_pipe_references[lang][nlu_reference]) for nlu_reference in Spellbook.pretrained_models_references[lang]: print("nlu.load('" + nlu_reference + "') returns Spark NLP Model: " + Spellbook.pretrained_models_references[lang][nlu_reference]) else: print( "Language " + lang + " Does not exsist in NLU. Please check the docs or nlu.print_all_languages() for supported language references") @staticmethod def print_components(lang='', action=''): ''' Print every single NLU reference for models and pipeliens and their Spark NLP pointer :param lang: Language requirements for the components filterd. See nlu.languages() for supported languages :param action: Components that will be filterd. :return: None. This method will print its results. ''' if lang != '' and action == '': nlu.Discoverer().print_all_nlu_components_for_lang(lang) return if lang != '' and action != '': nlu.Discoverer().print_all_model_kinds_for_action_and_lang(lang, action) return if lang == '' and action != '': nlu.Discoverer().print_all_model_kinds_for_action(action) return # Print entire Namespace below for nlu_reference in nlu.Spellbook.component_alias_references.keys(): component_type = nlu.Spellbook.component_alias_references[nlu_reference][1][0], # component_list or model_anno_obj print("nlu.load('" + nlu_reference + "') ' returns Spark NLP " + str(component_type) + ': ' + nlu.Spellbook.component_alias_references[nlu_reference][0]) for lang in nlu.Spellbook.pretrained_pipe_references.keys(): for nlu_reference in nlu.Spellbook.pretrained_pipe_references[lang]: print("nlu.load('" + nlu_reference + "') for lang" + lang + " returns model_anno_obj Spark NLP model_anno_obj:" + nlu.Spellbook.pretrained_pipe_references[lang][nlu_reference]) for lang in nlu.Spellbook.pretrained_models_references.keys(): for nlu_reference in nlu.Spellbook.pretrained_models_references[lang]: print("nlu.load('" + nlu_reference + "')' for lang" + lang + " returns model_anno_obj Spark NLP model_anno_obj: " + nlu.Spellbook.pretrained_models_references[lang][nlu_reference]) @staticmethod def print_component_types(): ''' Prints all unique component_to_resolve types in NLU''' types = [] for key, val in nlu.all_components_info.all_components.items(): types.append(val.type) types = set(types) print("Provided component_to_resolve types in this NLU version are : ") for i, type in enumerate(types): print(i, '. ', type) @staticmethod def print_all_model_kinds_for_action(action): for lang, lang_models in nlu.Spellbook.pretrained_models_references.items(): lang_printed = False for nlu_reference, nlp_reference in lang_models.items(): ref_action = nlu_reference.split('.') if len(ref_action) > 1: ref_action = ref_action[1] if ref_action == action: if not lang_printed: print('For language <' + lang + '> NLU provides the following Models : ') lang_printed = True print("nlu.load('" + nlu_reference + "') returns Spark NLP model_anno_obj " + nlp_reference) @staticmethod def print_all_model_kinds_for_action_and_lang(lang, action): lang_candidates = nlu.Spellbook.pretrained_models_references[lang] print("All NLU components for lang ", lang, " that peform action ", action) for nlu_reference, nlp_reference in lang_candidates.items(): ref_action = nlu_reference.split('.') if len(ref_action) > 1: ref_action = ref_action[1] if ref_action == action: print("nlu.load('" + nlu_reference + "') returns Spark NLP model_anno_obj " + nlp_reference) @staticmethod def print_trainable_components(): ''' # todo update Print every trainable Algorithm/Model :return: None ''' i = 1 print('The following models can be trained with a dataset that provides a label column and matching dataset') for name, infos in nlu.all_components_info.all_components.items() : if infos.trainable == True : print(f' {i}. {name}') i+=1
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/discovery.py
discovery.py
from sparknlp.common import AnnotatorModel import sparknlp.annotator from sparknlp.annotator import * from sparknlp.internal import ExtendedJavaWrapper class _FinanceNerModelLoader(ExtendedJavaWrapper): def __init__(self, ner_model_path, path, jspark): super(_FinanceNerModelLoader, self).__init__( "com.johnsnowlabs.nlp.annotators.ner.MedicalNerModel.loadSavedModel", ner_model_path, path, jspark) class FinanceNer(AnnotatorModel, HasStorageRef, HasBatchedAnnotate): name = "MedicalNerModel" def __init__(self, classname="com.johnsnowlabs.nlp.annotators.ner.MedicalNerModel", java_model=None): super(FinanceNer, self).__init__( classname=classname, java_model=java_model ) self._setDefault( includeConfidence=False, includeAllConfidenceScores=False, batchSize=8, inferenceBatchSize=1 ) configProtoBytes = Param(Params._dummy(), "configProtoBytes", "ConfigProto from tensorflow, serialized into byte array. Get with config_proto.SerializeToString()", TypeConverters.toListString) includeConfidence = Param(Params._dummy(), "includeConfidence", "whether to include confidence scores in annotation metadata", TypeConverters.toBoolean) includeAllConfidenceScores = Param(Params._dummy(), "includeAllConfidenceScores", "whether to include all confidence scores in annotation metadata or just the score of the predicted tag", TypeConverters.toBoolean) inferenceBatchSize = Param(Params._dummy(), "inferenceBatchSize", "number of sentences to process in a single batch during inference", TypeConverters.toInt) classes = Param(Params._dummy(), "classes", "get the tags used to trained this MedicalNerModel", TypeConverters.toListString) trainingClassDistribution = Param(Params._dummy(), "trainingClassDistribution", "class counts for each of the classes during training", typeConverter=TypeConverters.identity) labelCasing = Param(Params._dummy(), "labelCasing", "Setting all labels of the NER models upper/lower case. values upper|lower", TypeConverters.toString) def setConfigProtoBytes(self, b): """Sets configProto from tensorflow, serialized into byte array. Parameters ---------- b : List[str] ConfigProto from tensorflow, serialized into byte array """ return self._set(configProtoBytes=b) def setIncludeConfidence(self, value): """Sets whether to include confidence scores in annotation metadata, by default False. Parameters ---------- value : bool Whether to include the confidence value in the output. """ return self._set(includeConfidence=value) def setIncludeConfidence(self, value): """Sets whether to include confidence scores in annotation metadata, by default False. Parameters ---------- value : bool Whether to include the confidence value in the output. """ return self._set(includeConfidence=value) def setInferenceBatchSize(self, value): """Sets number of sentences to process in a single batch during inference Parameters ---------- value : int number of sentences to process in a single batch during inference """ return self._set(inferenceBatchSize=value) def setLabelCasing(self, value): """Setting all labels of the NER models upper/lower case. values upper|lower Parameters ---------- value : str Setting all labels of the NER models upper/lower case. values upper|lower """ return self._set(labelCasing=value) def getTrainingClassDistribution(self): return self._call_java('getTrainingClassDistributionJava') @staticmethod def pretrained(name="ner_dl", lang="en", remote_loc=None): from sparknlp.pretrained import ResourceDownloader return ResourceDownloader.downloadModel(FinanceNer, name, lang, remote_loc) @staticmethod def loadSavedModel(ner_model_path, folder, spark_session): jModel = _FinanceNerModelLoader(ner_model_path, folder, spark_session._jsparkSession)._java_obj return FinanceNer(java_model=jModel) @staticmethod def pretrained(name="ner_clinical", lang="en", remote_loc="clinical/models"): from sparknlp.pretrained import ResourceDownloader return ResourceDownloader.downloadModel(FinanceNer, name, lang, remote_loc, j_dwn='InternalsPythonResourceDownloader') #from nlu import finance # finance.FinanceNER
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/finance.py
finance.py
from nlu.universe.feature_node_ids import NLP_NODE_IDS, NLP_HC_NODE_IDS, OCR_NODE_IDS class Spellbook: trainable_models = { # map NLU references to NLP approaches 'train.deep_sentence_detector': NLP_NODE_IDS.TRAINABLE_SENTENCE_DETECTOR_DL, 'train.classifier_dl': NLP_NODE_IDS.TRAINABLE_CLASSIFIER_DL, 'train.classifier': NLP_NODE_IDS.TRAINABLE_CLASSIFIER_DL, # classifier DL alias 'train.named_entity_recognizer_dl': NLP_NODE_IDS.TRAINABLE_NER_DL, 'train.ner': NLP_NODE_IDS.TRAINABLE_NER_DL, # ner DL alias 'train.sentiment_dl': NLP_NODE_IDS.TRAINABLE_SENTIMENT_DL, 'train.sentiment': NLP_NODE_IDS.TRAINABLE_SENTIMENT_DL, # sent DL alias 'train.pos': NLP_NODE_IDS.TRAINABLE_POS, 'train.multi_classifier': NLP_NODE_IDS.TRAINABLE_MULTI_CLASSIFIER_DL, 'train.generic_classifier': NLP_HC_NODE_IDS.TRAINABLE_GENERIC_CLASSIFIER, 'train.resolve_sentence': NLP_HC_NODE_IDS.TRAINABLE_SENTENCE_ENTITY_RESOLVER, 'train.resolve': NLP_HC_NODE_IDS.TRAINABLE_SENTENCE_ENTITY_RESOLVER, 'train.assertion': NLP_HC_NODE_IDS.TRAINABLE_ASSERTION_DL, 'train.assert': NLP_HC_NODE_IDS.TRAINABLE_ASSERTION_DL, 'train.assertion_dl': NLP_HC_NODE_IDS.TRAINABLE_ASSERTION_DL, # 'train.sentence_detector': '', # deep sentence detector alias # 'train.symmetric_spell': '', # 'train.context_spell': '', # 'train.spell': '', ## context spell alias # 'train.norvig_spell': '', # 'train.unlabeled_dependency_parser': '', # 'train.labeled_dependency_parser': '', # 'train.vivekn_sentiment': '', } # Reference to all datasets for which we have pretrained models datasets = [] chunk_embeddings = ['embed_sentence'] # The vocabulary of the nlu Namespace. Any of this references give you a model_anno_obj # keys inside a language dict are NLU references and value is the name in SparkNLP component_alias_references = { # references for SparkNLPAnnotators without pretrained models. # These are names for NLU components that can be created withouth a language prefix # multi lang pipes 'lang': ('detect_language_375', 'component_list'), # multi lang alias # Aliases 'classify.lang': ('detect_language_375', 'component_list'), # multi lang detector default # eng pipes 'classify': ('analyze_sentiment', 'component_list'), # default classifier 'explain': ('explain_document_ml', 'component_list'), # default explain 'explain.ml': ('explain_document_ml', 'component_list'), 'explain.dl': ('explain_document_dl', 'component_list'), 'ner.conll': ('recognize_entities_dl', 'component_list'), # default ner 'ner.dl': ('recognize_entities_dl', 'component_list'), 'ner.bert': ('recognize_entities_bert', 'component_list'), 'ner': ('onto_recognize_entities_sm', 'component_list'), # default ner.onto 'ner.onto': ('onto_recognize_entities_sm', 'component_list'), # default ner.onto 'ner.onto.sm': ('onto_recognize_entities_sm', 'component_list'), # 'match.text': ('text_matcher', 'model_anno_obj'), # 'match.regex': ('regex_matcher', 'model_anno_obj'), # 'match.date': ('date_matcher', 'model_anno_obj'), # 'match.context': ('context_parser', 'model_anno_obj'), # 'context_parser': ('context_parser', 'model_anno_obj'), 'match.chunks': ('match_chunks', 'component_list'), 'clean.stop': ('clean_stop', 'component_list'), # 'clean.pattern': ('clean_pattern', 'component_list'), 'clean.slang': ('clean_slang', 'component_list'), # 'spell': ('check_spelling','component_list'), # bad spell_checker, 'spell': ('spellcheck_dl', 'model_anno_obj'), # default spell 'sentiment': ('sentimentdl_glove_imdb', 'model_anno_obj'), 'emotion': ('classifierdl_use_emotion', 'model_anno_obj'), # default emotion model_anno_obj 'sentiment.imdb': ('analyze_sentimentdl_use_imdb', 'component_list'), 'sentiment.imdb.use': ('analyze_sentimentdl_use_imdb', 'component_list'), 'sentiment.twitter.use': ('analyze_sentimentdl_use_twitter', 'component_list'), 'sentiment.twitter': ('analyze_sentimentdl_use_twitter', 'component_list'), 'dependency': ('dependency_conllu', 'model_anno_obj'), 'tokenize': ('spark_nlp_tokenizer', 'model_anno_obj'), # tokenizer rule based model_anno_obj 'stem': ('stemmer', 'model_anno_obj'), # stem rule based model_anno_obj 'norm': ('normalizer', 'model_anno_obj'), # rule based model_anno_obj 'norm_document': ('document_normalizer', 'model_anno_obj'), # rule based model_anno_obj 'chunk': ('default_chunker', 'model_anno_obj'), # rule based model_anno_obj 'embed_chunk': ('chunk_embeddings', 'model_anno_obj'), # rule based model_anno_obj 'ngram': ('ngram', 'model_anno_obj'), # rule based model_anno_obj 'lemma': ('lemma_antbnc', 'model_anno_obj'), # lemma default en 'lemma.antbnc': ('lemma_antbnc', 'model_anno_obj'), 'pos': ('pos_anc', 'model_anno_obj'), # pos default en 'pos.anc': ('pos_anc', 'model_anno_obj'), 'pos.ud_ewt': ('pos_ud_ewt', 'model_anno_obj'), # 'ner.crf' :'ner_crf', # crf not supported in NLU 'ner.dl.glove.6B_100d': ('ner_dl', 'model_anno_obj'), 'ner.dl.bert': ('ner_dl_bert', 'model_anno_obj'), # points ner bert 'ner.onto.glove.6B_100d': ('onto_100', 'model_anno_obj'), 'ner.onto.glove.6B_300d': ('onto_300', 'model_anno_obj'), # this uses multi lang embeds! 'sentence_detector': ('sentence_detector_dl', 'model_anno_obj'), 'sentence_detector.deep': ('sentence_detector_dl', 'model_anno_obj'), # ALIAS 'sentence_detector.pragmatic': ('pragmatic_sentence_detector', 'model_anno_obj'), 'spell.symmetric': ('spellcheck_sd', 'model_anno_obj'), # TODO erronous 'spell.norivg': ('spellcheck_norvig', 'model_anno_obj'), 'sentiment.vivekn': ('sentiment_vivekn', 'model_anno_obj'), 'dep.untyped.conllu': ('dependency_conllu', 'model_anno_obj'), 'dep.untyped': ('dependency_conllu', 'model_anno_obj'), # default untyped dependency 'dep': ('dependency_typed_conllu', 'model_anno_obj'), # default typed dependency 'dep.typed': ('dependency_typed_conllu', 'model_anno_obj'), # default typed dependency dataset 'dep.typed.conllu': ('dependency_typed_conllu', 'model_anno_obj'), 'stopwords': ('stopwords_en', 'model_anno_obj'), # embeddings models 'embed': ('glove_100d', 'model_anno_obj'), # default overall embed 'glove': ('glove_100d', 'model_anno_obj'), # default glove 'embed.glove': ('glove_100d', 'model_anno_obj'), # default glove en 'embed.glove.100d': ('glove_100d', 'model_anno_obj'), 'bert': ('small_bert_L2_128', 'model_anno_obj'), # default bert 'covidbert': ('covidbert_large_uncased', 'model_anno_obj'), 'embed.bert': ('bert_base_uncased', 'model_anno_obj'), # default bert 'embed.bert_base_uncased': ('bert_base_uncased', 'model_anno_obj'), 'embed.bert_base_cased': ('bert_base_cased', 'model_anno_obj'), 'embed.bert_large_uncased': ('bert_large_uncased', 'model_anno_obj'), 'embed.bert_large_cased': ('bert_large_cased', 'model_anno_obj'), 'biobert': ('biobert_pubmed_base_cased', 'model_anno_obj'), # alias 'embed.biobert': ('biobert_pubmed_base_cased', 'model_anno_obj'), # default bio bert 'embed.biobert_pubmed_base_cased': ('biobert_pubmed_base_cased', 'model_anno_obj'), 'embed.biobert_pubmed_large_cased': ('biobert_pubmed_large_cased', 'model_anno_obj'), 'embed.biobert_pmc_base_cased': ('biobert_pmc_base_cased', 'model_anno_obj'), 'embed.biobert_pubmed_pmc_base_cased': ('biobert_pubmed_pmc_base_cased', 'model_anno_obj'), 'embed.biobert_clinical_base_cased': ('biobert_clinical_base_cased', 'model_anno_obj'), 'embed.biobert_discharge_base_cased': ('biobert_discharge_base_cased', 'model_anno_obj'), 'elmo': ('elmo', 'model_anno_obj'), 'embed.electra': ('electra_small_uncased', 'model_anno_obj'), 'electra': ('electra_small_uncased', 'model_anno_obj'), 'e2e': ('multiclassifierdl_use_e2e', 'model_anno_obj'), 'embed.elmo': ('elmo', 'model_anno_obj'), 'embed_sentence': ('tfhub_use', 'model_anno_obj'), # default use 'embed_sentence.small_bert_L2_128': ('sent_small_bert_L2_128', 'model_anno_obj'), 'embed_sentence.bert': ('sent_small_bert_L2_128', 'model_anno_obj'), 'embed_sentence.electra': ('sent_electra_small_uncased', 'model_anno_obj'), 'embed_sentence.use': ('tfhub_use', 'model_anno_obj'), # default use 'use': ('tfhub_use', 'model_anno_obj'), # alias 'embed_sentence.tfhub_use': ('tfhub_use', 'model_anno_obj'), 'embed_sentence.use_lg': ('tfhub_use_lg', 'model_anno_obj'), # alias 'embed_sentence.tfhub_use_lg': ('tfhub_use_lg', 'model_anno_obj'), 'albert': ('albert_base_uncased', 'model_anno_obj'), # albert alias en 'embed.albert_base_uncased': ('albert_base_uncased', 'model_anno_obj'), 'embed.albert_large_uncased': ('albert_large_uncased', 'model_anno_obj'), 'embed.albert_xlarge_uncased': ('albert_xlarge_uncased', 'model_anno_obj'), 'embed.albert_xxlarge_uncased': ('albert_xxlarge_uncased', 'model_anno_obj'), 'embed.xlnet': ('xlnet_base_cased', 'model_anno_obj'), # xlnet default en 'xlnet': ('xlnet_base_cased', 'model_anno_obj'), # xlnet alias 'embed.xlnet_base_cased': ('xlnet_base_cased', 'model_anno_obj'), 'embed.xlnet_large_cased': ('xlnet_large_cased', 'model_anno_obj'), # classifiers and sentiment models 'classify.trec6.use': ('classifierdl_use_trec6', 'model_anno_obj'), 'classify.trec50.use': ('classifierdl_use_trec50', 'model_anno_obj'), 'classify.questions': ('classifierdl_use_trec50', 'model_anno_obj'), 'questions': ('classifierdl_use_trec50', 'model_anno_obj'), 'classify.spam.use': ('classifierdl_use_spam', 'model_anno_obj'), 'classify.fakenews.use': ('classifierdl_use_fakenews', 'model_anno_obj'), 'classify.emotion.use': ('classifierdl_use_emotion', 'model_anno_obj'), 'classify.cyberbullying.use': ('classifierdl_use_cyberbullying', 'model_anno_obj'), 'classify.sarcasm.use': ('classifierdl_use_sarcasm', 'model_anno_obj'), 'sentiment.imdb.glove': ('sentimentdl_glove_imdb', 'model_anno_obj'), 'classify.trec6': ('classifierdl_use_trec6', 'model_anno_obj'), # Alias withouth embedding 'classify.trec50': ('classifierdl_use_trec50', 'model_anno_obj'), # Alias withouth embedding 'classify.spam': ('classifierdl_use_spam', 'model_anno_obj'), # Alias withouth embedding 'spam': ('classifierdl_use_spam', 'model_anno_obj'), # Alias withouth embedding 'toxic': ('multiclassifierdl_use_toxic', 'model_anno_obj'), 'classify.fakenews': ('classifierdl_use_fakenews', 'model_anno_obj'), # Alias withouth embedding 'classify.emotion': ('classifierdl_use_emotion', 'model_anno_obj'), # Alias withouth embedding 'classify.cyberbullying': ('classifierdl_use_cyberbullying', 'model_anno_obj'), # Alias withouth embedding 'cyberbullying': ('classifierdl_use_cyberbullying', 'model_anno_obj'), # Alias withouth embedding 'classify.sarcasm': ('classifierdl_use_sarcasm', 'model_anno_obj'), # Alias withouth embedding 'sarcasm': ('classifierdl_use_sarcasm', 'model_anno_obj'), # Alias withouth embedding 'embed.glove.840B_300': ('glove_840B_300', 'model_anno_obj'), 'yake': ('yake', 'model_anno_obj'), # # 2.7.0 new aliases 't5': ('t5_base', 'model_anno_obj'), 'summarize': ('t5_base', 'model_anno_obj', {'setTask': '"summarize: "'}), 'grammar_correctness': ('t5_base', 'model_anno_obj', {'setTask': '"cola sentence: "'}), 'answer_question': ('t5_base', 'model_anno_obj', {'setTask': '"question: "'}), 'classify.sentiment_t5': ('t5_base', 'model_anno_obj', {'setTask': '"sst2 sentence: "'}), } # multi lang models pretrained_pipe_references = { 'da': { 'da.explain': 'explain_document_sm', 'da.explain.sm': 'explain_document_sm', 'da.explain.md': 'explain_document_md', 'da.explain.lg': 'explain_document_lg', 'da.ner': 'entity_recognizer_sm', 'da.ner.sm': 'entity_recognizer_sm', 'da.ner.md': 'entity_recognizer_md', 'da.ner.lg': 'entity_recognizer_lg'}, 'nl': { 'nl.explain': 'explain_document_sm', # default 'nl.explain.sm': 'explain_document_sm', 'nl.explain.md': 'explain_document_md', 'nl.explain.lg': 'explain_document_lg', 'nl.ner': 'entity_recognizer_sm', # default,calling it nl.ner this makes creating actual NER object impossible! 'nl.ner.sm': 'entity_recognizer_sm', 'nl.ner.md': 'entity_recognizer_md', 'nl.ner.lg': 'entity_recognizer_lg', }, 'en': { 'en.classify': 'analyze_sentiment', # default classifier 'en.explain': 'explain_document_ml', # default explain 'en.explain.ml': 'explain_document_ml', 'en.explain.dl': 'explain_document_dl', 'en.ner': 'recognize_entities_dl', # default ner 'en.ner.conll': 'recognize_entities_dl', # default ner 'en.ner.dl': 'recognize_entities_dl', 'en.ner.bert': 'recognize_entities_bert', # 'en.ner.onto': 'onto_recognize_entities_sm', # default ner.onto 'en.ner.onto.sm': 'onto_recognize_entities_sm', 'en.ner.onto.lg': 'onto_recognize_entities_lg', # 'en.match.datetime': 'match_datetime', 'en.match.chunks': 'match_chunks', 'en.clean.stop': 'clean_stop', # 'en.clean.pattern': 'clean_pattern', 'en.clean.slang': 'clean_slang', 'en.spell': 'check_spelling_dl', # dfault spell 'en.spell.dl': 'check_spelling_dl', 'en.spell.context': 'check_spelling_dl', 'en.sentiment': 'analyze_sentiment', 'en.classify.sentiment': 'analyze_sentiment', 'en.sentiment.imdb': 'analyze_sentimentdl_use_imdb', 'en.sentiment.imdb.use': 'analyze_sentimentdl_use_imdb', # 'en.sentiment.twitter.use': 'analyze_sentimentdl_use_twitter', 'en.sentiment.twitter': 'analyze_sentimentdl_use_twitter', # 'en.dependency': 'dependency_parse', # 2.7 'en.ner.onto.bert.base': 'onto_recognize_entities_bert_base', 'en.ner.onto.bert.large': 'onto_recognize_entities_bert_large', 'en.ner.onto.bert.medium': 'onto_recognize_entities_bert_medium', 'en.ner.onto.bert.mini': 'onto_recognize_entities_bert_mini', 'en.ner.onto.bert.small': 'onto_recognize_entities_bert_small', 'en.ner.onto.bert.tiny': 'onto_recognize_entities_bert_tiny', 'en.ner.onto.electra.base': 'onto_recognize_entities_electra_base', 'en.ner.onto.electra.small': 'onto_recognize_entities_electra_small', # 2.7.1 and 2.7.2 "en.sentiment.glove": "analyze_sentimentdl_glove_imdb", "en.sentiment.glove.imdb": "analyze_sentimentdl_glove_imdb", "en.classify.sentiment.glove.imdb": "analyze_sentimentdl_glove_imdb", "en.classify.sentiment.glove": "analyze_sentimentdl_glove_imdb", "en.classify.trec50.component_list": "classifierdl_use_trec50_pipeline", "en.ner.onto.large": "onto_recognize_entities_electra_large", }, 'fa': { 'fa.ner.dl': 'recognize_entities_dl', }, 'he': { 'he.explain_document': 'explain_document_lg', 'he.explain_document.lg': 'explain_document_lg', }, 'ko': { 'ko.explain_document': 'explain_document_lg', 'ko.explain_document.lg': 'explain_document_lg', }, 'sv': { 'sv.explain': 'explain_document_sm', 'sv.explain.sm': 'explain_document_sm', 'sv.explain.md': 'explain_document_md', 'sv.explain.lg': 'explain_document_lg', 'sv.ner': 'entity_recognizer_sm', 'sv.ner.sm': 'entity_recognizer_sm', 'sv.ner.md': 'entity_recognizer_md', 'sv.ner.lg': 'entity_recognizer_lg'}, 'fi': { 'fi.explain': 'explain_document_sm', 'fi.explain.sm': 'explain_document_sm', 'fi.explain.md': 'explain_document_md', 'fi.explain.lg': 'explain_document_lg', 'fi.ner': 'entity_recognizer_sm', 'fi.ner.sm': 'entity_recognizer_sm', 'fi.ner.md': 'entity_recognizer_md', 'fi.ner.lg': 'entity_recognizer_lg'}, 'fr': { 'fr.explain': 'explain_document_lg', # default fr explain 'fr.explain.lg': 'explain_document_lg', 'fr.explain.md': 'explain_document_md', 'fr.ner': 'entity_recognizer_lg', # default fr ner component_list 'fr.ner.lg': 'entity_recognizer_lg', 'fr.ner.md': 'entity_recognizer_md', }, 'de': { 'de.explain.document': 'explain_document_md', # default de explain 'de.explain.document.md': 'explain_document_md', 'de.explain.document.lg': 'explain_document_lg', 'de.ner.recognizer': 'entity_recognizer_md', # default de ner 'de.ner.recognizer.md': 'entity_recognizer_md', 'de.ner.recognizer.lg': 'entity_recognizer_lg', }, 'it': { 'it.explain.document': 'explain_document_md', # it default explain 'it.explain.document.md': 'explain_document_md', 'it.explain.document.lg': 'explain_document_lg', 'it.ner': 'entity_recognizer_md', # it default ner 'it.ner.md': 'entity_recognizer_md', 'it.ner.lg': 'entity_recognizer_lg', }, 'no': { 'no.explain': 'explain_document_sm', # default no explain 'no.explain.sm': 'explain_document_sm', 'no.explain.md': 'explain_document_md', 'no.explain.lg': 'explain_document_lg', 'no.ner': 'entity_recognizer_sm', # default no ner 'no.ner.sm': 'entity_recognizer_sm', 'no.ner.md': 'entity_recognizer_md', 'no.ner.lg': 'entity_recognizer_lg', }, 'pl': { 'pl.explain': 'explain_document_sm', # defaul pl explain 'pl.explain.sm': 'explain_document_sm', 'pl.explain.md': 'explain_document_md', 'pl.explain.lg': 'explain_document_lg', 'pl.ner': 'entity_recognizer_sm', # default pl ner 'pl.ner.sm': 'entity_recognizer_sm', 'pl.ner.md': 'entity_recognizer_md', 'pl.ner.lg': 'entity_recognizer_lg', }, 'pt': { 'pt.explain': 'explain_document_sm', # default explain pt 'pt.explain.sm': 'explain_document_sm', 'pt.explain.md': 'explain_document_md', 'pt.explain.lg': 'explain_document_lg', 'pt.ner': 'entity_recognizer_sm', # default ner pt 'pt.ner.sm': 'entity_recognizer_sm', 'pt.ner.md': 'entity_recognizer_md', 'pt.ner.lg': 'entity_recognizer_lg', }, 'ru': { 'ru.explain': 'explain_document_sm', # default ru explain 'ru.explain.sm': 'explain_document_sm', 'ru.explain.md': 'explain_document_md', 'ru.explain.lg': 'explain_document_lg', 'ru.ner': 'entity_recognizer_sm', # default ru ner 'ru.ner.sm': 'entity_recognizer_sm', 'ru.ner.md': 'entity_recognizer_md', 'ru.ner.lg': 'entity_recognizer_lg', }, 'es': { 'es.explain': 'explain_document_sm', # es expplain deflaut 'es.explain.sm': 'explain_document_sm', 'es.explain.md': 'explain_document_md', 'es.explain.lg': 'explain_document_lg', 'es.ner': 'entity_recognizer_sm', # es ner default 'es.ner.sm': 'entity_recognizer_sm', 'es.ner.md': 'entity_recognizer_md', 'es.ner.lg': 'entity_recognizer_lg', }, 'xx': { 'lang': 'detect_language_375', # default lang classifer 'lang.7': 'detect_language_7', # multi lang detector alias 'lang.20': 'detect_language_20', # multi lang detector alias 'xx.classify.lang.20': 'detect_language_20', # multi lang detector default 'xx.classify.lang.7': 'detect_language_7', # 2.7 lang classifiers 'xx.classify.lang': 'detect_language_375', # default lang classifer 'xx.classify.lang.21': 'detect_language_21', 'xx.classify.lang.43': 'detect_language_43', 'xx.classify.lang.95': 'detect_language_95', 'xx.classify.lang.99': 'detect_language_99', 'xx.classify.lang.220': 'detect_language_220', 'xx.classify.lang.231': 'detect_language_231', 'xx.classify.lang.bigru': 'detect_language_bigru_21', 'lang.21': 'detect_language_21', 'lang.43': 'detect_language_43', 'lang.95': 'detect_language_95', 'lang.99': 'detect_language_99', 'lang.220': 'detect_language_220', 'lang.231': 'detect_language_231', 'lang.bigru': 'detect_language_bigru_21', # 2.7 marian translate component_list references "xx.zlw.translate_to.en": "translate_zlw_en", "xx.en.translate_to.ti": "translate_en_ti", "xx.bem.translate_to.en": "translate_bem_en", "xx.ny.translate_to.en": "translate_ny_en", "xx.en.translate_to.lu": "translate_en_lu", "xx.taw.translate_to.en": "translate_taw_en", "xx.en.translate_to.ga": "translate_en_ga", "xx.en.translate_to.sw": "translate_en_sw", "xx.war.translate_to.en": "translate_war_en", "xx.en.translate_to.hu": "translate_en_hu", "xx.pqe.translate_to.en": "translate_pqe_en", "xx.en.translate_to.bem": "translate_en_bem", "xx.en.translate_to.tiv": "translate_en_tiv", "xx.en.translate_to.tll": "translate_en_tll", "xx.en.translate_to.cpp": "translate_en_cpp", "xx.efi.translate_to.en": "translate_efi_en", "xx.en.translate_to.itc": "translate_en_itc", "xx.uk.translate_to.en": "translate_uk_en", "xx.ee.translate_to.en": "translate_ee_en", "xx.nso.translate_to.en": "translate_nso_en", "xx.urj.translate_to.en": "translate_urj_en", "xx.sv.translate_to.en": "translate_sv_en", "xx.en.translate_to.rn": "translate_en_rn", "xx.nic.translate_to.en": "translate_nic_en", "xx.en.translate_to.bcl": "translate_en_bcl", "xx.en.translate_to.lg": "translate_en_lg", "xx.kwy.translate_to.en": "translate_kwy_en", "xx.en.translate_to.gmq": "translate_en_gmq", "xx.en.translate_to.ts": "translate_en_ts", "xx.bnt.translate_to.en": "translate_bnt_en", "xx.en.translate_to.pis": "translate_en_pis", "xx.kwn.translate_to.en": "translate_kwn_en", "xx.fi.translate_to.en": "translate_fi_en", "xx.en.translate_to.gaa": "translate_en_gaa", "xx.afa.translate_to.en": "translate_afa_en", "xx.itc.translate_to.en": "translate_itc_en", "xx.mh.translate_to.en": "translate_mh_en", "xx.en.translate_to.ln": "translate_en_ln", "xx.en.translate_to.zls": "translate_en_zls", "xx.en.translate_to.cy": "translate_en_cy", "xx.et.translate_to.en": "translate_et_en", "xx.en.translate_to.dra": "translate_en_dra", "xx.en.translate_to.sn": "translate_en_sn", "xx.lua.translate_to.en": "translate_lua_en", "xx.ln.translate_to.en": "translate_ln_en", "xx.ja.translate_to.en": "translate_ja_en", "xx.loz.translate_to.en": "translate_loz_en", "xx.en.translate_to.bi": "translate_en_bi", "xx.mg.translate_to.en": "translate_mg_en", "xx.vi.translate_to.en": "translate_vi_en", "xx.en.translate_to.vi": "translate_en_vi", "xx.hy.translate_to.en": "translate_hy_en", "xx.en.translate_to.mt": "translate_en_mt", "xx.ng.translate_to.en": "translate_ng_en", "xx.mkh.translate_to.en": "translate_mkh_en", "xx.en.translate_to.cpf": "translate_en_cpf", "xx.wal.translate_to.en": "translate_wal_en", "xx.en.translate_to.crs": "translate_en_crs", "xx.en.translate_to.zle": "translate_en_zle", "xx.en.translate_to.phi": "translate_en_phi", "xx.ine.translate_to.en": "translate_ine_en", "xx.en.translate_to.pap": "translate_en_pap", "xx.en.translate_to.sit": "translate_en_sit", "xx.bg.translate_to.en": "translate_bg_en", "xx.en.translate_to.ml": "translate_en_ml", "xx.en.translate_to.ss": "translate_en_ss", "xx.en.translate_to.tw": "translate_en_tw", "xx.en.translate_to.gv": "translate_en_gv", "xx.ca.translate_to.en": "translate_ca_en", "xx.umb.translate_to.en": "translate_umb_en", "xx.alv.translate_to.en": "translate_alv_en", "xx.gem.translate_to.en": "translate_gem_en", "xx.chk.translate_to.en": "translate_chk_en", "xx.kqn.translate_to.en": "translate_kqn_en", "xx.en.translate_to.afa": "translate_en_afa", "xx.gl.translate_to.en": "translate_gl_en", "xx.en.translate_to.ber": "translate_en_ber", "xx.en.translate_to.ig": "translate_en_ig", "xx.ase.translate_to.en": "translate_ase_en", "xx.en.translate_to.cs": "translate_en_cs", "xx.en.translate_to.pag": "translate_en_pag", "xx.en.translate_to.nic": "translate_en_nic", "xx.en.translate_to.hil": "translate_en_hil", "xx.en.translate_to.cel": "translate_en_cel", "xx.nl.translate_to.en": "translate_nl_en", "xx.en.translate_to.ho": "translate_en_ho", "xx.en.translate_to.inc": "translate_en_inc", "xx.ts.translate_to.en": "translate_ts_en", "xx.en.translate_to.tl": "translate_en_tl", "xx.ve.translate_to.en": "translate_ve_en", "xx.ceb.translate_to.en": "translate_ceb_en", "xx.en.translate_to.iir": "translate_en_iir", "xx.en.translate_to.aav": "translate_en_aav", "xx.en.translate_to.bat": "translate_en_bat", "xx.en.translate_to.alv": "translate_en_alv", "xx.ar.translate_to.en": "translate_ar_en", "xx.fiu.translate_to.en": "translate_fiu_en", "xx.en.translate_to.eu": "translate_en_eu", "xx.is.translate_to.en": "translate_is_en", "xx.wa.translate_to.en": "translate_wa_en", "xx.en.translate_to.tn": "translate_en_tn", "xx.ig.translate_to.en": "translate_ig_en", "xx.luo.translate_to.en": "translate_luo_en", "xx.en.translate_to.kwn": "translate_en_kwn", "xx.niu.translate_to.en": "translate_niu_en", "xx.en.translate_to.gl": "translate_en_gl", "xx.en.translate_to.ilo": "translate_en_ilo", "xx.en.translate_to.ur": "translate_en_ur", "xx.cus.translate_to.en": "translate_cus_en", "xx.phi.translate_to.en": "translate_phi_en", "xx.en.translate_to.loz": "translate_en_loz", "xx.tiv.translate_to.en": "translate_tiv_en", "xx.en.translate_to.id": "translate_en_id", "xx.zle.translate_to.en": "translate_zle_en", "xx.en.translate_to.mfe": "translate_en_mfe", "xx.id.translate_to.en": "translate_id_en", "xx.lv.translate_to.en": "translate_lv_en", "xx.en.translate_to.pon": "translate_en_pon", "xx.en.translate_to.sq": "translate_en_sq", "xx.tum.translate_to.en": "translate_tum_en", "xx.pl.translate_to.en": "translate_pl_en", "xx.xh.translate_to.en": "translate_xh_en", "xx.kab.translate_to.en": "translate_kab_en", "xx.tvl.translate_to.en": "translate_tvl_en", "xx.pa.translate_to.en": "translate_pa_en", "xx.iso.translate_to.en": "translate_iso_en", "xx.ho.translate_to.en": "translate_ho_en", "xx.cel.translate_to.en": "translate_cel_en", "xx.en.translate_to.om": "translate_en_om", "xx.kg.translate_to.en": "translate_kg_en", "xx.en.translate_to.lus": "translate_en_lus", "xx.om.translate_to.en": "translate_om_en", "xx.lun.translate_to.en": "translate_lun_en", "xx.crs.translate_to.en": "translate_crs_en", "xx.cy.translate_to.en": "translate_cy_en", "xx.tll.translate_to.en": "translate_tll_en", "xx.gil.translate_to.en": "translate_gil_en", "xx.en.translate_to.mkh": "translate_en_mkh", "xx.en.translate_to.euq": "translate_en_euq", "xx.en.translate_to.sem": "translate_en_sem", "xx.cs.translate_to.en": "translate_cs_en", "xx.en.translate_to.sk": "translate_en_sk", "xx.en.translate_to.bzs": "translate_en_bzs", "xx.en.translate_to.trk": "translate_en_trk", "xx.cpf.translate_to.en": "translate_cpf_en", "xx.bi.translate_to.en": "translate_bi_en", "xx.en.translate_to.mul": "translate_en_mul", "xx.en.translate_to.gmw": "translate_en_gmw", "xx.en.translate_to.fi": "translate_en_fi", "xx.en.translate_to.zlw": "translate_en_zlw", "xx.lg.translate_to.en": "translate_lg_en", "xx.en.translate_to.pqe": "translate_en_pqe", "xx.en.translate_to.xh": "translate_en_xh", "xx.en.translate_to.hi": "translate_en_hi", "xx.en.translate_to.nyk": "translate_en_nyk", "xx.th.translate_to.en": "translate_th_en", "xx.en.translate_to.umb": "translate_en_umb", "xx.en.translate_to.af": "translate_en_af", "xx.tpi.translate_to.en": "translate_tpi_en", "xx.ti.translate_to.en": "translate_ti_en", "xx.en.translate_to.chk": "translate_en_chk", "xx.mos.translate_to.en": "translate_mos_en", "xx.en.translate_to.sm": "translate_en_sm", "xx.pon.translate_to.en": "translate_pon_en", "xx.en.translate_to.bg": "translate_en_bg", "xx.en.translate_to.ny": "translate_en_ny", "xx.kl.translate_to.en": "translate_kl_en", "xx.en.translate_to.hy": "translate_en_hy", "xx.nyk.translate_to.en": "translate_nyk_en", "xx.it.translate_to.en": "translate_it_en", "xx.mt.translate_to.en": "translate_mt_en", "xx.pap.translate_to.en": "translate_pap_en", "xx.srn.translate_to.en": "translate_srn_en", "xx.da.translate_to.en": "translate_da_en", "xx.en.translate_to.lue": "translate_en_lue", "xx.rn.translate_to.en": "translate_rn_en", "xx.en.translate_to.tut": "translate_en_tut", "xx.lu.translate_to.en": "translate_lu_en", "xx.ru.translate_to.en": "translate_ru_en", "xx.en.translate_to.toi": "translate_en_toi", "xx.ccs.translate_to.en": "translate_ccs_en", "xx.aav.translate_to.en": "translate_aav_en", "xx.en.translate_to.ha": "translate_en_ha", "xx.rnd.translate_to.en": "translate_rnd_en", "xx.de.translate_to.en": "translate_de_en", "xx.en.translate_to.luo": "translate_en_luo", "xx.fr.translate_to.en": "translate_fr_en", "xx.bcl.translate_to.en": "translate_bcl_en", "xx.ilo.translate_to.en": "translate_ilo_en", "xx.en.translate_to.jap": "translate_en_jap", "xx.en.translate_to.fj": "translate_en_fj", "xx.sk.translate_to.en": "translate_sk_en", "xx.bzs.translate_to.en": "translate_bzs_en", "xx.ka.translate_to.en": "translate_ka_en", "xx.ko.translate_to.en": "translate_ko_en", "xx.sq.translate_to.en": "translate_sq_en", "xx.mul.translate_to.en": "translate_mul_en", "xx.en.translate_to.run": "translate_en_run", "xx.sn.translate_to.en": "translate_sn_en", "xx.en.translate_to.pqw": "translate_en_pqw", "xx.ss.translate_to.en": "translate_ss_en", "xx.sm.translate_to.en": "translate_sm_en", "xx.en.translate_to.kwy": "translate_en_kwy", "xx.jap.translate_to.en": "translate_jap_en", "xx.en.translate_to.kqn": "translate_en_kqn", "xx.mk.translate_to.en": "translate_mk_en", "xx.hu.translate_to.en": "translate_hu_en", "xx.en.translate_to.map": "translate_en_map", "xx.yo.translate_to.en": "translate_yo_en", "xx.hi.translate_to.en": "translate_hi_en", "xx.iir.translate_to.en": "translate_iir_en", "xx.en.translate_to.guw": "translate_en_guw", "xx.en.translate_to.es": "translate_en_es", "xx.en.translate_to.gem": "translate_en_gem", "xx.en.translate_to.ht": "translate_en_ht", "xx.zls.translate_to.en": "translate_zls_en", "xx.sg.translate_to.en": "translate_sg_en", "xx.en.translate_to.ty": "translate_en_ty", "xx.en.translate_to.lun": "translate_en_lun", "xx.guw.translate_to.en": "translate_guw_en", "xx.trk.translate_to.en": "translate_trk_en", "xx.mfe.translate_to.en": "translate_mfe_en", "xx.en.translate_to.nl": "translate_en_nl", "xx.en.translate_to.sv": "translate_en_sv", "xx.ber.translate_to.en": "translate_ber_en", "xx.to.translate_to.en": "translate_to_en", "xx.en.translate_to.da": "translate_en_da", "xx.en.translate_to.urj": "translate_en_urj", "xx.inc.translate_to.en": "translate_inc_en", "xx.wls.translate_to.en": "translate_wls_en", "xx.pis.translate_to.en": "translate_pis_en", "xx.en.translate_to.mh": "translate_en_mh", "xx.en.translate_to.iso": "translate_en_iso", "xx.en.translate_to.ru": "translate_en_ru", "xx.swc.translate_to.en": "translate_swc_en", "xx.en.translate_to.rnd": "translate_en_rnd", "xx.en.translate_to.nso": "translate_en_nso", "xx.en.translate_to.swc": "translate_en_swc", "xx.ur.translate_to.en": "translate_ur_en", "xx.en.translate_to.ro": "translate_en_ro", "xx.ml.translate_to.en": "translate_ml_en", "xx.grk.translate_to.en": "translate_grk_en", "xx.rw.translate_to.en": "translate_rw_en", "xx.tr.translate_to.en": "translate_tr_en", "xx.gmq.translate_to.en": "translate_gmq_en", "xx.euq.translate_to.en": "translate_euq_en", "xx.en.translate_to.tdt": "translate_en_tdt", "xx.eo.translate_to.en": "translate_eo_en", "xx.cau.translate_to.en": "translate_cau_en", "xx.en.translate_to.mk": "translate_en_mk", "xx.en.translate_to.mr": "translate_en_mr", "xx.af.translate_to.en": "translate_af_en", "xx.run.translate_to.en": "translate_run_en", "xx.en.translate_to.ng": "translate_en_ng", "xx.en.translate_to.mg": "translate_en_mg", "xx.en.translate_to.bnt": "translate_en_bnt", "xx.en.translate_to.kj": "translate_en_kj", "xx.en.translate_to.he": "translate_en_he", "xx.en.translate_to.sla": "translate_en_sla", "xx.en.translate_to.el": "translate_en_el", "xx.ht.translate_to.en": "translate_ht_en", "xx.en.translate_to.et": "translate_en_et", "xx.en.translate_to.poz": "translate_en_poz", "xx.roa.translate_to.en": "translate_roa_en", "xx.en.translate_to.de": "translate_en_de", "xx.fj.translate_to.en": "translate_fj_en", "xx.en.translate_to.lua": "translate_en_lua", "xx.en.translate_to.kg": "translate_en_kg", "xx.en.translate_to.fiu": "translate_en_fiu", "xx.gv.translate_to.en": "translate_gv_en", "xx.cpp.translate_to.en": "translate_cpp_en", "xx.en.translate_to.tpi": "translate_en_tpi", "xx.en.translate_to.grk": "translate_en_grk", "xx.en.translate_to.sal": "translate_en_sal", "xx.en.translate_to.niu": "translate_en_niu", "xx.en.translate_to.ca": "translate_en_ca", "xx.en.translate_to.roa": "translate_en_roa", "xx.sal.translate_to.en": "translate_sal_en", "xx.ha.translate_to.en": "translate_ha_en", "xx.sem.translate_to.en": "translate_sem_en", "xx.tn.translate_to.en": "translate_tn_en", "xx.gaa.translate_to.en": "translate_gaa_en", "xx.en.translate_to.to": "translate_en_to", "xx.en.translate_to.ee": "translate_en_ee", "xx.toi.translate_to.en": "translate_toi_en", "xx.lue.translate_to.en": "translate_lue_en", "xx.en.translate_to.rw": "translate_en_rw", "xx.st.translate_to.en": "translate_st_en", "xx.dra.translate_to.en": "translate_dra_en", "xx.en.translate_to.mos": "translate_en_mos", "xx.eu.translate_to.en": "translate_eu_en", "xx.lus.translate_to.en": "translate_lus_en", "xx.sla.translate_to.en": "translate_sla_en", "xx.en.translate_to.ceb": "translate_en_ceb", "xx.art.translate_to.en": "translate_art_en", "xx.bat.translate_to.en": "translate_bat_en", "xx.az.translate_to.en": "translate_az_en", "xx.en.translate_to.ine": "translate_en_ine", "xx.pag.translate_to.en": "translate_pag_en", "xx.yap.translate_to.en": "translate_yap_en", "xx.en.translate_to.eo": "translate_en_eo", "xx.en.translate_to.tvl": "translate_en_tvl", "xx.kj.translate_to.en": "translate_kj_en", "xx.en.translate_to.st": "translate_en_st", "xx.gmw.translate_to.en": "translate_gmw_en", "xx.mr.translate_to.en": "translate_mr_en", "xx.es.translate_to.en": "translate_es_en", "xx.en.translate_to.sg": "translate_en_sg", "xx.en.translate_to.cus": "translate_en_cus", "xx.en.translate_to.it": "translate_en_it", "xx.ga.translate_to.en": "translate_ga_en", "xx.bn.translate_to.en": "translate_bn_en", "xx.en.translate_to.efi": "translate_en_efi", "xx.en.translate_to.az": "translate_en_az", "xx.en.translate_to.zh": "translate_en_zh", "xx.en.translate_to.is": "translate_en_is", "xx.zh.translate_to.en": "translate_zh_en", "xx.hil.translate_to.en": "translate_hil_en", "xx.en.translate_to.ar": "translate_en_ar", "xx.tl.translate_to.en": "translate_tl_en", "xx.en.translate_to.gil": "translate_en_gil", "xx.en.translate_to.uk": "translate_en_uk", "xx.en.translate_to.fr": "translate_en_fr", # 3.1 Translator Pipe references "xx.es.translate_to.af": "translate_af_es", "xx.nl.translate_to.af": "translate_af_nl", "xx.eo.translate_to.af": "translate_af_eo", "xx.afa.translate_to.afa": "translate_afa_afa", "xx.sv.translate_to.af": "translate_af_sv", "xx.es.translate_to.aed": "translate_aed_es", "xx.fr.translate_to.af": "translate_af_fr", "xx.fi.translate_to.af": "translate_af_fi", "xx.de.translate_to.af": "translate_af_de", "xx.ru.translate_to.af": "translate_af_ru", "xx.es.translate_to.az": "translate_az_es", "xx.de.translate_to.bcl": "translate_bcl_de", "xx.sv.translate_to.bem": "translate_bem_sv", "xx.tr.translate_to.az": "translate_az_tr", "xx.sv.translate_to.bcl": "translate_bcl_sv", "xx.es.translate_to.ar": "translate_ar_es", "xx.es.translate_to.bem": "translate_bem_es", "xx.ru.translate_to.ar": "translate_ar_ru", "xx.es.translate_to.be": "translate_be_es", "xx.fr.translate_to.bem": "translate_bem_fr", "xx.he.translate_to.ar": "translate_ar_he", "xx.es.translate_to.bcl": "translate_bcl_es", "xx.es.translate_to.ase": "translate_ase_es", "xx.de.translate_to.ar": "translate_ar_de", "xx.pl.translate_to.ar": "translate_ar_pl", "xx.tr.translate_to.ar": "translate_ar_tr", "xx.sv.translate_to.ase": "translate_ase_sv", "xx.fi.translate_to.bcl": "translate_bcl_fi", "xx.el.translate_to.ar": "translate_ar_el", "xx.fr.translate_to.bcl": "translate_bcl_fr", "xx.fi.translate_to.bem": "translate_bem_fi", "xx.fr.translate_to.ase": "translate_ase_fr", "xx.fr.translate_to.ar": "translate_ar_fr", "xx.eo.translate_to.ar": "translate_ar_eo", "xx.it.translate_to.ar": "translate_ar_it", "xx.sv.translate_to.am": "translate_am_sv", "xx.de.translate_to.ase": "translate_ase_de", "xx.uk.translate_to.bg": "translate_bg_uk", "xx.it.translate_to.bg": "translate_bg_it", "xx.sv.translate_to.bzs": "translate_bzs_sv", "xx.pt.translate_to.ca": "translate_ca_pt", "xx.es.translate_to.ber": "translate_ber_es", "xx.it.translate_to.ca": "translate_ca_it", "xx.eo.translate_to.bg": "translate_bg_eo", "xx.sv.translate_to.ceb": "translate_ceb_sv", "xx.fr.translate_to.bi": "translate_bi_fr", "xx.sv.translate_to.bg": "translate_bg_sv", "xx.fr.translate_to.ca": "translate_ca_fr", "xx.tr.translate_to.bg": "translate_bg_tr", "xx.es.translate_to.ceb": "translate_ceb_es", "xx.de.translate_to.ca": "translate_ca_de", "xx.fi.translate_to.ceb": "translate_ceb_fi", "xx.es.translate_to.ca": "translate_ca_es", "xx.es.translate_to.bg": "translate_bg_es", "xx.uk.translate_to.ca": "translate_ca_uk", "xx.sv.translate_to.bi": "translate_bi_sv", "xx.sv.translate_to.chk": "translate_chk_sv", "xx.fr.translate_to.ceb": "translate_ceb_fr", "xx.es.translate_to.bzs": "translate_bzs_es", "xx.de.translate_to.crs": "translate_crs_de", "xx.nl.translate_to.ca": "translate_ca_nl", "xx.es.translate_to.chk": "translate_chk_es", "xx.fr.translate_to.ber": "translate_ber_fr", "xx.fi.translate_to.bzs": "translate_bzs_fi", "xx.es.translate_to.crs": "translate_crs_es", "xx.fi.translate_to.bg": "translate_bg_fi", "xx.cpp.translate_to.cpp": "translate_cpp_cpp", "xx.de.translate_to.bg": "translate_bg_de", "xx.es.translate_to.bi": "translate_bi_es", "xx.fr.translate_to.bzs": "translate_bzs_fr", "xx.fr.translate_to.bg": "translate_bg_fr", "xx.fr.translate_to.chk": "translate_chk_fr", "xx.ru.translate_to.bg": "translate_bg_ru", "xx.fi.translate_to.cs": "translate_cs_fi", "xx.ha.translate_to.de": "translate_de_ha", "xx.ee.translate_to.de": "translate_de_ee", "xx.eo.translate_to.de": "translate_de_eo", "xx.gil.translate_to.de": "translate_de_gil", "xx.fj.translate_to.de": "translate_de_fj", "xx.fr.translate_to.de": "translate_de_fr", "xx.sv.translate_to.cs": "translate_cs_sv", "xx.es.translate_to.csn": "translate_csn_es", "xx.ru.translate_to.da": "translate_da_ru", "xx.no.translate_to.da": "translate_da_no", "xx.iso.translate_to.de": "translate_de_iso", "xx.eu.translate_to.de": "translate_de_eu", "xx.nl.translate_to.de": "translate_de_nl", "xx.ilo.translate_to.de": "translate_de_ilo", "xx.hr.translate_to.de": "translate_de_hr", "xx.mt.translate_to.de": "translate_de_mt", "xx.es.translate_to.da": "translate_da_es", "xx.ar.translate_to.de": "translate_de_ar", "xx.is.translate_to.de": "translate_de_is", "xx.sv.translate_to.crs": "translate_crs_sv", "xx.fr.translate_to.da": "translate_da_fr", "xx.gaa.translate_to.de": "translate_de_gaa", "xx.niu.translate_to.de": "translate_de_niu", "xx.da.translate_to.de": "translate_de_da", "xx.de.translate_to.da": "translate_da_de", "xx.ase.translate_to.de": "translate_de_ase", "xx.ig.translate_to.de": "translate_de_ig", "xx.lua.translate_to.de": "translate_de_lua", "xx.de.translate_to.de": "translate_de_de", "xx.bi.translate_to.de": "translate_de_bi", "xx.fr.translate_to.cs": "translate_cs_fr", "xx.ms.translate_to.de": "translate_de_ms", "xx.fi.translate_to.crs": "translate_crs_fi", "xx.eo.translate_to.da": "translate_da_eo", "xx.af.translate_to.de": "translate_de_af", "xx.uk.translate_to.cs": "translate_cs_uk", "xx.bg.translate_to.de": "translate_de_bg", "xx.no.translate_to.de": "translate_de_no", "xx.de.translate_to.cs": "translate_cs_de", "xx.it.translate_to.de": "translate_de_it", "xx.ho.translate_to.de": "translate_de_ho", "xx.ln.translate_to.de": "translate_de_ln", "xx.guw.translate_to.de": "translate_de_guw", "xx.efi.translate_to.de": "translate_de_efi", "xx.hil.translate_to.de": "translate_de_hil", "xx.cs.translate_to.de": "translate_de_cs", "xx.es.translate_to.csg": "translate_csg_es", "xx.es.translate_to.de": "translate_de_es", "xx.bcl.translate_to.de": "translate_de_bcl", "xx.ht.translate_to.de": "translate_de_ht", "xx.loz.translate_to.de": "translate_de_loz", "xx.kg.translate_to.de": "translate_de_kg", "xx.eo.translate_to.cs": "translate_cs_eo", "xx.el.translate_to.de": "translate_de_el", "xx.fi.translate_to.de": "translate_de_fi", "xx.he.translate_to.de": "translate_de_he", "xx.bzs.translate_to.de": "translate_de_bzs", "xx.fr.translate_to.crs": "translate_crs_fr", "xx.crs.translate_to.de": "translate_de_crs", "xx.fi.translate_to.da": "translate_da_fi", "xx.hu.translate_to.de": "translate_de_hu", "xx.et.translate_to.de": "translate_de_et", "xx.lt.translate_to.de": "translate_de_lt", "xx.ca.translate_to.de": "translate_de_ca", "xx.pl.translate_to.de": "translate_de_pl", "xx.sv.translate_to.el": "translate_el_sv", "xx.de.translate_to.ee": "translate_ee_de", "xx.pag.translate_to.de": "translate_de_pag", "xx.ar.translate_to.el": "translate_el_ar", "xx.nso.translate_to.de": "translate_de_nso", "xx.pon.translate_to.de": "translate_de_pon", "xx.pap.translate_to.de": "translate_de_pap", "xx.fr.translate_to.efi": "translate_efi_fr", "xx.pis.translate_to.de": "translate_de_pis", "xx.de.translate_to.efi": "translate_efi_de", "xx.eo.translate_to.el": "translate_el_eo", "xx.fi.translate_to.ee": "translate_ee_fi", "xx.es.translate_to.ee": "translate_ee_es", "xx.fr.translate_to.ee": "translate_ee_fr", "xx.fi.translate_to.efi": "translate_efi_fi", "xx.fr.translate_to.el": "translate_el_fr", "xx.tl.translate_to.de": "translate_de_tl", "xx.ny.translate_to.de": "translate_de_ny", "xx.uk.translate_to.de": "translate_de_uk", "xx.sv.translate_to.efi": "translate_efi_sv", "xx.sv.translate_to.ee": "translate_ee_sv", "xx.vi.translate_to.de": "translate_de_vi", "xx.fi.translate_to.el": "translate_el_fi", "xx.cs.translate_to.eo": "translate_eo_cs", "xx.bzs.translate_to.es": "translate_es_bzs", "xx.he.translate_to.eo": "translate_eo_he", "xx.hu.translate_to.eo": "translate_eo_hu", "xx.ro.translate_to.eo": "translate_eo_ro", "xx.ber.translate_to.es": "translate_es_ber", "xx.ca.translate_to.es": "translate_es_ca", "xx.bcl.translate_to.es": "translate_es_bcl", "xx.ceb.translate_to.es": "translate_es_ceb", "xx.da.translate_to.eo": "translate_eo_da", "xx.bi.translate_to.es": "translate_es_bi", "xx.ee.translate_to.es": "translate_es_ee", "xx.ru.translate_to.eo": "translate_eo_ru", "xx.csg.translate_to.es": "translate_es_csg", "xx.fi.translate_to.eo": "translate_eo_fi", "xx.it.translate_to.eo": "translate_eo_it", "xx.nl.translate_to.eo": "translate_eo_nl", "xx.et.translate_to.es": "translate_es_et", "xx.bg.translate_to.es": "translate_es_bg", "xx.de.translate_to.eo": "translate_eo_de", "xx.ar.translate_to.es": "translate_es_ar", "xx.cs.translate_to.es": "translate_es_cs", "xx.aed.translate_to.es": "translate_es_aed", "xx.ase.translate_to.es": "translate_es_ase", "xx.el.translate_to.es": "translate_es_el", "xx.eo.translate_to.es": "translate_es_eo", "xx.af.translate_to.eo": "translate_eo_af", "xx.af.translate_to.es": "translate_es_af", "xx.pl.translate_to.eo": "translate_eo_pl", "xx.de.translate_to.es": "translate_es_de", "xx.es.translate_to.eo": "translate_eo_es", "xx.da.translate_to.es": "translate_es_da", "xx.crs.translate_to.es": "translate_es_crs", "xx.pt.translate_to.eo": "translate_eo_pt", "xx.eu.translate_to.es": "translate_es_eu", "xx.es.translate_to.es": "translate_es_es", "xx.csn.translate_to.es": "translate_es_csn", "xx.sv.translate_to.eo": "translate_eo_sv", "xx.efi.translate_to.es": "translate_es_efi", "xx.sh.translate_to.eo": "translate_eo_sh", "xx.bg.translate_to.eo": "translate_eo_bg", "xx.fr.translate_to.eo": "translate_eo_fr", "xx.el.translate_to.eo": "translate_eo_el", "xx.pl.translate_to.es": "translate_es_pl", "xx.ro.translate_to.es": "translate_es_ro", "xx.is.translate_to.es": "translate_es_is", "xx.ln.translate_to.es": "translate_es_ln", "xx.to.translate_to.es": "translate_es_to", "xx.no.translate_to.es": "translate_es_no", "xx.nl.translate_to.es": "translate_es_nl", "xx.pag.translate_to.es": "translate_es_pag", "xx.tvl.translate_to.es": "translate_es_tvl", "xx.fr.translate_to.es": "translate_es_fr", "xx.he.translate_to.es": "translate_es_he", "xx.lus.translate_to.es": "translate_es_lus", "xx.hil.translate_to.es": "translate_es_hil", "xx.ny.translate_to.es": "translate_es_ny", "xx.pap.translate_to.es": "translate_es_pap", "xx.id.translate_to.es": "translate_es_id", "xx.wls.translate_to.es": "translate_es_wls", "xx.gaa.translate_to.es": "translate_es_gaa", "xx.nso.translate_to.es": "translate_es_nso", "xx.mk.translate_to.es": "translate_es_mk", "xx.mt.translate_to.es": "translate_es_mt", "xx.pis.translate_to.es": "translate_es_pis", "xx.gl.translate_to.es": "translate_es_gl", "xx.sn.translate_to.es": "translate_es_sn", "xx.hr.translate_to.es": "translate_es_hr", "xx.swc.translate_to.es": "translate_es_swc", "xx.lua.translate_to.es": "translate_es_lua", "xx.it.translate_to.es": "translate_es_it", "xx.fj.translate_to.es": "translate_es_fj", "xx.gil.translate_to.es": "translate_es_gil", "xx.sm.translate_to.es": "translate_es_sm", "xx.guw.translate_to.es": "translate_es_guw", "xx.kg.translate_to.es": "translate_es_kg", "xx.tl.translate_to.es": "translate_es_tl", "xx.rn.translate_to.es": "translate_es_rn", "xx.mfs.translate_to.es": "translate_es_mfs", "xx.iso.translate_to.es": "translate_es_iso", "xx.loz.translate_to.es": "translate_es_loz", "xx.tpi.translate_to.es": "translate_es_tpi", "xx.ha.translate_to.es": "translate_es_ha", "xx.ht.translate_to.es": "translate_es_ht", "xx.uk.translate_to.es": "translate_es_uk", "xx.tw.translate_to.es": "translate_es_tw", "xx.st.translate_to.es": "translate_es_st", "xx.sg.translate_to.es": "translate_es_sg", "xx.ilo.translate_to.es": "translate_es_ilo", "xx.ru.translate_to.es": "translate_es_ru", "xx.yo.translate_to.es": "translate_es_yo", "xx.pon.translate_to.es": "translate_es_pon", "xx.niu.translate_to.es": "translate_es_niu", "xx.lt.translate_to.es": "translate_es_lt", "xx.ty.translate_to.es": "translate_es_ty", "xx.ig.translate_to.es": "translate_es_ig", "xx.tzo.translate_to.es": "translate_es_tzo", "xx.rw.translate_to.es": "translate_es_rw", "xx.war.translate_to.es": "translate_es_war", "xx.tll.translate_to.es": "translate_es_tll", "xx.prl.translate_to.es": "translate_es_prl", "xx.xh.translate_to.es": "translate_es_xh", "xx.yua.translate_to.es": "translate_es_yua", "xx.ho.translate_to.es": "translate_es_ho", "xx.ve.translate_to.es": "translate_es_ve", "xx.sl.translate_to.es": "translate_es_sl", "xx.tn.translate_to.es": "translate_es_tn", "xx.vi.translate_to.es": "translate_es_vi", "xx.srn.translate_to.es": "translate_es_srn", "xx.fi.translate_to.es": "translate_es_fi", "xx.lua.translate_to.fi": "translate_fi_lua", "xx.ny.translate_to.fi": "translate_fi_ny", "xx.pon.translate_to.fi": "translate_fi_pon", "xx.crs.translate_to.fi": "translate_fi_crs", "xx.nso.translate_to.fi": "translate_fi_nso", "xx.iso.translate_to.fi": "translate_fi_iso", "xx.kqn.translate_to.fi": "translate_fi_kqn", "xx.gaa.translate_to.fi": "translate_fi_gaa", "xx.ru.translate_to.eu": "translate_eu_ru", "xx.eo.translate_to.fi": "translate_fi_eo", "xx.ig.translate_to.fi": "translate_fi_ig", "xx.bem.translate_to.fi": "translate_fi_bem", "xx.es.translate_to.et": "translate_et_es", "xx.fj.translate_to.fi": "translate_fi_fj", "xx.et.translate_to.fi": "translate_fi_et", "xx.bcl.translate_to.fi": "translate_fi_bcl", "xx.fi.translate_to.fi": "translate_fi_fi", "xx.el.translate_to.fi": "translate_fi_el", "xx.efi.translate_to.fi": "translate_fi_efi", "xx.ht.translate_to.fi": "translate_fi_ht", "xx.ceb.translate_to.fi": "translate_fi_ceb", "xx.lg.translate_to.fi": "translate_fi_lg", "xx.pap.translate_to.fi": "translate_fi_pap", "xx.kg.translate_to.fi": "translate_fi_kg", "xx.ee.translate_to.fi": "translate_fi_ee", "xx.lv.translate_to.fi": "translate_fi_lv", "xx.fr.translate_to.et": "translate_et_fr", "xx.de.translate_to.et": "translate_et_de", "xx.bzs.translate_to.fi": "translate_fi_bzs", "xx.mos.translate_to.fi": "translate_fi_mos", "xx.zh.translate_to.es": "translate_es_zh", "xx.id.translate_to.fi": "translate_fi_id", "xx.gil.translate_to.fi": "translate_fi_gil", "xx.pis.translate_to.fi": "translate_fi_pis", "xx.no.translate_to.fi": "translate_fi_no", "xx.it.translate_to.fi": "translate_fi_it", "xx.es.translate_to.fi": "translate_fi_es", "xx.ha.translate_to.fi": "translate_fi_ha", "xx.fr.translate_to.fi": "translate_fi_fr", "xx.de.translate_to.fi": "translate_fi_de", "xx.bg.translate_to.fi": "translate_fi_bg", "xx.zai.translate_to.es": "translate_es_zai", "xx.hil.translate_to.fi": "translate_fi_hil", "xx.cs.translate_to.fi": "translate_fi_cs", "xx.es.translate_to.eu": "translate_eu_es", "xx.ilo.translate_to.fi": "translate_fi_ilo", "xx.pag.translate_to.fi": "translate_fi_pag", "xx.ln.translate_to.fi": "translate_fi_ln", "xx.sv.translate_to.et": "translate_et_sv", "xx.niu.translate_to.fi": "translate_fi_niu", "xx.hr.translate_to.fi": "translate_fi_hr", "xx.de.translate_to.eu": "translate_eu_de", "xx.lus.translate_to.fi": "translate_fi_lus", "xx.ru.translate_to.et": "translate_et_ru", "xx.af.translate_to.fi": "translate_fi_af", "xx.mh.translate_to.fi": "translate_fi_mh", "xx.guw.translate_to.fi": "translate_fi_guw", "xx.mfe.translate_to.fi": "translate_fi_mfe", "xx.ho.translate_to.fi": "translate_fi_ho", "xx.fse.translate_to.fi": "translate_fi_fse", "xx.lu.translate_to.fi": "translate_fi_lu", "xx.hu.translate_to.fi": "translate_fi_hu", "xx.mk.translate_to.fi": "translate_fi_mk", "xx.nl.translate_to.fi": "translate_fi_nl", "xx.mg.translate_to.fi": "translate_fi_mg", "xx.mt.translate_to.fi": "translate_fi_mt", "xx.he.translate_to.fi": "translate_fi_he", "xx.fi.translate_to.et": "translate_et_fi", "xx.is.translate_to.fi": "translate_fi_is", "xx.lue.translate_to.fi": "translate_fi_lue", "xx.guw.translate_to.fr": "translate_fr_guw", "xx.ber.translate_to.fr": "translate_fr_ber", "xx.uk.translate_to.fi": "translate_fi_uk", "xx.efi.translate_to.fr": "translate_fr_efi", "xx.tr.translate_to.fi": "translate_fi_tr", "xx.tn.translate_to.fi": "translate_fi_tn", "xx.es.translate_to.fr": "translate_fr_es", "xx.srn.translate_to.fi": "translate_fi_srn", "xx.bcl.translate_to.fr": "translate_fr_bcl", "xx.sl.translate_to.fi": "translate_fi_sl", "xx.ht.translate_to.fr": "translate_fr_ht", "xx.zne.translate_to.fi": "translate_fi_zne", "xx.de.translate_to.fr": "translate_fr_de", "xx.war.translate_to.fi": "translate_fi_war", "xx.tpi.translate_to.fi": "translate_fi_tpi", "xx.ca.translate_to.fr": "translate_fr_ca", "xx.yap.translate_to.fi": "translate_fi_yap", "xx.sn.translate_to.fi": "translate_fi_sn", "xx.hr.translate_to.fr": "translate_fr_hr", "xx.gil.translate_to.fr": "translate_fr_gil", "xx.id.translate_to.fr": "translate_fr_id", "xx.sv.translate_to.fi": "translate_fi_sv", "xx.toi.translate_to.fi": "translate_fi_toi", "xx.sk.translate_to.fi": "translate_fi_sk", "xx.he.translate_to.fr": "translate_fr_he", "xx.sq.translate_to.fi": "translate_fi_sq", "xx.ve.translate_to.fi": "translate_fi_ve", "xx.tw.translate_to.fi": "translate_fi_tw", "xx.tvl.translate_to.fi": "translate_fi_tvl", "xx.hil.translate_to.fr": "translate_fr_hil", "xx.sw.translate_to.fi": "translate_fi_sw", "xx.eo.translate_to.fr": "translate_fr_eo", "xx.xh.translate_to.fi": "translate_fi_xh", "xx.bi.translate_to.fr": "translate_fr_bi", "xx.ru.translate_to.fi": "translate_fi_ru", "xx.ceb.translate_to.fr": "translate_fr_ceb", "xx.ig.translate_to.fr": "translate_fr_ig", "xx.el.translate_to.fr": "translate_fr_el", "xx.sm.translate_to.fi": "translate_fi_sm", "xx.to.translate_to.fi": "translate_fi_to", "xx.ase.translate_to.fr": "translate_fr_ase", "xx.yo.translate_to.fi": "translate_fi_yo", "xx.sg.translate_to.fi": "translate_fi_sg", "xx.rw.translate_to.fi": "translate_fi_rw", "xx.ts.translate_to.fi": "translate_fi_ts", "xx.wls.translate_to.fi": "translate_fi_wls", "xx.ho.translate_to.fr": "translate_fr_ho", "xx.tll.translate_to.fi": "translate_fi_tll", "xx.st.translate_to.fi": "translate_fi_st", "xx.fiu.translate_to.fiu": "translate_fiu_fiu", "xx.ro.translate_to.fi": "translate_fi_ro", "xx.tiv.translate_to.fi": "translate_fi_tiv", "xx.ha.translate_to.fr": "translate_fr_ha", "xx.ee.translate_to.fr": "translate_fr_ee", "xx.gaa.translate_to.fr": "translate_fr_gaa", "xx.hu.translate_to.fr": "translate_fr_hu", "xx.ty.translate_to.fi": "translate_fi_ty", "xx.fr.translate_to.fj": "translate_fj_fr", "xx.run.translate_to.fi": "translate_fi_run", "xx.bem.translate_to.fr": "translate_fr_bem", "xx.bzs.translate_to.fr": "translate_fr_bzs", "xx.fj.translate_to.fr": "translate_fr_fj", "xx.ar.translate_to.fr": "translate_fr_ar", "xx.swc.translate_to.fi": "translate_fi_swc", "xx.crs.translate_to.fr": "translate_fr_crs", "xx.bg.translate_to.fr": "translate_fr_bg", "xx.af.translate_to.fr": "translate_fr_af", "xx.loz.translate_to.fr": "translate_fr_loz", "xx.st.translate_to.fr": "translate_fr_st", "xx.tn.translate_to.fr": "translate_fr_tn", "xx.srn.translate_to.fr": "translate_fr_srn", "xx.to.translate_to.fr": "translate_fr_to", "xx.sk.translate_to.fr": "translate_fr_sk", "xx.tum.translate_to.fr": "translate_fr_tum", "xx.ts.translate_to.fr": "translate_fr_ts", "xx.iso.translate_to.fr": "translate_fr_iso", "xx.sv.translate_to.fr": "translate_fr_sv", "xx.mt.translate_to.fr": "translate_fr_mt", "xx.pap.translate_to.fr": "translate_fr_pap", "xx.wls.translate_to.fr": "translate_fr_wls", "xx.lua.translate_to.fr": "translate_fr_lua", "xx.ro.translate_to.fr": "translate_fr_ro", "xx.tll.translate_to.fr": "translate_fr_tll", "xx.ilo.translate_to.fr": "translate_fr_ilo", "xx.ve.translate_to.fr": "translate_fr_ve", "xx.ny.translate_to.fr": "translate_fr_ny", "xx.tpi.translate_to.fr": "translate_fr_tpi", "xx.uk.translate_to.fr": "translate_fr_uk", "xx.ln.translate_to.fr": "translate_fr_ln", "xx.mfe.translate_to.fr": "translate_fr_mfe", "xx.lue.translate_to.fr": "translate_fr_lue", "xx.mos.translate_to.fr": "translate_fr_mos", "xx.pon.translate_to.fr": "translate_fr_pon", "xx.tvl.translate_to.fr": "translate_fr_tvl", "xx.run.translate_to.fr": "translate_fr_run", "xx.pag.translate_to.fr": "translate_fr_pag", "xx.sg.translate_to.fr": "translate_fr_sg", "xx.no.translate_to.fr": "translate_fr_no", "xx.ty.translate_to.fr": "translate_fr_ty", "xx.tl.translate_to.fr": "translate_fr_tl", "xx.sl.translate_to.fr": "translate_fr_sl", "xx.tiv.translate_to.fr": "translate_fr_tiv", "xx.rw.translate_to.fr": "translate_fr_rw", "xx.lus.translate_to.fr": "translate_fr_lus", "xx.swc.translate_to.fr": "translate_fr_swc", "xx.sm.translate_to.fr": "translate_fr_sm", "xx.pl.translate_to.fr": "translate_fr_pl", "xx.kg.translate_to.fr": "translate_fr_kg", "xx.niu.translate_to.fr": "translate_fr_niu", "xx.lg.translate_to.fr": "translate_fr_lg", "xx.ms.translate_to.fr": "translate_fr_ms", "xx.nso.translate_to.fr": "translate_fr_nso", "xx.war.translate_to.fr": "translate_fr_war", "xx.xh.translate_to.fr": "translate_fr_xh", "xx.pis.translate_to.fr": "translate_fr_pis", "xx.tw.translate_to.fr": "translate_fr_tw", "xx.kwy.translate_to.fr": "translate_fr_kwy", "xx.rnd.translate_to.fr": "translate_fr_rnd", "xx.vi.translate_to.fr": "translate_fr_vi", "xx.lu.translate_to.fr": "translate_fr_lu", "xx.mh.translate_to.fr": "translate_fr_mh", "xx.ru.translate_to.fr": "translate_fr_ru", "xx.sn.translate_to.fr": "translate_fr_sn", "xx.kqn.translate_to.fr": "translate_fr_kqn", "xx.ar.translate_to.he": "translate_he_ar", "xx.de.translate_to.he": "translate_he_de", "xx.es.translate_to.gil": "translate_gil_es", "xx.de.translate_to.gaa": "translate_gaa_de", "xx.fr.translate_to.hu": "translate_hu_fr", "xx.fr.translate_to.gil": "translate_gil_fr", "xx.de.translate_to.guw": "translate_guw_de", "xx.fr.translate_to.ht": "translate_ht_fr", "xx.uk.translate_to.he": "translate_he_uk", "xx.fi.translate_to.hu": "translate_hu_fi", "xx.uk.translate_to.hu": "translate_hu_uk", "xx.zne.translate_to.fr": "translate_fr_zne", "xx.sv.translate_to.gaa": "translate_gaa_sv", "xx.es.translate_to.guw": "translate_guw_es", "xx.gmq.translate_to.gmq": "translate_gmq_gmq", "xx.fi.translate_to.hil": "translate_hil_fi", "xx.fi.translate_to.guw": "translate_guw_fi", "xx.es.translate_to.he": "translate_he_es", "xx.ur.translate_to.hi": "translate_hi_ur", "xx.de.translate_to.hil": "translate_hil_de", "xx.gmw.translate_to.gmw": "translate_gmw_gmw", "xx.fi.translate_to.gaa": "translate_gaa_fi", "xx.fi.translate_to.he": "translate_he_fi", "xx.eo.translate_to.hu": "translate_hu_eo", "xx.fi.translate_to.ht": "translate_ht_fi", "xx.yo.translate_to.fr": "translate_fr_yo", "xx.sv.translate_to.hr": "translate_hr_sv", "xx.fr.translate_to.ha": "translate_ha_fr", "xx.fi.translate_to.ha": "translate_ha_fi", "xx.sv.translate_to.ha": "translate_ha_sv", "xx.pt.translate_to.gl": "translate_gl_pt", "xx.fr.translate_to.guw": "translate_guw_fr", "xx.es.translate_to.ht": "translate_ht_es", "xx.de.translate_to.hu": "translate_hu_de", "xx.sv.translate_to.ht": "translate_ht_sv", "xx.es.translate_to.hr": "translate_hr_es", "xx.fr.translate_to.gaa": "translate_gaa_fr", "xx.ru.translate_to.he": "translate_he_ru", "xx.es.translate_to.gl": "translate_gl_es", "xx.ru.translate_to.hy": "translate_hy_ru", "xx.fi.translate_to.gil": "translate_gil_fi", "xx.sv.translate_to.hu": "translate_hu_sv", "xx.sv.translate_to.gil": "translate_gil_sv", "xx.fi.translate_to.fse": "translate_fse_fi", "xx.gem.translate_to.gem": "translate_gem_gem", "xx.es.translate_to.ha": "translate_ha_es", "xx.it.translate_to.he": "translate_he_it", "xx.sv.translate_to.guw": "translate_guw_sv", "xx.sv.translate_to.he": "translate_he_sv", "xx.yap.translate_to.fr": "translate_fr_yap", "xx.fr.translate_to.hr": "translate_hr_fr", "xx.eo.translate_to.he": "translate_he_eo", "xx.es.translate_to.gaa": "translate_gaa_es", "xx.fi.translate_to.hr": "translate_hr_fi", "xx.fr.translate_to.he": "translate_he_fr", "xx.fi.translate_to.ilo": "translate_ilo_fi", "xx.sv.translate_to.iso": "translate_iso_sv", "xx.he.translate_to.ja": "translate_ja_he", "xx.fi.translate_to.id": "translate_id_fi", "xx.de.translate_to.ja": "translate_ja_de", "xx.he.translate_to.it": "translate_it_he", "xx.it.translate_to.ja": "translate_ja_it", "xx.is.translate_to.it": "translate_it_is", "xx.bg.translate_to.ja": "translate_ja_bg", "xx.de.translate_to.ig": "translate_ig_de", "xx.bg.translate_to.it": "translate_it_bg", "xx.es.translate_to.id": "translate_id_es", "xx.fr.translate_to.id": "translate_id_fr", "xx.es.translate_to.ja": "translate_ja_es", "xx.sv.translate_to.ja": "translate_ja_sv", "xx.es.translate_to.iso": "translate_iso_es", "xx.es.translate_to.ilo": "translate_ilo_es", "xx.it.translate_to.is": "translate_is_it", "xx.sv.translate_to.it": "translate_it_sv", "xx.sv.translate_to.is": "translate_is_sv", "xx.ru.translate_to.ja": "translate_ja_ru", "xx.es.translate_to.kg": "translate_kg_es", "xx.fi.translate_to.ig": "translate_ig_fi", "xx.fr.translate_to.iso": "translate_iso_fr", "xx.de.translate_to.ko": "translate_ko_de", "xx.sv.translate_to.ilo": "translate_ilo_sv", "xx.es.translate_to.is": "translate_is_es", "xx.da.translate_to.ja": "translate_ja_da", "xx.nl.translate_to.ja": "translate_ja_nl", "xx.inc.translate_to.inc": "translate_inc_inc", "xx.de.translate_to.is": "translate_is_de", "xx.fr.translate_to.is": "translate_is_fr", "xx.lt.translate_to.it": "translate_it_lt", "xx.sv.translate_to.ig": "translate_ig_sv", "xx.de.translate_to.ilo": "translate_ilo_de", "xx.ar.translate_to.it": "translate_it_ar", "xx.fr.translate_to.kg": "translate_kg_fr", "xx.vi.translate_to.ja": "translate_ja_vi", "xx.ru.translate_to.ka": "translate_ka_ru", "xx.uk.translate_to.it": "translate_it_uk", "xx.vi.translate_to.it": "translate_it_vi", "xx.ms.translate_to.it": "translate_it_ms", "xx.ar.translate_to.ja": "translate_ja_ar", "xx.eo.translate_to.is": "translate_is_eo", "xx.ca.translate_to.it": "translate_it_ca", "xx.sh.translate_to.ja": "translate_ja_sh", "xx.fi.translate_to.ja": "translate_ja_fi", "xx.iir.translate_to.iir": "translate_iir_iir", "xx.itc.translate_to.itc": "translate_itc_itc", "xx.ms.translate_to.ja": "translate_ja_ms", "xx.fr.translate_to.it": "translate_it_fr", "xx.fr.translate_to.ja": "translate_ja_fr", "xx.pt.translate_to.ja": "translate_ja_pt", "xx.eo.translate_to.it": "translate_it_eo", "xx.fi.translate_to.iso": "translate_iso_fi", "xx.pl.translate_to.ja": "translate_ja_pl", "xx.tr.translate_to.ja": "translate_ja_tr", "xx.es.translate_to.ig": "translate_ig_es", "xx.fr.translate_to.ig": "translate_ig_fr", "xx.sv.translate_to.id": "translate_id_sv", "xx.hu.translate_to.ja": "translate_ja_hu", "xx.sv.translate_to.kg": "translate_kg_sv", "xx.es.translate_to.it": "translate_it_es", "xx.ine.translate_to.ine": "translate_ine_ine", "xx.de.translate_to.it": "translate_it_de", "xx.fi.translate_to.is": "translate_is_fi", "xx.es.translate_to.mk": "translate_mk_es", "xx.es.translate_to.lue": "translate_lue_es", "xx.es.translate_to.lv": "translate_lv_es", "xx.fi.translate_to.lue": "translate_lue_fi", "xx.es.translate_to.ln": "translate_ln_es", "xx.fr.translate_to.loz": "translate_loz_fr", "xx.sv.translate_to.kwy": "translate_kwy_sv", "xx.es.translate_to.lus": "translate_lus_es", "xx.fr.translate_to.lv": "translate_lv_fr", "xx.fr.translate_to.lu": "translate_lu_fr", "xx.de.translate_to.lt": "translate_lt_de", "xx.tr.translate_to.lt": "translate_lt_tr", "xx.fr.translate_to.lus": "translate_lus_fr", "xx.es.translate_to.mg": "translate_mg_es", "xx.sv.translate_to.lua": "translate_lua_sv", "xx.fr.translate_to.lg": "translate_lg_fr", "xx.fr.translate_to.kwy": "translate_kwy_fr", "xx.es.translate_to.lt": "translate_lt_es", "xx.sv.translate_to.ko": "translate_ko_sv", "xx.es.translate_to.kqn": "translate_kqn_es", "xx.fr.translate_to.ko": "translate_ko_fr", "xx.sv.translate_to.kqn": "translate_kqn_sv", "xx.fi.translate_to.ko": "translate_ko_fi", "xx.es.translate_to.mh": "translate_mh_es", "xx.fr.translate_to.lua": "translate_lua_fr", "xx.it.translate_to.lt": "translate_lt_it", "xx.sv.translate_to.lt": "translate_lt_sv", "xx.es.translate_to.lu": "translate_lu_es", "xx.fi.translate_to.lua": "translate_lua_fi", "xx.fr.translate_to.kqn": "translate_kqn_fr", "xx.de.translate_to.loz": "translate_loz_de", "xx.fr.translate_to.ms": "translate_ms_fr", "xx.fr.translate_to.lt": "translate_lt_fr", "xx.ru.translate_to.lv": "translate_lv_ru", "xx.ms.translate_to.ms": "translate_ms_ms", "xx.sv.translate_to.lus": "translate_lus_sv", "xx.fr.translate_to.lue": "translate_lue_fr", "xx.fi.translate_to.lu": "translate_lu_fi", "xx.eo.translate_to.lt": "translate_lt_eo", "xx.fi.translate_to.mk": "translate_mk_fi", "xx.es.translate_to.ko": "translate_ko_es", "xx.sv.translate_to.lue": "translate_lue_sv", "xx.pl.translate_to.lt": "translate_lt_pl", "xx.es.translate_to.mfe": "translate_mfe_es", "xx.fi.translate_to.loz": "translate_loz_fi", "xx.sv.translate_to.loz": "translate_loz_sv", "xx.ru.translate_to.ko": "translate_ko_ru", "xx.fi.translate_to.lg": "translate_lg_fi", "xx.fi.translate_to.mh": "translate_mh_fi", "xx.sv.translate_to.lv": "translate_lv_sv", "xx.hu.translate_to.ko": "translate_ko_hu", "xx.es.translate_to.lua": "translate_lua_es", "xx.fi.translate_to.lv": "translate_lv_fi", "xx.ru.translate_to.lt": "translate_lt_ru", "xx.de.translate_to.ms": "translate_ms_de", "xx.fi.translate_to.lus": "translate_lus_fi", "xx.es.translate_to.lg": "translate_lg_es", "xx.de.translate_to.ln": "translate_ln_de", "xx.es.translate_to.mfs": "translate_mfs_es", "xx.fr.translate_to.mk": "translate_mk_fr", "xx.fr.translate_to.ln": "translate_ln_fr", "xx.es.translate_to.loz": "translate_loz_es", "xx.sv.translate_to.lu": "translate_lu_sv", "xx.it.translate_to.ms": "translate_ms_it", "xx.sv.translate_to.lg": "translate_lg_sv", "xx.ar.translate_to.pl": "translate_pl_ar", "xx.fr.translate_to.ro": "translate_ro_fr", "xx.sv.translate_to.niu": "translate_niu_sv", "xx.eo.translate_to.pl": "translate_pl_eo", "xx.nl.translate_to.no": "translate_no_nl", "xx.es.translate_to.no": "translate_no_es", "xx.es.translate_to.pag": "translate_pag_es", "xx.ru.translate_to.rn": "translate_rn_ru", "xx.sv.translate_to.pag": "translate_pag_sv", "xx.uk.translate_to.pt": "translate_pt_uk", "xx.uk.translate_to.pl": "translate_pl_uk", "xx.de.translate_to.pl": "translate_pl_de", "xx.sv.translate_to.nl": "translate_nl_sv", "xx.fr.translate_to.no": "translate_no_fr", "xx.es.translate_to.niu": "translate_niu_es", "xx.uk.translate_to.no": "translate_no_uk", "xx.lt.translate_to.pl": "translate_pl_lt", "xx.tl.translate_to.pt": "translate_pt_tl", "xx.gl.translate_to.pt": "translate_pt_gl", "xx.da.translate_to.ru": "translate_ru_da", "xx.da.translate_to.no": "translate_no_da", "xx.uk.translate_to.nl": "translate_nl_uk", "xx.sv.translate_to.pon": "translate_pon_sv", "xx.fr.translate_to.pis": "translate_pis_fr", "xx.fr.translate_to.niu": "translate_niu_fr", "xx.af.translate_to.nl": "translate_nl_af", "xx.fi.translate_to.nso": "translate_nso_fi", "xx.fi.translate_to.pon": "translate_pon_fi", "xx.de.translate_to.pap": "translate_pap_de", "xx.de.translate_to.rn": "translate_rn_de", "xx.es.translate_to.pon": "translate_pon_es", "xx.es.translate_to.pis": "translate_pis_es", "xx.ca.translate_to.pt": "translate_pt_ca", "xx.sv.translate_to.rnd": "translate_rnd_sv", "xx.sv.translate_to.pl": "translate_pl_sv", "xx.ru.translate_to.no": "translate_no_ru", "xx.fi.translate_to.niu": "translate_niu_fi", "xx.de.translate_to.pag": "translate_pag_de", "xx.fr.translate_to.pl": "translate_pl_fr", "xx.fi.translate_to.no": "translate_no_fi", "xx.pl.translate_to.no": "translate_no_pl", "xx.de.translate_to.nso": "translate_nso_de", "xx.fr.translate_to.rn": "translate_rn_fr", "xx.sv.translate_to.nso": "translate_nso_sv", "xx.sv.translate_to.ro": "translate_ro_sv", "xx.no.translate_to.pl": "translate_pl_no", "xx.fr.translate_to.nl": "translate_nl_fr", "xx.es.translate_to.nso": "translate_nso_es", "xx.no.translate_to.nl": "translate_nl_no", "xx.fi.translate_to.pis": "translate_pis_fi", "xx.ca.translate_to.nl": "translate_nl_ca", "xx.es.translate_to.nl": "translate_nl_es", "xx.es.translate_to.ny": "translate_ny_es", "xx.fr.translate_to.pap": "translate_pap_fr", "xx.fi.translate_to.nl": "translate_nl_fi", "xx.sv.translate_to.no": "translate_no_sv", "xx.fr.translate_to.pon": "translate_pon_fr", "xx.fr.translate_to.rnd": "translate_rnd_fr", "xx.es.translate_to.pap": "translate_pap_es", "xx.es.translate_to.prl": "translate_prl_es", "xx.eo.translate_to.ro": "translate_ro_eo", "xx.sv.translate_to.pis": "translate_pis_sv", "xx.af.translate_to.ru": "translate_ru_af", "xx.fr.translate_to.nso": "translate_nso_fr", "xx.eo.translate_to.pt": "translate_pt_eo", "xx.ar.translate_to.ru": "translate_ru_ar", "xx.fr.translate_to.mt": "translate_mt_fr", "xx.es.translate_to.rn": "translate_rn_es", "xx.sv.translate_to.mt": "translate_mt_sv", "xx.de.translate_to.niu": "translate_niu_de", "xx.es.translate_to.mt": "translate_mt_es", "xx.es.translate_to.pl": "translate_pl_es", "xx.fi.translate_to.pag": "translate_pag_fi", "xx.de.translate_to.no": "translate_no_de", "xx.de.translate_to.ny": "translate_ny_de", "xx.fi.translate_to.mt": "translate_mt_fi", "xx.no.translate_to.no": "translate_no_no", "xx.eo.translate_to.nl": "translate_nl_eo", "xx.bg.translate_to.ru": "translate_ru_bg", "xx.fi.translate_to.pap": "translate_pap_fi", "xx.fi.translate_to.ro": "translate_ro_fi", "xx.sv.translate_to.st": "translate_st_sv", "xx.kg.translate_to.sv": "translate_sv_kg", "xx.sv.translate_to.sq": "translate_sq_sv", "xx.ee.translate_to.sv": "translate_sv_ee", "xx.es.translate_to.srn": "translate_srn_es", "xx.lv.translate_to.ru": "translate_ru_lv", "xx.cs.translate_to.sv": "translate_sv_cs", "xx.ha.translate_to.sv": "translate_sv_ha", "xx.kqn.translate_to.sv": "translate_sv_kqn", "xx.fr.translate_to.rw": "translate_rw_fr", "xx.fr.translate_to.sn": "translate_sn_fr", "xx.eu.translate_to.ru": "translate_ru_eu", "xx.fi.translate_to.st": "translate_st_fi", "xx.efi.translate_to.sv": "translate_sv_efi", "xx.ho.translate_to.sv": "translate_sv_ho", "xx.id.translate_to.sv": "translate_sv_id", "xx.eo.translate_to.sv": "translate_sv_eo", "xx.guw.translate_to.sv": "translate_sv_guw", "xx.sv.translate_to.sk": "translate_sk_sv", "xx.fr.translate_to.srn": "translate_srn_fr", "xx.ceb.translate_to.sv": "translate_sv_ceb", "xx.es.translate_to.sq": "translate_sq_es", "xx.sv.translate_to.rw": "translate_rw_sv", "xx.is.translate_to.sv": "translate_sv_is", "xx.es.translate_to.sm": "translate_sm_es", "xx.bcl.translate_to.sv": "translate_sv_bcl", "xx.kwy.translate_to.sv": "translate_sv_kwy", "xx.es.translate_to.run": "translate_run_es", "xx.el.translate_to.sv": "translate_sv_el", "xx.es.translate_to.sk": "translate_sk_es", "xx.iso.translate_to.sv": "translate_sv_iso", "xx.lu.translate_to.sv": "translate_sv_lu", "xx.af.translate_to.sv": "translate_sv_af", "xx.bg.translate_to.sv": "translate_sv_bg", "xx.fr.translate_to.sm": "translate_sm_fr", "xx.hr.translate_to.sv": "translate_sv_hr", "xx.sv.translate_to.sn": "translate_sn_sv", "xx.no.translate_to.ru": "translate_ru_no", "xx.fr.translate_to.sg": "translate_sg_fr", "xx.es.translate_to.sl": "translate_sl_es", "xx.bzs.translate_to.sv": "translate_sv_bzs", "xx.fr.translate_to.st": "translate_st_fr", "xx.hu.translate_to.sv": "translate_sv_hu", "xx.sv.translate_to.sg": "translate_sg_sv", "xx.sem.translate_to.sem": "translate_sem_sem", "xx.uk.translate_to.sh": "translate_sh_uk", "xx.ln.translate_to.sv": "translate_sv_ln", "xx.fi.translate_to.sk": "translate_sk_fi", "xx.ht.translate_to.sv": "translate_sv_ht", "xx.es.translate_to.st": "translate_st_es", "xx.fr.translate_to.ru": "translate_ru_fr", "xx.chk.translate_to.sv": "translate_sv_chk", "xx.fr.translate_to.sk": "translate_sk_fr", "xx.lg.translate_to.sv": "translate_sv_lg", "xx.sv.translate_to.srn": "translate_srn_sv", "xx.crs.translate_to.sv": "translate_sv_crs", "xx.uk.translate_to.ru": "translate_ru_uk", "xx.et.translate_to.ru": "translate_ru_et", "xx.et.translate_to.sv": "translate_sv_et", "xx.es.translate_to.rw": "translate_rw_es", "xx.sla.translate_to.sla": "translate_sla_sla", "xx.ru.translate_to.sl": "translate_sl_ru", "xx.fj.translate_to.sv": "translate_sv_fj", "xx.es.translate_to.sn": "translate_sn_es", "xx.lua.translate_to.sv": "translate_sv_lua", "xx.hil.translate_to.sv": "translate_sv_hil", "xx.es.translate_to.ru": "translate_ru_es", "xx.lue.translate_to.sv": "translate_sv_lue", "xx.gaa.translate_to.sv": "translate_sv_gaa", "xx.hy.translate_to.ru": "translate_ru_hy", "xx.bem.translate_to.sv": "translate_sv_bem", "xx.sv.translate_to.run": "translate_run_sv", "xx.gil.translate_to.sv": "translate_sv_gil", "xx.lus.translate_to.sv": "translate_sv_lus", "xx.he.translate_to.ru": "translate_ru_he", "xx.vi.translate_to.ru": "translate_ru_vi", "xx.he.translate_to.sv": "translate_sv_he", "xx.sv.translate_to.ru": "translate_ru_sv", "xx.fi.translate_to.ru": "translate_ru_fi", "xx.es.translate_to.sv": "translate_sv_es", "xx.es.translate_to.sg": "translate_sg_es", "xx.eo.translate_to.ru": "translate_ru_eo", "xx.lv.translate_to.sv": "translate_sv_lv", "xx.fi.translate_to.sg": "translate_sg_fi", "xx.es.translate_to.ssp": "translate_ssp_es", "xx.ilo.translate_to.sv": "translate_sv_ilo", "xx.fi.translate_to.sv": "translate_sv_fi", "xx.lt.translate_to.ru": "translate_ru_lt", "xx.bi.translate_to.sv": "translate_sv_bi", "xx.sv.translate_to.sl": "translate_sl_sv", "xx.fr.translate_to.sv": "translate_sv_fr", "xx.uk.translate_to.sl": "translate_sl_uk", "xx.fi.translate_to.sl": "translate_sl_fi", "xx.sl.translate_to.ru": "translate_ru_sl", "xx.ig.translate_to.sv": "translate_sv_ig", "xx.ase.translate_to.sv": "translate_sv_ase", "xx.eo.translate_to.sh": "translate_sh_eo", "xx.fr.translate_to.sl": "translate_sl_fr", "xx.es.translate_to.tl": "translate_tl_es", "xx.sv.translate_to.tw": "translate_tw_sv", "xx.lt.translate_to.tr": "translate_tr_lt", "xx.fi.translate_to.tll": "translate_tll_fi", "xx.sn.translate_to.sv": "translate_sv_sn", "xx.tn.translate_to.sv": "translate_sv_tn", "xx.sv.translate_to.toi": "translate_toi_sv", "xx.uk.translate_to.sv": "translate_sv_uk", "xx.tiv.translate_to.sv": "translate_sv_tiv", "xx.sk.translate_to.sv": "translate_sv_sk", "xx.ty.translate_to.sv": "translate_sv_ty", "xx.es.translate_to.toi": "translate_toi_es", "xx.rw.translate_to.sv": "translate_sv_rw", "xx.ny.translate_to.sv": "translate_sv_ny", "xx.rnd.translate_to.sv": "translate_sv_rnd", "xx.es.translate_to.tn": "translate_tn_es", "xx.sv.translate_to.tn": "translate_tn_sv", "xx.es.translate_to.tvl": "translate_tvl_es", "xx.pon.translate_to.sv": "translate_sv_pon", "xx.ve.translate_to.sv": "translate_sv_ve", "xx.fr.translate_to.tvl": "translate_tvl_fr", "xx.es.translate_to.tum": "translate_tum_es", "xx.run.translate_to.sv": "translate_sv_run", "xx.de.translate_to.tl": "translate_tl_de", "xx.fi.translate_to.tw": "translate_tw_fi", "xx.es.translate_to.ty": "translate_ty_es", "xx.fr.translate_to.toi": "translate_toi_fr", "xx.sv.translate_to.tll": "translate_tll_sv", "xx.sg.translate_to.sv": "translate_sv_sg", "xx.az.translate_to.tr": "translate_tr_az", "xx.es.translate_to.ts": "translate_ts_es", "xx.fr.translate_to.ts": "translate_ts_fr", "xx.fr.translate_to.th": "translate_th_fr", "xx.zne.translate_to.sv": "translate_sv_zne", "xx.tw.translate_to.sv": "translate_sv_tw", "xx.mh.translate_to.sv": "translate_sv_mh", "xx.pag.translate_to.sv": "translate_sv_pag", "xx.fr.translate_to.tum": "translate_tum_fr", "xx.no.translate_to.sv": "translate_sv_no", "xx.ts.translate_to.sv": "translate_sv_ts", "xx.mt.translate_to.sv": "translate_sv_mt", "xx.yo.translate_to.sv": "translate_sv_yo", "xx.fr.translate_to.to": "translate_to_fr", "xx.sv.translate_to.sv": "translate_sv_sv", "xx.fi.translate_to.toi": "translate_toi_fi", "xx.ro.translate_to.sv": "translate_sv_ro", "xx.es.translate_to.tw": "translate_tw_es", "xx.niu.translate_to.sv": "translate_sv_niu", "xx.uk.translate_to.tr": "translate_tr_uk", "xx.to.translate_to.sv": "translate_sv_to", "xx.fi.translate_to.ts": "translate_ts_fi", "xx.tll.translate_to.sv": "translate_sv_tll", "xx.fr.translate_to.tll": "translate_tll_fr", "xx.pt.translate_to.tl": "translate_tl_pt", "xx.nso.translate_to.sv": "translate_sv_nso", "xx.sq.translate_to.sv": "translate_sv_sq", "xx.sv.translate_to.tpi": "translate_tpi_sv", "xx.yap.translate_to.sv": "translate_sv_yap", "xx.sv.translate_to.tr": "translate_tr_sv", "xx.fr.translate_to.swc": "translate_swc_fr", "xx.nl.translate_to.sv": "translate_sv_nl", "xx.fi.translate_to.ty": "translate_ty_fi", "xx.fr.translate_to.tr": "translate_tr_fr", "xx.sv.translate_to.tum": "translate_tum_sv", "xx.swc.translate_to.sv": "translate_sv_swc", "xx.fi.translate_to.swc": "translate_swc_fi", "xx.eo.translate_to.tr": "translate_tr_eo", "xx.xh.translate_to.sv": "translate_sv_xh", "xx.sv.translate_to.tvl": "translate_tvl_sv", "xx.sl.translate_to.sv": "translate_sv_sl", "xx.tum.translate_to.sv": "translate_sv_tum", "xx.es.translate_to.to": "translate_to_es", "xx.fr.translate_to.tn": "translate_tn_fr", "xx.sv.translate_to.ty": "translate_ty_sv", "xx.sv.translate_to.swc": "translate_swc_sv", "xx.mos.translate_to.sv": "translate_sv_mos", "xx.ar.translate_to.tr": "translate_tr_ar", "xx.ru.translate_to.sv": "translate_sv_ru", "xx.srn.translate_to.sv": "translate_sv_srn", "xx.pis.translate_to.sv": "translate_sv_pis", "xx.pap.translate_to.sv": "translate_sv_pap", "xx.tvl.translate_to.sv": "translate_sv_tvl", "xx.sv.translate_to.to": "translate_to_sv", "xx.th.translate_to.sv": "translate_sv_th", "xx.war.translate_to.sv": "translate_sv_war", "xx.sv.translate_to.ts": "translate_ts_sv", "xx.fr.translate_to.tw": "translate_tw_fr", "xx.st.translate_to.sv": "translate_sv_st", "xx.fr.translate_to.tiv": "translate_tiv_fr", "xx.tpi.translate_to.sv": "translate_sv_tpi", "xx.fi.translate_to.tvl": "translate_tvl_fi", "xx.fr.translate_to.ty": "translate_ty_fr", "xx.sm.translate_to.sv": "translate_sv_sm", "xx.es.translate_to.swc": "translate_swc_es", "xx.sv.translate_to.tiv": "translate_tiv_sv", "xx.toi.translate_to.sv": "translate_sv_toi", "xx.mfe.translate_to.sv": "translate_sv_mfe", "xx.wls.translate_to.sv": "translate_sv_wls", "xx.umb.translate_to.sv": "translate_sv_umb", "xx.es.translate_to.tr": "translate_tr_es", "xx.es.translate_to.tll": "translate_tll_es", "xx.pt.translate_to.uk": "translate_uk_pt", "xx.it.translate_to.zh": "translate_zh_it", "xx.no.translate_to.uk": "translate_uk_no", "xx.sh.translate_to.uk": "translate_uk_sh", "xx.sv.translate_to.wls": "translate_wls_sv", "xx.pl.translate_to.uk": "translate_uk_pl", "xx.es.translate_to.yo": "translate_yo_es", "xx.es.translate_to.war": "translate_war_es", "xx.sv.translate_to.zh": "translate_zh_sv", "xx.tr.translate_to.uk": "translate_uk_tr", "xx.fi.translate_to.war": "translate_war_fi", "xx.de.translate_to.zh": "translate_zh_de", "xx.uk.translate_to.zh": "translate_zh_uk", "xx.eo.translate_to.vi": "translate_vi_eo", "xx.bg.translate_to.zh": "translate_zh_bg", "xx.es.translate_to.zne": "translate_zne_es", "xx.fr.translate_to.uk": "translate_uk_fr", "xx.zls.translate_to.zls": "translate_zls_zls", "xx.fr.translate_to.yo": "translate_yo_fr", "xx.bg.translate_to.uk": "translate_uk_bg", "xx.fr.translate_to.xh": "translate_xh_fr", "xx.ca.translate_to.uk": "translate_uk_ca", "xx.fi.translate_to.zh": "translate_zh_fi", "xx.es.translate_to.zai": "translate_zai_es", "xx.es.translate_to.uk": "translate_uk_es", "xx.nl.translate_to.uk": "translate_uk_nl", "xx.sv.translate_to.yap": "translate_yap_sv", "xx.he.translate_to.uk": "translate_uk_he", "xx.sl.translate_to.uk": "translate_uk_sl", "xx.es.translate_to.ve": "translate_ve_es", "xx.zlw.translate_to.zlw": "translate_zlw_zlw", "xx.es.translate_to.tzo": "translate_tzo_es", "xx.hu.translate_to.uk": "translate_uk_hu", "xx.de.translate_to.vi": "translate_vi_de", "xx.fi.translate_to.yo": "translate_yo_fi", "xx.ru.translate_to.uk": "translate_uk_ru", "xx.ms.translate_to.zh": "translate_zh_ms", "xx.urj.translate_to.urj": "translate_urj_urj", "xx.it.translate_to.uk": "translate_uk_it", "xx.sv.translate_to.war": "translate_war_sv", "xx.fr.translate_to.wls": "translate_wls_fr", "xx.zle.translate_to.zle": "translate_zle_zle", "xx.vi.translate_to.zh": "translate_zh_vi", "xx.es.translate_to.vsl": "translate_vsl_es", "xx.fi.translate_to.zne": "translate_zne_fi", "xx.fi.translate_to.uk": "translate_uk_fi", "xx.ru.translate_to.vi": "translate_vi_ru", "xx.nl.translate_to.zh": "translate_zh_nl", "xx.sv.translate_to.xh": "translate_xh_sv", "xx.es.translate_to.xh": "translate_xh_es", "xx.he.translate_to.zh": "translate_zh_he", "xx.fr.translate_to.war": "translate_war_fr", "xx.fr.translate_to.zne": "translate_zne_fr", "xx.sv.translate_to.yo": "translate_yo_sv", "xx.fr.translate_to.vi": "translate_vi_fr", "xx.it.translate_to.vi": "translate_vi_it", "xx.sv.translate_to.zne": "translate_zne_sv", "xx.fr.translate_to.yap": "translate_yap_fr", "xx.cs.translate_to.uk": "translate_uk_cs", "xx.es.translate_to.vi": "translate_vi_es", "xx.de.translate_to.uk": "translate_uk_de", "xx.sv.translate_to.uk": "translate_uk_sv", }, } pretrained_models_references = {'ab': {'ab.speech2text.wav2vec2': 'asr_hf_challenge_test', 'ab.speech2text.wav2vec2.base': 'asr_baseline', 'ab.speech2text.wav2vec2.base_gpu': 'asr_baseline_gpu', 'ab.speech2text.wav2vec2.by_mofe': 'asr_speech_sprint_test', 'ab.speech2text.wav2vec2.gpu': 'asr_hf_challenge_test_gpu', 'ab.speech2text.wav2vec2.v2': 'asr_wav2vec2_common_voice_ab_demo', 'ab.speech2text.wav2vec2.v2_gpu': 'asr_wav2vec2_common_voice_ab_demo_gpu', 'ab.speech2text.wav2vec_xls': 'asr_xls_test', 'ab.speech2text.wav2vec_xlsr': 'asr_xls_r_ab_spanish', 'ab.speech2text.wav2vec_xlsr.by_baaastien': 'asr_xls_r_ab_test_by_baaastien', 'ab.speech2text.wav2vec_xlsr.by_cahya': 'asr_xls_r_ab_test_by_cahya', 'ab.speech2text.wav2vec_xlsr.by_chmanoj': 'asr_xls_r_demo_test', 'ab.speech2text.wav2vec_xlsr.by_fitods': 'asr_xls_r_ab_test_by_fitods', 'ab.speech2text.wav2vec_xlsr.by_hf_test': 'asr_xls_r_ab_test_by_hf_test', 'ab.speech2text.wav2vec_xlsr.by_mattchurgin': 'asr_xls_r_eng', 'ab.speech2text.wav2vec_xlsr.by_muneson': 'asr_xls_r_ab_test_by_muneson', 'ab.speech2text.wav2vec_xlsr.by_pablouribe': 'asr_xls_r_ab_test_by_pablouribe', 'ab.speech2text.wav2vec_xlsr.gpu': 'asr_xls_r_ab_spanish_gpu', 'ab.speech2text.wav2vec_xlsr.gpu.by_cahya': 'asr_xls_r_ab_test_by_cahya_gpu', 'ab.speech2text.wav2vec_xlsr.gpu.by_chmanoj': 'asr_xls_r_demo_test_gpu', 'ab.speech2text.wav2vec_xlsr.gpu.by_fitods': 'asr_xls_r_ab_test_by_fitods_gpu', 'ab.speech2text.wav2vec_xlsr.gpu.by_hf_test': 'asr_xls_r_ab_test_by_hf_test_gpu', 'ab.speech2text.wav2vec_xlsr.gpu.by_mattchurgin': 'asr_xls_r_eng_gpu', 'ab.speech2text.wav2vec_xlsr.gpu.by_pablouribe': 'asr_xls_r_ab_test_by_pablouribe_gpu', 'ab.speech2text.wav2vec_xlsr.v2_300m': 'asr_wav2vec2_xls_r_300m_ab_cv8', 'ab.speech2text.wav2vec_xlsr.v2_300m_gpu': 'asr_wav2vec2_xls_r_300m_ab_cv8_gpu'}, 'af': {'af.embed.w2v_cc_300d': 'w2v_cc_300d', 'af.lemma': 'lemma', 'af.lemma.afribooms': 'lemma_afribooms', 'af.pos': 'pos_afribooms', 'af.stopwords': 'stopwords_iso', 'af.stopwords.iso': 'stopwords_iso'}, 'als': {'als.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'am': {'am.embed.am_roberta': 'roberta_embeddings_am_roberta', 'am.embed.w2v_cc_300d': 'w2v_cc_300d', 'am.embed.xlm_roberta': 'xlm_roberta_base_finetuned_amharic', 'am.embed_sentence.xlm_roberta': 'sent_xlm_roberta_base_finetuned_amharic', 'am.lemma': 'lemma', 'am.ner.xlmr_roberta.base_finetuned': 'xlmroberta_ner_xlm_roberta_base_finetuned_amharic_finetuned_ner_amharic', 'am.ner.xlmr_roberta.base_finetuned_amharic.by_mbeukman': 'xlmroberta_ner_xlm_roberta_base_finetuned_ner_amharic', 'am.ner.xlmr_roberta.base_finetuned_swahili_amharic.by_mbeukman': 'xlmroberta_ner_xlm_roberta_base_finetuned_swahili_finetuned_ner_amharic', 'am.pos': 'pos_ud_att', 'am.stopwords': 'stopwords_iso'}, 'an': {'an.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'ar': {'ar.answer_question.arabert.v2': 'bert_qa_arabert_v2', 'ar.answer_question.bert': 'bert_qa_arap_qa_bert', 'ar.answer_question.bert.cased_multilingual_base': 'bert_qa_multilingual_bert_base_cased_arabic', 'ar.answer_question.bert.large_v2': 'bert_qa_arap_qa_bert_large_v2', 'ar.answer_question.bert.squad_arcd.finetuned': 'bert_qa_arabert_finetuned_arcd', 'ar.answer_question.bert.v2': 'bert_qa_arap_qa_bert_v2', 'ar.answer_question.electra': 'electra_qa_AraELECTRA_discriminator_SOQAL', 'ar.answer_question.squad_arcd.electra': 'electra_qa_araElectra_SQUAD_ARCD', 'ar.answer_question.squad_arcd.electra.768d': 'electra_qa_araElectra_SQUAD_ARCD_768', 'ar.answer_question.squad_arcd.electra.base': 'electra_qa_AraElectra_base_finetuned_ARCD', 'ar.answer_question.tydiqa.electra.base': 'electra_qa_ara_base_artydiqa', 'ar.answer_question.xlm_roberta.base': 'xlm_roberta_qa_xlm_roberta_base_arabic', 'ar.answer_question.xlm_roberta.large': 'xlm_roberta_qa_xlm_roberta_large_arabic_qa', 'ar.classify.bert': 'bert_classifier_arabic_marbert_dialect_identification_city', 'ar.classify.bert.base': 'bert_classifier_bert_base_arabic_camelbert_ca_poetry', 'ar.classify.bert.base_finetuned': 'bert_classifier_base_arabic_camel_mix_poetry_finetuned_qawaf', 'ar.classify.bert.by_ammar_alhaj_ali': 'bert_classifier_arabic_marbert_poetry_classification', 'ar.classify.bert.by_azizkh': 'bert_classifier_autotrain_j_multi_classification_1181044057', 'ar.classify.bert.by_m47labs': 'bert_classifier_binary_classification_arabic', 'ar.classify.bert.by_researchaccount': 'bert_classifier_sa_sub5', 'ar.classify.bert.by_yah216': 'bert_classifier_poem_qafiyah_detection', 'ar.classify.bert.hate.': 'bert_classifier_dehatebert_mono_arabic', 'ar.classify.bert.news.': 'bert_classifier_arabic_marbert_news_article_classification', 'ar.classify.bert.news.by_m47labs': 'bert_classifier_ara_multiclass_news', 'ar.classify.bert.sentiment.': 'bert_classifier_arabic_marbert_sentiment', 'ar.classify.bert.sentiment.base': 'bert_classifier_bert_base_arabic_camelbert_ca_sentiment', 'ar.classify.bert.sentiment.base_da_sentiment.by_camel_lab': 'bert_classifier_bert_base_arabic_camelbert_da_sentiment', 'ar.classify.bert.sentiment.base_mix_sentiment.by_camel_lab': 'bert_classifier_bert_base_arabic_camelbert_mix_sentiment', 'ar.classify.bert.sentiment.base_msa_sentiment.by_camel_lab': 'bert_classifier_bert_base_arabic_camelbert_msa_sentiment', 'ar.classify.bert.twitter.base': 'bert_classifier_bert_base_arabic_camelbert_msa_did_madar_twitter5', 'ar.classify.da_poetry.bert.base.by_camel_lab': 'bert_classifier_bert_base_arabic_camelbert_da_poetry', 'ar.classify.mix_did_madar_corpus26.bert.base.by_camel_lab': 'bert_classifier_bert_base_arabic_camelbert_mix_did_madar_corpus26', 'ar.classify.mix_did_madar_corpus6.bert.base.by_camel_lab': 'bert_classifier_bert_base_arabic_camelbert_mix_did_madar_corpus6', 'ar.classify.mix_did_nadi.bert.base.by_camel_lab': 'bert_classifier_bert_base_arabic_camelbert_mix_did_nadi', 'ar.classify.mix_poetry.bert.base.by_camel_lab': 'bert_classifier_bert_base_arabic_camelbert_mix_poetry', 'ar.classify.msa_did_nadi.bert.base.by_camel_lab': 'bert_classifier_bert_base_arabic_camelbert_msa_did_nadi', 'ar.classify.msa_poetry.bert.base.by_camel_lab': 'bert_classifier_bert_base_arabic_camelbert_msa_poetry', 'ar.classify.poetry.bert.by_zenkri': 'bert_classifier_autotrain_arabic_poetry_by_subject_920730227', 'ar.classify.poetry.v2.bert.by_zenkri': 'bert_classifier_autotrain_arabic_poetry_by_subject_920730230', 'ar.embed': 'arabic_w2v_cc_300d', 'ar.embed.AraBertMo_base_V1': 'bert_embeddings_AraBertMo_base_V1', 'ar.embed.Ara_DialectBERT': 'bert_embeddings_Ara_DialectBERT', 'ar.embed.DarijaBERT': 'bert_embeddings_DarijaBERT', 'ar.embed.MARBERT': 'bert_embeddings_MARBERT', 'ar.embed.MARBERTv2': 'bert_embeddings_MARBERTv2', 'ar.embed.albert': 'albert_embeddings_albert_base_arabic', 'ar.embed.albert_large_arabic': 'albert_embeddings_albert_large_arabic', 'ar.embed.albert_xlarge_arabic': 'albert_embeddings_albert_xlarge_arabic', 'ar.embed.aner': 'arabic_w2v_cc_300d', 'ar.embed.aner.300d': 'arabic_w2v_cc_300d', 'ar.embed.arabert_c19': 'bert_embeddings_arabert_c19', 'ar.embed.arbert': 'bert_embeddings_ARBERT', 'ar.embed.bert': 'bert_embeddings_arbert', 'ar.embed.bert.base': 'bert_embeddings_base_arabert', 'ar.embed.bert.base.by_asafaya': 'bert_embeddings_base_arabic', 'ar.embed.bert.base.v1.by_aubmindlab': 'bert_embeddings_base_arabertv01', 'ar.embed.bert.base.v2.by_aubmindlab': 'bert_embeddings_base_arabertv02', 'ar.embed.bert.base_mix.by_camel_lab': 'bert_embeddings_base_arabic_camel_mix', 'ar.embed.bert.base_msa.by_camel_lab': 'bert_embeddings_base_arabic_camel_msa', 'ar.embed.bert.base_msa_eighth.by_camel_lab': 'bert_embeddings_base_arabic_camel_msa_eighth', 'ar.embed.bert.base_msa_half.by_camel_lab': 'bert_embeddings_base_arabic_camel_msa_half', 'ar.embed.bert.base_msa_quarter.by_camel_lab': 'bert_embeddings_base_arabic_camel_msa_quarter', 'ar.embed.bert.base_msa_sixteenth.by_camel_lab': 'bert_embeddings_base_arabic_camel_msa_sixteenth', 'ar.embed.bert.by_ubc_nlp': 'bert_embeddings_marbert', 'ar.embed.bert.cased_base': 'bert_embeddings_base_ar_cased', 'ar.embed.bert.large': 'bert_embeddings_large_arabertv02', 'ar.embed.bert.large.by_asafaya': 'bert_embeddings_large_arabic', 'ar.embed.bert.medium': 'bert_embeddings_medium_arabic', 'ar.embed.bert.mini': 'bert_embeddings_mini_arabic', 'ar.embed.bert.v2': 'bert_embeddings_marbertv2', 'ar.embed.bert.v2_base': 'bert_embeddings_base_arabertv2', 'ar.embed.bert.v2_large': 'bert_embeddings_large_arabertv2', 'ar.embed.bert_base_arabert': 'bert_embeddings_bert_base_arabert', 'ar.embed.bert_base_arabertv01': 'bert_embeddings_bert_base_arabertv01', 'ar.embed.bert_base_arabertv02': 'bert_embeddings_bert_base_arabertv02', 'ar.embed.bert_base_arabertv02_twitter': 'bert_embeddings_bert_base_arabertv02_twitter', 'ar.embed.bert_base_arabertv2': 'bert_embeddings_bert_base_arabertv2', 'ar.embed.bert_base_arabic': 'bert_embeddings_bert_base_arabic', 'ar.embed.bert_base_arabic_camelbert_mix': 'bert_embeddings_bert_base_arabic_camelbert_mix', 'ar.embed.bert_base_arabic_camelbert_msa': 'bert_embeddings_bert_base_arabic_camelbert_msa', 'ar.embed.bert_base_arabic_camelbert_msa_eighth': 'bert_embeddings_bert_base_arabic_camelbert_msa_eighth', 'ar.embed.bert_base_arabic_camelbert_msa_half': 'bert_embeddings_bert_base_arabic_camelbert_msa_half', 'ar.embed.bert_base_arabic_camelbert_msa_quarter': 'bert_embeddings_bert_base_arabic_camelbert_msa_quarter', 'ar.embed.bert_base_arabic_camelbert_msa_sixteenth': 'bert_embeddings_bert_base_arabic_camelbert_msa_sixteenth', 'ar.embed.bert_base_qarib': 'bert_embeddings_bert_base_qarib', 'ar.embed.bert_base_qarib60_1790k': 'bert_embeddings_bert_base_qarib60_1790k', 'ar.embed.bert_base_qarib60_860k': 'bert_embeddings_bert_base_qarib60_860k', 'ar.embed.bert_large_arabertv02': 'bert_embeddings_bert_large_arabertv02', 'ar.embed.bert_large_arabertv02_twitter': 'bert_embeddings_bert_large_arabertv02_twitter', 'ar.embed.bert_large_arabertv2': 'bert_embeddings_bert_large_arabertv2', 'ar.embed.bert_large_arabic': 'bert_embeddings_bert_large_arabic', 'ar.embed.bert_medium_arabic': 'bert_embeddings_bert_medium_arabic', 'ar.embed.bert_mini_arabic': 'bert_embeddings_bert_mini_arabic', 'ar.embed.cbow': 'arabic_w2v_cc_300d', 'ar.embed.cbow.300d': 'arabic_w2v_cc_300d', 'ar.embed.distilbert': 'distilbert_embeddings_distilbert_base_ar_cased', 'ar.embed.dziribert': 'bert_embeddings_dziribert', 'ar.embed.electra.base': 'electra_embeddings_araelectra_base_generator', 'ar.embed.glove': 'arabic_w2v_cc_300d', 'ar.embed.mbert_ar_c19': 'bert_embeddings_mbert_ar_c19', 'ar.embed.multi_dialect_bert_base_arabic': 'bert_embeddings_multi_dialect_bert_base_arabic', 'ar.lemma': 'lemma_padt', 'ar.lemma.padt': 'lemma_padt', 'ar.ner': 'aner_cc_300d', 'ar.ner.ANER': 'bert_ner_ANER', 'ar.ner.aner': 'aner_cc_300d', 'ar.ner.arabert_ner': 'bert_ner_arabert_ner', 'ar.ner.arabic_camelbert_da_ner': 'bert_ner_bert_base_arabic_camelbert_da_ner', 'ar.ner.arabic_camelbert_mix_ner': 'bert_ner_bert_base_arabic_camelbert_mix_ner', 'ar.ner.arabic_camelbert_msa_ner': 'bert_ner_bert_base_arabic_camelbert_msa_ner', 'ar.ner.bert': 'bert_ner_arabic_ner', 'ar.ner.bert.base': 'bert_ner_bert_base_arabic_camelbert_ca_ner', 'ar.ner.bert.by_holako': 'bert_ner_ner_camelbert', 'ar.ner.bert.wikiann.': 'bert_token_classifier_ara_ner', 'ar.ner.multilingual_cased_ner_hrl': 'bert_ner_bert_base_multilingual_cased_ner_hrl', 'ar.ner.pos.base': 'bert_pos_bert_base_arabic_camelbert_ca_pos_msa', 'ar.ner.xlmr_roberta': 'xlmroberta_ner_lge_panx_dataset', 'ar.ner.xlmr_roberta.base': 'xlmroberta_ner_base_panx_dataset', 'ar.ner.xlmr_roberta.base_finetuned': 'xlmroberta_ner_base_finetuned_recipe', 'ar.poem_meter.classify.bert.by_yah216': 'bert_classifier_arabic_poem_meter_3', 'ar.poem_meter.classify.bert.v2.by_yah216': 'bert_classifier_arabic_poem_meter_classification', 'ar.pos': 'pos_padt', 'ar.pos.arabic_camelbert_ca_pos_egy': 'bert_pos_bert_base_arabic_camelbert_ca_pos_egy', 'ar.pos.arabic_camelbert_ca_pos_glf': 'bert_pos_bert_base_arabic_camelbert_ca_pos_glf', 'ar.pos.arabic_camelbert_da_pos_egy': 'bert_pos_bert_base_arabic_camelbert_da_pos_egy', 'ar.pos.arabic_camelbert_da_pos_glf': 'bert_pos_bert_base_arabic_camelbert_da_pos_glf', 'ar.pos.arabic_camelbert_da_pos_msa': 'bert_pos_bert_base_arabic_camelbert_da_pos_msa', 'ar.pos.arabic_camelbert_mix_pos_egy': 'bert_pos_bert_base_arabic_camelbert_mix_pos_egy', 'ar.pos.arabic_camelbert_mix_pos_glf': 'bert_pos_bert_base_arabic_camelbert_mix_pos_glf', 'ar.pos.arabic_camelbert_mix_pos_msa': 'bert_pos_bert_base_arabic_camelbert_mix_pos_msa', 'ar.pos.arabic_camelbert_msa_pos_egy': 'bert_pos_bert_base_arabic_camelbert_msa_pos_egy', 'ar.pos.arabic_camelbert_msa_pos_glf': 'bert_pos_bert_base_arabic_camelbert_msa_pos_glf', 'ar.pos.arabic_camelbert_msa_pos_msa': 'bert_pos_bert_base_arabic_camelbert_msa_pos_msa', 'ar.pos.padt': 'pos_padt', 'ar.stopwords': 'stopwords_iso', 'ar.stopwords.iso': 'stopwords_iso'}, 'arz': {'arz.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'as': {'as.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'ast': {'ast.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'az': {'az.embed.w2v_cc_300d': 'w2v_cc_300d', 'az.stopwords': 'stopwords_iso'}, 'azb': {'azb.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'ba': {'ba.embed.w2v_cc_300d': 'w2v_cc_300d', 'ba.speech2text.wav2vec_xlsr.v2_large_300m': 'asr_wav2vec2_large_xls_r_300m_bashkir_cv7_opt', 'ba.speech2text.wav2vec_xlsr.v2_large_300m_gpu': 'asr_wav2vec2_large_xls_r_300m_bashkir_cv7_opt_gpu'}, 'bar': {'bar.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'bcl': {'bcl.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'be': {'be.embed.w2v_cc_300d': 'w2v_cc_300d', 'be.lemma': 'lemma_hse', 'be.pos': 'pos_hse', 'be.pos.hse': 'pos_hse'}, 'bem': { 'bem.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xls_r_1b_bemba_fds', 'bem.speech2text.wav2vec_xlsr.v2_large.by_csikasote': 'asr_wav2vec2_large_xlsr_bemba', 'bem.speech2text.wav2vec_xlsr.v2_large_300m': 'asr_wav2vec2_large_xls_r_300m_bemba_fds', 'bem.speech2text.wav2vec_xlsr.v2_large_300m_gpu': 'asr_wav2vec2_large_xls_r_300m_bemba_fds_gpu', 'bem.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xls_r_1b_bemba_fds_gpu', 'bem.speech2text.wav2vec_xlsr.v2_large_gpu.by_csikasote': 'asr_wav2vec2_large_xlsr_bemba_gpu'}, 'bg': {'bg.embed.bert.cased_base': 'bert_embeddings_base_bg_cased', 'bg.embed.roberta.base': 'roberta_embeddings_base_bulgarian', 'bg.embed.roberta.small': 'roberta_embeddings_small_bulgarian', 'bg.embed.w2v_cc_300d': 'w2v_cc_300d', 'bg.lemma': 'lemma', 'bg.lemma.btb': 'lemma_btb', 'bg.pos': 'pos_btb', 'bg.pos.ud_btb': 'pos_ud_btb', 'bg.speech2text.wav2vec_xlsr.v2_large_300m': 'asr_wav2vec2_large_xls_r_300m_d2', 'bg.speech2text.wav2vec_xlsr.v2_large_300m_d2_gpu': 'asr_wav2vec2_large_xls_r_300m_d2_gpu', 'bg.stopwords': 'stopwords_iso', 'bg.stopwords.iso': 'stopwords_iso'}, 'bh': {'bh.embed.w2v_cc_300d': 'w2v_cc_300d', 'bh.lemma': 'lemma', 'bh.pos': 'pos_ud_bhtb'}, 'bn': { 'bn.answer_question.tydiqa.multi_lingual_bert': 'bert_qa_mbert_bengali_tydiqa_qa', 'bn.classify.roberta': 'roberta_classifier_autotrain_citizen_nlu_bn_1370652766', 'bn.embed': 'bengali_cc_300d', 'bn.embed.bangala_bert': 'bert_embeddings_bangla_bert_base', 'bn.embed.bangla_bert': 'bert_embeddings_bangla_bert', 'bn.embed.bert': 'bert_embeddings_indic_transformers', 'bn.embed.bert.base': 'bert_embeddings_bangla_base', 'bn.embed.distil_bert': 'distilbert_embeddings_indic_transformers', 'bn.embed.glove': 'bengali_cc_300d', 'bn.embed.indic_transformers_bn_bert': 'bert_embeddings_indic_transformers_bn_bert', 'bn.embed.indic_transformers_bn_distilbert': 'distilbert_embeddings_indic_transformers_bn_distilbert', 'bn.embed.muril_adapted_local': 'bert_embeddings_muril_adapted_local', 'bn.embed.roberta': 'roberta_embeddings_indic_transformers', 'bn.embed.w2v_cc_300d': 'w2v_cc_300d', 'bn.embed.xlmr_roberta': 'xlmroberta_embeddings_indic_transformers_bn_xlmroberta', 'bn.lemma': 'lemma', 'bn.ner': 'bengaliner_cc_300d', 'bn.ner.glove': 'ner_jifs_glove_840B_300d', 'bn.ner.jifs': 'ner_jifs_glove_840B_300d', 'bn.pos': 'pos_msri', 'bn.stopwords': 'stopwords_iso', 'bn.stopwords.iso': 'stopwords_iso'}, 'bo': {'bo.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'bpy': {'bpy.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'br': {'br.embed.w2v_cc_300d': 'w2v_cc_300d', 'br.lemma': 'lemma', 'br.pos': 'pos_ud_keb', 'br.stopwords': 'stopwords_br'}, 'bs': {'bs.detect_sentence': 'sentence_detector_dl', 'bs.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'ca': { 'ca.answer_question.roberta.base_cased': 'roberta_qa_roberta_base_ca_cased_qa', 'ca.answer_question.roberta.cased_base': 'roberta_qa_base_ca_cased', 'ca.answer_question.roberta.cased_v2_base': 'roberta_qa_base_ca_v2_cased', 'ca.answer_question.roberta.squad.base': 'roberta_qa_roberta_base_squad', 'ca.classify.roberta.base_finetuned': 'roberta_classifier_base_ca_finetuned_cyberbullying_catalan', 'ca.classify.roberta.base_finetuned.by_jonatangk': 'roberta_classifier_base_ca_finetuned_tecla', 'ca.classify.roberta.cased_base': 'roberta_classifier_base_ca_sts_cased', 'ca.classify.roberta.cased_base.tc.by_projecte_aina': 'roberta_classifier_base_ca_tc_cased', 'ca.classify.roberta.cased_base.te.by_projecte_aina': 'roberta_classifier_base_ca_te_cased', 'ca.classify.roberta.cased_v2_base': 'roberta_classifier_base_ca_v2_sts_cased', 'ca.classify.roberta.cased_v2_base.tc.by_projecte_aina': 'roberta_classifier_base_ca_v2_tc_cased', 'ca.classify.roberta.cased_v2_base.te.by_projecte_aina': 'roberta_classifier_base_ca_v2_te_cased', 'ca.classify.roberta.catalonia_independence.base_finetuned': 'roberta_classifier_base_ca_finetuned_catalonia_independence_detector', 'ca.embed.w2v_cc_300d': 'w2v_cc_300d', 'ca.lemma': 'lemma_spacylookup', 'ca.lemma.ancora': 'lemma_ancora', 'ca.lemma.spacylookup': 'lemma_spacylookup', 'ca.ner.pos.universal_dependencies.': 'roberta_pos_veganuary_pos', 'ca.ner.roberta.cased_base': 'roberta_ner_roberta_base_ca_cased_ner', 'ca.pos': 'pos_ud_ancora', 'ca.pos.ancora': 'pos_ancora', 'ca.speech2text.wav2vec2.voxpopuli.v2_large': 'asr_wav2vec2_large_100k_voxpopuli_catala_by_ccoreilly', 'ca.speech2text.wav2vec2.voxpopuli.v2_large.by_softcatala': 'asr_wav2vec2_large_100k_voxpopuli_catala_by_softcatala', 'ca.speech2text.wav2vec2.voxpopuli.v2_large_gpu': 'asr_wav2vec2_large_100k_voxpopuli_catala_by_ccoreilly_gpu', 'ca.speech2text.wav2vec2.voxpopuli.v2_large_gpu.by_softcatala': 'asr_wav2vec2_large_100k_voxpopuli_catala_by_softcatala_gpu', 'ca.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_53_catalan', 'ca.stopwords': 'stopwords_iso', 'ca.stopwords.iso': 'stopwords_iso'}, 'ce': {'ce.embed.w2v_cc_300d': 'w2v_cc_300d', 'ce.ner.bert.conll.finetuned': 'bert_ner_ce_bert_finetuned_ner'}, 'ceb': {'ceb.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'ch': {'ch.answer_question.roberta': 'roberta_qa_addi_ch_roberta', 'ch.answer_question.xlm_roberta': 'xlm_roberta_qa_ADDI_CH_XLM_R'}, 'ckb': {}, 'co': {'co.embed.roberta.small': 'roberta_embeddings_codeberta_small_v1', 'co.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'cop': {'cop.lemma': 'lemma_scriptorium', 'cop.lemma.scriptorium': 'lemma_scriptorium', 'cop.pos': 'pos_scriptorium'}, 'cs': {'cs.embed.bert': 'bert_embeddings_fernet_c5', 'cs.embed.roberta.news.': 'roberta_embeddings_fernet_news', 'cs.embed.w2v_cc_300d': 'w2v_cc_300d', 'cs.lemma': 'lemma_cltt', 'cs.lemma.cac': 'lemma_cac', 'cs.lemma.cltt': 'lemma_cltt', 'cs.lemma.fictree': 'lemma_fictree', 'cs.lemma.pdt': 'lemma_pdt', 'cs.lemma.spacylookup': 'lemma_spacylookup', 'cs.pos': 'pos_fictree', 'cs.pos.cac': 'pos_cac', 'cs.pos.cltt': 'pos_cltt', 'cs.pos.fictree': 'pos_fictree', 'cs.pos.pdt': 'pos_pdt', 'cs.pos.ud_pdt': 'pos_ud_pdt', 'cs.speech2text.wav2vec_xlsr.v2_300m': 'asr_wav2vec2_xls_r_300m_250', 'cs.speech2text.wav2vec_xlsr.v2_300m_gpu': 'asr_wav2vec2_xls_r_300m_250_gpu', 'cs.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_53_czech', 'cs.speech2text.wav2vec_xlsr.v2_large.by_arampacha': 'asr_wav2vec2_large_xlsr_czech', 'cs.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_czech_gpu', 'cs.stopwords': 'stopwords_iso', 'cs.stopwords.iso': 'stopwords_iso'}, 'cu': {'cu.lemma': 'lemma_proiel', 'cu.lemma.proiel': 'lemma_proiel', 'cu.pos': 'pos_proiel'}, 'cv': {'cv.answer_question.roberta': 'roberta_qa_cv_custom_ds', 'cv.answer_question.roberta.by_sunitha': 'roberta_qa_cv_merge_ds', 'cv.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'cy': {'cy.embed.w2v_cc_300d': 'w2v_cc_300d', 'cy.lemma': 'lemma', 'cy.lemma.ccg': 'lemma_ccg', 'cy.pos.ccg': 'pos_ccg'}, 'da': {'da.answer_question.squad.bert': 'bert_qa_danish_bert_botxo_qa_squad', 'da.answer_question.squad.xlmr_roberta.base': 'xlm_roberta_qa_xlmr_base_texas_squad_da_da_saattrupdan', 'da.classify.bert': 'bert_classifier_analytical', 'da.classify.bert.by_larskjeldgaard': 'bert_classifier_larskjeldgaard_senda', 'da.classify.bert.by_nikolajmunch': 'bert_classifier_danish_emotion_classification', 'da.classify.bert.by_pin': 'bert_classifier_pin_senda', 'da.classify.bert.go_emotions.': 'bert_classifier_da_hyggebert', 'da.classify.bert.hate.': 'bert_classifier_da_hatespeech_classification', 'da.classify.bert.hate.by_danlp': 'bert_classifier_da_hatespeech_detection', 'da.classify.bert.sentiment.': 'bert_sequence_classifier_sentiment', 'da.classify.bert.sentiment.by_danlp': 'bert_classifier_da_tone_sentiment_polarity', 'da.classify.bert.twitter.': 'bert_classifier_da_tone_subjective_objective', 'da.classify.emotion.bert.by_danlp': 'bert_classifier_da_emotion_classification', 'da.classify.emotion_binary.bert.by_danlp': 'bert_classifier_da_emotion_binary', 'da.classify.xlmr_roberta': 'xlmroberta_classifier_da_xlmr_ned', 'da.embed.bert.cased_base': 'bert_embeddings_base_da_cased', 'da.embed.w2v_cc_300d': 'w2v_cc_300d', 'da.lemma': 'lemma_spacylookup', 'da.lemma.ddt': 'lemma_ddt', 'da.lemma.spacylookup': 'lemma_spacylookup', 'da.ner': 'dane_ner_6B_100', 'da.ner.6B_100D': 'dane_ner_6B_100', 'da.ner.6B_300D': 'dane_ner_6B_300', 'da.ner.840B_300D': 'dane_ner_840B_300', 'da.ner.bert': 'bert_ner_da_bert_ner', 'da.ner.bert.by_maltehb': 'bert_ner_danish_bert_botxo_ner_dane', 'da.ner.pos': 'bert_pos_bert_punct_restoration_da_alvenir', 'da.ner.xlmr_roberta': 'xlmroberta_ner_employment_contract_ner', 'da.pos': 'pos_ud_ddt', 'da.pos.ddt': 'pos_ddt', 'da.speech2text.wav2vec2.v2_base': 'asr_alvenir_wav2vec2_base_nst_cv9', 'da.speech2text.wav2vec2.v2_base.by_alvenir': 'asr_wav2vec2_base_nst', 'da.speech2text.wav2vec2.v2_base_gpu': 'asr_wav2vec2_base_nst_gpu', 'da.speech2text.wav2vec_xlsr.300m': 'asr_xls_r_300m_danish_nst_cv9', 'da.speech2text.wav2vec_xlsr.300m.by_chcaa': 'asr_xls_r_300m_nst_cv9', 'da.speech2text.wav2vec_xlsr.300m_gpu': 'asr_xls_r_300m_nst_cv9_gpu', 'da.speech2text.wav2vec_xlsr.v2_300m': 'asr_wav2vec2_xls_r_300m_ftspeech', 'da.stopwords': 'stopwords_iso'}, 'de': {'de.answer_question.bert': 'bert_qa_GBERTQnA', 'de.answer_question.bert.cased_multilingual_base': 'bert_qa_multilingual_bert_base_cased_german', 'de.answer_question.electra': 'electra_qa_German_question_answer', 'de.answer_question.electra.base': 'electra_qa_g_base_germanquad', 'de.answer_question.electra.distilled_base': 'electra_qa_g_base_germanquad_distilled', 'de.answer_question.electra.large': 'electra_qa_g_large_germanquad', 'de.answer_question.roberta': 'roberta_qa_addi_de_roberta', 'de.answer_question.squad_de_tuned.xlmr_roberta.base.by_saattrupdan': 'xlm_roberta_qa_xlmr_base_texas_squad_de_de_saattrupdan', 'de.answer_question.squad_spanish_tuned.xlmr_roberta.base.by_saattrupdan': 'xlm_roberta_qa_xlmr_base_texas_squad_es_es_saattrupdan', 'de.answer_question.squadv2.bert': 'bert_qa_bert_multi_english_german_squad2', 'de.answer_question.squadv2.electra.base': 'electra_qa_base_squad2', 'de.answer_question.xlm_roberta': 'xlm_roberta_qa_ADDI_DE_XLM_R', 'de.answer_question.xlm_roberta.base': 'xlm_roberta_qa_xlm_roberta_base_german', 'de.classify.bert': 'bert_classifier_autonlp_doctor_de_24595545', 'de.classify.bert.base': 'bert_classifier_gbert_base_germandpr_reranking', 'de.classify.bert.base_finetuned': 'bert_classifier_gbert_base_finetuned_cefr', 'de.classify.bert.by_muhtasham': 'bert_classifier_autonlp_doctor_de_24595546', 'de.classify.bert.cased_base': 'bert_classifier_bert_base_german_cased_german_hotel_classification', 'de.classify.bert.cased_base.by_mathking': 'bert_classifier_bert_base_german_cased_gnad10', 'de.classify.bert.hate.': 'bert_classifier_dehatebert_mono_german', 'de.classify.bert.hate.cased_base': 'bert_classifier_base_german_cased_hatespeech_germeval18coarse', 'de.classify.bert.large': 'bert_classifier_gbert_large_jobad_classification_34', 'de.classify.bert.multi2convai.': 'bert_classifier_multi2convai_corona', 'de.classify.bert.news_sentiment.': 'bert_classifier_german_news_sentiment', 'de.classify.bert.pawsx_xtreme.cased_base_finetuned': 'bert_sequence_classifier_base_german_dbmdz_cased_finetuned_pawsx', 'de.classify.bert.sentiment.': 'bert_sequence_classifier_sentiment', 'de.classify.bert.sentiment.by_oliverguhr': 'bert_classifier_german_sentiment', 'de.classify.bert.sentiment.cased_base': 'bert_classifier_bert_base_german_cased_german_hotel_sentiment', 'de.classify.bert.sentiment.cased_base.by_deepset': 'bert_classifier_base_german_cased_sentiment_germeval17', 'de.classify.bert.sentiment_twitter.': 'bert_classifier_german_sentiment_twitter', 'de.classify.bert.uncased_base': 'bert_classifier_bert_base_uncased_german_multilable_classification', 'de.classify.distil_bert': 'distilbert_sequence_classifier_autonlp_doctor_de_24595544', 'de.classify.distil_bert.base': 'distilbert_base_sequence_classifier_toxicity', 'de.classify.distil_bert.base.by_ml6team': 'distilbert_sequence_classifier_cross_encoder_mmarco_german_distilbert_base', 'de.classify.distil_bert.cased_base': 'distilbert_sequence_classifier_distilbert_base_german_cased_toxic_comments', 'de.classify.gewerke.bert.by_cm_mueller': 'bert_classifier_bacnet_klassifizierung_gewerke', 'de.classify.heizungstechnik.bert.by_cm_mueller': 'bert_classifier_bacnet_klassifizierung_heizungstechnik', 'de.classify.kaeltettechnik.bert.by_cm_mueller': 'bert_classifier_bacnet_klassifizierung_kaeltettechnik', 'de.classify.logistics.bert.multi2convai.by_inovex': 'bert_classifier_multi2convai_logistics', 'de.classify.news': 'classifierdl_bert_news', 'de.classify.news_sentiment.bert': 'bert_sequence_classifier_news_sentiment', 'de.classify.quality.bert.multi2convai.by_inovex': 'bert_classifier_multi2convai_quality', 'de.classify.raumlufttechnik.bert.by_cm_mueller': 'bert_classifier_bacnet_klassifizierung_raumlufttechnik', 'de.classify.roberta': 'roberta_classifier_autonlp_doctor_de_24595548', 'de.classify.sanitaertechnik.bert.by_cm_mueller': 'bert_classifier_bacnet_klassifizierung_sanitaertechnik', 'de.classify.sentiment.bert': 'classifierdl_bert_sentiment', 'de.embed.albert_german_ner': 'albert_embeddings_albert_german_ner', 'de.embed.bert': 'bert_base_german_cased', 'de.embed.bert.base': 'bert_embeddings_g_base', 'de.embed.bert.by_smanjil': 'bert_embeddings_german_medbert', 'de.embed.bert.cased_base': 'bert_embeddings_base_de_cased', 'de.embed.bert.cased_base.by_dbmdz': 'bert_embeddings_dbmdz_base_german_cased', 'de.embed.bert.cased_base.by_uploaded by huggingface': 'bert_embeddings_base_german_cased', 'de.embed.bert.finance': 'bert_sentence_embeddings_financial', 'de.embed.bert.large': 'bert_embeddings_g_large', 'de.embed.bert.uncased': 'bert_base_german_uncased', 'de.embed.bert.uncased_base': 'bert_embeddings_base_german_uncased', 'de.embed.bert_base_5lang_cased': 'bert_embeddings_bert_base_5lang_cased', 'de.embed.bert_base_de_cased': 'bert_embeddings_bert_base_de_cased', 'de.embed.bert_base_german_cased_oldvocab': 'bert_embeddings_bert_base_german_cased_oldvocab', 'de.embed.bert_base_german_dbmdz_cased': 'bert_embeddings_bert_base_german_dbmdz_cased', 'de.embed.bert_base_german_dbmdz_uncased': 'bert_embeddings_bert_base_german_dbmdz_uncased', 'de.embed.bert_base_german_uncased': 'bert_embeddings_bert_base_german_uncased', 'de.embed.bert_base_historical_german_rw_cased': 'bert_embeddings_bert_base_historical_german_rw_cased', 'de.embed.distilbert_base_de_cased': 'distilbert_embeddings_distilbert_base_de_cased', 'de.embed.distilbert_base_german_cased': 'distilbert_embeddings_distilbert_base_german_cased', 'de.embed.electra.base': 'electra_embeddings_gelectra_base_generator', 'de.embed.electra.cased_base_64d': 'electra_embeddings_electra_base_gc4_64k_0_cased_generator', 'de.embed.electra.cased_base_gc4_64k_100000.by_stefan_it': 'electra_embeddings_electra_base_gc4_64k_100000_cased_generator', 'de.embed.electra.cased_base_gc4_64k_1000000.by_stefan_it': 'electra_embeddings_electra_base_gc4_64k_1000000_cased_generator', 'de.embed.electra.cased_base_gc4_64k_200000.by_stefan_it': 'electra_embeddings_electra_base_gc4_64k_200000_cased_generator', 'de.embed.electra.cased_base_gc4_64k_300000.by_stefan_it': 'electra_embeddings_electra_base_gc4_64k_300000_cased_generator', 'de.embed.electra.cased_base_gc4_64k_400000.by_stefan_it': 'electra_embeddings_electra_base_gc4_64k_400000_cased_generator', 'de.embed.electra.cased_base_gc4_64k_500000.by_stefan_it': 'electra_embeddings_electra_base_gc4_64k_500000_cased_generator', 'de.embed.electra.cased_base_gc4_64k_600000.by_stefan_it': 'electra_embeddings_electra_base_gc4_64k_600000_cased_generator', 'de.embed.electra.cased_base_gc4_64k_700000.by_stefan_it': 'electra_embeddings_electra_base_gc4_64k_700000_cased_generator', 'de.embed.electra.cased_base_gc4_64k_800000.by_stefan_it': 'electra_embeddings_electra_base_gc4_64k_800000_cased_generator', 'de.embed.electra.cased_base_gc4_64k_900000.by_stefan_it': 'electra_embeddings_electra_base_gc4_64k_900000_cased_generator', 'de.embed.electra.large': 'electra_embeddings_gelectra_large_generator', 'de.embed.gbert_base': 'bert_embeddings_gbert_base', 'de.embed.gbert_large': 'bert_embeddings_gbert_large', 'de.embed.german_financial_statements_bert': 'bert_embeddings_german_financial_statements_bert', 'de.embed.medbert': 'bert_embeddings_German_MedBERT', 'de.embed.roberta': 'roberta_embeddings_hotelbert', 'de.embed.roberta.small': 'roberta_embeddings_hotelbert_small', 'de.embed.roberta_base_wechsel_german': 'roberta_embeddings_roberta_base_wechsel_german', 'de.embed_sentence.bert.base_cased': 'sent_bert_base_cased', 'de.lemma': 'lemma_spacylookup', 'de.lemma.gsd': 'lemma_gsd', 'de.lemma.hdt': 'lemma_hdt', 'de.lemma.spacylookup': 'lemma_spacylookup', 'de.ner': 'wikiner_840B_300', 'de.ner.bert': 'bert_ner_bert_de_ner', 'de.ner.bert.base': 'bert_ner_gbert_base_germaner', 'de.ner.bert.base_finetuned': 'bert_token_classifier_base_german_finetuned_ler', 'de.ner.bert.by_elenanereiss': 'bert_token_classifier_german_ler', 'de.ner.bert.by_severinsimmler': 'bert_ner_german_press_bert', 'de.ner.bert.by_tariqyousef': 'bert_ner_german_intensifiers_tagging', 'de.ner.bert.by_vera_pro': 'bert_ner_bert_mention_de_vera_pro', 'de.ner.bert.cased_base': 'bert_ner_bert_base_german_cased_20000_ner', 'de.ner.bert.cased_base.by_domischwimmbeck': 'bert_ner_bert_base_german_cased_fine_tuned_ner', 'de.ner.bert.large': 'bert_ner_gbert_large_germaner', 'de.ner.bert.legal': 'bert_token_classifier_ner_legal', 'de.ner.bert.own_data.cased_base.by_domischwimmbeck': 'bert_ner_bert_base_german_cased_own_data_ner', 'de.ner.bert.uncased_base': 'bert_ner_bert_base_german_cased_20000_ner_uncased', 'de.ner.distil_bert.cased_base': 'distilbert_ner_distilbert_base_german_europeana_cased_germeval_14', 'de.ner.literary.bert.by_severinsimmler': 'bert_ner_literary_german_bert', 'de.ner.pos': 'bert_pos_bert_punct_restoration_de_alvenir', 'de.ner.pos.universal_dependencies.base': 'bert_pos_bert_base_german_upos', 'de.ner.pos.universal_dependencies.large': 'bert_pos_bert_large_german_upos', 'de.ner.wikiner': 'wikiner_840B_300', 'de.ner.wikiner.glove.6B_300': 'wikiner_6B_300', 'de.ner.wikiner.glove.840B_300': 'wikiner_840B_300', 'de.ner.xlm': 'xlm_roberta_large_token_classifier_conll03', 'de.ner.xlmr_roberta.base_finetuned': 'xlmroberta_ner_atlantis_base_finetuned_panx', 'de.ner.xlmr_roberta.base_finetuned.by_evs': 'xlmroberta_ner_evs_base_finetuned_panx', 'de.ner.xlmr_roberta.base_finetuned.by_xugenpeng': 'xlmroberta_ner_xugenpeng_base_finetuned_panx', 'de.ner.xlmr_roberta.conll.large_finetuned': 'xlmroberta_ner_xlm_roberta_large_finetuned_conll03_german', 'de.ner.xlmr_roberta.large': 'xlmroberta_ner_gpt2_large_detector_de_v1', 'de.ner.xlmr_roberta.xtreme.base_finetuned': 'xlmroberta_ner_akshat_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_ArneD': 'xlmroberta_ner_arned_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_Cole': 'xlmroberta_ner_cole_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_KayKozaronek': 'xlmroberta_ner_kaykozaronek_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_Leizhang': 'xlmroberta_ner_leizhang_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_ManqingLiu': 'xlmroberta_ner_manqingliu_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_Neha2608': 'xlmroberta_ner_neha2608_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_Ninh': 'xlmroberta_ner_ninh_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_Rgl73': 'xlmroberta_ner_rgl73_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_SelamatPagi': 'xlmroberta_ner_selamatpagi_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_SimulSt': 'xlmroberta_ner_simulst_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_V3RX2000': 'xlmroberta_ner_v3rx2000_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_andyjennings': 'xlmroberta_ner_andyjennings_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_ardallie': 'xlmroberta_ner_ardallie_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_arrandi': 'xlmroberta_ner_arrandi_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_aytugkaya': 'xlmroberta_ner_aytugkaya_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_be4rr': 'xlmroberta_ner_be4rr_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_cj_mills': 'xlmroberta_ner_cj_mills_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_claytonsamples': 'xlmroberta_ner_claytonsamples_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_clisi2000': 'xlmroberta_ner_clisi2000_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_coolzhao': 'xlmroberta_ner_coolzhao_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_danhsf': 'xlmroberta_ner_danhsf_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_davidenam': 'xlmroberta_ner_davinam_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_dfsj': 'xlmroberta_ner_dfsj_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_dkasti': 'xlmroberta_ner_dkasti_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_edwardjross': 'xlmroberta_ner_edwardjross_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_ericklerouge123': 'xlmroberta_ner_ericklerouge123_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_flood': 'xlmroberta_ner_flood_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_furyhawk': 'xlmroberta_ner_furyhawk_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_fvector': 'xlmroberta_ner_fvector_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_gbennett': 'xlmroberta_ner_gbennett_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_hadxu': 'xlmroberta_ner_hadxu_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_haesun': 'xlmroberta_ner_haesun_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_harish3110': 'xlmroberta_ner_harish3110_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_hugsao123': 'xlmroberta_ner_hugsao123_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_iis2009002': 'xlmroberta_ner_iis2009002_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_imyday': 'xlmroberta_ner_imyday_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_jamesmarcel': 'xlmroberta_ner_jamesmarcel_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_jamie613': 'xlmroberta_ner_jamie613_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_jasonyim2': 'xlmroberta_ner_jasonyim2_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_jdang': 'xlmroberta_ner_jdang_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_jdeboever': 'xlmroberta_ner_jboever_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_jgriffi': 'xlmroberta_ner_jgriffi_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_jonfrank': 'xlmroberta_ner_jonfrank_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_k3nneth': 'xlmroberta_ner_k3nneth_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_leixu': 'xlmroberta_ner_leixu_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_lijingxin': 'xlmroberta_ner_lijingxin_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_mertyrgn': 'xlmroberta_ner_mertyrgn_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_miyagawaorj': 'xlmroberta_ner_miyagawaorj_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_moghis': 'xlmroberta_ner_base_finetuned_panx_de_data', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_mubikan': 'xlmroberta_ner_mubikan_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_naam': 'xlmroberta_ner_naam_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_naomiyjchen': 'xlmroberta_ner_naomiyjchen_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_netoass': 'xlmroberta_ner_netoass_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_novarac23': 'xlmroberta_ner_novarac23_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_olpa': 'xlmroberta_ner_olpa_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_osanseviero': 'xlmroberta_ner_osanseviero_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_pdroberts': 'xlmroberta_ner_pdroberts_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_pglauner': 'xlmroberta_ner_pglauner_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_pitspits': 'xlmroberta_ner_pitspits_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_radev': 'xlmroberta_ner_rav_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_rishav_hub': 'xlmroberta_ner_rishav_hub_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_rishiyoung': 'xlmroberta_ner_rishiyoung_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_robkayinto': 'xlmroberta_ner_robkayinto_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_skr3178': 'xlmroberta_ner_skr3178_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_transformersbook': 'xlmroberta_ner_transformersbook_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_venturaville': 'xlmroberta_ner_venturaville_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_victen': 'xlmroberta_ner_victen_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_xliu128': 'xlmroberta_ner_xliu128_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_yomexa': 'xlmroberta_ner_yomexa_base_finetuned_panx', 'de.ner.xlmr_roberta.xtreme.base_finetuned.by_zdepablo': 'xlmroberta_ner_zpablo_base_finetuned_panx', 'de.pos': 'pos_ud_hdt', 'de.pos.gsd': 'pos_gsd', 'de.pos.hdt': 'pos_hdt', 'de.pos.ud_hdt': 'pos_ud_hdt', 'de.speech2text.wav2vec2.v2_base': 'asr_wav2vec2_base_german', 'de.speech2text.wav2vec2.v2_base.by_oliverguhr': 'asr_wav2vec2_base_german_cv9', 'de.speech2text.wav2vec2.v2_base_gpu': 'asr_wav2vec2_base_german_cv9_gpu', 'de.speech2text.wav2vec2.v2_base_gpu.by_aware_ai': 'asr_wav2vec2_base_german_gpu', 'de.speech2text.wav2vec2.v2_gpu_s103_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_0_austria_10_s103_gpu', 'de.speech2text.wav2vec2.v2_gpu_s377_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_0_austria_10_s377_gpu', 'de.speech2text.wav2vec2.v2_gpu_s37_exp': 'asr_exp_w2v2t_r_wav2vec2_s37_gpu', 'de.speech2text.wav2vec2.v2_gpu_s3_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_5_austria_5_s3_gpu', 'de.speech2text.wav2vec2.v2_gpu_s445_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_8_austria_2_s445_gpu', 'de.speech2text.wav2vec2.v2_gpu_s460_exp': 'asr_exp_w2v2t_r_wav2vec2_s460_gpu', 'de.speech2text.wav2vec2.v2_gpu_s466_exp': 'asr_exp_w2v2t_r_wav2vec2_s466_gpu', 'de.speech2text.wav2vec2.v2_gpu_s468_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_2_austria_8_s468_gpu', 'de.speech2text.wav2vec2.v2_gpu_s527_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_10_austria_0_s527_gpu', 'de.speech2text.wav2vec2.v2_gpu_s545_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_10_austria_0_s545_gpu', 'de.speech2text.wav2vec2.v2_gpu_s627_vp_exp': 'asr_exp_w2v2t_vp_100k_s627_gpu', 'de.speech2text.wav2vec2.v2_gpu_s732_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_2_austria_8_s732_gpu', 'de.speech2text.wav2vec2.v2_gpu_s756_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_0_austria_10_s756_gpu', 'de.speech2text.wav2vec2.v2_gpu_s779_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_10_austria_0_s779_gpu', 'de.speech2text.wav2vec2.v2_gpu_s803_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_5_austria_5_s803_gpu', 'de.speech2text.wav2vec2.v2_gpu_s957_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_2_austria_8_s957_gpu', 'de.speech2text.wav2vec2.v2_gpu_s95_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_5_austria_5_s95_gpu', 'de.speech2text.wav2vec2.v2_gpu_s962_vp_exp': 'asr_exp_w2v2t_vp_s962_gpu', 'de.speech2text.wav2vec2.v2_gpu_s982_exp': 'asr_exp_w2v2t_wav2vec2_s982_gpu', 'de.speech2text.wav2vec2.v2_s103_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_0_austria_10_s103', 'de.speech2text.wav2vec2.v2_s184_vp_exp': 'asr_exp_w2v2t_vp_s184', 'de.speech2text.wav2vec2.v2_s377_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_0_austria_10_s377', 'de.speech2text.wav2vec2.v2_s37_exp': 'asr_exp_w2v2t_r_wav2vec2_s37', 'de.speech2text.wav2vec2.v2_s3_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_5_austria_5_s3', 'de.speech2text.wav2vec2.v2_s445_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_8_austria_2_s445', 'de.speech2text.wav2vec2.v2_s460_exp': 'asr_exp_w2v2t_r_wav2vec2_s460', 'de.speech2text.wav2vec2.v2_s466_exp': 'asr_exp_w2v2t_r_wav2vec2_s466', 'de.speech2text.wav2vec2.v2_s468_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_2_austria_8_s468', 'de.speech2text.wav2vec2.v2_s527_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_10_austria_0_s527', 'de.speech2text.wav2vec2.v2_s545_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_10_austria_0_s545', 'de.speech2text.wav2vec2.v2_s627_vp_exp': 'asr_exp_w2v2t_vp_100k_s627', 'de.speech2text.wav2vec2.v2_s732_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_2_austria_8_s732', 'de.speech2text.wav2vec2.v2_s756_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_0_austria_10_s756', 'de.speech2text.wav2vec2.v2_s779_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_10_austria_0_s779', 'de.speech2text.wav2vec2.v2_s803_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_5_austria_5_s803', 'de.speech2text.wav2vec2.v2_s946_vp_exp': 'asr_exp_w2v2t_vp_s946', 'de.speech2text.wav2vec2.v2_s957_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_2_austria_8_s957', 'de.speech2text.wav2vec2.v2_s95_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_germany_5_austria_5_s95', 'de.speech2text.wav2vec2.v2_s962_vp_exp': 'asr_exp_w2v2t_vp_s962', 'de.speech2text.wav2vec2.v2_s982_exp': 'asr_exp_w2v2t_wav2vec2_s982', 'de.speech2text.wav2vec_xlsr.gender_male_female.v2_gpu_s204_exp': 'asr_exp_w2v2r_xls_r_gender_male_10_female_0_s204_gpu', 'de.speech2text.wav2vec_xlsr.gender_male_female.v2_gpu_s896_exp': 'asr_exp_w2v2r_xls_r_gender_male_5_female_5_s896_gpu', 'de.speech2text.wav2vec_xlsr.gender_male_female.v2_s204_exp': 'asr_exp_w2v2r_xls_r_gender_male_10_female_0_s204', 'de.speech2text.wav2vec_xlsr.gender_male_female.v2_s896_exp': 'asr_exp_w2v2r_xls_r_gender_male_5_female_5_s896', 'de.speech2text.wav2vec_xlsr.gender_male_female.v2_s922_exp': 'asr_exp_w2v2r_xls_r_gender_male_0_female_10_s922', 'de.speech2text.wav2vec_xlsr.v2_300m': 'asr_wav2vec2_xls_r_300m_english_by_aware_ai', 'de.speech2text.wav2vec_xlsr.v2_300m_gpu': 'asr_wav2vec2_xls_r_300m_english_by_aware_ai_gpu', 'de.speech2text.wav2vec_xlsr.v2_gpu_s143_exp': 'asr_exp_w2v2t_xls_r_s143_gpu', 'de.speech2text.wav2vec_xlsr.v2_gpu_s350_exp': 'asr_exp_w2v2r_xls_r_accent_germany_0_austria_10_s350_gpu', 'de.speech2text.wav2vec_xlsr.v2_gpu_s381_exp': 'asr_exp_w2v2r_xls_r_accent_germany_0_austria_10_s381_gpu', 'de.speech2text.wav2vec_xlsr.v2_gpu_s412_exp': 'asr_exp_w2v2r_xls_r_accent_germany_5_austria_5_s412_gpu', 'de.speech2text.wav2vec_xlsr.v2_gpu_s42_exp': 'asr_exp_w2v2r_xls_r_accent_germany_8_austria_2_s42_gpu', 'de.speech2text.wav2vec_xlsr.v2_gpu_s438_exp': 'asr_exp_w2v2t_xls_r_s438_gpu', 'de.speech2text.wav2vec_xlsr.v2_gpu_s458_exp': 'asr_exp_w2v2r_xls_r_accent_germany_2_austria_8_s458_gpu', 'de.speech2text.wav2vec_xlsr.v2_gpu_s534_exp': 'asr_exp_w2v2r_xls_r_accent_germany_5_austria_5_s534_gpu', 'de.speech2text.wav2vec_xlsr.v2_gpu_s543_exp': 'asr_exp_w2v2r_xls_r_accent_germany_2_austria_8_s543_gpu', 'de.speech2text.wav2vec_xlsr.v2_gpu_s673_exp': 'asr_exp_w2v2r_xls_r_accent_germany_0_austria_10_s673_gpu', 'de.speech2text.wav2vec_xlsr.v2_gpu_s728_exp': 'asr_exp_w2v2r_xls_r_accent_germany_10_austria_0_s728_gpu', 'de.speech2text.wav2vec_xlsr.v2_gpu_s886_exp': 'asr_exp_w2v2r_xls_r_accent_germany_10_austria_0_s886_gpu', 'de.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_53_german_by_facebook', 'de.speech2text.wav2vec_xlsr.v2_large.by_aware_ai': 'asr_wav2vec2_large_xlsr_53_german_with_lm', 'de.speech2text.wav2vec_xlsr.v2_large.by_jonatasgrosman': 'asr_wav2vec2_large_xlsr_53_german_by_jonatasgrosman', 'de.speech2text.wav2vec_xlsr.v2_large.by_marcel': 'asr_wav2vec2_large_xlsr_53_german_by_marcel', 'de.speech2text.wav2vec_xlsr.v2_large.by_maxidl': 'asr_wav2vec2_large_xlsr_german_by_maxidl', 'de.speech2text.wav2vec_xlsr.v2_large.by_mehdihosseinimoghadam': 'asr_wav2vec2_large_xlsr_53_german', 'de.speech2text.wav2vec_xlsr.v2_large.by_oliverguhr': 'asr_wav2vec2_large_xlsr_53_german_by_oliverguhr', 'de.speech2text.wav2vec_xlsr.v2_large_cv8.by_oliverguhr': 'asr_wav2vec2_large_xlsr_53_german_cv8', 'de.speech2text.wav2vec_xlsr.v2_large_cv8_gpu.by_oliverguhr': 'asr_wav2vec2_large_xlsr_53_german_cv8_gpu', 'de.speech2text.wav2vec_xlsr.v2_large_cv9.by_oliverguhr': 'asr_wav2vec2_large_xlsr_53_german_cv9', 'de.speech2text.wav2vec_xlsr.v2_large_cv9_gpu.by_oliverguhr': 'asr_wav2vec2_large_xlsr_53_german_cv9_gpu', 'de.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_german_gpu', 'de.speech2text.wav2vec_xlsr.v2_large_gpu.by_facebook': 'asr_wav2vec2_large_xlsr_53_german_by_facebook_gpu', 'de.speech2text.wav2vec_xlsr.v2_large_gpu.by_marcel': 'asr_wav2vec2_large_xlsr_53_german_by_marcel_gpu', 'de.speech2text.wav2vec_xlsr.v2_large_gpu.by_maxidl': 'asr_wav2vec2_large_xlsr_german_by_maxidl_gpu', 'de.speech2text.wav2vec_xlsr.v2_large_gpu.by_oliverguhr': 'asr_wav2vec2_large_xlsr_53_german_by_oliverguhr_gpu', 'de.speech2text.wav2vec_xlsr.v2_s143_exp': 'asr_exp_w2v2t_xls_r_s143', 'de.speech2text.wav2vec_xlsr.v2_s295_exp': 'asr_exp_w2v2r_xls_r_accent_germany_10_austria_0_s295', 'de.speech2text.wav2vec_xlsr.v2_s350_exp': 'asr_exp_w2v2r_xls_r_accent_germany_0_austria_10_s350', 'de.speech2text.wav2vec_xlsr.v2_s368_exp': 'asr_exp_w2v2r_xls_r_accent_germany_2_austria_8_s368', 'de.speech2text.wav2vec_xlsr.v2_s381_exp': 'asr_exp_w2v2r_xls_r_accent_germany_0_austria_10_s381', 'de.speech2text.wav2vec_xlsr.v2_s412_exp': 'asr_exp_w2v2r_xls_r_accent_germany_5_austria_5_s412', 'de.speech2text.wav2vec_xlsr.v2_s42_exp': 'asr_exp_w2v2r_xls_r_accent_germany_8_austria_2_s42', 'de.speech2text.wav2vec_xlsr.v2_s438_exp': 'asr_exp_w2v2t_xls_r_s438', 'de.speech2text.wav2vec_xlsr.v2_s458_exp': 'asr_exp_w2v2r_xls_r_accent_germany_2_austria_8_s458', 'de.speech2text.wav2vec_xlsr.v2_s534_exp': 'asr_exp_w2v2r_xls_r_accent_germany_5_austria_5_s534', 'de.speech2text.wav2vec_xlsr.v2_s543_exp': 'asr_exp_w2v2r_xls_r_accent_germany_2_austria_8_s543', 'de.speech2text.wav2vec_xlsr.v2_s673_exp': 'asr_exp_w2v2r_xls_r_accent_germany_0_austria_10_s673', 'de.speech2text.wav2vec_xlsr.v2_s728_exp': 'asr_exp_w2v2r_xls_r_accent_germany_10_austria_0_s728', 'de.speech2text.wav2vec_xlsr.v2_s886_exp': 'asr_exp_w2v2r_xls_r_accent_germany_10_austria_0_s886', 'de.speech2text.wav2vec_xlsr.v3_large.by_marcel': 'asr_wav2vec2_large_xlsr_german_demo', 'de.stopwords': 'stopwords_iso', 'de.stopwords.iso': 'stopwords_iso'}, 'diq': {'diq.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'dv': { 'dv.classify.electra.news.small': 'electra_classifier_dv_small_news_classification', 'dv.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'el': {'el.answer_question.bert': 'bert_qa_qacombination_bert_el_Danastos', 'el.answer_question.bert.danastos': 'bert_qa_nq_bert_el_danastos', 'el.answer_question.bert.news_sqa.': 'bert_qa_newsqa_bert_el_danastos', 'el.answer_question.bert.news_sqa.by_danastos': 'bert_qa_newsqa_el_4', 'el.answer_question.bert.squad.': 'bert_qa_squad_bert_el_danastos', 'el.answer_question.bert.squad.v1.by_danastos': 'bert_qa_nq_squad_el_3', 'el.answer_question.bert.squad.v2.by_danastos': 'bert_qa_nq_squad_el_4', 'el.answer_question.bert.squad.v3.by_danastos': 'bert_qa_squad_el_4', 'el.answer_question.bert.trivia.': 'bert_qa_triviaqa_bert_el_danastos', 'el.answer_question.bert.trivia.by_danastos': 'bert_qa_triviaqa_el_4', 'el.answer_question.bert_v1.by_danastos': 'bert_qa_nq_el_4', 'el.answer_question.bert_v2.by_danastos': 'bert_qa_qacombination_el_4', 'el.answer_question.bert_v3.by_danastos': 'bert_qa_qacombined_el_3', 'el.answer_question.bert_v4.by_danastos': 'bert_qa_qacombined_el_4', 'el.classify.roberta': 'roberta_classifier_mediawatch_el_topics', 'el.classify.roberta.by_cvcio': 'roberta_classifier_comments_el_toxic', 'el.embed.bert.base_uncased': 'bert_base_uncased', 'el.embed.bert.cased_base': 'bert_embeddings_base_el_cased', 'el.embed.bert.uncased_base': 'bert_embeddings_greeksocial_base_greek_uncased_v1', 'el.embed.roberta.uncased_base': 'roberta_embeddings_palobert_base_greek_uncased_v1', 'el.embed_sentence.bert.base_uncased': 'sent_bert_base_uncased', 'el.lemma': 'lemma', 'el.lemma.gdt': 'lemma_gdt', 'el.pos': 'pos_ud_gdt', 'el.pos.gdt': 'pos_gdt', 'el.pos.ud_gdt': 'pos_ud_gdt', 'el.speech2text.wav2vec2': 'asr_greek_lsr_1', 'el.speech2text.wav2vec_xlsr.v2': 'asr_xlsr_53_wav2vec_greek', 'el.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_53_greek_by_perelluis13', 'el.speech2text.wav2vec_xlsr.v2_large.by_bagus': 'asr_wav2vec2_large_xlsr_bahasa_indonesia', 'el.speech2text.wav2vec_xlsr.v2_large.by_jonatasgrosman': 'asr_wav2vec2_large_xlsr_53_greek_by_jonatasgrosman', 'el.speech2text.wav2vec_xlsr.v2_large.by_lighteternal': 'asr_wav2vec2_large_xlsr_53_greek_by_lighteternal', 'el.speech2text.wav2vec_xlsr.v2_large.by_skylord': 'asr_wav2vec2_large_xlsr_greek_1', 'el.speech2text.wav2vec_xlsr.v2_large.by_vasilis': 'asr_wav2vec2_large_xlsr_53_greek_by_vasilis', 'el.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_greek_by_perelluis13_gpu', 'el.speech2text.wav2vec_xlsr.v2_large_gpu.by_bagus': 'asr_wav2vec2_large_xlsr_bahasa_indonesia_gpu', 'el.speech2text.wav2vec_xlsr.v2_large_gpu.by_lighteternal': 'asr_wav2vec2_large_xlsr_53_greek_by_lighteternal_gpu', 'el.speech2text.wav2vec_xlsr.v2_large_gpu.by_skylord': 'asr_wav2vec2_large_xlsr_greek_1_gpu', 'el.speech2text.wav2vec_xlsr.v3_large.by_skylord': 'asr_wav2vec2_large_xlsr_greek_2', 'el.speech2text.wav2vec_xlsr.v3_large_gpu.by_skylord': 'asr_wav2vec2_large_xlsr_greek_2_gpu', 'el.stopwords': 'stopwords_iso', 'el.stopwords.iso': 'stopwords_iso'}, 'eml': {'eml.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'en': {'en.bert.zero_shot_classifier' : 'bert_base_cased_zero_shot_classifier_xnli', 'en.distilbert.zero_shot_classifier': 'distilbert_base_zero_shot_classifier_uncased_mnli', 'en.roberta.zero_shot_classifier': 'roberta_base_zero_shot_classifier_nli', 'en.classify_image.swin.tiny':'image_classifier_swin_tiny_patch4_window7_224', 'en.speech2text.hubert': 'asr_hubert_large_ls960', 'en.speech2text.hubert.large_ls960': 'asr_hubert_large_ls960', 'en.albert': 'albert_base_uncased', 'en.answer_qu estion.mqa_cls.bert.by_xraychen': 'bert_qa_mqa_cls', 'en.answer_question.albert': 'albert_qa_generic', 'en.answer_question.albert.by_AyushPJ': 'albert_qa_ai_club_inductions_21_nlp', 'en.answer_question.albert.by_SalmanMo': 'albert_qa_QA_1e', 'en.answer_question.albert.by_nlpunibo': 'albert_qa_nlpunibo', 'en.answer_question.albert.by_rowan1224': 'albert_qa_slp', 'en.answer_question.albert.by_saburbutt': 'albert_qa_generic', 'en.answer_question.albert.xl': 'albert_qa_xlarge_finetuned', 'en.answer_question.attribute_correction_mlm.bert.uncased_base.by_ksabeh': 'bert_qa_base_uncased_attribute_correction_mlm', 'en.answer_question.attribute_correction_mlm_titles.bert.uncased_base.by_ksabeh': 'bert_qa_base_uncased_attribute_correction_mlm_titles', 'en.answer_question.bert': 'bert_qa_no_need_to_name_this', 'en.answer_question.bert.32d': 'bert_qa_bert_set_date_1_lr_2e_5_bs_32_ep_4', 'en.answer_question.bert.abs': 'bert_qa_bertlargeabsa', 'en.answer_question.bert.augmented': 'bert_qa_augmented', 'en.answer_question.bert.base': 'bert_qa_bert_base_faquad', 'en.answer_question.bert.base.by_peggyhuang': 'bert_qa_finetune_bert_base_v1', 'en.answer_question.bert.base.by_ricardo-filho': 'bert_qa_bert_base_faquad', 'en.answer_question.bert.base.by_slavka': 'bert_qa_xdzadi00_based_v4', 'en.answer_question.bert.base.by_xraychen': 'bert_qa_mqa_baseline', 'en.answer_question.bert.base.serini.cmrc': 'bert_qa_bertserini_base_cmrc', 'en.answer_question.bert.base_cased': 'bert_qa_bert_base_turkish_cased_finetuned_lr_2e_05_epochs_3', 'en.answer_question.bert.base_cased.by_CenIA': 'bert_qa_bert_base_spanish_wwm_cased_finetuned_qa_tar', 'en.answer_question.bert.base_cased.by_husnu': 'bert_qa_bert_base_turkish_cased_finetuned_lr_2e_05_epochs_3', 'en.answer_question.bert.base_cased.by_nntadotzip': 'bert_qa_bert_base_cased_IUChatbot_ontologyDts', 'en.answer_question.bert.base_uncased': 'bert_qa_bert_base_uncased_coqa', 'en.answer_question.bert.base_uncased.by_CenIA': 'bert_qa_bert_base_spanish_wwm_uncased_finetuned_qa_tar', 'en.answer_question.bert.base_uncased.by_machine2049': 'bert_qa_bert_base_uncased_finetuned_duorc_bert', 'en.answer_question.bert.base_uncased.by_peggyhuang': 'bert_qa_bert_base_uncased_coqa', 'en.answer_question.bert.base_uncased.by_vanadhi': 'bert_qa_bert_base_uncased_fiqa_flm_sq_flit', 'en.answer_question.bert.base_v2': 'bert_qa_finetune_bert_base_v2', 'en.answer_question.bert.base_v3.by_peggyhuang': 'bert_qa_finetune_bert_base_v3', 'en.answer_question.bert.by_Danastos': 'bert_qa_nq_bert_el_Danastos', 'en.answer_question.bert.by_ForutanRad': 'bert_qa_bert_fa_QA_v1', 'en.answer_question.bert.by_HankyStyle': 'bert_qa_Multi_ling_BERT', 'en.answer_question.bert.by_LenaSchmidt': 'bert_qa_no_need_to_name_this', 'en.answer_question.bert.by_Rocketknight1': 'bert_qa_bert_finetuned_qa', 'en.answer_question.bert.by_SanayCo': 'bert_qa_model_output', 'en.answer_question.bert.by_aozorahime': 'bert_qa_my_new_model', 'en.answer_question.bert.by_aymanm419': 'bert_qa_araSpeedest', 'en.answer_question.bert.by_enoriega': 'bert_qa_rule_softmatching', 'en.answer_question.bert.by_ericRosello': 'bert_qa_results', 'en.answer_question.bert.by_hendrixcosta': 'bert_qa_hendrixcosta', 'en.answer_question.bert.by_internetoftim': 'bert_qa_demo', 'en.answer_question.bert.by_jackh1995': 'bert_qa_bert_finetuned_jackh1995', 'en.answer_question.bert.by_krinal214': 'bert_qa_bert_all_translated', 'en.answer_question.bert.by_manav': 'bert_qa_causal_qa', 'en.answer_question.bert.by_mezes': 'bert_qa_eauction_section_parsing_from_pretrained', 'en.answer_question.bert.by_motiondew': 'bert_qa_bert_finetuned_lr2_e5_b16_ep2', 'en.answer_question.bert.by_mrm8488': 'bert_qa_ManuERT_for_xqua', 'en.answer_question.bert.by_nlpunibo': 'bert_qa_bert', 'en.answer_question.bert.by_nvkha': 'bert_qa_bert_qa_vi_nvkha', 'en.answer_question.bert.by_piEsposito': 'bert_qa_braquad_bert_qna', 'en.answer_question.bert.by_ponmari': 'bert_qa_questionansweing', 'en.answer_question.bert.by_songhee': 'bert_qa_i_manual_m', 'en.answer_question.bert.by_voidful': 'bert_qa_question_answering_zh_voidful', 'en.answer_question.bert.by_z-uo': 'bert_qa_bert_qasper', 'en.answer_question.bert.cased_base': 'bert_qa_base_cased_iuchatbot_ontologydts_berttokenizer_12april2022', 'en.answer_question.bert.cased_base_finetuned': 'bert_qa_base_cased_finetuned_log_parser_winlogbeat_nowhitespac', 'en.answer_question.bert.cased_base_finetuned.by_slavka': 'bert_qa_base_cased_finetuned_log_parser_winlogbeat_nowhitespace_larg', 'en.answer_question.bert.cased_multilingual_base_finetuned': 'bert_qa_base_multilingual_cased_finetuned_viquad', 'en.answer_question.bert.distilled_base_uncased': 'bert_qa_distilbert_base_uncased_finetuned_custom', 'en.answer_question.bert.docvqa.base_uncased.by_tiennvcs': 'bert_qa_bert_base_uncased_finetuned_docvqa', 'en.answer_question.bert.infovqa.base_uncased.by_tiennvcs': 'bert_qa_bert_base_uncased_finetuned_infovqa', 'en.answer_question.bert.large': 'bert_qa_bert_large_question_answering_finetuned_legal', 'en.answer_question.bert.large.by_Sounak': 'bert_qa_bert_large_finetuned', 'en.answer_question.bert.large.by_atharvamundada99': 'bert_qa_bert_large_question_answering_finetuned_legal', 'en.answer_question.bert.large.by_ricardo-filho': 'bert_qa_bert_large_faquad', 'en.answer_question.bert.large_cased': 'bert_qa_muril_large_cased_hita_qa', 'en.answer_question.bert.large_uncased': 'bert_qa_bert_large_uncased_finetuned_docvqa', 'en.answer_question.bert.legal': 'bert_qa_legal', 'en.answer_question.bert.mlqa.base': 'bert_qa_base_multi_mlqa_dev', 'en.answer_question.bert.mlqa.finetuned': 'bert_qa_mbert_finetuned_mlqa_dev', 'en.answer_question.bert.multilingual_arabic_tuned_base_cased.by_bhavikardeshna': 'bert_qa_multilingual_bert_base_cased_arabic', 'en.answer_question.bert.multilingual_english_tuned_base_cased.by_bhavikardeshna': 'bert_qa_multilingual_bert_base_cased_english', 'en.answer_question.bert.multilingual_german_tuned_base_cased.by_bhavikardeshna': 'bert_qa_multilingual_bert_base_cased_german', 'en.answer_question.bert.multilingual_hindi_tuned_base_cased.by_bhavikardeshna': 'bert_qa_multilingual_bert_base_cased_hindi', 'en.answer_question.bert.multilingual_spanish_tuned_base_cased.by_bhavikardeshna': 'bert_qa_multilingual_bert_base_cased_spanish', 'en.answer_question.bert.multilingual_vietnamese_tuned_base_cased.by_bhavikardeshna': 'bert_qa_multilingual_bert_base_cased_vietnamese', 'en.answer_question.bert.news_sqa.uncased_large': 'bert_qa_unqover_large_uncased_newsqa', 'en.answer_question.bert.sd1.by_motiondew': 'bert_qa_sd1', 'en.answer_question.bert.sd2.by_motiondew': 'bert_qa_sd2', 'en.answer_question.bert.sd2_lr_5e_5_bs_32_e_3': 'bert_qa_sd2_lr_5e_5_bs_32_e_3', 'en.answer_question.bert.sd3.by_motiondew': 'bert_qa_sd3', 'en.answer_question.bert.set_date_1_impartit_4.by_motiondew': 'bert_qa_set_date_1_impartit_4', 'en.answer_question.bert.sim.by_xraychen': 'bert_qa_mqa_sim', 'en.answer_question.bert.small': 'bert_qa_sd1_small', 'en.answer_question.bert.small.sd2_small.by_motiondew': 'bert_qa_sd2_small', 'en.answer_question.bert.small.sd3_small.by_motiondew': 'bert_qa_sd3_small', 'en.answer_question.bert.squad.': 'bert_qa_kd_squad1.1', 'en.answer_question.bert.squad.by_nadav': 'bert_qa_macsquad', 'en.answer_question.bert.squad.cased_base_finetuned': 'bert_qa_baru98_base_cased_finetuned_squad', 'en.answer_question.bert.squad.cased_base_finetuned.by_anas_awadalla': 'bert_qa_base_cased_finetuned_squad_r3f', 'en.answer_question.bert.squad.cased_base_finetuned.by_victorlee071200': 'bert_qa_victorlee071200_base_cased_finetuned_squad', 'en.answer_question.bert.squad.cased_multilingual_base_finetuned': 'bert_qa_khanh_base_multilingual_cased_finetuned_squad', 'en.answer_question.bert.squad.distilled_uncased_base_finetuned': 'bert_qa_negfir_distilbert_base_uncased_finetuned_squad', 'en.answer_question.bert.squad.finetuned': 'bert_qa_aiyshwariya_finetuned_squad', 'en.answer_question.bert.squad.finetuned.by_Callmenicky': 'bert_qa_callmenicky_finetuned_squad', 'en.answer_question.bert.squad.finetuned.by_CherylTSW': 'bert_qa_cheryltsw_finetuned_squad', 'en.answer_question.bert.squad.finetuned.by_DaisyMak': 'bert_qa_finetuned_squad_transformerfrozen_testtoken', 'en.answer_question.bert.squad.finetuned.by_LeoFelix': 'bert_qa_leofelix_finetuned_squad', 'en.answer_question.bert.squad.finetuned.by_MyMild': 'bert_qa_mymild_finetuned_squad', 'en.answer_question.bert.squad.finetuned.by_Nausheen': 'bert_qa_nausheen_finetuned_squad_accelera', 'en.answer_question.bert.squad.finetuned.by_ParanoidAndroid': 'bert_qa_paranoidandroid_finetuned_squad', 'en.answer_question.bert.squad.finetuned.by_WounKai': 'bert_qa_wounkai_finetuned_squad', 'en.answer_question.bert.squad.finetuned.by_akmal2500': 'bert_qa_akmal2500_finetuned_squad', 'en.answer_question.bert.squad.finetuned.by_chanifrusydi': 'bert_qa_chanifrusydi_finetuned_squad', 'en.answer_question.bert.squad.finetuned.by_cjjie': 'bert_qa_cjjie_finetuned_squad', 'en.answer_question.bert.squad.finetuned.by_ericw0530': 'bert_qa_ericw0530_finetuned_squad', 'en.answer_question.bert.squad.finetuned.by_irenelizihui': 'bert_qa_irenelizihui_finetuned_squad', 'en.answer_question.bert.squad.finetuned.by_lewtun': 'bert_qa_lewtun_finetuned_squad', 'en.answer_question.bert.squad.finetuned.by_mkkc58': 'bert_qa_mkkc58_finetuned_squad', 'en.answer_question.bert.squad.finetuned.by_qgrantq': 'bert_qa_qgrantq_finetuned_squad', 'en.answer_question.bert.squad.finetuned.by_susghosh': 'bert_qa_susghosh_finetuned_squad', 'en.answer_question.bert.squad.finetuned.by_wiselinjayajos': 'bert_qa_wiselinjayajos_finetuned_squad', 'en.answer_question.bert.squad.finetuned.by_yossra': 'bert_qa_yossra_finetuned_squad', 'en.answer_question.bert.squad.finetuned_accelera.by_spasis': 'bert_qa_spasis_finetuned_squad_accelera', 'en.answer_question.bert.squad.finetuned_squad.by_KFlash': 'bert_qa_kflash_finetuned_squad', 'en.answer_question.bert.squad.finetuned_squad.by_SebastianS': 'bert_qa_sebastians_finetuned_squad', 'en.answer_question.bert.squad.finetuned_squad.by_clementgyj': 'bert_qa_clementgyj_finetuned_squad', 'en.answer_question.bert.squad.finetuned_squad.by_spasis': 'bert_qa_spasis_finetuned_squad', 'en.answer_question.bert.squad.finetuned_squad_50k.by_clementgyj': 'bert_qa_finetuned_squad_50k', 'en.answer_question.bert.squad.finetuned_squad_accelera.by_KFlash': 'bert_qa_kflash_finetuned_squad_accelera', 'en.answer_question.bert.squad.finetuned_squad_accelera.by_SebastianS': 'bert_qa_sebastians_finetuned_squad_accelera', 'en.answer_question.bert.squad.mini_finetuned': 'bert_qa_mini_finetuned_squad', 'en.answer_question.bert.squad.tiny': 'bert_qa_tinybert_general_4l_312d_squad', 'en.answer_question.bert.squad.tiny_finetuned': 'bert_qa_tiny_wrslb_finetuned_squadv1', 'en.answer_question.bert.squad.uncased_base_finetuned': 'bert_qa_base_uncased_squad_v1.0_finetuned', 'en.answer_question.bert.squad.uncased_finetuned': 'bert_qa_uncased_finetuned_squad_indonesian', 'en.answer_question.bert.squad.uncased_mini_lm_mini': 'bert_qa_minilm_l12_h384_uncased_squad', 'en.answer_question.bert.squad.uncased_seed_0_base_1024d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_1024_finetuned_squad_seed_0', 'en.answer_question.bert.squad.uncased_seed_0_base_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_16_finetuned_squad_seed_0', 'en.answer_question.bert.squad.uncased_seed_10_base_1024d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_1024_finetuned_squad_seed_10', 'en.answer_question.bert.squad.uncased_seed_10_base_128d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_128_finetuned_squad_seed_10', 'en.answer_question.bert.squad.uncased_seed_10_base_256d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_256_finetuned_squad_seed_10', 'en.answer_question.bert.squad.uncased_seed_10_base_32d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_32_finetuned_squad_seed_10', 'en.answer_question.bert.squad.uncased_seed_10_base_512d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_512_finetuned_squad_seed_10', 'en.answer_question.bert.squad.uncased_seed_10_base_64d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_64_finetuned_squad_seed_10', 'en.answer_question.bert.squad.uncased_seed_10_base_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_16_finetuned_squad_seed_10', 'en.answer_question.bert.squad.uncased_seed_2_base_1024d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_1024_finetuned_squad_seed_2', 'en.answer_question.bert.squad.uncased_seed_2_base_128d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_128_finetuned_squad_seed_2', 'en.answer_question.bert.squad.uncased_seed_2_base_256d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_256_finetuned_squad_seed_2', 'en.answer_question.bert.squad.uncased_seed_2_base_32d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_32_finetuned_squad_seed_2', 'en.answer_question.bert.squad.uncased_seed_2_base_512d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_512_finetuned_squad_seed_2', 'en.answer_question.bert.squad.uncased_seed_2_base_64d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_64_finetuned_squad_seed_2', 'en.answer_question.bert.squad.uncased_seed_2_base_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_16_finetuned_squad_seed_2', 'en.answer_question.bert.squad.uncased_seed_42_base_128d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_128_finetuned_squad_seed_42', 'en.answer_question.bert.squad.uncased_seed_4_base_1024d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_1024_finetuned_squad_seed_4', 'en.answer_question.bert.squad.uncased_seed_4_base_128d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_128_finetuned_squad_seed_4', 'en.answer_question.bert.squad.uncased_seed_4_base_256d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_256_finetuned_squad_seed_4', 'en.answer_question.bert.squad.uncased_seed_4_base_32d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_32_finetuned_squad_seed_4', 'en.answer_question.bert.squad.uncased_seed_4_base_512d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_512_finetuned_squad_seed_4', 'en.answer_question.bert.squad.uncased_seed_4_base_64d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_64_finetuned_squad_seed_4', 'en.answer_question.bert.squad.uncased_seed_4_base_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_16_finetuned_squad_seed_4', 'en.answer_question.bert.squad.uncased_seed_6_base_1024d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_1024_finetuned_squad_seed_6', 'en.answer_question.bert.squad.uncased_seed_6_base_128d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_128_finetuned_squad_seed_6', 'en.answer_question.bert.squad.uncased_seed_6_base_256d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_256_finetuned_squad_seed_6', 'en.answer_question.bert.squad.uncased_seed_6_base_32d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_32_finetuned_squad_seed_6', 'en.answer_question.bert.squad.uncased_seed_6_base_512d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_512_finetuned_squad_seed_6', 'en.answer_question.bert.squad.uncased_seed_6_base_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_16_finetuned_squad_seed_6', 'en.answer_question.bert.squad.uncased_seed_8_base_1024d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_1024_finetuned_squad_seed_8', 'en.answer_question.bert.squad.uncased_seed_8_base_128d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_128_finetuned_squad_seed_8', 'en.answer_question.bert.squad.uncased_seed_8_base_256d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_256_finetuned_squad_seed_8', 'en.answer_question.bert.squad.uncased_seed_8_base_32d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_32_finetuned_squad_seed_8', 'en.answer_question.bert.squad.uncased_seed_8_base_512d_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_512_finetuned_squad_seed_8', 'en.answer_question.bert.squad.uncased_seed_8_base_finetuned_few_shot': 'bert_qa_base_uncased_few_shot_k_16_finetuned_squad_seed_8', 'en.answer_question.bert.squadv2.': 'bert_qa_emanuals_squad2.0', 'en.answer_question.bert.squadv2.base': 'bert_qa_unsup_consert_base_squad2.0', 'en.answer_question.bert.squadv2.base_epochs_1': 'bert_qa_rule_based_hier_quadruplet_0.1_epochs_1_shard_1_squad2.0', 'en.answer_question.bert.squadv2.based_hier_quadruplet_epochs_1_shard_1_squad2.by_AnonymousSub': 'bert_qa_rule_based_hier_quadruplet_epochs_1_shard_1_squad2.0', 'en.answer_question.bert.squadv2.based_hier_triplet_0.by_AnonymousSub': 'bert_qa_rule_based_hier_triplet_0.1_epochs_1_shard_1_squad2.0', 'en.answer_question.bert.squadv2.based_hier_triplet_epochs_1_shard_1_kldiv_squad2.by_AnonymousSub': 'bert_qa_rule_based_hier_triplet_epochs_1_shard_1_kldiv_squad2.0', 'en.answer_question.bert.squadv2.based_hier_triplet_epochs_1_shard_1_squad2.by_AnonymousSub': 'bert_qa_rule_based_hier_triplet_epochs_1_shard_1_squad2.0', 'en.answer_question.bert.squadv2.based_only_classfn_epochs_1_shard_1_squad2.by_AnonymousSub': 'bert_qa_rule_based_only_classfn_epochs_1_shard_1_squad2.0', 'en.answer_question.bert.squadv2.based_quadruplet_epochs_1_shard_1_squad2.by_AnonymousSub': 'bert_qa_rule_based_quadruplet_epochs_1_shard_1_squad2.0', 'en.answer_question.bert.squadv2.based_triplet_epochs_1_shard_1_squad2.by_AnonymousSub': 'bert_qa_rule_based_triplet_epochs_1_shard_1_squad2.0', 'en.answer_question.bert.squadv2.by_anonymoussub': 'bert_qa_specter_model_squad2.0', 'en.answer_question.bert.squadv2.cased_v2_base_finetuned': 'bert_qa_base_cased_finetuned_squad_v2', 'en.answer_question.bert.squadv2.large_tiny_768d': 'bert_qa_tinybert_6l_768d_squad2_large_teach', 'en.answer_question.bert.squadv2.large_tiny_768d.by_MichelBartels': 'bert_qa_tinybert_6l_768d_squad2_large_teacher_dummy', 'en.answer_question.bert.squadv2.uncased_2l_128d_a2a_128d': 'bert_qa_uncased_l_2_h_128_a_2_squad2', 'en.answer_question.bert.squadv2.uncased_base': 'bert_qa_base_uncased_squad2.0', 'en.answer_question.bert.squadv2.uncased_mini_lm_mini_finetuned': 'bert_qa_ahujaniharika95_minilm_uncased_squad2_finetuned_squad', 'en.answer_question.bert.squadv2.uncased_mini_lm_mini_finetuned.by_Renukswamy': 'bert_qa_renukswamy_minilm_uncased_squad2_finetuned_squad', 'en.answer_question.bert.squadv2.uncased_v2': 'bert_qa_multi_uncased_trained_squadv2', 'en.answer_question.bert.squadv2.uncased_v2_base_finetuned': 'bert_qa_base_uncased_squad_v2.0_finetuned', 'en.answer_question.bert.squadv2.v2_mini_finetuned': 'bert_qa_mini_finetuned_squadv2', 'en.answer_question.bert.squadv2.v2_tiny_finetuned': 'bert_qa_tiny_finetuned_squadv2', 'en.answer_question.bert.squadv2_contracts.uncased_v2_base_finetuned': 'bert_qa_base_uncased_contracts_finetuned_on_squadv2', 'en.answer_question.bert.squadv2_cord19.uncased_2l_128d_a2a_128d': 'bert_qa_uncased_l_2_h_128_a_2_cord19_200616_squad2', 'en.answer_question.bert.squadv2_cord19.uncased_6l_128d_a2a_128d': 'bert_qa_uncased_l_6_h_128_a_2_cord19_200616_squad2', 'en.answer_question.bert.squadv2_covid.uncased_2l_128d_a2a_128d': 'bert_qa_uncased_l_2_h_128_a_2_squad2_covid_qna', 'en.answer_question.bert.squadv2_covid_cord19.uncased_2l_128d_a2a_128d': 'bert_qa_uncased_l_2_h_128_a_2_cord19_200616_squad2_covid_qna', 'en.answer_question.bert.squadv2_covid_cord19.uncased_2l_512d_a8a_512d': 'bert_qa_uncased_l_2_h_512_a_8_cord19_200616_squad2_covid_qna', 'en.answer_question.bert.squadv2_covid_cord19.uncased_6l_128d_a2a_128d': 'bert_qa_uncased_l_6_h_128_a_2_cord19_200616_squad2_covid_qna', 'en.answer_question.bert.techqa.': 'bert_qa_consert_techqa', 'en.answer_question.bert.tquad.xtremedistiled_uncased_finetuned_epochs_3': 'bert_qa_xtremedistil_l6_h256_uncased_tquad_finetuned_lr_2e_05_epochs_3', 'en.answer_question.bert.tquad.xtremedistiled_uncased_finetuned_epochs_6': 'bert_qa_xtremedistil_l6_h256_uncased_tquad_finetuned_lr_2e_05_epochs_6', 'en.answer_question.bert.trivia.base_1024d': 'bert_qa_base_1024_full_trivia', 'en.answer_question.bert.tydiqa.': 'bert_qa_part_2_mbert_model_e1', 'en.answer_question.bert.uncased_base': 'bert_qa_base_uncased_attribute_correction', 'en.answer_question.bert.uncased_base.by_michaelrglass': 'bert_qa_base_uncased_ssp', 'en.answer_question.bert.uncased_base_finetuned': 'bert_qa_base_uncased_pretrain_finetuned_coqa_fal', 'en.answer_question.bert.uncased_base_finetuned.by_alistvt': 'bert_qa_base_uncased_pretrain_finetuned_coqa_falttened', 'en.answer_question.bert.uncased_large': 'bert_qa_large_uncased_ssp', 'en.answer_question.bert.uncased_large_finetuned': 'bert_qa_large_uncased_finetuned_infovqa', 'en.answer_question.bert.uncased_large_finetuned.by_tiennvcs': 'bert_qa_large_uncased_finetuned_vi_infovqa', 'en.answer_question.bert.unsupsim.by_xraychen': 'bert_qa_mqa_unsupsim', 'en.answer_question.bert.v1.lr_2e_5_bs_32_ep_3.by_motiondew': 'bert_qa_set_date_1_lr_2e_5_bs_32_ep_3', 'en.answer_question.bert.v2.lr_2e_5_bs_32_ep_3.by_motiondew': 'bert_qa_set_date_2_lr_2e_5_bs_32_ep_3', 'en.answer_question.bert.v3.lr_2e_5_bs_32_ep_3.by_motiondew': 'bert_qa_set_date_3_lr_2e_5_bs_32_ep_3', 'en.answer_question.bert.v4.lr_2e_5_bs_32_ep_4.by_motiondew': 'bert_qa_set_date_2_lr_2e_5_bs_32_ep_4', 'en.answer_question.bert.v5.lr_2e_5_bs_32_ep_4.by_motiondew': 'bert_qa_set_date_3_lr_2e_5_bs_32_ep_4', 'en.answer_question.bert.v6.lr_3e_5_bs_32_ep_3.by_motiondew': 'bert_qa_set_date_1_lr_3e_5_bs_32_ep_3', 'en.answer_question.bert.v7.lr_3e_5_bs_32_ep_3.by_motiondew': 'bert_qa_set_date_2_lr_3e_5_bs_32_ep_3', 'en.answer_question.bert.v8.lr_3e_5_bs_32_ep_3.by_motiondew': 'bert_qa_set_date_3_lr_3e_5_bs_32_ep_3', 'en.answer_question.bert.vi_infovqa.base_uncased.by_tiennvcs': 'bert_qa_bert_base_uncased_finetuned_vi_infovqa', 'en.answer_question.bert.xtremedistiled_uncased_lr_2e_05_epochs_3.by_husnu': 'bert_qa_xtremedistil_l6_h256_uncased_finetuned_lr_2e_05_epochs_3', 'en.answer_question.bert.xtremedistiled_uncased_lr_2e_05_epochs_6.by_husnu': 'bert_qa_xtremedistil_l6_h256_uncased_finetuned_lr_2e_05_epochs_6', 'en.answer_question.bert.zero_shot': 'bert_qa_fewrel_zero_shot', 'en.answer_question.bert.zero_shot.by_fractalego': 'bert_qa_fewrel_zero_shot', 'en.answer_question.bert.zero_shot.by_krinal214': 'bert_qa_zero_shot', 'en.answer_question.bio_clinical.bert': 'bert_qa_sagemaker_BioclinicalBERT_ADR', 'en.answer_question.bio_medical.bert.base': 'bert_qa_biomedical_slot_filling_reader_base', 'en.answer_question.bio_medical.bert.large': 'bert_qa_biomedical_slot_filling_reader_large', 'en.answer_question.biobert': 'bert_qa_biobert_bioasq', 'en.answer_question.biobert.bio_medical.': 'bert_qa_biobert_v1.1_biomedicalquestionanswering', 'en.answer_question.biobert.squad.cased_large': 'bert_qa_biobert_large_cased_v1.1_squad', 'en.answer_question.chaii.bert.base_cased': 'bert_qa_bert_base_cased_chaii', 'en.answer_question.chaii.bert.cased': 'bert_qa_bert_multi_cased_finetuned_chaii', 'en.answer_question.chaii.bert.large_uncased_uncased_whole_word_masking.by_SauravMaheshkar': 'bert_qa_bert_large_uncased_whole_word_masking_chaii', 'en.answer_question.chaii.bert.large_uncased_uncased_whole_word_masking_finetuned.by_SauravMaheshkar': 'bert_qa_bert_large_uncased_whole_word_masking_finetuned_chaii', 'en.answer_question.chaii.bert.multilingual_base_cased': 'bert_qa_bert_base_multilingual_cased_finetuned_chaii', 'en.answer_question.chaii.bert.uncased': 'bert_qa_bert_multi_uncased_finetuned_chaii', 'en.answer_question.chaii.distil_bert': 'distilbert_qa_multi_finetuned_for_xqua_on_chaii', 'en.answer_question.chaii.distil_bert.base_cased': 'distilbert_qa_base_cased_distilled_chaii', 'en.answer_question.chaii.distil_bert.base_uncased': 'distilbert_qa_base_uncased_distilled_chaii', 'en.answer_question.chaii.electra.base': 'electra_qa_base_chaii', 'en.answer_question.chaii.roberta.base': 'roberta_qa_roberta_base_chaii', 'en.answer_question.chaii.xlm_roberta.base': 'xlm_roberta_qa_xlm_roberta_base_finetuned_chaii', 'en.answer_question.chaii.xlm_roberta.base.by_SauravMaheshkar': 'xlm_roberta_qa_xlm_roberta_base_chaii', 'en.answer_question.chaii.xlm_roberta.base.by_tyqiangz': 'xlm_roberta_qa_xlm_roberta_base_finetuned_chaii', 'en.answer_question.chaii.xlm_roberta.large.by_SauravMaheshkar': 'xlm_roberta_qa_xlm_roberta_large_chaii', 'en.answer_question.chaii.xlm_roberta.large_multi.by_SauravMaheshkar': 'xlm_roberta_qa_xlm_multi_roberta_large_chaii', 'en.answer_question.clinical.distil_bert': 'distilbert_qa_BERT_ClinicalQA', 'en.answer_question.conll.distil_bert.base_uncased': 'distilbert_qa_base_uncased_qa_with_ner', 'en.answer_question.cord19.bert.by_JAlexis': 'bert_qa_Bertv1_fine', 'en.answer_question.cord19.bert.small': 'bert_qa_bert_small_cord19qa', 'en.answer_question.cord19.prueba_bert.by_JAlexis': 'bert_qa_PruebaBert', 'en.answer_question.covid.distil_bert.a.by_rahulkuruvilla': 'distilbert_qa_COVID_DistilBERTa', 'en.answer_question.covid.distil_bert.b.by_rahulkuruvilla': 'distilbert_qa_COVID_DistilBERTb', 'en.answer_question.covid.distil_bert.c.by_rahulkuruvilla': 'distilbert_qa_COVID_DistilBERTc', 'en.answer_question.covid.longformer': 'longformer_qa_covid', 'en.answer_question.covid_bert.a.by_rahulkuruvilla': 'bert_qa_COVID_BERTa', 'en.answer_question.covid_bert.b.by_rahulkuruvilla': 'bert_qa_COVID_BERTb', 'en.answer_question.covid_bert.c.by_rahulkuruvilla': 'bert_qa_COVID_BERTc', 'en.answer_question.cuad.roberta.base': 'roberta_qa_marshmellow77_roberta_base_cuad', 'en.answer_question.cuad.roberta.base.by_Gam': 'roberta_qa_roberta_base_finetuned_cuad', 'en.answer_question.cuad.roberta.base.by_Rakib': 'roberta_qa_roberta_base_on_cuad', 'en.answer_question.cuad.roberta.base.by_akdeniz27': 'roberta_qa_akdeniz27_roberta_base_cuad', 'en.answer_question.cuad.roberta.base.by_marshmellow77': 'roberta_qa_marshmellow77_roberta_base_cuad', 'en.answer_question.cuad.roberta.large': 'roberta_qa_roberta_large_cuad', 'en.answer_question.cuad_gam.roberta.base.by_Gam': 'roberta_qa_roberta_base_finetuned_cuad_gam', 'en.answer_question.distil_bert': 'distilbert_qa_projectmodel_bert', 'en.answer_question.distil_bert.base': 'distilbert_qa_robustqa_baseline_02', 'en.answer_question.distil_bert.base.by_leemii18': 'distilbert_qa_robustqa_baseline_02', 'en.answer_question.distil_bert.base.by_minhdang241': 'distilbert_qa_robustqa_baseline_01', 'en.answer_question.distil_bert.base_cased': 'distilbert_qa_bert_base_cased_finetuned_log_parser_winlogbeat', 'en.answer_question.distil_bert.base_cased.by_Slavka': 'distilbert_qa_bert_base_cased_finetuned_log_parser_winlogbeat', 'en.answer_question.distil_bert.base_cased.by_adamlin': 'distilbert_qa_base_cased_sgd_qa_step5000', 'en.answer_question.distil_bert.base_config1.by_nlpunibo': 'distilbert_qa_base_config1', 'en.answer_question.distil_bert.base_config2.by_nlpunibo': 'distilbert_qa_base_config2', 'en.answer_question.distil_bert.base_config3.by_nlpunibo': 'distilbert_qa_base_config3', 'en.answer_question.distil_bert.base_uncased': 'distilbert_qa_base_uncased_finetuned_advers', 'en.answer_question.distil_bert.base_uncased.by_T-qualizer': 'distilbert_qa_base_uncased_finetuned_advers', 'en.answer_question.distil_bert.base_uncased.by_charlieoneill': 'distilbert_qa_base_uncased_gradient_clinic', 'en.answer_question.distil_bert.base_uncased.by_datarpit': 'distilbert_qa_base_uncased_finetuned_natural_questions', 'en.answer_question.distil_bert.base_uncased.by_machine2049': 'distilbert_qa_base_uncased_finetuned_duorc_', 'en.answer_question.distil_bert.base_uncased.by_tiennvcs': 'distilbert_qa_base_uncased_finetuned_infovqa', 'en.answer_question.distil_bert.by_Ifenna': 'distilbert_qa_dbert_3epoch', 'en.answer_question.distil_bert.by_LucasS': 'distilbert_qa_distilBertABSA', 'en.answer_question.distil_bert.by_Sarmad': 'distilbert_qa_projectmodel_bert', 'en.answer_question.distil_bert.by_Sounak': 'distilbert_qa_finetuned', 'en.answer_question.distil_bert.by_ajaypyatha': 'distilbert_qa_sdsqna', 'en.answer_question.distil_bert.by_alinemati': 'distilbert_qa_BERT', 'en.answer_question.distil_bert.by_keras-io': 'distilbert_qa_transformers_qa', 'en.answer_question.distil_bert.by_minhdang241': 'distilbert_qa_robustqa_tapt', 'en.answer_question.distil_bert.by_pakupoko': 'distilbert_qa_bizlin_distil_model', 'en.answer_question.distil_bert.by_poom-sci': 'distilbert_qa_qa', 'en.answer_question.distil_bert.custom.by_aszidon': 'distilbert_qa_custom', 'en.answer_question.distil_bert.custom3.by_aszidon': 'distilbert_qa_custom3', 'en.answer_question.distil_bert.custom4.by_aszidon': 'distilbert_qa_custom4', 'en.answer_question.distil_bert.custom5.by_aszidon': 'distilbert_qa_custom5', 'en.answer_question.distil_bert.flat_n_max.by_mcurmei': 'distilbert_qa_flat_N_max', 'en.answer_question.distil_bert.log_parser.by_Slavka': 'distilbert_qa_distil_bert_finetuned_log_parser_1', 'en.answer_question.distil_bert.log_parser_winlogbeat.by_Slavka': 'distilbert_qa_distil_bert_finetuned_log_parser_winlogbeat', 'en.answer_question.distil_bert.single_label_n_max.by_mcurmei': 'distilbert_qa_single_label_N_max', 'en.answer_question.distil_bert.single_label_n_max_long_training.by_mcurmei': 'distilbert_qa_single_label_N_max_long_training', 'en.answer_question.distil_bert.unique_n_max.by_mcurmei': 'distilbert_qa_unique_N_max', 'en.answer_question.electra': 'electra_qa_TestQA2', 'en.answer_question.electra.by_Andranik': 'electra_qa_TestQA2', 'en.answer_question.electra.by_carlosserquen': 'electra_qa_elctrafp', 'en.answer_question.electra.by_rowan1224': 'electra_qa_slp', 'en.answer_question.electra.finetuning_1': 'electra_qa_DSPFirst_Finetuning_1', 'en.answer_question.electra.finetuning_2': 'electra_qa_DSPFirst_Finetuning_2', 'en.answer_question.electra.finetuning_3': 'electra_qa_DSPFirst_Finetuning_3', 'en.answer_question.electra.finetuning_4': 'electra_qa_DSPFirst_Finetuning_4', 'en.answer_question.electra.finetuning_5': 'electra_qa_DSPFirst_Finetuning_5', 'en.answer_question.klue.bert': 'bert_qa_Klue_CommonSense_model', 'en.answer_question.klue.bert.multilingual_base_cased': 'bert_qa_bert_base_multilingual_cased_finetuned_klue', 'en.answer_question.klue.xlm_roberta.base': 'xlm_roberta_qa_klue_mrc_roberta_base', 'en.answer_question.korquad.bert.multilingual_base_cased': 'bert_qa_bert_base_multilingual_cased_korquad_v1', 'en.answer_question.korquad.bert.multilingual_base_cased.by_eliza-dukim': 'bert_qa_bert_base_multilingual_cased_korquad_v1', 'en.answer_question.korquad.bert.multilingual_base_cased.by_sangrimlee': 'bert_qa_bert_base_multilingual_cased_korquad', 'en.answer_question.korquad.xlm_roberta.large': 'xlm_roberta_qa_xlm_roberta_large_korquad_mask', 'en.answer_question.link_bert.squad.base_finetuned': 'bert_qa_linkbert_base_finetuned_squad', 'en.answer_question.longformer': 'longformer_qa_ponmari', 'en.answer_question.longformer.by_Nomi97': 'longformer_qa_Chatbot', 'en.answer_question.longformer.by_manishiitg': 'longformer_qa_recruit', 'en.answer_question.longformer.by_ponmari': 'longformer_qa_ponmari', 'en.answer_question.longformer.large': 'longformer_qa_recruit_large', 'en.answer_question.longformer.v2': 'longformer_qa_recruit_v2', 'en.answer_question.mitmovie_squad.roberta.by_thatdramebaazguy': 'roberta_qa_movie_roberta_MITmovie_squad', 'en.answer_question.mlqa.bert.base_cased': 'bert_qa_bert_base_spanish_wwm_cased_finetuned_qa_mlqa', 'en.answer_question.mlqa.bert.base_uncased': 'bert_qa_bert_base_spanish_wwm_uncased_finetuned_qa_mlqa', 'en.answer_question.movie_squad.roberta.base': 'roberta_qa_roberta_base_MITmovie_squad', 'en.answer_question.movie_squad.roberta.by_thatdramebaazguy': 'roberta_qa_movie_roberta_squad', 'en.answer_question.movie_squadv2.bert.large_uncased': 'bert_qa_bert_large_uncased_whole_word_masking_squad2_with_ner_mit_movie_with_neg_with_repeat', 'en.answer_question.mqa_cls.bert.by_xraychen': 'bert_qa_mqa_cls', 'en.answer_question.multi_lingual_bert': 'bert_qa_mBERT_all_ty_SQen_SQ20_1', 'en.answer_question.multi_lingual_bert.by_horsbug98': 'bert_qa_Part_1_mBERT_Model_E2', 'en.answer_question.multi_lingual_bert.by_krinal214': 'bert_qa_mBERT_all_ty_SQen_SQ20_1', 'en.answer_question.news.bert': 'bert_qa_newsqa_bert_el_Danastos', 'en.answer_question.news.bert.base_uncased': 'bert_qa_unqover_bert_base_uncased_newsqa', 'en.answer_question.news.bert.base_uncased.by_mirbostani': 'bert_qa_bert_base_uncased_finetuned_newsqa', 'en.answer_question.news.bert.base_uncased.by_tli8hf': 'bert_qa_unqover_bert_base_uncased_newsqa', 'en.answer_question.news.bert.by_AnonymousSub': 'bert_qa_news_pretrain_bert_FT_newsqa', 'en.answer_question.news.bert.by_Danastos': 'bert_qa_newsqa_bert_el_Danastos', 'en.answer_question.news.bert.fpdm_ft.by_AnonymousSub': 'bert_qa_fpdm_bert_FT_newsqa', 'en.answer_question.news.bert.fpdm_ft_new.by_AnonymousSub': 'bert_qa_fpdm_bert_FT_new_newsqa', 'en.answer_question.news.bert.fpdm_hier_ft.by_AnonymousSub': 'bert_qa_fpdm_hier_bert_FT_newsqa', 'en.answer_question.news.bert.fpdm_hier_ft_by_AnonymousSub': 'bert_qa_fpdm_hier_bert_FT_new_newsqa', 'en.answer_question.news.bert.ft.by_AnonymousSub': 'bert_qa_bert_FT_newsqa', 'en.answer_question.news.bert.ft_new.by_AnonymousSub': 'bert_qa_bert_FT_new_newsqa', 'en.answer_question.news.bert.new.by_AnonymousSub': 'bert_qa_news_pretrain_bert_FT_new_newsqa', 'en.answer_question.news.bert.qa_fpdm_triplet_ft.by_AnonymousSub': 'bert_qa_fpdm_triplet_bert_FT_newsqa', 'en.answer_question.news.bert.qa_fpdm_triplet_ft_new.by_AnonymousSub': 'bert_qa_fpdm_triplet_bert_FT_new_newsqa', 'en.answer_question.news.distil_bert.base_uncased': 'distilbert_qa_unqover_base_uncased_newsqa', 'en.answer_question.news.roberta.base': 'roberta_qa_unqover_roberta_base_newsqa', 'en.answer_question.news.roberta.large': 'roberta_qa_unqover_roberta_large_newsqa', 'en.answer_question.news.roberta.qa_fpdm_hier_roberta_ft_new_newsqa.by_AnonymousSub': 'roberta_qa_fpdm_hier_roberta_FT_new_newsqa', 'en.answer_question.news.roberta.qa_fpdm_hier_roberta_ft_newsqa.by_AnonymousSub': 'roberta_qa_fpdm_hier_roberta_FT_newsqa', 'en.answer_question.news.roberta.qa_fpdm_roberta_ft_newsqa.by_AnonymousSub': 'roberta_qa_fpdm_roberta_FT_newsqa', 'en.answer_question.news.roberta.qa_fpdm_triplet_roberta_ft_new_newsqa.by_AnonymousSub': 'roberta_qa_fpdm_triplet_roberta_FT_new_newsqa', 'en.answer_question.news.roberta.qa_fpdm_triplet_roberta_ft_newsqa.by_AnonymousSub': 'roberta_qa_fpdm_triplet_roberta_FT_newsqa', 'en.answer_question.news.roberta.qa_ft.by_AnonymousSub': 'roberta_qa_news_pretrain_roberta_FT_newsqa', 'en.answer_question.news.roberta.qa_ft_new.by_AnonymousSub': 'roberta_qa_news_pretrain_roberta_FT_new_newsqa', 'en.answer_question.news.roberta.qa_roberta_ft_new_newsqa.by_AnonymousSub': 'roberta_qa_roberta_FT_new_newsqa', 'en.answer_question.news.roberta.qa_roberta_ft_newsqa.by_AnonymousSub': 'roberta_qa_roberta_FT_newsqa', 'en.answer_question.output_files.bert.by_sunitha': 'bert_qa_output_files', 'en.answer_question.pubmed.bert.base_uncased': 'bert_qa_Sotireas_BiomedNLP_PubMedBERT_base_uncased_abstract_fulltext_ContaminationQAmodel_PubmedBERT', 'en.answer_question.pubmed.bert.base_uncased.by_Shushant': 'bert_qa_Shushant_BiomedNLP_PubMedBERT_base_uncased_abstract_fulltext_ContaminationQAmodel_PubmedBERT', 'en.answer_question.pubmed.bert.base_uncased.by_Sotireas': 'bert_qa_Sotireas_BiomedNLP_PubMedBERT_base_uncased_abstract_fulltext_ContaminationQAmodel_PubmedBERT', 'en.answer_question.roberta': 'roberta_qa_robertaBaseABSA', 'en.answer_question.roberta.756523213.by_AlirezaBaneshi': 'roberta_qa_autotrain_test2_756523213', 'en.answer_question.roberta.756523214.by_AlirezaBaneshi': 'roberta_qa_autotrain_test2_756523214', 'en.answer_question.roberta.augmented': 'roberta_qa_roberta_unaugmentedv3', 'en.answer_question.roberta.base': 'roberta_qa_roberta_base_custom_QA', 'en.answer_question.roberta.base.by_123tarunanand': 'roberta_qa_roberta_base_finetuned', 'en.answer_question.roberta.base.by_eAsyle': 'roberta_qa_roberta_base_custom_QA', 'en.answer_question.roberta.base.by_emr-se-miniproject': 'roberta_qa_roberta_base_emr', 'en.answer_question.roberta.base.by_nlpconnect': 'roberta_qa_dpr_nq_reader_roberta_base', 'en.answer_question.roberta.base.by_rsvp-ai': 'roberta_qa_bertserini_roberta_base', 'en.answer_question.roberta.base_v2': 'roberta_qa_dpr_nq_reader_roberta_base_v2', 'en.answer_question.roberta.by_AmazonScience': 'roberta_qa_qanlu', 'en.answer_question.roberta.by_Andranik': 'roberta_qa_TestQaV1', 'en.answer_question.roberta.by_AyushPJ': 'roberta_qa_ai_club_inductions_21_nlp_roBERTa', 'en.answer_question.roberta.by_Beri': 'roberta_qa_legal_qa', 'en.answer_question.roberta.by_CNT-UPenn': 'roberta_qa_RoBERTa_for_seizureFrequency_QA', 'en.answer_question.roberta.by_Ching': 'roberta_qa_negation_detector', 'en.answer_question.roberta.by_LucasS': 'roberta_qa_robertaBaseABSA', 'en.answer_question.roberta.by_Mr-Wick': 'roberta_qa_Roberta', 'en.answer_question.roberta.by_Nakul24': 'roberta_qa_RoBERTa_emotion_extraction', 'en.answer_question.roberta.by_amazonscience': 'roberta_qa_nlu', 'en.answer_question.roberta.by_aravind-812': 'roberta_qa_roberta_train_json', 'en.answer_question.roberta.by_arjunth2001': 'roberta_qa_priv_qna', 'en.answer_question.roberta.by_billfrench': 'roberta_qa_cyberlandr_door', 'en.answer_question.roberta.by_nlpunibo': 'roberta_qa_nlpunibo_roberta', 'en.answer_question.roberta.by_pierrerappolt': 'roberta_qa_cart', 'en.answer_question.roberta.by_shmuelamar': 'roberta_qa_REQA_RoBERTa', 'en.answer_question.roberta.by_stevemobs': 'roberta_qa_quales_iberlef', 'en.answer_question.roberta.by_sunitha': 'roberta_qa_roberta_customds_finetune', 'en.answer_question.roberta.by_veronica320': 'roberta_qa_QA_for_Event_Extraction', 'en.answer_question.roberta.by_vesteinn': 'roberta_qa_IceBERT_QA', 'en.answer_question.roberta.by_yirenl2': 'roberta_qa_plm', 'en.answer_question.roberta.by_z-uo': 'roberta_qa_roberta_qasper', 'en.answer_question.roberta.by_z_uo': 'roberta_qa_sper', 'en.answer_question.roberta.carbonblog': 'roberta_qa_carbonblog', 'en.answer_question.roberta.ch_tuned.by_Gantenbein': 'roberta_qa_ADDI_CH_RoBERTa', 'en.answer_question.roberta.cuad.base': 'roberta_qa_akdeniz27_base_cuad', 'en.answer_question.roberta.cuad.base.by_Rakib': 'roberta_qa_base_on_cuad', 'en.answer_question.roberta.cuad.base.by_marshmellow77': 'roberta_qa_marshmellow77_base_cuad', 'en.answer_question.roberta.cuad.base_finetuned': 'roberta_qa_base_cuad_finetuned', 'en.answer_question.roberta.cuad.large': 'roberta_qa_large_cuad', 'en.answer_question.roberta.cv_custom_ds.by_sunitha': 'roberta_qa_CV_Custom_DS', 'en.answer_question.roberta.cv_merge_ds.by_sunitha': 'roberta_qa_CV_Merge_DS', 'en.answer_question.roberta.de_tuned.by_Gantenbein': 'roberta_qa_ADDI_DE_RoBERTa', 'en.answer_question.roberta.eda_and_parav3.by_comacrae': 'roberta_qa_roberta_eda_and_parav3', 'en.answer_question.roberta.edav3.by_comacrae': 'roberta_qa_roberta_edav3', 'en.answer_question.roberta.fi_tuned.by_Gantenbein': 'roberta_qa_ADDI_FI_RoBERTa', 'en.answer_question.roberta.fr_tuned.by_Gantenbein': 'roberta_qa_ADDI_FR_RoBERTa', 'en.answer_question.roberta.it_tuned.by_Gantenbein': 'roberta_qa_ADDI_IT_RoBERTa', 'en.answer_question.roberta.large_init_large_seed_0.by_anas-awadalla': 'roberta_qa_roberta_large_initialization_seed_0', 'en.answer_question.roberta.large_seed_0.by_anas-awadalla': 'roberta_qa_roberta_large_data_seed_0', 'en.answer_question.roberta.large_seed_4': 'roberta_qa_roberta_large_data_seed_4', 'en.answer_question.roberta.paraphrasev3.by_comacrae': 'roberta_qa_roberta_paraphrasev3', 'en.answer_question.roberta.squad.base': 'roberta_qa_base_squad', 'en.answer_question.roberta.squad.base.by_csarron': 'roberta_qa_base_squad_v1', 'en.answer_question.roberta.squad.base_finetuned': 'roberta_qa_base_1b_1_finetuned_squadv1', 'en.answer_question.roberta.squad_movie.': 'roberta_qa_movie_mitmovie_squad', 'en.answer_question.roberta.squad_movie.base': 'roberta_qa_base_mitmovie_squad', 'en.answer_question.roberta.squad_movie.by_thatdramebaazguy': 'roberta_qa_movie_squad', 'en.answer_question.roberta.squadv2.base': 'roberta_qa_autoevaluate_base_squad2', 'en.answer_question.roberta.squadv2.base.by_deepset': 'roberta_qa_deepset_base_squad2', 'en.answer_question.roberta.squadv2.base.by_navteca': 'roberta_qa_navteca_base_squad2', 'en.answer_question.roberta.squadv2.base.by_ydshieh': 'roberta_qa_ydshieh_base_squad2', 'en.answer_question.roberta.squadv2.distilled_base': 'roberta_qa_base_squad2_distilled', 'en.answer_question.roberta.squadv2.large': 'roberta_qa_deepset_large_squad2', 'en.answer_question.roberta.squadv2.large.by_navteca': 'roberta_qa_navteca_large_squad2', 'en.answer_question.roberta.squadv2.large_finetuned': 'roberta_qa_large_finetuned_squad2', 'en.answer_question.roberta.squadv2.tiny': 'roberta_qa_tiny_squad2', 'en.answer_question.roberta.squadv2.v2_base': 'roberta_qa_base_squad_v2', 'en.answer_question.roberta.squadv2.v2_base_finetuned': 'roberta_qa_base_1b_1_finetuned_squadv2', 'en.answer_question.roberta.synqa.large': 'roberta_qa_large_syn', 'en.answer_question.roberta.synqa.large.by_mbartolo': 'roberta_qa_large_syn_ext', 'en.answer_question.roberta.techqa_cline.by_AnonymousSub': 'roberta_qa_cline_techqa', 'en.answer_question.roberta.techqa_cline_emanuals.by_AnonymousSub': 'roberta_qa_cline_emanuals_techqa', 'en.answer_question.roberta.techqa_declutr.by_AnonymousSub': 'roberta_qa_declutr_techqa', 'en.answer_question.roberta.techqa_declutr_emanuals.by_AnonymousSub': 'roberta_qa_declutr_emanuals_techqa', 'en.answer_question.roberta.testabsa.by_eAsyle': 'roberta_qa_testABSA', 'en.answer_question.roberta.testabsa3.by_eAsyle': 'roberta_qa_testABSA3', 'en.answer_question.roberta.tiny_6l_768d': 'roberta_qa_tiny_6l_768d', 'en.answer_question.roberta.tiny_768d': 'roberta_qa_tinyroberta_6l_768d', 'en.answer_question.roberta.unaugv3.by_comacrae': 'roberta_qa_roberta_unaugv3', 'en.answer_question.roberta_absa': 'roberta_qa_robertaABSA', 'en.answer_question.scibert.scibert.': 'bert_qa_scibert_coqa', 'en.answer_question.scibert.scibert.v2': 'bert_qa_finetune_scibert_v2', 'en.answer_question.scibert.v2': 'bert_qa_nolog_SciBert_v2', 'en.answer_question.span_bert': 'bert_qa_Spanbert_emotion_extraction', 'en.answer_question.span_bert.by_Nakul24': 'bert_qa_Spanbert_emotion_extraction', 'en.answer_question.span_bert.by_manishiitg': 'bert_qa_spanbert_recruit_qa', 'en.answer_question.span_bert.large': 'bert_qa_spanbert_large_recruit_qa', 'en.answer_question.span_bert.squad.base_finetuned': 'bert_qa_spanbert_base_finetuned_squad_r3f', 'en.answer_question.span_bert.squad.cased_seed_0_base_128d_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_128_finetuned_squad_seed_0', 'en.answer_question.span_bert.squad.cased_seed_0_base_256d_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_256_finetuned_squad_seed_0', 'en.answer_question.span_bert.squad.cased_seed_0_base_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_16_finetuned_squad_seed_0', 'en.answer_question.span_bert.squad.cased_seed_10_base_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_16_finetuned_squad_seed_10', 'en.answer_question.span_bert.squad.cased_seed_2_base_256d_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_256_finetuned_squad_seed_2', 'en.answer_question.span_bert.squad.cased_seed_2_base_512d_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_512_finetuned_squad_seed_2', 'en.answer_question.span_bert.squad.cased_seed_2_base_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_16_finetuned_squad_seed_2', 'en.answer_question.span_bert.squad.cased_seed_42_base_128d_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_128_finetuned_squad_seed_42', 'en.answer_question.span_bert.squad.cased_seed_4_base_256d_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_256_finetuned_squad_seed_4', 'en.answer_question.span_bert.squad.cased_seed_4_base_32d_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_32_finetuned_squad_seed_4', 'en.answer_question.span_bert.squad.cased_seed_4_base_512d_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_512_finetuned_squad_seed_4', 'en.answer_question.span_bert.squad.cased_seed_4_base_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_16_finetuned_squad_seed_4', 'en.answer_question.span_bert.squad.cased_seed_6_base_256d_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_256_finetuned_squad_seed_6', 'en.answer_question.span_bert.squad.cased_seed_6_base_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_16_finetuned_squad_seed_6', 'en.answer_question.span_bert.squad.cased_seed_8_base_256d_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_256_finetuned_squad_seed_8', 'en.answer_question.span_bert.squad.cased_seed_8_base_32d_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_32_finetuned_squad_seed_8', 'en.answer_question.span_bert.squad.cased_seed_8_base_64d_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_64_finetuned_squad_seed_8', 'en.answer_question.span_bert.squad.cased_seed_8_base_finetuned_few_shot': 'bert_qa_spanbert_base_cased_few_shot_k_16_finetuned_squad_seed_8', 'en.answer_question.sqac.bert.base_cased': 'bert_qa_bert_base_spanish_wwm_cased_finetuned_qa_sqac', 'en.answer_question.sqac.bert.base_uncased': 'bert_qa_bert_base_spanish_wwm_uncased_finetuned_qa_sqac', 'en.answer_question.squad.albert': 'albert_qa_squad_2.0', 'en.answer_question.squad.albert.base_v2': 'albert_qa_base_v2_squad', 'en.answer_question.squad.albert.by_SS8': 'albert_qa_squad_2.0', 'en.answer_question.squad.albert.by_rowan1224': 'albert_qa_squad_slp', 'en.answer_question.squad.albert.xl': 'albert_qa_xlarge_finetuned_squad', 'en.answer_question.squad.albert.xxl': 'albert_qa_xxlarge_finetuned_squad', 'en.answer_question.squad.bert': 'bert_qa_neuralmagic_bert_squad_12layer_0sparse', 'en.answer_question.squad.bert.accelerate.by_KevinChoi': 'bert_qa_KevinChoi_bert_finetuned_squad_accelerate', 'en.answer_question.squad.bert.accelerate.by_huggingface-course': 'bert_qa_huggingface_course_bert_finetuned_squad_accelerate', 'en.answer_question.squad.bert.accelerate.by_peterhsu': 'bert_qa_peterhsu_bert_finetuned_squad_accelerate', 'en.answer_question.squad.bert.accelerate.by_youngjae': 'bert_qa_youngjae_bert_finetuned_squad_accelerate', 'en.answer_question.squad.bert.augmented': 'bert_qa_augmented_Squad_Translated', 'en.answer_question.squad.bert.base': 'bert_qa_bert_mini_wrslb_finetuned_squadv1', 'en.answer_question.squad.bert.base.by_mrm8488': 'bert_qa_bert_mini_wrslb_finetuned_squadv1', 'en.answer_question.squad.bert.base.by_rsvp-ai': 'bert_qa_bertserini_bert_base_squad', 'en.answer_question.squad.bert.base.by_vuiseng9': 'bert_qa_bert_base_squadv1', 'en.answer_question.squad.bert.base.by_xraychen': 'bert_qa_squad_baseline', 'en.answer_question.squad.bert.base.by_zhufy': 'bert_qa_squad_en_bert_base', 'en.answer_question.squad.bert.base_cased': 'bert_qa_andresestevez_bert_base_cased_finetuned_squad', 'en.answer_question.squad.bert.base_cased.by_KB': 'bert_qa_bert_base_swedish_cased_squad_experimental', 'en.answer_question.squad.bert.base_cased.by_Seongkyu': 'bert_qa_Seongkyu_bert_base_cased_finetuned_squad', 'en.answer_question.squad.bert.base_cased.by_SreyanG-NVIDIA': 'bert_qa_SreyanG_NVIDIA_bert_base_cased_finetuned_squad', 'en.answer_question.squad.bert.base_cased.by_andresestevez': 'bert_qa_andresestevez_bert_base_cased_finetuned_squad', 'en.answer_question.squad.bert.base_cased.by_batterydata': 'bert_qa_bert_base_cased_squad_v1', 'en.answer_question.squad.bert.base_cased.by_ncduy': 'bert_qa_bert_base_cased_finetuned_squad_test', 'en.answer_question.squad.bert.base_uncased': 'bert_qa_SreyanG_NVIDIA_bert_base_uncased_finetuned_squad', 'en.answer_question.squad.bert.base_uncased.1.1_block_sparse_0.32_v1.by_madlag': 'bert_qa_bert_base_uncased_squad1.1_block_sparse_0.32_v1', 'en.answer_question.squad.bert.base_uncased.by_HomayounSadri': 'bert_qa_HomayounSadri_bert_base_uncased_finetuned_squad', 'en.answer_question.squad.bert.base_uncased.by_Intel': 'bert_qa_bert_base_uncased_squadv1.1_sparse_80_1x4_block_pruneofa', 'en.answer_question.squad.bert.base_uncased.by_SreyanG-NVIDIA': 'bert_qa_SreyanG_NVIDIA_bert_base_uncased_finetuned_squad', 'en.answer_question.squad.bert.base_uncased.by_SupriyaArun': 'bert_qa_SupriyaArun_bert_base_uncased_finetuned_squad', 'en.answer_question.squad.bert.base_uncased.by_Tianle': 'bert_qa_Tianle_bert_base_uncased_finetuned_squad', 'en.answer_question.squad.bert.base_uncased.by_bdickson': 'bert_qa_bdickson_bert_base_uncased_finetuned_squad', 'en.answer_question.squad.bert.base_uncased.by_csarron': 'bert_qa_csarron_bert_base_uncased_squad_v1', 'en.answer_question.squad.bert.base_uncased.by_jgammack': 'bert_qa_MTL_bert_base_uncased_ww_squad', 'en.answer_question.squad.bert.base_uncased.by_jimypbr': 'bert_qa_jimypbr_bert_base_uncased_squad', 'en.answer_question.squad.bert.base_uncased.by_kaporter': 'bert_qa_kaporter_bert_base_uncased_finetuned_squad', 'en.answer_question.squad.bert.base_uncased.by_lewtun': 'bert_qa_bert_base_uncased_finetuned_squad_v1', 'en.answer_question.squad.bert.base_uncased.by_madlag': 'bert_qa_bert_base_uncased_squad_v1_sparse0.25', 'en.answer_question.squad.bert.base_uncased.by_srmukundb': 'bert_qa_srmukundb_bert_base_uncased_finetuned_squad', 'en.answer_question.squad.bert.base_uncased.by_tli8hf': 'bert_qa_unqover_bert_base_uncased_squad', 'en.answer_question.squad.bert.base_uncased.by_victoraavila': 'bert_qa_victoraavila_bert_base_uncased_finetuned_squad', 'en.answer_question.squad.bert.base_uncased.by_vuiseng9': 'bert_qa_vuiseng9_bert_base_uncased_squad', 'en.answer_question.squad.bert.base_uncased.x1.16_f88.1_d8_unstruct.by_madlag': 'bert_qa_bert_base_uncased_squadv1_x1.16_f88.1_d8_unstruct_v1', 'en.answer_question.squad.bert.base_uncased_1024d_seed_42': 'bert_qa_bert_base_uncased_few_shot_k_1024_finetuned_squad_seed_42', 'en.answer_question.squad.bert.base_uncased_128d_seed_0': 'bert_qa_bert_base_uncased_few_shot_k_128_finetuned_squad_seed_0', 'en.answer_question.squad.bert.base_uncased_1_block_sparse_0.13_v1.by_madlag': 'bert_qa_bert_base_uncased_squadv1_x1.84_f88.7_d36_hybrid_filled_v1', 'en.answer_question.squad.bert.base_uncased_1_block_sparse_0.20_v1.by_madlag': 'bert_qa_bert_base_uncased_squad1.1_block_sparse_0.13_v1', 'en.answer_question.squad.bert.base_uncased_256d_seed_0': 'bert_qa_bert_base_uncased_few_shot_k_256_finetuned_squad_seed_0', 'en.answer_question.squad.bert.base_uncased_32d_seed_0': 'bert_qa_bert_base_uncased_few_shot_k_32_finetuned_squad_seed_0', 'en.answer_question.squad.bert.base_uncased_512d_seed_0': 'bert_qa_bert_base_uncased_few_shot_k_512_finetuned_squad_seed_0', 'en.answer_question.squad.bert.base_uncased_64d_seed_0': 'bert_qa_bert_base_uncased_few_shot_k_64_finetuned_squad_seed_0', 'en.answer_question.squad.bert.base_uncased_l3.by_howey': 'bert_qa_bert_base_uncased_squad_L3', 'en.answer_question.squad.bert.base_uncased_l6.by_howey': 'bert_qa_bert_base_uncased_squad_L6', 'en.answer_question.squad.bert.base_uncased_seed_42': 'bert_qa_bert_base_uncased_few_shot_k_16_finetuned_squad_seed_42', 'en.answer_question.squad.bert.base_uncased_v2': 'bert_qa_bert_base_uncased_squad1.1_pruned_x3.2_v2', 'en.answer_question.squad.bert.base_uncased_v2.by_ericRosello': 'bert_qa_bert_base_uncased_finetuned_squad_frozen_v2', 'en.answer_question.squad.bert.base_uncased_v2.by_madlag': 'bert_qa_bert_base_uncased_squad1.1_pruned_x3.2_v2', 'en.answer_question.squad.bert.base_uncased_x1.16_f88.1_d8_unstruct_v1.by_madlag': 'bert_qa_bert_base_uncased_squad1.1_block_sparse_0.20_v1', 'en.answer_question.squad.bert.base_uncased_x1.84_f88.7_d36_hybrid_filled_v1.by_madlag': 'bert_qa_bert_base_uncased_squadv1_x1.96_f88.3_d27_hybrid_filled_opt_v1', 'en.answer_question.squad.bert.base_uncased_x1.96_f88.3_d27_hybrid_filled_opt_v1.by_madlag': 'bert_qa_bert_base_uncased_squadv1_x2.01_f89.2_d30_hybrid_rewind_opt_v1', 'en.answer_question.squad.bert.base_uncased_x2.01_f89.2_d30_hybrid_rewind_opt_v1.by_madlag': 'bert_qa_bert_base_uncased_squadv1_x2.32_f86.6_d15_hybrid_v1', 'en.answer_question.squad.bert.base_uncased_x2.32_f86.6_d15_hybrid_v1.by_madlag': 'bert_qa_bert_base_uncased_squadv1_x2.44_f87.7_d26_hybrid_filled_v1', 'en.answer_question.squad.bert.base_uncased_x2.44_f87.7_d26_hybrid_filled_v1.by_madlag': 'bert_qa_bert_base_uncased_squad1.1_block_sparse_0.07_v1', 'en.answer_question.squad.bert.by_Alexander-Learn': 'bert_qa_Alexander_Learn_bert_finetuned_squad', 'en.answer_question.squad.bert.by_ArpanZS': 'bert_qa_debug_squad', 'en.answer_question.squad.bert.by_DaisyMak': 'bert_qa_bert_finetuned_squad_accelerate_10epoch_transformerfrozen', 'en.answer_question.squad.bert.by_Danastos': 'bert_qa_squad_bert_el_Danastos', 'en.answer_question.squad.bert.by_FardinSaboori': 'bert_qa_FardinSaboori_bert_finetuned_squad', 'en.answer_question.squad.bert.by_Ghost1': 'bert_qa_bert_finetuned_squad1', 'en.answer_question.squad.bert.by_Harsit': 'bert_qa_Harsit_bert_finetuned_squad', 'en.answer_question.squad.bert.by_KevinChoi': 'bert_qa_KevinChoi_bert_finetuned_squad', 'en.answer_question.squad.bert.by_Kutay': 'bert_qa_fine_tuned_squad_aip', 'en.answer_question.squad.bert.by_Laikokwei': 'bert_qa_Laikokwei_bert_finetuned_squad', 'en.answer_question.squad.bert.by_Neulvo': 'bert_qa_Neulvo_bert_finetuned_squad', 'en.answer_question.squad.bert.by_andresestevez': 'bert_qa_andresestevez_bert_finetuned_squad_accelerate', 'en.answer_question.squad.bert.by_ankitkupadhyay': 'bert_qa_ankitkupadhyay_bert_finetuned_squad', 'en.answer_question.squad.bert.by_datauma': 'bert_qa_datauma_bert_finetuned_squad', 'en.answer_question.squad.bert.by_hendrixcosta': 'bert_qa_bertimbau_squad1.1', 'en.answer_question.squad.bert.by_huggingface-course': 'bert_qa_huggingface_course_bert_finetuned_squad', 'en.answer_question.squad.bert.by_jatinshah': 'bert_qa_jatinshah_bert_finetuned_squad', 'en.answer_question.squad.bert.by_maroo93': 'bert_qa_squad1.1', 'en.answer_question.squad.bert.by_mrbalazs5': 'bert_qa_mrbalazs5_bert_finetuned_squad', 'en.answer_question.squad.bert.by_mrp': 'bert_qa_mrp_bert_finetuned_squad', 'en.answer_question.squad.bert.by_nickmuchi': 'bert_qa_nickmuchi_bert_finetuned_squad', 'en.answer_question.squad.bert.by_peterhsu': 'bert_qa_tf_bert_finetuned_squad', 'en.answer_question.squad.bert.by_ruselkomp': 'bert_qa_tests_finetuned_squad_test_bert', 'en.answer_question.squad.bert.by_spacemanidol': 'bert_qa_neuralmagic_bert_squad_12layer_0sparse', 'en.answer_question.squad.bert.by_stevemobs': 'bert_qa_bert_finetuned_squad_pytorch', 'en.answer_question.squad.bert.by_vanichandna': 'bert_qa_muril_finetuned_squad', 'en.answer_question.squad.bert.by_youngjae': 'bert_qa_youngjae_bert_finetuned_squad', 'en.answer_question.squad.bert.cased': 'bert_qa_bert_multi_cased_squad_sv_marbogusz', 'en.answer_question.squad.bert.distilled_base_uncased': 'bert_qa_kamilali_distilbert_base_uncased_finetuned_squad', 'en.answer_question.squad.bert.distilled_base_uncased.by_huggingface': 'bert_qa_prunebert_base_uncased_6_finepruned_w_distil_squad', 'en.answer_question.squad.bert.distilled_base_uncased.by_juliusco': 'bert_qa_juliusco_distilbert_base_uncased_finetuned_squad', 'en.answer_question.squad.bert.distilled_base_uncased.by_kamilali': 'bert_qa_kamilali_distilbert_base_uncased_finetuned_squad', 'en.answer_question.squad.bert.large': 'bert_qa_sbert_large_nlu_ru_finetuned_squad', 'en.answer_question.squad.bert.large.by_rsvp-ai': 'bert_qa_bertserini_bert_large_squad', 'en.answer_question.squad.bert.large.by_ruselkomp': 'bert_qa_sbert_large_nlu_ru_finetuned_squad', 'en.answer_question.squad.bert.large_cased': 'bert_qa_bert_large_cased_whole_word_masking_finetuned_squad', 'en.answer_question.squad.bert.large_uncased': 'bert_qa_bert_large_uncased_whole_word_masking_finetuned_squad', 'en.answer_question.squad.bert.large_uncased.by_Graphcore': 'bert_qa_Graphcore_bert_large_uncased_squad', 'en.answer_question.squad.bert.large_uncased.by_haddadalwi': 'bert_qa_bert_large_uncased_whole_word_masking_finetuned_squad_finetuned_islamic_squad', 'en.answer_question.squad.bert.large_uncased.by_howey': 'bert_qa_howey_bert_large_uncased_squad', 'en.answer_question.squad.bert.large_uncased.by_internetoftim': 'bert_qa_internetoftim_bert_large_uncased_squad', 'en.answer_question.squad.bert.large_uncased.by_ofirzaf': 'bert_qa_ofirzaf_bert_large_uncased_squad', 'en.answer_question.squad.bert.large_uncased.by_uploaded by huggingface': 'bert_qa_bert_large_uncased_whole_word_masking_finetuned_squad', 'en.answer_question.squad.bert.large_uncased_sparse_80_1x4_block_pruneofa.by_Intel': 'bert_qa_bert_large_uncased_squadv1.1_sparse_80_1x4_block_pruneofa', 'en.answer_question.squad.bert.large_uncased_sparse_90_unstructured.by_Intel': 'bert_qa_bert_large_uncased_squadv1.1_sparse_90_unstructured', 'en.answer_question.squad.bert.medium': 'bert_qa_bert_medium_wrslb_finetuned_squadv1', 'en.answer_question.squad.bert.medium.by_anas-awadalla': 'bert_qa_bert_medium_finetuned_squad', 'en.answer_question.squad.bert.medium.by_mrm8488': 'bert_qa_bert_medium_wrslb_finetuned_squadv1', 'en.answer_question.squad.bert.medium_finetuned.by_anas-awadalla': 'bert_qa_bert_medium_pretrained_finetuned_squad', 'en.answer_question.squad.bert.mini_lm_base_uncased': 'bert_qa_MiniLM_L12_H384_uncased_finetuned_squad', 'en.answer_question.squad.bert.ms_tuned.base.by_zhufy': 'bert_qa_squad_ms_bert_base', 'en.answer_question.squad.bert.multilingual_base_cased': 'bert_qa_salti_bert_base_multilingual_cased_finetuned_squad', 'en.answer_question.squad.bert.multilingual_base_cased.by_Paul-Vinh': 'bert_qa_Paul_Vinh_bert_base_multilingual_cased_finetuned_squad', 'en.answer_question.squad.bert.multilingual_base_cased.by_salti': 'bert_qa_salti_bert_base_multilingual_cased_finetuned_squad', 'en.answer_question.squad.bert.multilingual_base_cased.by_vanichandna': 'bert_qa_bert_base_multilingual_cased_finetuned_squadv1', 'en.answer_question.squad.bert.multilingual_base_uncased': 'bert_qa_bert_base_multilingual_uncased_finetuned_squad', 'en.answer_question.squad.bert.sl256.by_vuiseng9': 'bert_qa_bert_l_squadv1.1_sl256', 'en.answer_question.squad.bert.sl384.by_vuiseng9': 'bert_qa_bert_l_squadv1.1_sl384', 'en.answer_question.squad.bert.small': 'bert_qa_bert_small_wrslb_finetuned_squadv1', 'en.answer_question.squad.bert.small.by_anas-awadalla': 'bert_qa_bert_small_finetuned_squad', 'en.answer_question.squad.bert.small.by_mrm8488': 'bert_qa_bert_small_wrslb_finetuned_squadv1', 'en.answer_question.squad.bert.small_finetuned.by_anas-awadalla': 'bert_qa_bert_small_pretrained_finetuned_squad', 'en.answer_question.squad.bert.tiny': 'bert_qa_bert_tiny_finetuned_squad', 'en.answer_question.squad.bert.v1.1.by_maroo93': 'bert_qa_squad1.1_1', 'en.answer_question.squad.bert.v1.by_vanichandna': 'bert_qa_muril_finetuned_squadv1', 'en.answer_question.squad.bert.v2.by_peterhsu': 'bert_qa_peterhsu_bert_finetuned_squad', 'en.answer_question.squad.bert.v2.by_ruselkomp': 'bert_qa_tests_finetuned_squad_test_bert_2', 'en.answer_question.squad.biobert.base_cased': 'bert_qa_biobert_base_cased_v1.1_squad_finetuned_biobert', 'en.answer_question.squad.biobert.base_cased.by_dmis-lab': 'bert_qa_biobert_base_cased_v1.1_squad', 'en.answer_question.squad.biobert.base_cased.by_juliusco': 'bert_qa_biobert_base_cased_v1.1_squad_finetuned_biobert', 'en.answer_question.squad.bioformer.cased': 'bert_qa_bioformer_cased_v1.0_squad1', 'en.answer_question.squad.covid_bert': 'bert_qa_covidbert_squad', 'en.answer_question.squad.covid_biobert.base_cased': 'bert_qa_biobert_base_cased_v1.1_squad_finetuned_covbiobert', 'en.answer_question.squad.covid_roberta.base_cased': 'bert_qa_biobert_base_cased_v1.1_squad_finetuned_covdrobert', 'en.answer_question.squad.distil_bert': 'distilbert_qa_checkpoint_500_finetuned_squad', 'en.answer_question.squad.distil_bert.base': 'distilbert_qa_base_finetuned_squad', 'en.answer_question.squad.distil_bert.base_cased': 'distilbert_qa_base_cased_distilled_squad_finetuned_squad_test', 'en.answer_question.squad.distil_bert.base_cased.by_ncduy': 'distilbert_qa_base_cased_distilled_squad_finetuned_squad_test', 'en.answer_question.squad.distil_bert.base_cased.by_uploaded by huggingface': 'distilbert_qa_base_cased_distilled_squad', 'en.answer_question.squad.distil_bert.base_small_cased': 'distilbert_qa_base_cased_distilled_squad_finetuned_squad_small', 'en.answer_question.squad.distil_bert.base_tiny_cased': 'distilbert_qa_tiny_base_cased_distilled_squad', 'en.answer_question.squad.distil_bert.base_uncased': 'distilbert_qa_MYX4567_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_21iridescent': 'distilbert_qa_21iridescent_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_Adrian': 'distilbert_qa_Adrian_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_Ayoola': 'distilbert_qa_Ayoola_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_FOFer': 'distilbert_qa_FOFer_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_Firat': 'distilbert_qa_Firat_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_Gayathri': 'distilbert_qa_Gayathri_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_Hoang': 'distilbert_qa_Hoang_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_HomayounSadri': 'distilbert_qa_HomayounSadri_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_MYX4567': 'distilbert_qa_MYX4567_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_Nadhiya': 'distilbert_qa_Nadhiya_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_ParulChaudhari': 'distilbert_qa_ParulChaudhari_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_Plimpton': 'distilbert_qa_Plimpton_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_Raphaelg9': 'distilbert_qa_Raphaelg9_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_Rocketknight1': 'distilbert_qa_Rocketknight1_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_SEISHIN': 'distilbert_qa_SEISHIN_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_Shashidhar': 'distilbert_qa_Shashidhar_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_Sourabh714': 'distilbert_qa_Sourabh714_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_SupriyaArun': 'distilbert_qa_SupriyaArun_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_Thitaree': 'distilbert_qa_Thitaree_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_Tianle': 'distilbert_qa_Tianle_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_V3RX2000': 'distilbert_qa_V3RX2000_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_Wiam': 'distilbert_qa_Wiam_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_aaraki': 'distilbert_qa_aaraki_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_abhinavkulkarni': 'distilbert_qa_abhinavkulkarni_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_akr': 'distilbert_qa_akr_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_andi611': 'distilbert_qa_andi611_base_uncased_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_anurag0077': 'distilbert_qa_base_uncased_finetuned_squad3', 'en.answer_question.squad.distil_bert.base_uncased.by_arvalinno': 'distilbert_qa_arvalinno_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_avioo1': 'distilbert_qa_avioo1_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_bdickson': 'distilbert_qa_bdickson_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_caiosantillo': 'distilbert_qa_caiosantillo_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_deepakvk': 'distilbert_qa_base_uncased_distilled_squad_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_emre': 'distilbert_qa_emre_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_en': 'distilbert_qa_en_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_fadhilarkan': 'distilbert_qa_fadhilarkan_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_gokulkarthik': 'distilbert_qa_gokulkarthik_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_graviraja': 'distilbert_qa_graviraja_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_guhuawuli': 'distilbert_qa_guhuawuli_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_hark99': 'distilbert_qa_hark99_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_hcy11': 'distilbert_qa_hcy11_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_hiiii23': 'distilbert_qa_hiiii23_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_holtin': 'distilbert_qa_base_uncased_holtin_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_huggingfaceepita': 'distilbert_qa_huggingfaceepita_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_huxxx657': 'distilbert_qa_huxxx657_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_jgammack': 'distilbert_qa_jgammack_base_uncased_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_jhoonk': 'distilbert_qa_jhoonk_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_jsunster': 'distilbert_qa_jsunster_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_kaggleodin': 'distilbert_qa_kaggleodin_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_lewtun': 'distilbert_qa_base_uncased_finetuned_squad_v1', 'en.answer_question.squad.distil_bert.base_uncased.by_machine2049': 'distilbert_qa_base_uncased_finetuned_squad_', 'en.answer_question.squad.distil_bert.base_uncased.by_manudotc': 'distilbert_qa_transformers_base_uncased_finetuneQA_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_sunitha': 'distilbert_qa_base_uncased_3feb_2022_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_tli8hf': 'distilbert_qa_unqover_base_uncased_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_tucan9389': 'distilbert_qa_tucan9389_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_uploaded by huggingface': 'distilbert_qa_base_uncased_distilled_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_usami': 'distilbert_qa_usami_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_vitusya': 'distilbert_qa_vitusya_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_vkmr': 'distilbert_qa_vkmr_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased.by_vkrishnamoorthy': 'distilbert_qa_vkrishnamoorthy_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased_colab.by_Adrian': 'distilbert_qa_base_uncased_finetuned_squad_colab', 'en.answer_question.squad.distil_bert.base_uncased_full.by_holtin': 'distilbert_qa_base_uncased_holtin_finetuned_full_squad', 'en.answer_question.squad.distil_bert.base_uncased_mtl.by_jgammack': 'distilbert_qa_MTL_base_uncased_squad', 'en.answer_question.squad.distil_bert.base_uncased_sae.by_jgammack': 'distilbert_qa_SAE_base_uncased_squad', 'en.answer_question.squad.distil_bert.base_uncased_v2': 'distilbert_qa_base_uncased_finetuned_indosquad_v2', 'en.answer_question.squad.distil_bert.base_uncased_v2.by_arvalinno': 'distilbert_qa_base_uncased_finetuned_indosquad_v2', 'en.answer_question.squad.distil_bert.base_uncased_v2.by_ericRosello': 'distilbert_qa_base_uncased_finetuned_squad_frozen_v2', 'en.answer_question.squad.distil_bert.base_uncased_v2.by_holtin': 'distilbert_qa_holtin_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.base_uncased_v2.by_huxxx657': 'distilbert_qa_base_uncased_finetuned_jumbling_squad_15', 'en.answer_question.squad.distil_bert.base_uncased_v3.by_anurag0077': 'distilbert_qa_anurag0077_base_uncased_finetuned_squad', 'en.answer_question.squad.distil_bert.by_AyushPJ': 'distilbert_qa_test_squad_trained_finetuned_squad', 'en.answer_question.squad.distil_bert.by_ZYW': 'distilbert_qa_test_squad_trained', 'en.answer_question.squad.distil_bert.by_abhilash1910': 'distilbert_qa_squadv1', 'en.answer_question.squad.distil_bert.by_rowan1224': 'distilbert_qa_squad_slp', 'en.answer_question.squad.distil_bert.by_sunitha': 'distilbert_qa_AQG_CV_Squad', 'en.answer_question.squad.distil_bert.by_tabo': 'distilbert_qa_checkpoint_500_finetuned_squad', 'en.answer_question.squad.electra': 'electra_qa_squad_slp', 'en.answer_question.squad.electra.base': 'electra_qa_base_discriminator_finetuned_squad', 'en.answer_question.squad.electra.base.by_Palak': 'electra_qa_google_base_discriminator_squad', 'en.answer_question.squad.electra.base.by_mrm8488': 'electra_qa_base_finetuned_squadv1', 'en.answer_question.squad.electra.base.by_usami': 'electra_qa_base_discriminator_finetuned_squad', 'en.answer_question.squad.electra.base.by_valhalla': 'electra_qa_base_discriminator_finetuned_squadv1', 'en.answer_question.squad.electra.large': 'electra_qa_large_squad', 'en.answer_question.squad.electra.large.by_howey': 'electra_qa_large_squad', 'en.answer_question.squad.electra.large.by_mrm8488': 'electra_qa_large_finetuned_squadv1', 'en.answer_question.squad.electra.small': 'electra_qa_small_finetuned_squadv1', 'en.answer_question.squad.electra.small.by_Palak': 'electra_qa_google_small_discriminator_squad', 'en.answer_question.squad.electra.small.by_bdickson': 'electra_qa_small_discriminator_finetuned_squad_1', 'en.answer_question.squad.electra.small.by_hankzhong': 'electra_qa_hankzhong_small_discriminator_finetuned_squad', 'en.answer_question.squad.electra.small.by_mrm8488': 'electra_qa_small_finetuned_squadv1', 'en.answer_question.squad.electra.small_v2.by_bdickson': 'electra_qa_small_discriminator_finetuned_squad_2', 'en.answer_question.squad.ixam_bert.by_MarcBrun': 'bert_qa_ixambert_finetuned_squad', 'en.answer_question.squad.ixam_bert.eu_en_tunedby_MarcBrun': 'bert_qa_ixambert_finetuned_squad_eu_en_MarcBrun', 'en.answer_question.squad.ixam_bert.eu_tuned.by_MarcBrun': 'bert_qa_ixambert_finetuned_squad_eu_MarcBrun', 'en.answer_question.squad.link_bert.large': 'bert_qa_linkbert_large_finetuned_squad', 'en.answer_question.squad.multi_lingual_bert.by_ZYW': 'bert_qa_squad_mbert_model', 'en.answer_question.squad.multi_lingual_bert.en_de_es.by_ZYW': 'bert_qa_squad_mbert_en_de_es_model', 'en.answer_question.squad.multi_lingual_bert.en_de_es_vi_zh.by_ZYW': 'bert_qa_squad_mbert_en_de_es_vi_zh_model', 'en.answer_question.squad.multi_lingual_bert.v2.by_ZYW': 'bert_qa_squad_mbert_model_2', 'en.answer_question.squad.roberta': 'roberta_qa_roberta_l_squadv1.1', 'en.answer_question.squad.roberta.base': 'roberta_qa_roberta_base_1B_1_finetuned_squadv1', 'en.answer_question.squad.roberta.base.by_Firat': 'roberta_qa_Firat_roberta_base_finetuned_squad', 'en.answer_question.squad.roberta.base.by_ahmedattia143': 'roberta_qa_roberta_squadv1_base', 'en.answer_question.squad.roberta.base.by_csarron': 'roberta_qa_roberta_base_squad_v1', 'en.answer_question.squad.roberta.base.by_huxxx657': 'roberta_qa_huxxx657_roberta_base_finetuned_squad', 'en.answer_question.squad.roberta.base.by_jgammack': 'roberta_qa_roberta_base_squad', 'en.answer_question.squad.roberta.base.by_mrm8488': 'roberta_qa_roberta_base_1B_1_finetuned_squadv1', 'en.answer_question.squad.roberta.base.by_rahulchakwate': 'roberta_qa_rahulchakwate_roberta_base_finetuned_squad', 'en.answer_question.squad.roberta.base.by_tli8hf': 'roberta_qa_unqover_roberta_base_squad', 'en.answer_question.squad.roberta.base_1024d_seed_0': 'roberta_qa_roberta_base_few_shot_k_1024_finetuned_squad_seed_0', 'en.answer_question.squad.roberta.base_1024d_seed_10': 'roberta_qa_roberta_base_few_shot_k_1024_finetuned_squad_seed_10', 'en.answer_question.squad.roberta.base_1024d_seed_2': 'roberta_qa_roberta_base_few_shot_k_1024_finetuned_squad_seed_2', 'en.answer_question.squad.roberta.base_1024d_seed_4': 'roberta_qa_roberta_base_few_shot_k_1024_finetuned_squad_seed_4', 'en.answer_question.squad.roberta.base_1024d_seed_42': 'roberta_qa_roberta_base_few_shot_k_1024_finetuned_squad_seed_42', 'en.answer_question.squad.roberta.base_1024d_seed_6': 'roberta_qa_roberta_base_few_shot_k_1024_finetuned_squad_seed_6', 'en.answer_question.squad.roberta.base_1024d_seed_8': 'roberta_qa_roberta_base_few_shot_k_1024_finetuned_squad_seed_8', 'en.answer_question.squad.roberta.base_128d_seed_0': 'roberta_qa_roberta_base_few_shot_k_128_finetuned_squad_seed_0', 'en.answer_question.squad.roberta.base_128d_seed_10': 'roberta_qa_roberta_base_few_shot_k_128_finetuned_squad_seed_10', 'en.answer_question.squad.roberta.base_128d_seed_2': 'roberta_qa_roberta_base_few_shot_k_128_finetuned_squad_seed_2', 'en.answer_question.squad.roberta.base_128d_seed_4': 'roberta_qa_roberta_base_few_shot_k_128_finetuned_squad_seed_4', 'en.answer_question.squad.roberta.base_128d_seed_42': 'roberta_qa_roberta_base_few_shot_k_128_finetuned_squad_seed_42', 'en.answer_question.squad.roberta.base_128d_seed_6': 'roberta_qa_roberta_base_few_shot_k_128_finetuned_squad_seed_6', 'en.answer_question.squad.roberta.base_128d_seed_8': 'roberta_qa_roberta_base_few_shot_k_128_finetuned_squad_seed_8', 'en.answer_question.squad.roberta.base_256d_seed_0': 'roberta_qa_roberta_base_few_shot_k_256_finetuned_squad_seed_0', 'en.answer_question.squad.roberta.base_256d_seed_10': 'roberta_qa_roberta_base_few_shot_k_256_finetuned_squad_seed_10', 'en.answer_question.squad.roberta.base_256d_seed_2': 'roberta_qa_roberta_base_few_shot_k_256_finetuned_squad_seed_2', 'en.answer_question.squad.roberta.base_256d_seed_4': 'roberta_qa_roberta_base_few_shot_k_256_finetuned_squad_seed_4', 'en.answer_question.squad.roberta.base_256d_seed_6': 'roberta_qa_roberta_base_few_shot_k_256_finetuned_squad_seed_6', 'en.answer_question.squad.roberta.base_256d_seed_8': 'roberta_qa_roberta_base_few_shot_k_256_finetuned_squad_seed_8', 'en.answer_question.squad.roberta.base_32d_seed_0': 'roberta_qa_roberta_base_few_shot_k_32_finetuned_squad_seed_0', 'en.answer_question.squad.roberta.base_32d_seed_10': 'roberta_qa_roberta_base_few_shot_k_32_finetuned_squad_seed_10', 'en.answer_question.squad.roberta.base_32d_seed_2': 'roberta_qa_roberta_base_few_shot_k_32_finetuned_squad_seed_2', 'en.answer_question.squad.roberta.base_32d_seed_4': 'roberta_qa_roberta_base_few_shot_k_32_finetuned_squad_seed_4', 'en.answer_question.squad.roberta.base_32d_seed_6': 'roberta_qa_roberta_base_few_shot_k_32_finetuned_squad_seed_6', 'en.answer_question.squad.roberta.base_32d_seed_8': 'roberta_qa_roberta_base_few_shot_k_32_finetuned_squad_seed_8', 'en.answer_question.squad.roberta.base_512d_seed_0': 'roberta_qa_roberta_base_few_shot_k_512_finetuned_squad_seed_0', 'en.answer_question.squad.roberta.base_512d_seed_10': 'roberta_qa_roberta_base_few_shot_k_512_finetuned_squad_seed_10', 'en.answer_question.squad.roberta.base_512d_seed_2': 'roberta_qa_roberta_base_few_shot_k_512_finetuned_squad_seed_2', 'en.answer_question.squad.roberta.base_512d_seed_4': 'roberta_qa_roberta_base_few_shot_k_512_finetuned_squad_seed_4', 'en.answer_question.squad.roberta.base_512d_seed_6': 'roberta_qa_roberta_base_few_shot_k_512_finetuned_squad_seed_6', 'en.answer_question.squad.roberta.base_512d_seed_8': 'roberta_qa_roberta_base_few_shot_k_512_finetuned_squad_seed_8', 'en.answer_question.squad.roberta.base_64d_seed_0': 'roberta_qa_roberta_base_few_shot_k_64_finetuned_squad_seed_0', 'en.answer_question.squad.roberta.base_64d_seed_10': 'roberta_qa_roberta_base_few_shot_k_64_finetuned_squad_seed_10', 'en.answer_question.squad.roberta.base_64d_seed_2': 'roberta_qa_roberta_base_few_shot_k_64_finetuned_squad_seed_2', 'en.answer_question.squad.roberta.base_64d_seed_4': 'roberta_qa_roberta_base_few_shot_k_64_finetuned_squad_seed_4', 'en.answer_question.squad.roberta.base_64d_seed_6': 'roberta_qa_roberta_base_few_shot_k_64_finetuned_squad_seed_6', 'en.answer_question.squad.roberta.base_64d_seed_8': 'roberta_qa_roberta_base_few_shot_k_64_finetuned_squad_seed_8', 'en.answer_question.squad.roberta.base_deletion_10.by_huxxx657': 'roberta_qa_roberta_base_finetuned_deletion_squad_10', 'en.answer_question.squad.roberta.base_deletion_15.by_huxxx657': 'roberta_qa_roberta_base_finetuned_deletion_squad_15', 'en.answer_question.squad.roberta.base_sae.by_jgammack': 'roberta_qa_SAE_roberta_base_squad', 'en.answer_question.squad.roberta.base_scrambled_10.by_huxxx657': 'roberta_qa_roberta_base_finetuned_scrambled_squad_10', 'en.answer_question.squad.roberta.base_scrambled_10_new.by_huxxx657': 'roberta_qa_roberta_base_finetuned_scrambled_squad_10_new', 'en.answer_question.squad.roberta.base_scrambled_15.by_huxxx657': 'roberta_qa_roberta_base_finetuned_scrambled_squad_15', 'en.answer_question.squad.roberta.base_scrambled_15_v2.by_huxxx657': 'roberta_qa_roberta_base_finetuned_scrambled_squad_15_new', 'en.answer_question.squad.roberta.base_scrambled_5.by_huxxx657': 'roberta_qa_roberta_base_finetuned_scrambled_squad_5', 'en.answer_question.squad.roberta.base_scrambled_sq.by_huxxx657': 'roberta_qa_roberta_base_finetuned_scrambled_squad_5_new', 'en.answer_question.squad.roberta.base_seed_0': 'roberta_qa_roberta_base_few_shot_k_16_finetuned_squad_seed_0', 'en.answer_question.squad.roberta.base_seed_10': 'roberta_qa_roberta_base_few_shot_k_16_finetuned_squad_seed_10', 'en.answer_question.squad.roberta.base_seed_2': 'roberta_qa_roberta_base_few_shot_k_16_finetuned_squad_seed_2', 'en.answer_question.squad.roberta.base_seed_4': 'roberta_qa_roberta_base_few_shot_k_16_finetuned_squad_seed_4', 'en.answer_question.squad.roberta.base_seed_42': 'roberta_qa_roberta_base_few_shot_k_16_finetuned_squad_seed_42', 'en.answer_question.squad.roberta.base_seed_6': 'roberta_qa_roberta_base_few_shot_k_16_finetuned_squad_seed_6', 'en.answer_question.squad.roberta.base_seed_8': 'roberta_qa_roberta_base_few_shot_k_16_finetuned_squad_seed_8', 'en.answer_question.squad.roberta.base_v1.by_huxxx657': 'roberta_qa_roberta_base_finetuned_squad_1', 'en.answer_question.squad.roberta.base_v2.by_huxxx657': 'roberta_qa_roberta_base_finetuned_squad_2', 'en.answer_question.squad.roberta.base_v3.by_huxxx657': 'roberta_qa_roberta_base_finetuned_squad_3', 'en.answer_question.squad.roberta.by_cgou': 'roberta_qa_fin_RoBERTa_v1_finetuned_squad', 'en.answer_question.squad.roberta.by_sunitha': 'roberta_qa_Roberta_Custom_Squad_DS', 'en.answer_question.squad.roberta.by_vuiseng9': 'roberta_qa_roberta_l_squadv1.1', 'en.answer_question.squad.roberta.distilled': 'roberta_qa_distilroberta_finetuned_squadv1', 'en.answer_question.squad.roberta.distilled_base': 'roberta_qa_distilroberta_base_squad', 'en.answer_question.squad.roberta.large': 'roberta_qa_roberta_large_squad_v1', 'en.answer_question.squad.roberta.large.by_csarron': 'roberta_qa_roberta_large_squad_v1', 'en.answer_question.squad.roberta.large.by_rahulchakwate': 'roberta_qa_roberta_large_finetuned_squad', 'en.answer_question.squad.scibert': 'bert_qa_scibert_nli_squad', 'en.answer_question.squad.scibert.by_amoux': 'bert_qa_scibert_nli_squad', 'en.answer_question.squad.scibert.by_ixa-ehu': 'bert_qa_SciBERT_SQuAD_QuAC', 'en.answer_question.squad.scibert.uncased': 'bert_qa_scibert_scivocab_uncased_squad', 'en.answer_question.squad.span_bert': 'bert_qa_spanbert_finetuned_squadv1', 'en.answer_question.squad.span_bert.base_cased_1024d_seed_0': 'bert_qa_spanbert_base_cased_few_shot_k_1024_finetuned_squad_seed_0', 'en.answer_question.squad.span_bert.base_cased_1024d_seed_10': 'bert_qa_spanbert_base_cased_few_shot_k_1024_finetuned_squad_seed_10', 'en.answer_question.squad.span_bert.base_cased_1024d_seed_2': 'bert_qa_spanbert_base_cased_few_shot_k_1024_finetuned_squad_seed_2', 'en.answer_question.squad.span_bert.base_cased_1024d_seed_4': 'bert_qa_spanbert_base_cased_few_shot_k_1024_finetuned_squad_seed_4', 'en.answer_question.squad.span_bert.base_cased_1024d_seed_42': 'bert_qa_spanbert_base_cased_few_shot_k_1024_finetuned_squad_seed_42', 'en.answer_question.squad.span_bert.base_cased_1024d_seed_6': 'bert_qa_spanbert_base_cased_few_shot_k_1024_finetuned_squad_seed_6', 'en.answer_question.squad.span_bert.base_cased_1024d_seed_8': 'bert_qa_spanbert_base_cased_few_shot_k_1024_finetuned_squad_seed_8', 'en.answer_question.squad.span_bert.base_cased_128d_seed_10': 'bert_qa_spanbert_base_cased_few_shot_k_128_finetuned_squad_seed_10', 'en.answer_question.squad.span_bert.base_cased_128d_seed_4': 'bert_qa_spanbert_base_cased_few_shot_k_128_finetuned_squad_seed_4', 'en.answer_question.squad.span_bert.base_cased_128d_seed_6': 'bert_qa_spanbert_base_cased_few_shot_k_128_finetuned_squad_seed_6', 'en.answer_question.squad.span_bert.base_cased_128d_seed_8': 'bert_qa_spanbert_base_cased_few_shot_k_128_finetuned_squad_seed_8', 'en.answer_question.squad.span_bert.base_cased_256d_seed_10': 'bert_qa_spanbert_base_cased_few_shot_k_256_finetuned_squad_seed_10', 'en.answer_question.squad.span_bert.base_cased_32d_seed_0': 'bert_qa_spanbert_base_cased_few_shot_k_32_finetuned_squad_seed_0', 'en.answer_question.squad.span_bert.base_cased_32d_seed_10': 'bert_qa_spanbert_base_cased_few_shot_k_32_finetuned_squad_seed_10', 'en.answer_question.squad.span_bert.base_cased_32d_seed_2': 'bert_qa_spanbert_base_cased_few_shot_k_32_finetuned_squad_seed_2', 'en.answer_question.squad.span_bert.base_cased_32d_seed_6': 'bert_qa_spanbert_base_cased_few_shot_k_32_finetuned_squad_seed_6', 'en.answer_question.squad.span_bert.base_cased_512d_seed_0': 'bert_qa_spanbert_base_cased_few_shot_k_512_finetuned_squad_seed_0', 'en.answer_question.squad.span_bert.base_cased_512d_seed_10': 'bert_qa_spanbert_base_cased_few_shot_k_512_finetuned_squad_seed_10', 'en.answer_question.squad.span_bert.base_cased_512d_seed_6': 'bert_qa_spanbert_base_cased_few_shot_k_512_finetuned_squad_seed_6', 'en.answer_question.squad.span_bert.base_cased_512d_seed_8': 'bert_qa_spanbert_base_cased_few_shot_k_512_finetuned_squad_seed_8', 'en.answer_question.squad.span_bert.base_cased_64d_seed_0': 'bert_qa_spanbert_base_cased_few_shot_k_64_finetuned_squad_seed_0', 'en.answer_question.squad.span_bert.base_cased_64d_seed_10': 'bert_qa_spanbert_base_cased_few_shot_k_64_finetuned_squad_seed_10', 'en.answer_question.squad.span_bert.base_cased_64d_seed_2': 'bert_qa_spanbert_base_cased_few_shot_k_64_finetuned_squad_seed_2', 'en.answer_question.squad.span_bert.base_cased_64d_seed_4': 'bert_qa_spanbert_base_cased_few_shot_k_64_finetuned_squad_seed_4', 'en.answer_question.squad.span_bert.base_cased_64d_seed_6': 'bert_qa_spanbert_base_cased_few_shot_k_64_finetuned_squad_seed_6', 'en.answer_question.squad.span_bert.base_cased_seed_42': 'bert_qa_spanbert_base_cased_few_shot_k_16_finetuned_squad_seed_42', 'en.answer_question.squad.xlm_roberta': 'xlm_roberta_qa_xlm_roberta_squad_v1.1', 'en.answer_question.squad.xlm_roberta.by_jakobwes': 'xlm_roberta_qa_xlm_roberta_squad_v1.1', 'en.answer_question.squad.xlm_roberta.by_meghana': 'xlm_roberta_qa_hitalmqa_finetuned_squad', 'en.answer_question.squad_battery.bert.base_uncased': 'bert_qa_batterydata_bert_base_uncased_squad_v1', 'en.answer_question.squad_battery.bert.cased.by_batterydata': 'bert_qa_batterybert_cased_squad_v1', 'en.answer_question.squad_battery.bert.cased_only_bert.by_batterydata': 'bert_qa_batteryonlybert_cased_squad_v1', 'en.answer_question.squad_battery.bert.uncased.by_batterydata': 'bert_qa_batterybert_uncased_squad_v1', 'en.answer_question.squad_battery.bert.uncased_only_bert.by_batterydata': 'bert_qa_batteryonlybert_uncased_squad_v1', 'en.answer_question.squad_battery.scibert.cased': 'bert_qa_batteryscibert_cased_squad_v1', 'en.answer_question.squad_battery.scibert.uncased': 'bert_qa_batteryscibert_uncased_squad_v1', 'en.answer_question.squad_ben_tel.bert.by_krinal214': 'bert_qa_bert_all_squad_ben_tel_context', 'en.answer_question.squad_covid.bert': 'bert_qa_covid_squad', 'en.answer_question.squad_pubmed.biobert': 'bert_qa_biobert_v1.1_pubmed_finetuned_squad', 'en.answer_question.squad_translated.bert.by_krinal214': 'bert_qa_bert_all_squad_all_translated', 'en.answer_question.squad_translated.bert.que.by_krinal214': 'bert_qa_bert_all_squad_que_translated', 'en.answer_question.squadv2.albert.base_v2': 'albert_qa_vumichien_base_v2_squad2', 'en.answer_question.squadv2.albert.base_v2.by_elgeish': 'albert_qa_cs224n_squad2.0_base_v2', 'en.answer_question.squadv2.albert.base_v2.by_vumichien': 'albert_qa_vumichien_base_v2_squad2', 'en.answer_question.squadv2.albert.large_v2': 'albert_qa_cs224n_squad2.0_large_v2', 'en.answer_question.squadv2.albert.xl_v2': 'albert_qa_xlarge_v2_squad_v2', 'en.answer_question.squadv2.albert.xxl': 'albert_qa_xxlarge_v1_finetuned_squad2', 'en.answer_question.squadv2.albert.xxl.by_elgeish': 'albert_qa_cs224n_squad2.0_xxlarge_v1', 'en.answer_question.squadv2.albert.xxl.by_replydotai': 'albert_qa_xxlarge_v1_finetuned_squad2', 'en.answer_question.squadv2.albert.xxl.by_sultan': 'albert_qa_BioM_xxlarge_SQuAD2', 'en.answer_question.squadv2.albert.xxl_512d': 'albert_qa_xxlargev1_squad2_512', 'en.answer_question.squadv2.albert.xxl_v2': 'albert_qa_xxlarge_v2_squad2', 'en.answer_question.squadv2.bert': 'bert_qa_squad2.0', 'en.answer_question.squadv2.bert.base': 'bert_qa_bert_base_finetuned_squad2', 'en.answer_question.squadv2.bert.base_cased': 'bert_qa_tf_bert_base_cased_squad2', 'en.answer_question.squadv2.bert.base_cased.by_deepset': 'bert_base_cased_qa_squad2', 'en.answer_question.squadv2.bert.base_cased.by_vumichien': 'bert_qa_tf_bert_base_cased_squad2', 'en.answer_question.squadv2.bert.base_cased.by_ydshieh': 'bert_qa_ydshieh_bert_base_cased_squad2', 'en.answer_question.squadv2.bert.base_uncased': 'bert_qa_deepset_bert_base_uncased_squad2', 'en.answer_question.squadv2.bert.base_uncased.by_Vasanth': 'bert_qa_bert_base_uncased_qa_squad2', 'en.answer_question.squadv2.bert.base_uncased.by_deepset': 'bert_qa_deepset_bert_base_uncased_squad2', 'en.answer_question.squadv2.bert.base_uncased.by_twmkn9': 'bert_qa_twmkn9_bert_base_uncased_squad2', 'en.answer_question.squadv2.bert.base_uncased_v2': 'bert_qa_bert_base_uncased_finetuned_squad_v2', 'en.answer_question.squadv2.bert.base_v2.by_mrm8488': 'bert_qa_bert_mini_finetuned_squadv2', 'en.answer_question.squadv2.bert.base_v2_5.by_mrm8488': 'bert_qa_bert_mini_5_finetuned_squadv2', 'en.answer_question.squadv2.bert.by_augustoortiz': 'bert_qa_bert_finetuned_squad2', 'en.answer_question.squadv2.bert.by_maroo93': 'bert_qa_squad2.0', 'en.answer_question.squadv2.bert.by_pinecone': 'bert_qa_bert_reader_squad2', 'en.answer_question.squadv2.bert.distilled': 'bert_qa_xdistil_l12_h384_squad2', 'en.answer_question.squadv2.bert.distilled_medium': 'bert_qa_bert_medium_squad2_distilled', 'en.answer_question.squadv2.bert.large': 'bert_qa_muril_large_squad2', 'en.answer_question.squadv2.bert.large.by_Sindhu': 'bert_qa_muril_large_squad2', 'en.answer_question.squadv2.bert.large.by_phiyodr': 'bert_qa_bert_large_finetuned_squad2', 'en.answer_question.squadv2.bert.large_tiny_768d.by_MichelBartels': 'bert_qa_tinybert_6l_768d_squad2_large_teacher_finetuned', 'en.answer_question.squadv2.bert.large_tiny_768d_v2.by_MichelBartels': 'bert_qa_tinybert_6l_768d_squad2_large_teacher_finetuned_step1', 'en.answer_question.squadv2.bert.large_uncased': 'bert_qa_bert_large_uncased_whole_word_masking_squad2_with_ner_mit_restaurant_with_neg_with_repeat', 'en.answer_question.squadv2.bert.large_uncased.by_Salesforce': 'bert_qa_qaconv_bert_large_uncased_whole_word_masking_squad2', 'en.answer_question.squadv2.bert.large_uncased.by_andi611': 'bert_qa_bert_large_uncased_whole_word_masking_squad2_with_ner_mit_restaurant_with_neg_with_repeat', 'en.answer_question.squadv2.bert.large_uncased.by_deepset': 'bert_qa_bert_large_uncased_whole_word_masking_squad2', 'en.answer_question.squadv2.bert.large_uncased_v2.by_madlag': 'bert_qa_bert_large_uncased_squadv2', 'en.answer_question.squadv2.bert.large_uncased_v2_x2.15_f83.2_d25_hybrid.by_madlag': 'bert_qa_bert_large_uncased_wwm_squadv2_x2.15_f83.2_d25_hybrid_v1', 'en.answer_question.squadv2.bert.large_uncased_v2_x2.63_f82.6_d16_hybrid.by_madlag': 'bert_qa_bert_large_uncased_wwm_squadv2_x2.63_f82.6_d16_hybrid_v1', 'en.answer_question.squadv2.bert.large_uncased_whole_word_masking_v2.by_madlag': 'bert_qa_bert_large_uncased_whole_word_masking_finetuned_squadv2', 'en.answer_question.squadv2.bert.medium_v2': 'bert_qa_bert_medium_finetuned_squadv2', 'en.answer_question.squadv2.bert.mini_lm_base_uncased': 'bert_qa_minilm_uncased_squad2', 'en.answer_question.squadv2.bert.small.by_mrm8488': 'bert_qa_bert_small_finetuned_squadv2', 'en.answer_question.squadv2.bert.small_v2.by_mrm8488': 'bert_qa_bert_small_2_finetuned_squadv2', 'en.answer_question.squadv2.bert.tiny_.by_mrm8488': 'bert_qa_bert_tiny_finetuned_squadv2', 'en.answer_question.squadv2.bert.tiny_768d': 'bert_qa_tinybert_6l_768d_squad2', 'en.answer_question.squadv2.bert.tiny_v2.by_mrm8488': 'bert_qa_bert_tiny_2_finetuned_squadv2', 'en.answer_question.squadv2.bert.tiny_v3.by_mrm8488': 'bert_qa_bert_tiny_3_finetuned_squadv2', 'en.answer_question.squadv2.bert.tiny_v4.by_mrm8488': 'bert_qa_bert_tiny_4_finetuned_squadv2', 'en.answer_question.squadv2.bert.tiny_v5.by_mrm8488': 'bert_qa_bert_tiny_5_finetuned_squadv2', 'en.answer_question.squadv2.bert.uncased_10l_512d_a8a_512d': 'bert_qa_bert_uncased_L_10_H_512_A_8_squad2', 'en.answer_question.squadv2.bert.uncased_2l_512d_a8a_512d': 'bert_qa_bert_uncased_L_2_H_512_A_8_squad2', 'en.answer_question.squadv2.bert.uncased_4l_256d_a4a_256d': 'bert_qa_bert_uncased_L_4_H_256_A_4_squad2', 'en.answer_question.squadv2.bert.uncased_4l_512d_a8a_512d': 'bert_qa_bert_uncased_L_4_H_512_A_8_squad2', 'en.answer_question.squadv2.bert.uncased_4l_768d_a12a_768d': 'bert_qa_bert_uncased_L_4_H_768_A_12_squad2', 'en.answer_question.squadv2.bert.uncased_6l_128d_a2a_128d': 'bert_qa_bert_uncased_L_6_H_128_A_2_squad2', 'en.answer_question.squadv2.biobert.cased': 'bert_qa_biobert_squad2_cased', 'en.answer_question.squadv2.biobert.cased.by_clagator': 'bert_qa_biobert_squad2_cased', 'en.answer_question.squadv2.biobert.cased.by_ptnv-s': 'bert_qa_biobert_squad2_cased_finetuned_squad', 'en.answer_question.squadv2.deberta': 'deberta_v3_xsmall_qa_squad2', 'en.answer_question.squadv2.distil_bert.base': 'distilbert_qa_base_squad2_custom_dataset', 'en.answer_question.squadv2.distil_bert.base_cased': 'distilbert_base_cased_qa_squad2', 'en.answer_question.squadv2.distil_bert.base_uncased': 'distilbert_qa_mvonwyl_base_uncased_finetuned_squad2', 'en.answer_question.squadv2.distil_bert.base_uncased.by_andi611': 'distilbert_qa_base_uncased_squad2_with_ner_mit_restaurant_with_neg_with_repeat', 'en.answer_question.squadv2.distil_bert.base_uncased.by_anurag0077': 'distilbert_qa_anurag0077_base_uncased_finetuned_squad2', 'en.answer_question.squadv2.distil_bert.base_uncased.by_mvonwyl': 'distilbert_qa_mvonwyl_base_uncased_finetuned_squad2', 'en.answer_question.squadv2.distil_bert.base_uncased.by_tabo': 'distilbert_qa_tabo_base_uncased_finetuned_squad2', 'en.answer_question.squadv2.distil_bert.base_uncased.by_twmkn9': 'distilbert_qa_base_uncased_squad2', 'en.answer_question.squadv2.distil_bert.by_threem': 'distilbert_qa_mysquadv2_finetuned_squad', 'en.answer_question.squadv2.distil_bert.v2.by_threem': 'distilbert_qa_mysquadv2_8Jan22_finetuned_squad', 'en.answer_question.squadv2.electra.base': 'electra_qa_base_best_squad2', 'en.answer_question.squadv2.electra.base.by_PremalMatalia': 'electra_qa_base_best_squad2', 'en.answer_question.squadv2.electra.base.by_navteca': 'electra_qa_base_squad2', 'en.answer_question.squadv2.electra.base.by_sultan': 'electra_qa_BioM_Base_SQuAD2', 'en.answer_question.squadv2.electra.base_v2': 'electra_qa_base_finetuned_squadv2', 'en.answer_question.squadv2.electra.large': 'electra_qa_BioM_Large_SQuAD2', 'en.answer_question.squadv2.electra.large.by_sultan': 'electra_qa_BioM_Large_SQuAD2', 'en.answer_question.squadv2.electra.large.by_superspray': 'electra_qa_large_discriminator_squad2_custom_dataset', 'en.answer_question.squadv2.electra.large_512d': 'electra_qa_large_discriminator_squad2_512', 'en.answer_question.squadv2.electra.small_v2': 'electra_qa_small_finetuned_squadv2', 'en.answer_question.squadv2.longformer.base': 'longformer_base_base_qa_squad2', 'en.answer_question.squadv2.longformer.base_v2': 'longformer_qa_base_4096_finetuned_squadv2', 'en.answer_question.squadv2.roberta.base': 'roberta_qa_navteca_roberta_base_squad2', 'en.answer_question.squadv2.roberta.base.by_21iridescent': 'roberta_qa_RoBERTa_base_finetuned_squad2_lwt', 'en.answer_question.squadv2.roberta.base.by_AnonymousSub': 'roberta_qa_roberta_base_squad2.0', 'en.answer_question.squadv2.roberta.base.by_PremalMatalia': 'roberta_qa_roberta_base_best_squad2', 'en.answer_question.squadv2.roberta.base.by_Shappey': 'roberta_qa_roberta_base_QnA_squad2_trained', 'en.answer_question.squadv2.roberta.base.by_Teepika': 'roberta_qa_roberta_base_squad2_finetuned_selqa', 'en.answer_question.squadv2.roberta.base.by_avioo1': 'roberta_qa_avioo1_roberta_base_squad2_finetuned_squad', 'en.answer_question.squadv2.roberta.base.by_deepakvk': 'roberta_qa_deepakvk_roberta_base_squad2_finetuned_squad', 'en.answer_question.squadv2.roberta.base.by_deepset': 'roberta_base_qa_squad2', 'en.answer_question.squadv2.roberta.base.by_mvonwyl': 'roberta_qa_roberta_base_finetuned_squad2', 'en.answer_question.squadv2.roberta.base.by_navteca': 'roberta_qa_navteca_roberta_base_squad2', 'en.answer_question.squadv2.roberta.base.by_nlpconnect': 'roberta_qa_roberta_base_squad2_nq', 'en.answer_question.squadv2.roberta.base.by_prk': 'roberta_qa_prk_roberta_base_squad2_finetuned_squad', 'en.answer_question.squadv2.roberta.base.by_shahrukhx01': 'roberta_qa_roberta_base_squad2_boolq_baseline', 'en.answer_question.squadv2.roberta.base.by_sumba': 'roberta_qa_sumba_roberta_base_squad2_finetuned_squad', 'en.answer_question.squadv2.roberta.base.by_ydshieh': 'roberta_qa_ydshieh_roberta_base_squad2', 'en.answer_question.squadv2.roberta.base_rule_based_hier_quadruplet_0.1_epochs_1_shard_1.by_AnonymousSub': 'roberta_qa_rule_based_roberta_hier_quadruplet_0.1_epochs_1_shard_1_squad2.0', 'en.answer_question.squadv2.roberta.base_rule_based_hier_quadruplet_epochs_1_shard_1.by_AnonymousSub': 'roberta_qa_rule_based_roberta_hier_quadruplet_epochs_1_shard_1_squad2.0', 'en.answer_question.squadv2.roberta.base_rule_based_hier_triplet_0.1_epochs_1_shard_1.by_AnonymousSub': 'roberta_qa_rule_based_roberta_hier_triplet_0.1_epochs_1_shard_1_squad2.0', 'en.answer_question.squadv2.roberta.base_rule_based_hier_triplet_epochs_1_shard_1.by_AnonymousSub': 'roberta_qa_rule_based_roberta_hier_triplet_epochs_1_shard_1_squad2.0', 'en.answer_question.squadv2.roberta.base_rule_based_only_classfn_epochs_1_shard_1.by_AnonymousSub': 'roberta_qa_rule_based_roberta_only_classfn_epochs_1_shard_1_squad2.0', 'en.answer_question.squadv2.roberta.base_rule_based_only_classfn_twostage_epochs_1_shard_1.by_AnonymousSub': 'roberta_qa_rule_based_roberta_only_classfn_twostage_epochs_1_shard_1_squad2.0', 'en.answer_question.squadv2.roberta.base_rule_based_quadruplet_epochs_1_shard_1.by_AnonymousSub': 'roberta_qa_rule_based_roberta_bert_quadruplet_epochs_1_shard_1_squad2.0', 'en.answer_question.squadv2.roberta.base_rule_based_twostage_quadruplet_epochs_1_shard_1.by_AnonymousSub': 'roberta_qa_rule_based_roberta_twostage_quadruplet_epochs_1_shard_1_squad2.0', 'en.answer_question.squadv2.roberta.base_rule_based_twostagequadruplet_hier_epochs_1_shard_1.by_AnonymousSub': 'roberta_qa_rule_based_roberta_twostagequadruplet_hier_epochs_1_shard_1_squad2.0', 'en.answer_question.squadv2.roberta.base_rule_based_twostagetriplet_epochs_1_shard_1.by_AnonymousSub': 'roberta_qa_rule_based_roberta_twostagetriplet_epochs_1_shard_1_squad2.0', 'en.answer_question.squadv2.roberta.base_rule_based_twostagetriplet_hier_epochs_1_shard_1.by_AnonymousSub': 'roberta_qa_rule_based_roberta_twostagetriplet_hier_epochs_1_shard_1_squad2.0', 'en.answer_question.squadv2.roberta.base_ruletriplet_epochs_1_shard_1.by_AnonymousSub': 'roberta_qa_rule_based_roberta_bert_triplet_epochs_1_shard_1_squad2.0', 'en.answer_question.squadv2.roberta.base_v2': 'roberta_qa_roberta_base_1B_1_finetuned_squadv2', 'en.answer_question.squadv2.roberta.base_v2.by_AyushPJ': 'roberta_qa_ai_club_inductions_21_nlp_roBERTa_base_squad_v2', 'en.answer_question.squadv2.roberta.base_v2.by_mrm8488': 'roberta_qa_roberta_base_1B_1_finetuned_squadv2', 'en.answer_question.squadv2.roberta.cline.by_AnonymousSub': 'roberta_qa_cline_squad2.0', 'en.answer_question.squadv2.roberta.declutr.by_AnonymousSub': 'roberta_qa_declutr_model_squad2.0', 'en.answer_question.squadv2.roberta.distilled_base': 'roberta_qa_distilroberta_base_finetuned_squad2_lwt', 'en.answer_question.squadv2.roberta.distilled_base.by_21iridescent': 'roberta_qa_distilroberta_base_finetuned_squad2_lwt', 'en.answer_question.squadv2.roberta.distilled_base.by_deepset': 'roberta_qa_roberta_base_squad2_distilled', 'en.answer_question.squadv2.roberta.distilled_base.by_twmkn9': 'roberta_qa_distilroberta_base_squad2', 'en.answer_question.squadv2.roberta.distilled_base_128d_32d_v2': 'roberta_qa_distilrobert_base_squadv2_328seq_128stride_test', 'en.answer_question.squadv2.roberta.distilled_base_v2': 'roberta_qa_distilroberta_base_squad_v2', 'en.answer_question.squadv2.roberta.emanuals.by_AnonymousSub': 'roberta_qa_EManuals_RoBERTa_squad2.0', 'en.answer_question.squadv2.roberta.large': 'roberta_qa_roberta_large_squad2', 'en.answer_question.squadv2.roberta.large.by_Salesforce': 'roberta_qa_qaconv_roberta_large_squad2', 'en.answer_question.squadv2.roberta.large.by_deepset': 'roberta_qa_roberta_large_squad2_hp', 'en.answer_question.squadv2.roberta.large.by_navteca': 'roberta_qa_roberta_large_squad2', 'en.answer_question.squadv2.roberta.large.by_phiyodr': 'roberta_qa_roberta_large_finetuned_squad2', 'en.answer_question.squadv2.roberta.tiny.by_deepset': 'roberta_qa_tinyroberta_squad2', 'en.answer_question.squadv2.roberta.tiny.v2.by_deepset': 'roberta_qa_tinyroberta_squad2_step1', 'en.answer_question.squadv2.scibert.uncased_v2': 'bert_qa_scibert_scivocab_uncased_squad_v2', 'en.answer_question.squadv2.span_bert.v2': 'bert_qa_spanbert_finetuned_squadv2', 'en.answer_question.squadv2.xlm_roberta.base': 'xlm_roberta_base_qa_squad2', 'en.answer_question.squadv2.xlm_roberta.base.by_deepset': 'xlm_roberta_base_qa_squad2', 'en.answer_question.squadv2.xlm_roberta.base_24465514.by_teacookies': 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465514', 'en.answer_question.squadv2.xlm_roberta.base_24465515.by_teacookies': 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465515', 'en.answer_question.squadv2.xlm_roberta.base_24465516.by_teacookies': 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465516', 'en.answer_question.squadv2.xlm_roberta.base_24465517.by_teacookies': 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465517', 'en.answer_question.squadv2.xlm_roberta.base_24465518.by_teacookies': 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465518', 'en.answer_question.squadv2.xlm_roberta.base_24465519.by_teacookies': 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465519', 'en.answer_question.squadv2.xlm_roberta.base_24465520.by_teacookies': 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465520', 'en.answer_question.squadv2.xlm_roberta.base_24465521.by_teacookies': 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465521', 'en.answer_question.squadv2.xlm_roberta.base_24465522.by_teacookies': 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465522', 'en.answer_question.squadv2.xlm_roberta.base_24465523.by_teacookies': 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465523', 'en.answer_question.squadv2.xlm_roberta.base_24465524.by_teacookies': 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465524', 'en.answer_question.squadv2.xlm_roberta.base_24465525.by_teacookies': 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465525', 'en.answer_question.squadv2.xlm_roberta.base_v2': 'xlm_roberta_qa_squadv2_xlm_roberta_base', 'en.answer_question.squadv2.xlm_roberta.distilled_base': 'xlm_roberta_qa_xlm_roberta_base_squad2_distilled', 'en.answer_question.squadv2.xlm_roberta.large': 'xlm_roberta_qa_xlm_roberta_large_squad2', 'en.answer_question.squadv2_bioasq8b.electra.base': 'electra_qa_BioM_Base_SQuAD2_BioASQ8B', 'en.answer_question.squadv2_bioasq8b.electra.large': 'electra_qa_BioM_Large_SQuAD2_BioASQ8B', 'en.answer_question.squadv2_chaii.xlm_roberta.distilled_base': 'xlm_roberta_qa_xlm_roberta_base_squad2_distilled_finetuned_chaii', 'en.answer_question.squadv2_chaii.xlm_roberta.distilled_base_small': 'xlm_roberta_qa_xlm_roberta_base_squad2_distilled_finetuned_chaii_small', 'en.answer_question.squadv2_chemical.bert.uncased': 'bert_qa_chemical_bert_uncased_squad2', 'en.answer_question.squadv2_conll.bert.large_uncased.by_andi611': 'bert_qa_bert_large_uncased_whole_word_masking_squad2_with_ner_conll2003_with_neg_with_repeat', 'en.answer_question.squadv2_conll.bert.large_uncased_pistherea.by_andi611': 'bert_qa_bert_large_uncased_whole_word_masking_squad2_with_ner_Pistherea_conll2003_with_neg_with_repeat', 'en.answer_question.squadv2_conll.bert.large_uncased_pwhatisthe.by_andi611': 'bert_qa_bert_large_uncased_whole_word_masking_squad2_with_ner_Pwhatisthe_conll2003_with_neg_with_repeat', 'en.answer_question.squadv2_conll.distil_bert.base_uncased.by_andi611': 'distilbert_qa_base_uncased_squad2_with_ner', 'en.answer_question.squadv2_conll.distil_bert.base_uncased_with_neg.by_andi611': 'distilbert_qa_base_uncased_squad2_with_ner_with_neg', 'en.answer_question.squadv2_conll.distil_bert.base_uncased_with_neg_with_multi.by_andi611': 'distilbert_qa_base_uncased_squad2_with_ner_with_neg_with_multi', 'en.answer_question.squadv2_conll.distil_bert.base_uncased_with_neg_with_multi_with_repeat.by_andi611': 'distilbert_qa_base_uncased_squad2_with_ner_with_neg_with_multi_with_repeat', 'en.answer_question.squadv2_conll.distil_bert.base_uncased_with_neg_with_repeat.by_andi611': 'distilbert_qa_base_uncased_squad2_with_ner_with_neg_with_repeat', 'en.answer_question.squadv2_cord19.bert.small': 'bert_qa_bert_small_cord19_squad2', 'en.answer_question.squadv2_cord19.bert.uncased_10l_512d_a8a_512d': 'bert_qa_bert_uncased_L_10_H_512_A_8_cord19_200616_squad2', 'en.answer_question.squadv2_cord19.bert.uncased_2l_512d_a8a_512d': 'bert_qa_bert_uncased_L_2_H_512_A_8_cord19_200616_squad2', 'en.answer_question.squadv2_cord19.bert.uncased_4l_256d_a4a_256d': 'bert_qa_bert_uncased_L_4_H_256_A_4_cord19_200616_squad2', 'en.answer_question.squadv2_cord19.bert.uncased_4l_512d_a8a_512d': 'bert_qa_bert_uncased_L_4_H_512_A_8_cord19_200616_squad2', 'en.answer_question.squadv2_cord19.bert.uncased_4l_768d_a12a_768d': 'bert_qa_bert_uncased_L_4_H_768_A_12_cord19_200616_squad2', 'en.answer_question.squadv2_covid.albert.xxl_v2': 'albert_qa_xxlarge_v2_squad2_covid_deepset', 'en.answer_question.squadv2_covid.bert.base_uncased': 'bert_qa_bert_base_uncased_squad2_covid_qa_deepset', 'en.answer_question.squadv2_covid.bert.large_uncased': 'bert_qa_bert_large_uncased_squad2_covid_qa_deepset', 'en.answer_question.squadv2_covid.bert.uncased_10l_512d_a8a_512d': 'bert_qa_bert_uncased_L_10_H_512_A_8_squad2_covid_qna', 'en.answer_question.squadv2_covid.bert.uncased_2l_512d_a8a_512d': 'bert_qa_bert_uncased_L_2_H_512_A_8_squad2_covid_qna', 'en.answer_question.squadv2_covid.bert.uncased_4l_256d_a4a_256d': 'bert_qa_bert_uncased_L_4_H_256_A_4_squad2_covid_qna', 'en.answer_question.squadv2_covid.bert.uncased_4l_512d_a8a_512d': 'bert_qa_bert_uncased_L_4_H_512_A_8_squad2_covid_qna', 'en.answer_question.squadv2_covid.bert.uncased_4l_768d_a12a_768d': 'bert_qa_bert_uncased_L_4_H_768_A_12_squad2_covid_qna', 'en.answer_question.squadv2_covid.bert.uncased_6l_128d_a2a_128d': 'bert_qa_bert_uncased_L_6_H_128_A_2_squad2_covid_qna', 'en.answer_question.squadv2_covid.distil_bert.base_uncased': 'distilbert_qa_base_uncased_squad2_covid_qa_deepset', 'en.answer_question.squadv2_covid.electra.base': 'electra_qa_base_squad2_covid_deepset', 'en.answer_question.squadv2_covid.roberta.base': 'roberta_qa_roberta_base_squad2_covid', 'en.answer_question.squadv2_covid.roberta.base.by_armageddon': 'roberta_qa_roberta_base_squad2_covid_qa_deepset', 'en.answer_question.squadv2_covid.roberta.base.by_deepset': 'roberta_qa_roberta_base_squad2_covid', 'en.answer_question.squadv2_covid.roberta.large': 'roberta_qa_roberta_large_squad2_covid_qa_deepset', 'en.answer_question.squadv2_covid_cord19.bert.uncased_10l_512d_a8a_512d': 'bert_qa_bert_uncased_L_10_H_512_A_8_cord19_200616_squad2_covid_qna', 'en.answer_question.squadv2_covid_cord19.bert.uncased_4l_256d_a4a_256d': 'bert_qa_bert_uncased_L_4_H_256_A_4_cord19_200616_squad2_covid_qna', 'en.answer_question.squadv2_covid_cord19.bert.uncased_4l_512d_a8a_512d': 'bert_qa_bert_uncased_L_4_H_512_A_8_cord19_200616_squad2_covid_qna', 'en.answer_question.squadv2_covid_cord19.bert.uncased_4l_768d_a12a_768d': 'bert_qa_bert_uncased_L_4_H_768_A_12_cord19_200616_squad2_covid_qna', 'en.answer_question.squadv2_pubmed.bert.v2': 'bert_qa_pubmed_bert_squadv2', 'en.answer_question.squadv2_pubmed.biobert.v2': 'bert_qa_biobert_v1.1_pubmed_squad_v2', 'en.answer_question.squadv2_pubmed.sapbert': 'bert_qa_sapbert_from_pubmedbert_squad2', 'en.answer_question.synqa.electra.large': 'electra_qa_large_synqa', 'en.answer_question.synqa.roberta.large.by_mbartolo': 'roberta_qa_roberta_large_synqa', 'en.answer_question.synqa_ext.roberta.large.by_mbartolo': 'roberta_qa_roberta_large_synqa_ext', 'en.answer_question.tapas': 'table_qa_table_question_answering_tapas', 'en.answer_question.tapas.by_uploaded by huggingface': 'table_qa_tapas_temporary_repo', 'en.answer_question.tapas.sqa.base_finetuned': 'table_qa_tapas_base_finetuned_sqa', 'en.answer_question.tapas.sqa.large_finetuned': 'table_qa_tapas_large_finetuned_sqa', 'en.answer_question.tapas.sqa.medium_finetuned': 'table_qa_tapas_medium_finetuned_sqa', 'en.answer_question.tapas.sqa.mini_finetuned': 'table_qa_tapas_mini_finetuned_sqa', 'en.answer_question.tapas.sqa.small_finetuned': 'table_qa_tapas_small_finetuned_sqa', 'en.answer_question.tapas.sqa.tiny_finetuned': 'table_qa_tapas_tiny_finetuned_sqa', 'en.answer_question.tapas.wikisql.base_finetuned': 'table_qa_tapas_base_finetuned_wikisql_supervised', 'en.answer_question.tapas.wikisql.large_finetuned': 'table_qa_tapas_large_finetuned_wikisql_supervised', 'en.answer_question.tapas.wikisql.medium_finetuned': 'table_qa_tapas_medium_finetuned_wikisql_supervised', 'en.answer_question.tapas.wikisql.small_finetuned': 'table_qa_tapas_small_finetuned_wikisql_supervised', 'en.answer_question.tapas.wtq.large_finetuned': 'table_qa_tapas_large_finetuned_wtq', 'en.answer_question.tapas.wtq.medium_finetuned': 'table_qa_tapas_medium_finetuned_wtq', 'en.answer_question.tapas.wtq.mini_finetuned': 'table_qa_tapas_mini_finetuned_wtq', 'en.answer_question.tapas.wtq.small_finetuned': 'table_qa_tapas_small_finetuned_wtq', 'en.answer_question.tapas.wtq.tiny_finetuned': 'table_qa_tapas_tiny_finetuned_wtq', 'en.answer_question.tquad.bert.xtremedistiled_uncased': 'bert_qa_xtremedistil_l6_h256_uncased_TQUAD_finetuned_lr_2e_05_epochs_9', 'en.answer_question.trial.bert.by_sunitha': 'bert_qa_Trial_3_Results', 'en.answer_question.trivia.albert.xxl': 'albert_qa_xxlarge_tweetqa', 'en.answer_question.trivia.bert': 'bert_qa_fine_tuned_tweetqa_aip', 'en.answer_question.trivia.bert.base_1024d': 'bert_qa_bert_base_1024_full_trivia_copied_embeddings', 'en.answer_question.trivia.bert.base_2048.by_MrAnderson': 'bert_qa_bert_base_2048_full_trivia_copied_embeddings', 'en.answer_question.trivia.bert.base_4096.by_MrAnderson': 'bert_qa_bert_base_4096_full_trivia_copied_embeddings', 'en.answer_question.trivia.bert.base_512d': 'bert_qa_bert_base_512_full_trivia', 'en.answer_question.trivia.bert.by_Danastos': 'bert_qa_triviaqa_bert_el_Danastos', 'en.answer_question.trivia.bert.by_Kutay': 'bert_qa_fine_tuned_tweetqa_aip', 'en.answer_question.trivia.distil_bert.base_uncased': 'distilbert_qa_base_uncased_finetuned_triviaqa', 'en.answer_question.trivia.longformer.large': 'longformer_qa_large_4096_finetuned_triviaqa', 'en.answer_question.trivia.roberta': 'roberta_qa_roberta_fine_tuned_tweet_sentiment_extractor', 'en.answer_question.trivia.roberta.base': 'roberta_qa_roberta_base_tweetqa_model', 'en.answer_question.trivia.roberta.large': 'roberta_qa_roberta_large_tweetqa', 'en.answer_question.trivia.xlmr_roberta.large': 'xlm_roberta_qa_xlmroberta_large_tweetqa', 'en.answer_question.tydiqa.bert': 'bert_qa_bert_all', 'en.answer_question.tydiqa.bert.multilingual': 'bert_qa_Part_2_BERT_Multilingual_Dutch_Model_E1', 'en.answer_question.tydiqa.distil_bert': 'distilbert_qa_multi_finetuned_for_xqua_on_tydiqa', 'en.answer_question.tydiqa.multi_lingual_bert': 'bert_qa_Part_2_mBERT_Model_E2', 'en.answer_question.tydiqa.roberta': 'roberta_qa_roberta_tydiqa', 'en.answer_question.tydiqa.xlm_roberta': 'xlm_roberta_qa_xlm_all', 'en.answer_question.tydiqa.xlm_roberta.3lang': 'xlm_roberta_qa_xlm_3lang', 'en.answer_question.tydiqa.xlm_roberta.by_horsbug98': 'xlm_roberta_qa_Part_1_XLM_Model_E1', 'en.answer_question.tydiqa.xlm_roberta.by_krinal214': 'xlm_roberta_qa_xlm_all', 'en.answer_question.tydiqa.xlm_roberta.v2.by_horsbug98': 'xlm_roberta_qa_Part_2_XLM_Model_E1', 'en.answer_question.xlm_roberta': 'xlm_roberta_qa_mrc2reader', 'en.answer_question.xlm_roberta.base': 'xlm_roberta_qa_xlm_roberta_base_finetune_qa', 'en.answer_question.xlm_roberta.by_Dongjae': 'xlm_roberta_qa_mrc2reader', 'en.answer_question.xlm_roberta.by_Srini99': 'xlm_roberta_qa_TQA', 'en.answer_question.xlm_roberta.by_anukaver': 'xlm_roberta_qa_xlm_roberta_est_qa', 'en.answer_question.xlm_roberta.by_jeew': 'xlm_roberta_qa_xlm_roberta_ckpt_95000', 'en.answer_question.xlm_roberta.by_laifuchicago': 'xlm_roberta_qa_farm2tran', 'en.answer_question.xlm_roberta.by_ncthuan': 'xlm_roberta_qa_xlm_l_uetqa', 'en.answer_question.xlm_roberta.distilled': 'xlm_roberta_qa_distill_xlm_mrc', 'en.answer_question.xlm_roberta.fine_tune_24465520_26265897': 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265897', 'en.answer_question.xlm_roberta.fine_tune_24465520_26265898': 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265898', 'en.answer_question.xlm_roberta.fine_tune_24465520_26265899': 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265899', 'en.answer_question.xlm_roberta.fine_tune_24465520_26265900': 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265900', 'en.answer_question.xlm_roberta.fine_tune_24465520_26265901': 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265901', 'en.answer_question.xlm_roberta.fine_tune_24465520_26265902': 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265902', 'en.answer_question.xlm_roberta.fine_tune_24465520_26265903': 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265903', 'en.answer_question.xlm_roberta.fine_tune_24465520_26265904': 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265904', 'en.answer_question.xlm_roberta.fine_tune_24465520_26265905': 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265905', 'en.answer_question.xlm_roberta.fine_tune_24465520_26265906': 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265906', 'en.answer_question.xlm_roberta.fine_tune_24465520_26265907': 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265907', 'en.answer_question.xlm_roberta.fine_tune_24465520_26265908': 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265908', 'en.answer_question.xlm_roberta.fine_tune_24465520_26265909': 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265909', 'en.answer_question.xlm_roberta.fine_tune_24465520_26265910': 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265910', 'en.answer_question.xlm_roberta.fine_tune_24465520_26265911': 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265911', 'en.answer_question.xlm_roberta.fr_tuned.by_Gantenbein': 'roberta_qa_ADDI_FR_XLM_R', 'en.answer_question.xlmr_roberta': 'xlm_roberta_qa_XLMr_ENIS_QA_IsQ_EnA', 'en.answer_question.xquad.bert.multilingual_base': 'bert_qa_bert_base_multilingual_xquad', 'en.answer_question.xquad.xlm_roberta.base': 'xlm_roberta_qa_xlm_roberta_base_xquad', 'en.answer_question.xquad.xlm_roberta.large': 'xlm_roberta_qa_xlm_roberta_large_xquad', 'en.answer_question.xquad_chaii.bert.cased': 'bert_qa_bert_multi_cased_finedtuned_xquad_chaii', 'en.answer_question.xquad_squad.bert.cased': 'bert_qa_bert_multi_cased_finetuned_xquadv1_finetuned_squad_colab', 'en.bert': 'bert_base_uncased', 'en.chunk': 'default_chunker', 'en.classify.acts_feedback.roberta.by_mp6kv': 'roberta_classifier_acts_feedback1', 'en.classify.ag_news.longformer': 'longformer_base_sequence_classifier_ag_news', 'en.classify.ag_news.xlnet': 'xlnet_base_sequence_classifier_ag_news', 'en.classify.albert.ag_news': 'albert_base_sequence_classifier_ag_news', 'en.classify.albert.imdb': 'albert_base_sequence_classifier_imdb', 'en.classify.bbc.roberta.by_abhishek': 'roberta_classifier_autonlp_bbc_37249301', 'en.classify.bert': 'bert_sequence_classifier_antisemitism', 'en.classify.bert.64d': 'bert_classifier_autonlp_cai_out_of_scope_649919116', 'en.classify.bert.amazon.uncased_base': 'bert_classifier_base_uncased_amazon_polarity', 'en.classify.bert.amazon_sentiment.': 'bert_classifier_amazon_review_sentiment_analysis', 'en.classify.bert.banking.': 'bert_classifier_bert_banking77', 'en.classify.bert.base': 'bert_classifier_base_gpt2detector_topp92', 'en.classify.bert.base.by_beomi': 'bert_classifier_beep_kc_base_bias', 'en.classify.bert.base.by_cffl': 'bert_classifier_base_styleclassification_subjective_neutral', 'en.classify.bert.base.by_zamachi': 'bert_classifier_base_for_multilabel_sentence_classification', 'en.classify.bert.base_finetuned': 'bert_sequence_classifier_electricidad_base_finetuned_sst2', 'en.classify.bert.base_random.by_baykenney': 'bert_classifier_base_gpt2detector_random', 'en.classify.bert.base_topk40.by_baykenney': 'bert_classifier_base_gpt2detector_topk40', 'en.classify.bert.base_topp96.by_baykenney': 'bert_classifier_base_gpt2detector_topp96', 'en.classify.bert.battery.cased': 'bert_classifier_batterybert_cased_abstract', 'en.classify.bert.battery.cased.by_batterydata': 'bert_classifier_batteryonlybert_cased_abstract', 'en.classify.bert.battery.uncased': 'bert_classifier_batterybert_uncased_abstract', 'en.classify.bert.battery.uncased.by_batterydata': 'bert_classifier_batteryonlybert_uncased_abstract', 'en.classify.bert.bio_pubmed.by_pritamdeka': 'bert_classifier_bio_pubmed200krct', 'en.classify.bert.by_404e': 'bert_classifier_autotrain_formality_1026434913', 'en.classify.bert.by_adamlin': 'bert_classifier_text_cls', 'en.classify.bert.by_adrianmoses': 'bert_classifier_autonlp_auto_nlp_lyrics_classification_19333717', 'en.classify.bert.by_aimendo': 'bert_classifier_autonlp_triage_35248482', 'en.classify.bert.by_ajay191191': 'bert_classifier_autonlp_test_530014983', 'en.classify.bert.by_akilesh96': 'bert_classifier_autonlp_mrcooper_text_classification_529614927', 'en.classify.bert.by_alperiox': 'bert_classifier_autonlp_user_review_classification_536415182', 'en.classify.bert.by_astarostap': 'bert_classifier_autonlp_antisemitism_2_21194454', 'en.classify.bert.by_asvs': 'bert_classifier_qs', 'en.classify.bert.by_aujer': 'bert_classifier_ni_model_8_19', 'en.classify.bert.by_aychang': 'bert_sequence_classifier_trec_coarse', 'en.classify.bert.by_benword': 'bert_classifier_autotrain_apm2_1212245840', 'en.classify.bert.by_braveoni': 'bert_classifier_2ch_text_classification', 'en.classify.bert.by_bush': 'bert_classifier_autonlp_bp_29016523', 'en.classify.bert.by_crcb': 'bert_classifier_dvs_f', 'en.classify.bert.by_danl': 'bert_classifier_scientific_challenges_and_directions', 'en.classify.bert.by_deepesh0x': 'bert_classifier_autotrain_wikipedia_sst_2_1034235509', 'en.classify.bert.by_ecoli': 'bert_classifier_sb', 'en.classify.bert.by_edmundhui': 'bert_classifier_mental_health_trainer', 'en.classify.bert.by_erica': 'bert_classifier_krm_sa3', 'en.classify.bert.by_ericpeter': 'bert_classifier_comments_text_classification_model', 'en.classify.bert.by_esiebomajeremiah': 'bert_classifier_autonlp_email_classification_657119381', 'en.classify.bert.by_fan_s': 'bert_classifier_reddit_tc', 'en.classify.bert.by_financeinc': 'bert_classifier_finbert_fls', 'en.classify.bert.by_foundkim': 'bert_classifier_topic', 'en.classify.bert.by_hcklab': 'bert_classifier_bi_classification', 'en.classify.bert.by_huaen': 'bert_classifier_question_detection', 'en.classify.bert.by_idrimadrid': 'bert_classifier_autonlp_creator_classifications_4021083', 'en.classify.bert.by_imzachjohnson': 'bert_classifier_autonlp_spinner_check_16492731', 'en.classify.bert.by_jerimee': 'bert_classifier_autotrain_dontknowwhatimdoing_980432459', 'en.classify.bert.by_jonaskoenig': 'bert_classifier_topic_classification_04', 'en.classify.bert.by_joniponi': 'bert_classifier_multilabel_inpatient_comments_14labels', 'en.classify.bert.by_jpreilly123': 'bert_classifier_emojify_mvp', 'en.classify.bert.by_juancavallotti': 'bert_classifier_sentence', 'en.classify.bert.by_juliensimon': 'bert_classifier_autonlp_song_lyrics_18753417', 'en.classify.bert.by_justpyschitry': 'bert_classifier_medical_article_by_icd_11_chapter', 'en.classify.bert.by_kamivao': 'bert_classifier_autonlp_entity_selection_5771228', 'en.classify.bert.by_khu1998': 'bert_classifier_clog_assessment_model', 'en.classify.bert.by_leonweber': 'bert_classifier_semantic_relations', 'en.classify.bert.by_lewtun': 'bert_classifier_test_hub_pr_1', 'en.classify.bert.by_lucifermorninstar011': 'bert_classifier_autotrain_lucifer_multi_auto_831626529', 'en.classify.bert.by_lysandre': 'bert_classifier_dum', 'en.classify.bert.by_m3tafl0ps': 'bert_classifier_autonlp_nlpisfun_251844', 'en.classify.bert.by_madhurjindalworkmail': 'bert_classifier_autonlp_gibb_detect_515314387', 'en.classify.bert.by_marieke93': 'bert_classifier_evidence_types', 'en.classify.bert.by_mim': 'bert_classifier_pro_cell_expert', 'en.classify.bert.by_mlkorra': 'bert_classifier_ogbv_gder_hi_mlkorra', 'en.classify.bert.by_mohsenfayyaz': 'bert_classifier_toxicity', 'en.classify.bert.by_nbroad': 'bert_classifier_esg', 'en.classify.bert.by_ndavid': 'bert_classifier_autotrain_trec_fine_739422530', 'en.classify.bert.by_nicktien': 'bert_classifier_taipeiqa_v1', 'en.classify.bert.by_nikunjbjj': 'bert_classifier_jd_resume_model', 'en.classify.bert.by_nitishkumargundapu793': 'bert_classifier_autotrain_chat_bot_responses_949231426', 'en.classify.bert.by_palakagl': 'bert_classifier_bert_textclassification', 'en.classify.bert.by_panashe': 'bert_classifier_autonlp_eo_590516680', 'en.classify.bert.by_patrickquick': 'bert_classifier_berticelli', 'en.classify.bert.by_priyamm': 'bert_classifier_autotrain_keywordextraction_882328335', 'en.classify.bert.by_prompsit': 'bert_classifier_paraphrase', 'en.classify.bert.by_prosusai': 'bert_classifier_finbert', 'en.classify.bert.by_ptro': 'bert_classifier_model1_test', 'en.classify.bert.by_pwz98': 'bert_classifier_test_model', 'en.classify.bert.by_sampathkethineedi': 'bert_classifier_industry_classification_api', 'en.classify.bert.by_santarabantoosoo': 'bert_classifier_pathology_meningioma', 'en.classify.bert.by_sasha': 'bert_classifier_regardv3', 'en.classify.bert.by_sbrandeis': 'bert_classifier_autonlp_emotion_clf', 'en.classify.bert.by_serenay': 'bert_classifier_autonlp_emotion_14722565', 'en.classify.bert.by_sgugger': 'bert_classifier_test_dynamic_pipeline', 'en.classify.bert.by_shuvam': 'bert_classifier_autonlp_college_classification_164469', 'en.classify.bert.by_slowturtle': 'bert_classifier_topic_v5', 'en.classify.bert.by_snap': 'bert_classifier_autotrain_argument_feedback_1154042511', 'en.classify.bert.by_someshfengde': 'bert_classifier_autonlp_kaggledays_625717986', 'en.classify.bert.by_tcaputi': 'bert_classifier_guns_relevant', 'en.classify.bert.by_tristantristantristan': 'bert_classifier_rumor', 'en.classify.bert.by_unitary': 'bert_classifier_toxic', 'en.classify.bert.by_vijaygoriya': 'bert_classifier_vijaygoriya_test_trainer', 'en.classify.bert.by_vinaydngowda': 'bert_classifier_robertabase_ana4', 'en.classify.bert.by_vreese2414': 'bert_classifier_autotrain_test_frank_896929583', 'en.classify.bert.by_world_wide': 'bert_classifier_sent_sci_irrelevance', 'en.classify.bert.by_yonichi': 'bert_classifier_cbert', 'en.classify.bert.by_zwang199': 'bert_classifier_autonlp_traffic_nlp_451311592', 'en.classify.bert.cased_base': 'bert_classifier_bert_base_cased_abstract', 'en.classify.bert.cased_base.by_aychang': 'bert_classifier_bert_base_cased_trec_coarse', 'en.classify.bert.cased_base.by_dipesh': 'bert_classifier_intent_classification_base_cased', 'en.classify.bert.cased_base.by_seongju': 'bert_classifier_kor_3i4k_base_cased', 'en.classify.bert.cased_base_finetuned': 'bert_classifier_base_cased_finetuned_sst2', 'en.classify.bert.cased_base_medium': 'bert_classifier_base_cased_cv_studio_name_medium', 'en.classify.bert.cased_large': 'bert_classifier_mnli_large_cased', 'en.classify.bert.cased_large.by_boychaboy': 'bert_classifier_snli_large_cased', 'en.classify.bert.catalonia_independence.': 'bert_classifier_autonlp_text_classification_catalonia_independence_autonlp_633018323', 'en.classify.bert.chemical.': 'bert_classifier_autotrain_chemprot_re_838426740', 'en.classify.bert.cl.by_yuan1729': 'bert_classifier_cl_1', 'en.classify.bert.clinical.': 'bert_classifier_core_clinical_mortality_prediction', 'en.classify.bert.clinical.by_bvanaken': 'bert_classifier_clinical_assertion_negation', 'en.classify.bert.cola1.': 'bert_classifier_autonlp_cola_gram_208681', 'en.classify.bert.cola1.by_abirate': 'bert_classifier_fine_tuned_cola', 'en.classify.bert.cola1.by_vanhoan': 'bert_classifier_fine_tuned_cola1', 'en.classify.bert.cola2.by_vanhoan': 'bert_classifier_fine_tuned_cola2', 'en.classify.bert.cord19.': 'bert_classifier_coronabert', 'en.classify.bert.covid.': 'bert_classifier_covid_misinfo', 'en.classify.bert.distilled_512d_tiny_512d': 'bert_classifier_tiny_sst2_distilled_l4_h_512', 'en.classify.bert.distilled_mini': 'bert_classifier_mini_sst2_distilled', 'en.classify.bert.distilled_tiny': 'bert_classifier_tiny_aug_sst2_distilled', 'en.classify.bert.distilled_tiny.by_lupinlevorace': 'bert_classifier_lupinlevorace_tiny_sst2_distilled', 'en.classify.bert.distilled_tiny.by_sayan01': 'bert_classifier_tiny_qqp_distilled', 'en.classify.bert.distilled_tiny.by_smith123': 'bert_classifier_tiny_sst2_distilled_l6_h128', 'en.classify.bert.email_spam.': 'bert_classifier_trash_mail_cls_2022', 'en.classify.bert.emotion.distilled_uncased_base_finetuned': 'bert_classifier_distil_base_uncased_finetuned_emotion', 'en.classify.bert.emotion.finetuned': 'bert_classifier_finetuned_emotion', 'en.classify.bert.emotion.mini_lm_mini_finetuned': 'bert_classifier_minilm_finetuned_emotion_nm', 'en.classify.bert.emotion.mini_lm_mini_finetuned.by_lewtun': 'bert_classifier_minilm_finetuned_emotion', 'en.classify.bert.emotion.uncased_base': 'bert_classifier_bhadresh_savani_base_uncased_emotion', 'en.classify.bert.emotion.uncased_base.by_nateraw': 'bert_classifier_nateraw_base_uncased_emotion', 'en.classify.bert.emotion.xtremedistiled': 'bert_classifier_xtremedistil_emotion', 'en.classify.bert.emotion.xtremedistiled.by_bergum': 'bert_classifier_xtremedistil_l6_h384_emotion', 'en.classify.bert.fever.mini_lm_mini': 'bert_classifier_minilm_l6_mnli_fever_docnli_ling_2c', 'en.classify.bert.finetuned': 'bert_classifier_amitkayal_finetuned_sem_eval_english', 'en.classify.bert.finetuned.by_abderrahim2': 'bert_classifier_finetuned_location', 'en.classify.bert.finetuned.by_has_abi': 'bert_classifier_finetuned_resumes_sections', 'en.classify.bert.finetuned.by_hazrulakmal': 'bert_classifier_benchmark_finetuned', 'en.classify.bert.finetuned.by_jboomc': 'bert_classifier_rotten_tomatoes_finetuned', 'en.classify.bert.finetuned.by_joniponi': 'bert_classifier_joniponi_finetuned_sem_eval_english', 'en.classify.bert.finetuned.by_nickmuchi': 'bert_classifier_sec_finetuned_finance_classification', 'en.classify.bert.glue.': 'bert_classifier_autotrain_gluemodels_1010733562', 'en.classify.bert.glue.6l_256d_a8a_256d': 'bert_classifier_sead_l_6_h_256_a_8_mrpc', 'en.classify.bert.glue.6l_384d_a12a': 'bert_classifier_sead_l_6_h_384_a_12_mrpc', 'en.classify.bert.glue.base': 'bert_classifier_platzi_base_mrpc_glue_omar_espejel', 'en.classify.bert.glue.by_jxuhf': 'bert_classifier_fine_tuning_text_classification_model_habana_gaudi', 'en.classify.bert.glue.by_navsad': 'bert_classifier_navid_test', 'en.classify.bert.glue.by_sgugger': 'bert_classifier_glue_mrpc', 'en.classify.bert.glue.cased_base_finetuned': 'bert_classifier_bert_base_cased_finetuned_mnli', 'en.classify.bert.glue.cased_large_finetuned': 'bert_classifier_bert_large_cased_finetuned_mrpc', 'en.classify.bert.glue.cased_large_finetuned.by_gchhablani': 'bert_classifier_bert_large_cased_finetuned_rte', 'en.classify.bert.glue.cased_large_whole_word_masking': 'bert_classifier_large_cased_whole_word_masking_sst2', 'en.classify.bert.glue.distilled_tiny': 'bert_classifier_ibrahim2030_tiny_sst2_distilled', 'en.classify.bert.glue.distilled_tiny.by_ilana': 'bert_classifier_ilana_tiny_sst2_distilled', 'en.classify.bert.glue.distilled_tiny.by_nbhimte': 'bert_classifier_tiny_mnli_distilled', 'en.classify.bert.glue.distilled_tiny.by_philschmid': 'bert_classifier_philschmid_tiny_sst2_distilled', 'en.classify.bert.glue.distilled_tiny_mobile.by_gokuls': 'bert_classifier_tiny_sst2_mobile_distillation', 'en.classify.bert.glue.finetuned': 'bert_classifier_bert_finetuned_mrpc', 'en.classify.bert.glue.finetuned.by_Riad': 'bert_classifier_riad_finetuned_mrpc', 'en.classify.bert.glue.finetuned.by_deepesh0x': 'bert_classifier_autotrain_gluefinetunedmodel_1013533786', 'en.classify.bert.glue.finetuned.by_sgugger': 'bert_classifier_sgugger_finetuned_mrpc', 'en.classify.bert.glue.finetuned.by_shahma': 'bert_classifier_shahma_finetuned_mrpc', 'en.classify.bert.glue.finetuned.by_wiselinjayajos': 'bert_classifier_wiselinjayajos_finetuned_mrpc', 'en.classify.bert.glue.finetuning_': 'bert_classifier_bert_finetuning_cn', 'en.classify.bert.glue.mobile_multi_teacher_distilled_tiny.by_gokuls': 'bert_classifier_tiny_sst2_1_mobile_and_multi_teacher_distillation', 'en.classify.bert.glue.mobile_only_distilled_tiny.by_gokuls': 'bert_classifier_tiny_sst2_1_mobile_only_distillation', 'en.classify.bert.glue.only_distilled_tiny.by_gokuls': 'bert_classifier_tiny_sst2_1_mobile_2_only_distillation', 'en.classify.bert.glue.small_finetuned': 'bert_classifier_small_finetuned_glue_rte', 'en.classify.bert.glue.ssts2.distilled_tiny.by_gokuls': 'bert_classifier_tiny_sst2_distilled_model', 'en.classify.bert.glue.ssts2.mobile.distilled_tiny.by_gokuls': 'bert_classifier_tiny_sst2_1_mobile_2_distillation', 'en.classify.bert.glue.tiny_finetuned': 'bert_classifier_tiny_finetuned_glue_rte', 'en.classify.bert.glue.uncased_base': 'bert_classifier_intel_base_uncased_mrpc', 'en.classify.bert.glue.uncased_base.by_JeremiahZ': 'bert_classifier_jeremiahz_base_uncased_mrpc', 'en.classify.bert.glue.uncased_mini_lm_mini': 'bert_classifier_minilm_l12_h384_uncased_mrpc', 'en.classify.bert.glue.v1.by_junzai': 'bert_classifier_demo', 'en.classify.bert.glue.v2.by_junzai': 'bert_classifier_demotest', 'en.classify.bert.glue_cola1.': 'bert_classifier_sgugger_fine_tuned_cola', 'en.classify.bert.glue_cola1.by_vanhoan': 'bert_classifier_vanhoan_fine_tuned_cola', 'en.classify.bert.glue_cola1.cased_base_finetuned': 'bert_classifier_bert_base_cased_finetuned_cola', 'en.classify.bert.glue_cola1.cased_large_finetuned': 'bert_classifier_bert_large_cased_finetuned_cola', 'en.classify.bert.glue_cola1.uncased_base_finetuned': 'bert_classifier_base_uncased_finetuned_glue_cola', 'en.classify.bert.glue_gold_labels.distilled_tiny.by_gokuls': 'bert_classifier_tiny_sst2_1_mobile_2_3_gold_labels_distillation', 'en.classify.bert.glue_mrpc.cased_base_finetuned.by_gchhablani': 'bert_classifier_bert_base_cased_finetuned_mrpc', 'en.classify.bert.glue_qnli.6l_256d_a8a_256d.by_course5i': 'bert_classifier_sead_l_6_h_256_a_8_qnli', 'en.classify.bert.glue_qnli.6l_384d_a12a.by_course5i': 'bert_classifier_sead_l_6_h_384_a_12_qnli', 'en.classify.bert.glue_qnli.cased_base_finetuned.by_gchhablani': 'bert_classifier_bert_base_cased_finetuned_qnli', 'en.classify.bert.glue_qqp.6l_256d_a8a_256d.by_course5i': 'bert_classifier_sead_l_6_h_256_a_8_qqp', 'en.classify.bert.glue_qqp.6l_384d_a12a.by_course5i': 'bert_classifier_sead_l_6_h_384_a_12_qqp', 'en.classify.bert.glue_qqp.cased_base_finetuned.by_gchhablani': 'bert_classifier_bert_base_cased_finetuned_qqp', 'en.classify.bert.glue_rte.6l_256d_a8a_256d.by_course5i': 'bert_classifier_sead_l_6_h_256_a_8_rte', 'en.classify.bert.glue_rte.6l_384d_a12a.by_course5i': 'bert_classifier_sead_l_6_h_384_a_12_rte', 'en.classify.bert.glue_rte.cased_base_finetuned.by_gchhablani': 'bert_classifier_bert_base_cased_finetuned_rte', 'en.classify.bert.glue_sst2.6l_256d_a8a_256d.by_course5i': 'bert_classifier_sead_l_6_h_256_a_8_sst2', 'en.classify.bert.glue_sst2.6l_384d_a12a.by_course5i': 'bert_classifier_sead_l_6_h_384_a_12_sst2', 'en.classify.bert.glue_sst2.cased_base_finetuned.by_gchhablani': 'bert_classifier_bert_base_cased_finetuned_sst2', 'en.classify.bert.go_emotions.xtremedistiled': 'bert_classifier_xtremedistil_l6_h384_go_emotion', 'en.classify.bert.go_emotions.xtremedistiled_uncased': 'bert_classifier_xtremedistil_l6_h256_uncased_go_emotion', 'en.classify.bert.hate.': 'bert_sequence_classifier_hatexplain', 'en.classify.bert.hate.by_crcb': 'bert_classifier_hateval_re', 'en.classify.bert.hate.by_risingodegua': 'bert_classifier_hate_speech_detector', 'en.classify.bert.hate.large': 'bert_classifier_bert_large_hatexplain', 'en.classify.bert.hate.mono_english.by_hate_speech_cnerg': 'bert_classifier_dehatebert_mono_english', 'en.classify.bert.hate.mono_indonesian.by_hate_speech_cnerg': 'bert_classifier_dehate_mono_indonesian', 'en.classify.bert.hate.uncased_base': 'bert_classifier_bert_base_uncased_hatexplain', 'en.classify.bert.hate.uncased_base.by_hate_speech_cnerg': 'bert_classifier_bert_base_uncased_hatexplain_rationale_two', 'en.classify.bert.imdb.': 'bert_classifier_autonlp_imdb_classification_596216804', 'en.classify.bert.imdb.1h': 'bert_classifier_bert_imdb_1hidden', 'en.classify.bert.imdb.by_mmcquade11': 'bert_classifier_autonlp_imdb_test_21134442', 'en.classify.bert.imdb.uncased_base': 'bert_classifier_fabriceyhc_base_uncased_imdb', 'en.classify.bert.imdb.uncased_base.by_nateraw': 'bert_classifier_nateraw_base_uncased_imdb', 'en.classify.bert.imdb.v1.by_abhishek': 'bert_classifier_autonlp_imdb_eval_71421', 'en.classify.bert.imdb.v2.by_abhishek': 'bert_classifier_autotrain_imdbtestmodel_9215210', 'en.classify.bert.imdb_sentiment.': 'bert_classifier_autonlp_imdb_sentiment_analysis_623817873', 'en.classify.bert.imdb_sentiment.by_yosemite': 'bert_classifier_autonlp_imdb_sentiment_analysis_english_470512388', 'en.classify.bert.joy.': 'bert_classifier_emo_nojoylove', 'en.classify.bert.klue.cased_multilingual_base': 'bert_classifier_klue_tc_base_multilingual_cased', 'en.classify.bert.lang': 'bert_classifier_manglish_offensive_language_identification', 'en.classify.bert.large': 'bert_classifier_bert_large_mnli', 'en.classify.bert.large.by_tehrannlp_org': 'bert_classifier_bert_large_sst2', 'en.classify.bert.large.random.by_baykenney': 'bert_classifier_large_gpt2detector_random', 'en.classify.bert.large.topk40.by_baykenney': 'bert_classifier_large_gpt2detector_topk40', 'en.classify.bert.mini_lm_mini': 'bert_classifier_minilm_l6_mnli', 'en.classify.bert.mini_lm_mini.by_moritzlaurer': 'bert_classifier_minilm_l6_mnli_binary', 'en.classify.bert.movie_hate.uncased_base': 'bert_classifier_bert_based_uncased_hatespeech_movies', 'en.classify.bert.multi2convai.': 'bert_classifier_multi2convai_corona', 'en.classify.bert.multi2convai.logistics.by_inovex': 'bert_classifier_multi2convai_logistics', 'en.classify.bert.multi2convai.quality.by_inovex': 'bert_classifier_multi2convai_quality', 'en.classify.bert.news.': 'bert_sequence_classifier_age_news', 'en.classify.bert.news.by_abhishek': 'bert_classifier_autonlp_bbc_news_classification_37229289', 'en.classify.bert.news.cased_base': 'bert_classifier_base_cased_clickbait_news', 'en.classify.bert.news.cased_base.by_elozano': 'bert_classifier_base_cased_news_category', 'en.classify.bert.news.mini_finetuned': 'bert_sequence_classifier_mini_finetuned_age_news_classification', 'en.classify.bert.news.tiny_finetuned': 'bert_sequence_classifier_tiny_finetuned_fake_news_detection', 'en.classify.bert.news.uncased_base': 'bert_classifier_bert_base_uncased_ag_news', 'en.classify.bert.non_cl.by_yuan1729': 'bert_classifier_non_cl', 'en.classify.bert.pubmed.': 'bert_classifier_multi_label_classification_of_pubmed_articles', 'en.classify.bert.pubmed.by_pritamdeka': 'bert_classifier_pubmed_pubmed200krct', 'en.classify.bert.pubmed_bluebert.uncased_12l_768d_a12a_768d': 'bert_classifier_bluebert_pubmed_uncased_l_12_h_768_a_12_pub_section', 'en.classify.bert.sa_sub1.by_researchaccount': 'bert_classifier_sa_sub1', 'en.classify.bert.sa_sub2.by_researchaccount': 'bert_classifier_sa_sub2', 'en.classify.bert.sa_sub3.by_researchaccount': 'bert_classifier_sa_sub3', 'en.classify.bert.sa_sub4.by_researchaccount': 'bert_classifier_sa_sub4', 'en.classify.bert.sentiment.': 'bert_classifier_autonlp_sentiment_detection_1781580', 'en.classify.bert.sentiment.base': 'bert_classifier_base_finance_sentiment_noisy_search', 'en.classify.bert.sentiment.by_ahmedrachid': 'bert_classifier_financialbert_sentiment_analysis', 'en.classify.bert.sentiment.by_charly': 'bert_classifier_autotrain_sentiment_4_812425472', 'en.classify.bert.sentiment.by_gilf': 'bert_classifier_english_yelp_sentiment', 'en.classify.bert.sentiment.by_ktangri': 'bert_classifier_autotrain_financial_sentiment_765323474', 'en.classify.bert.sentiment.by_shahidul034': 'bert_classifier_drug_sentiment_analysis', 'en.classify.bert.sentiment.by_souvikcmsa': 'bert_classifier_bert_sentiment_analysis', 'en.classify.bert.sentiment.by_tomato': 'bert_classifier_sentiment_analysis', 'en.classify.bert.sentiment.cased_base': 'bert_classifier_base_cased_tamil_mix_sentiment', 'en.classify.bert.sentiment.finetuned': 'bert_classifier_auditor_sentiment_finetuned', 'en.classify.bert.sentiment_twitter.': 'bert_classifier_twitter_sentiment', 'en.classify.bert.sms_spam.': 'bert_sequence_classifier_sms_spam', 'en.classify.bert.sms_spam.tiny_finetuned': 'bert_sequence_classifier_tiny_finetuned_sms_spam_detection', 'en.classify.bert.tiny': 'bert_classifier_tiny_best', 'en.classify.bert.tiny.by_chatwithnerd': 'bert_classifier_tiny_master', 'en.classify.bert.tiny_finetuned': 'bert_sequence_classifier_tiny_finetuned_yahoo_answers_topics', 'en.classify.bert.tweet.': 'bert_classifier_bert_tweet_eval_emotion', 'en.classify.bert.tweet.base_128d': 'bert_classifier_autotrain_base_tweeteval_1281248996', 'en.classify.bert.tweet.base_128d_v1.by_sasha': 'bert_classifier_autotrain_base_tweeteval_1281248997', 'en.classify.bert.tweet.base_128d_v2.by_sasha': 'bert_classifier_autotrain_base_tweeteval_1281248998', 'en.classify.bert.tweet.base_128d_v3.by_sasha': 'bert_classifier_autotrain_base_tweeteval_1281248999', 'en.classify.bert.tweet.base_128d_v4.by_sasha': 'bert_classifier_autotrain_base_tweeteval_1281249000', 'en.classify.bert.tweet.by_dtai_kuleuven': 'bert_classifier_m_corona_tweets_belgium_topics', 'en.classify.bert.tweet_sentiment.': 'bert_classifier_english_tweetsentiment', 'en.classify.bert.uncased': 'bert_classifier_sci_uncased_topics', 'en.classify.bert.uncased_base': 'bert_classifier_bert_base_uncased_abstract', 'en.classify.bert.uncased_base.by_boychaboy': 'bert_classifier_snli_base_uncased', 'en.classify.bert.uncased_base.by_ehsanaghazadeh': 'bert_classifier_based_uncased_sst2_e1', 'en.classify.bert.uncased_base.by_li': 'bert_classifier_base_uncased_qnli', 'en.classify.bert.uncased_base.by_splend1dchan': 'bert_classifier_base_uncased_slue_goldtrascription_e3_lr1e_4', 'en.classify.bert.uncased_base.by_tennessejoyce': 'bert_classifier_titlewave_base_uncased', 'en.classify.bert.uncased_base.by_tobias': 'bert_classifier_base_uncased_english_multilable_classification', 'en.classify.bert.uncased_base.by_tomhosking': 'bert_classifier_base_uncased_debiased_nli', 'en.classify.bert.uncased_base_finetuned': 'bert_classifier_bert_base_uncased_finetuned_surveyclassification', 'en.classify.bert.uncased_base_finetuned.by_hazrulakmal': 'bert_classifier_base_uncased_finetuned', 'en.classify.bert.uncased_base_finetuned.by_transformersbook': 'bert_classifier_base_uncased_finetuned_clinc', 'en.classify.bert.uncased_base_finetuned.by_yuetian': 'bert_classifier_base_uncased_finetuned_plutchik_emotion', 'en.classify.bert.uncased_mini_lm_mini': 'bert_classifier_minilm_l12_h384_uncased_sst2_all_train', 'en.classify.bert.v1.by_bshlgrs': 'bert_classifier_autonlp_classification_9522090', 'en.classify.bert.v1.by_clem': 'bert_classifier_autonlp_test3_2101779', 'en.classify.bert.v1by_shogumbo': 'bert_classifier_testing3_multilavel', 'en.classify.bert.v2': 'bert_classifier_sponsorblock_v2', 'en.classify.bert.v2.by_bshlgrs': 'bert_classifier_autonlp_classification_with_all_labellers_9532137', 'en.classify.bert.v2.by_clem': 'bert_classifier_autonlp_test3_2101782', 'en.classify.bert.v2_2m': 'bert_classifier_autotrain_apmv2multiclass_1216046004', 'en.classify.bert.v2by_shogumbo': 'bert_classifier_testing4_multilabel', 'en.classify.bert.v3.by_bshlgrs': 'bert_classifier_autonlp_old_data_trained_10022181', 'en.classify.bert.wnli_glue.6l_256d_a8a_256d': 'bert_classifier_sead_l_6_h_256_a_8_wnli', 'en.classify.bert.wnli_glue.6l_384d_a12a': 'bert_classifier_sead_l_6_h_384_a_12_wnli', 'en.classify.bert.wnli_glue.cased_base_finetuned': 'bert_classifier_bert_base_cased_finetuned_wnli', 'en.classify.bert.wnli_glue.cased_large_finetuned': 'bert_classifier_bert_large_cased_finetuned_wnli', 'en.classify.bert.xtreme.': 'bert_classifier_extreme_go_emotion', 'en.classify.bert.xtremedistiled_uncased': 'bert_classifier_xtremedistil_l12_h384_uncased_pub_section', 'en.classify.bert_sequence.ag_news': 'bert_base_sequence_classifier_ag_news', 'en.classify.bert_sequence.dbpedia_14': 'bert_base_sequence_classifier_dbpedia_14', 'en.classify.bert_sequence.dehatebert_mono': 'bert_sequence_classifier_dehatebert_mono', 'en.classify.bert_sequence.finbert': 'bert_sequence_classifier_finbert', 'en.classify.bert_sequence.imdb': 'bert_base_sequence_classifier_imdb', 'en.classify.bert_sequence.imdb_large': 'bert_large_sequence_classifier_imdb', 'en.classify.beto_bert': 'bert_classifier_beto_4d', 'en.classify.beto_bert.sentiment.': 'bert_classifier_beto_headlines_sentiment_analysis', 'en.classify.biobert': 'bert_classifier_biobert_v1.1_pub_section', 'en.classify.bioformer.cased': 'bert_classifier_bioformer_cased_v1.0_mnli', 'en.classify.bioformer.cased.by_bioformers': 'bert_classifier_bioformer_cased_v1.0_qnli', 'en.classify.carer.roberta.by_crcb': 'roberta_classifier_carer_new', 'en.classify.carer.roberta.v2.by_crcb': 'roberta_classifier_carer_2', 'en.classify.claim.bert.by_thinkcerca': 'bert_classifier_claim_hugging', 'en.classify.counterargument.bert.by_thinkcerca': 'bert_classifier_counterargument_hugging', 'en.classify.ctrl44.roberta.by_liamcripwell': 'roberta_classifier_ctrl44', 'en.classify.ctrl44_clf.roberta.by_liamcripwell': 'roberta_classifier_ctrl44_clf', 'en.classify.curiosity_bio.bert.by_k_partha': 'bert_classifier_curiosity_bio', 'en.classify.cyberbullying': 'classifierdl_use_cyberbullying', 'en.classify.cyberbullying.use': 'classifierdl_use_cyberbullying', 'en.classify.cyberlandr_ai.bert.v1.by_billfrench': 'bert_classifier_autonlp_cyberlandr_ai_4_614417500', 'en.classify.cyberlandr_ai.bert.v2.by_billfrench': 'bert_classifier_autonlp_cyberlandr_ai_4_614417501', 'en.classify.dbpedia': 'deberta_v3_small_sequence_classifier_dbpedia_14', 'en.classify.decision_style_bio.bert.by_k_partha': 'bert_classifier_decision_style_bio', 'en.classify.distil_bert': 'distilbert_sequence_classifier_wiki_complexity', 'en.classify.distil_bert.base.by_datasets': 'distilbert_base_sequence_classifier_airlines', 'en.classify.distil_bert.by_abhishek': 'distilbert_sequence_classifier_autonlp_toxic_new_30516963', 'en.classify.distil_bert.by_bitmorse': 'distilbert_sequence_classifier_autonlp_ks_530615016', 'en.classify.distil_bert.by_chijioke': 'distilbert_sequence_classifier_autonlp_mono_625317956', 'en.classify.distil_bert.by_clem': 'distilbert_sequence_classifier_autonlp_test3_2101787', 'en.classify.distil_bert.by_d4data': 'distilbert_sequence_classifier_bias_detection_model', 'en.classify.distil_bert.by_danitg95': 'distilbert_sequence_classifier_autotrain_kaggle_effective_arguments_1086739296', 'en.classify.distil_bert.by_furiouslyasleep': 'distilbert_sequence_classifier_markingmulticlass', 'en.classify.distil_bert.by_juliensimon': 'distilbert_sequence_classifier_autonlp_song_lyrics_18753423', 'en.classify.distil_bert.by_lingwave_admin': 'distilbert_sequence_classifier_state_op_detector', 'en.classify.distil_bert.by_madhurjindal': 'distilbert_sequence_classifier_autonlp_gibberish_detector_492513457', 'en.classify.distil_bert.by_martin_ha': 'distilbert_sequence_classifier_toxic_comment_model', 'en.classify.distil_bert.by_mhaegeman': 'distilbert_sequence_classifier_autotrain_country_recognition_1059336697', 'en.classify.distil_bert.by_moritzlaurer': 'distilbert_sequence_classifier_policy_distilbert_7d', 'en.classify.distil_bert.by_palakagl': 'distilbert_sequence_classifier_distilbert_multiclass_textclassification', 'en.classify.distil_bert.by_preetiha': 'distilbert_sequence_classifier_clause_classification', 'en.classify.distil_bert.by_sampathkethineedi': 'distilbert_sequence_classifier_industry_classification', 'en.classify.distil_bert.by_sathira': 'distilbert_sequence_classifier_autotrain_mbtinlp_798824628', 'en.classify.distil_bert.by_someshfengde': 'distilbert_sequence_classifier_autonlp_kaggledays_625717992', 'en.classify.distil_bert.by_valurank': 'distilbert_sequence_classifier_distilbert_quality', 'en.classify.distil_bert.by_world_wide': 'distilbert_sequence_classifier_is_legit_kwd_march_27', 'en.classify.distil_bert.by_xinsignia': 'distilbert_sequence_classifier_autotrain_online_orders_755323156', 'en.classify.distil_bert.cased_base': 'distilbert_sequence_classifier_arxiv_topics_distilbert_base_cased', 'en.classify.distil_bert.cased_base.by_aychang': 'distilbert_sequence_classifier_distilbert_base_cased_trec_coarse', 'en.classify.distil_bert.emotion.uncased_base': 'distilbert_sequence_classifier_bert_base_uncased_emotion', 'en.classify.distil_bert.emotion.uncased_base.by_bhadresh_savani': 'distilbert_sequence_classifier_bhadresh_savani_distilbert_base_uncased_emotion', 'en.classify.distil_bert.emotion.uncased_base.by_nanopass': 'distilbert_sequence_classifier_distilbert_base_uncased_emotion_2', 'en.classify.distil_bert.emotion.uncased_base.by_philschmid': 'distilbert_sequence_classifier_philschmid_distilbert_base_uncased_emotion', 'en.classify.distil_bert.environment.': 'distilbert_sequence_classifier_environmental_due_diligence_model', 'en.classify.distil_bert.finetuned': 'distilbert_sequence_classifier_autotrain_finetunedmodelbert_1034335535', 'en.classify.distil_bert.finetuned.by_svenstahlmann': 'distilbert_sequence_classifier_finetuned_distilbert_needmining', 'en.classify.distil_bert.glue.': 'distilbert_sequence_classifier_testing', 'en.classify.distil_bert.glue.uncased_base_finetuned': 'distilbert_sequence_classifier_distilbert_base_uncased_finetuned_sst_2_english', 'en.classify.distil_bert.go_emotions.uncased_base': 'distilbert_sequence_classifier_distilbert_base_uncased_go_emotions_student', 'en.classify.distil_bert.imdb.': 'distilbert_sequence_classifier_autotrain_imdb_1166543171', 'en.classify.distil_bert.ma_mlc.by_couchcat': 'distilbert_sequence_classifier_ma_mlc_v7_distil', 'en.classify.distil_bert.ma_sa.by_couchcat': 'distilbert_sequence_classifier_ma_sa_v7_distil', 'en.classify.distil_bert.multi.by_lucifermorninstar011': 'distilbert_sequence_classifier_autotrain_lucifer_multi_844026969', 'en.classify.distil_bert.multi_all.by_lucifermorninstar011': 'distilbert_sequence_classifier_autotrain_lucifer_multi_auto_all_837626708', 'en.classify.distil_bert.news.uncased_base': 'distilbert_classifier_base_uncased_newspop_student', 'en.classify.distil_bert.news.uncased_base.by_andi611': 'distilbert_sequence_classifier_distilbert_base_uncased_ner_agnews', 'en.classify.distil_bert.news.uncased_base.by_joeddav': 'distilbert_sequence_classifier_distilbert_base_uncased_agnews_student', 'en.classify.distil_bert.news.uncased_base_finetuned': 'distilbert_sequence_classifier_distilbert_base_uncased_finetuned_ag_news', 'en.classify.distil_bert.sentiment.': 'distilbert_sequence_classifier_sentimentanalysisdistillbert', 'en.classify.distil_bert.sentiment.by_ck42': 'distilbert_sequence_classifier_sentiment_analysis_sbcbi', 'en.classify.distil_bert.sentiment.uncased_base': 'distilbert_sequence_classifier_distilbert_base_uncased_sentiment_sst2', 'en.classify.distil_bert.tweet.': 'distilbert_sequence_classifier_distilbert_tweet_eval_emotion', 'en.classify.distil_bert.tweet.by_bgoel4132': 'distilbert_sequence_classifier_tweet_disaster_classifier', 'en.classify.distil_bert.tweet.by_m_newhauser': 'distilbert_sequence_classifier_distilbert_political_tweets', 'en.classify.distil_bert.tweet.by_monsia': 'distilbert_sequence_classifier_autonlp_tweets_classification_23044997', 'en.classify.distil_bert.tweet_emotion.uncased_base_finetuned': 'distilbert_sequence_classifier_distilbert_base_uncased_finetuned_emotion_en_tweets', 'en.classify.distil_bert.tweet_sentiment.': 'distilbert_sequence_classifier_autonlp_tweet_sentiment_extraction_20114061', 'en.classify.distil_bert.uncased_base': 'distilbert_sequence_classifier_distilbert_base_uncased_if', 'en.classify.distil_bert.uncased_base.by_andi611': 'distilbert_sequence_classifier_distilbert_base_uncased_qa_boolq', 'en.classify.distil_bert.uncased_base_finetuned': 'distilbert_sequence_classifier_ane_distilbert_base_uncased_finetuned_sst_2_english', 'en.classify.distil_bert.uncased_base_finetuned.by_nsi319': 'distilbert_sequence_classifier_distilbert_base_uncased_finetuned_app', 'en.classify.distil_bert.uncased_base_finetuned.by_speeqo': 'distilbert_sequence_classifier_speeqo_distilbert_base_uncased_finetuned_sst_2_english', 'en.classify.distil_roberta.distilled': 'roberta_classifier_finetune_emotion_distilroberta', 'en.classify.distil_roberta.distilled.by_aristotletan': 'roberta_classifier_scim_distilroberta', 'en.classify.distil_roberta.distilled.by_jpcorb20': 'roberta_classifier_toxic_detector_distilroberta', 'en.classify.distil_roberta.distilled_base_finetuned': 'roberta_sequence_classifier_distilroberta_base_finetuned_suicide_depression', 'en.classify.distil_roberta.news.distilled_finetuned': 'roberta_sequence_classifier_distilroberta_finetuned_age_news_classification', 'en.classify.distil_roberta.news_sentiment.distilled_finetuned': 'roberta_sequence_classifier_distilroberta_finetuned_financial_news_sentiment_analysis', 'en.classify.distil_roberta.sentiment.distilled_finetuned': 'roberta_sequence_classifier_distilroberta_finetuned_rotten_tomatoes_sentiment_analysis', 'en.classify.distil_roberta.tweet_hate.distilled_finetuned': 'roberta_sequence_classifier_distilroberta_finetuned_tweets_hate_speech', 'en.classify.distilbert_sequence.ag_news': 'distilbert_base_sequence_classifier_ag_news', 'en.classify.distilbert_sequence.amazon_polarity': 'distilbert_base_sequence_classifier_amazon_polarity', 'en.classify.distilbert_sequence.banking77': 'distilbert_sequence_classifier_banking77', 'en.classify.distilbert_sequence.emotion': 'distilbert_sequence_classifier_emotion', 'en.classify.distilbert_sequence.imdb': 'distilbert_base_sequence_classifier_imdb', 'en.classify.distilbert_sequence.industry': 'distilbert_sequence_classifier_industry', 'en.classify.distilbert_sequence.policy': 'distilbert_sequence_classifier_policy', 'en.classify.distilbert_sequence.sst2': 'distilbert_sequence_classifier_sst2', 'en.classify.e2e': 'multiclassifierdl_use_e2e', 'en.classify.electra': 'electra_classifier_mfma', 'en.classify.electra.base': 'electra_classifier_base_discriminator_offenseval2019_downsample', 'en.classify.electra.fever.large': 'electra_classifier_large_discriminator_snli_mnli_fever_anli_r1_r2_r3_nli', 'en.classify.electra.hate.large': 'electra_classifier_large_discriminator_nli_efl_hateval', 'en.classify.electra.imdb.small_finetuned': 'electra_classifier_small_finetuned_imdb', 'en.classify.electra.tweet.large': 'electra_classifier_large_discriminator_nli_efl_tweeteval', 'en.classify.emotion': 'classifierdl_use_emotion', 'en.classify.emotion.bert': 'bert_sequence_classifier_emotion', 'en.classify.emotion.bert.minilm_v1.by_dinithi': 'bert_classifier_minlm_finetuned_emotionnew1', 'en.classify.emotion.bert.minilm_v2.by_dinithi': 'bert_classifier_minlm_finetuned_emotionnew2', 'en.classify.emotion.use': 'classifierdl_use_emotion', 'en.classify.esg.bert.by_yiyanghkust': 'bert_classifier_finbert_esg', 'en.classify.extra_bio.bert.by_k_partha': 'bert_classifier_extra_bio', 'en.classify.fakenews': 'classifierdl_use_fakenews', 'en.classify.fakenews.use': 'classifierdl_use_fakenews', 'en.classify.feedback_intent_test.roberta.by_mp6kv': 'roberta_classifier_feedback_intent_test', 'en.classify.finance_sentiment': 'bert_sequence_classifier_finbert_tone', 'en.classify.food.distil_bert.base': 'distilbert_base_sequence_classifier_food', 'en.classify.fred2.roberta.by_abhishek': 'roberta_classifier_autonlp_fred2_2682064', 'en.classify.hashtag_to_hashtag.bert.by_bryan0123': 'bert_classifier_hashtag_to_hashtag', 'en.classify.hashtag_to_hashtag_20.bert.by_bryan0123': 'bert_classifier_hashtag_to_hashtag_20', 'en.classify.imdb.longformer': 'longformer_base_sequence_classifier_imdb', 'en.classify.imdb.xlnet': 'xlnet_base_sequence_classifier_imdb', 'en.classify.intent.airline': 'classifierdl_use_atis', 'en.classify.intent.atis': 'classifierdl_use_atis', 'en.classify.iqa_classification.roberta.by_mp6kv': 'roberta_classifier_iqa_classification', 'en.classify.isear_bert.roberta.by_crcb': 'roberta_classifier_isear_bert', 'en.classify.main_intent_test.roberta.by_mp6kv': 'roberta_classifier_main_intent_test', 'en.classify.manibert.by_niksmer': 'roberta_classifier_manibert', 'en.classify.mbert.multi2convai.quality.by_inovex': 'bert_classifier_multi2convai_quality_en_mbert', 'en.classify.mnli.bert.cased_base.by_boychaboy': 'bert_classifier_mnli_base_cased_3', 'en.classify.mnli.roberta.base.by_boychaboy': 'roberta_classifier_mnli_base', 'en.classify.multi.bert.by_palakagl': 'bert_classifier_bert_multiclass_textclassification', 'en.classify.news.deberta': 'deberta_v3_xsmall_sequence_classifier_ag_news', 'en.classify.news.deberta.small': 'deberta_v3_small_sequence_classifier_ag_news', 'en.classify.not_interested..bert.by_aujer': 'bert_classifier_autotrain_not_interested_2_1213045881', 'en.classify.offense..bert.uncased_base.by_mohsenfayyaz': 'bert_classifier_base_uncased_offenseval2019_unbalanced', 'en.classify.offensive.roberta.beta.by_khailai': 'roberta_classifier_offensive_beta', 'en.classify.offensive.roberta.by_khailai': 'roberta_classifier_offensive', 'en.classify.paper_feedback_intent.roberta.by_mp6kv': 'roberta_classifier_paper_feedback_intent', 'en.classify.policyberta.by_niksmer': 'roberta_classifier_policyberta_7d', 'en.classify.prot_bfd_localization.bert.by_rostlab': 'bert_classifier_prot_bfd_localization', 'en.classify.prot_bfd_membrane.bert.by_rostlab': 'bert_classifier_prot_bfd_membrane', 'en.classify.pubmed_bert.pubmed.uncased_base': 'bert_classifier_biomednlp_pubmedbert_base_uncased_abstract_fulltext_pub_section', 'en.classify.pump_intent_test.roberta.by_mp6kv': 'roberta_classifier_pump_intent_test', 'en.classify.qn.roberta.by_lucianpopa': 'roberta_classifier_autotrain_qn_classification_1015534072', 'en.classify.qqp.distil_bert.base': 'distilbert_base_sequence_classifier_qqp', 'en.classify.question': 'classifierdl_use_trec50', 'en.classify.question_vs_statement': 'bert_sequence_classifier_question_statement', 'en.classify.questionpair': 'classifierdl_electra_questionpair', 'en.classify.questions': 'classifierdl_use_trec50', 'en.classify.questions.airline': 'classifierdl_use_atis', 'en.classify.questions.atis': 'classifierdl_use_atis', 'en.classify.reasoning.bert.by_thinkcerca': 'bert_classifier_reasoning_hugging', 'en.classify.rile.roberta.by_niksmer': 'roberta_classifier_rile', 'en.classify.roberta': 'roberta_classifier_autonlp_cml_412010597', 'en.classify.roberta.128d': 'roberta_classifier_autotrain_neurips_chanllenge_1287149282', 'en.classify.roberta.32d': 'roberta_classifier_autonlp_formality_scoring_2_32597818', 'en.classify.roberta.adverse_drug_event': 'roberta_classifier_parrot_adequacy_model', 'en.classify.roberta.ag_news': 'roberta_base_sequence_classifier_ag_news', 'en.classify.roberta.all_mnli.large.by_veronica320': 'roberta_classifier_spte_large_all_mnli', 'en.classify.roberta.amazon.': 'roberta_classifier_autotrain_amazon_shoe_reviews_classification_1104340243', 'en.classify.roberta.banking.': 'roberta_classifier_banking77', 'en.classify.roberta.banking.distilled_finetuned': 'roberta_classifier_distil_finetuned_banking77', 'en.classify.roberta.base': 'roberta_classifier_base_formality_ranker', 'en.classify.roberta.base.by_mohsenfayyaz': 'roberta_classifier_base_toxicity', 'en.classify.roberta.base.by_onewithnickelcoins': 'roberta_classifier_base_stars', 'en.classify.roberta.base.by_stremie': 'roberta_classifier_base_clickbait', 'en.classify.roberta.base_4h': 'roberta_classifier_base_adr_smm4h2022', 'en.classify.roberta.base_finetuned': 'roberta_sequence_classifier_codebert_base_finetuned_detect_insecure_cod', 'en.classify.roberta.base_finetuned.by_aristotletan': 'roberta_classifier_aristotletan_base_finetuned_sst2', 'en.classify.roberta.base_finetuned.by_peter2000': 'roberta_classifier_base_finetuned_osdg', 'en.classify.roberta.by_aaroncu': 'roberta_classifier_attribute_classification', 'en.classify.roberta.by_alecmullen': 'roberta_classifier_autonlp_group_classification_441411446', 'en.classify.roberta.by_anamika': 'roberta_classifier_autonlp_fa_473312409', 'en.classify.roberta.by_anthos23': 'roberta_classifier_my_awesome_model', 'en.classify.roberta.by_avacaondata': 'roberta_classifier_bertin_exist22_task1', 'en.classify.roberta.by_biu_nlp': 'roberta_classifier_superpal', 'en.classify.roberta.by_blinjrm': 'roberta_classifier_finsent', 'en.classify.roberta.by_cathy': 'roberta_classifier_reranking_model', 'en.classify.roberta.by_chkla': 'roberta_classifier_argument', 'en.classify.roberta.by_cjbarrie': 'roberta_classifier_autotrain_atc2', 'en.classify.roberta.by_deepesh0x': 'roberta_classifier_autotrain_mlsec_1013333734', 'en.classify.roberta.by_ds198799': 'roberta_classifier_autonlp_predict_roi_1_29797730', 'en.classify.roberta.by_edwin923': 'roberta_classifier_fyp2022', 'en.classify.roberta.by_emekaboris': 'roberta_classifier_autonlp_txc_17923124', 'en.classify.roberta.by_furiouslyasleep': 'roberta_classifier_unhappyzebra100', 'en.classify.roberta.by_idjotherwise': 'roberta_classifier_autonlp_reading_prediction_172506', 'en.classify.roberta.by_johny201': 'roberta_classifier_autotrain_article_pred_1142742075', 'en.classify.roberta.by_julien_c': 'roberta_classifier_reactiongif', 'en.classify.roberta.by_kamuuung': 'roberta_classifier_autonlp_lessons_tagging_606217261', 'en.classify.roberta.by_kco4776': 'roberta_classifier_soongsil_bert_wellness', 'en.classify.roberta.by_lucifermorninstar011': 'roberta_classifier_autotrain_lucifer_multi_auto_all_837626712', 'en.classify.roberta.by_luiz826': 'roberta_classifier_to_music_genre', 'en.classify.roberta.by_lumalik': 'roberta_classifier_vent_emotion', 'en.classify.roberta.by_masifayub': 'roberta_classifier_autotrain_pan_976832386', 'en.classify.roberta.by_nihaldsouza1': 'roberta_classifier_yelp_rating_classification', 'en.classify.roberta.by_nlpscholars': 'roberta_classifier_earning_call_transcript_classification', 'en.classify.roberta.by_noob123': 'roberta_classifier_autotrain_app_review_train_1314150168', 'en.classify.roberta.by_nthanhha26': 'roberta_classifier_autotrain_test_project_879428192', 'en.classify.roberta.by_p_christ': 'roberta_classifier_qandaclassifier', 'en.classify.roberta.by_palakagl': 'roberta_classifier_multiclass_textclassification', 'en.classify.roberta.by_paulagarciaserrano': 'roberta_classifier_depression_detection', 'en.classify.roberta.by_phucle': 'roberta_classifier_lro_v1.0.2a', 'en.classify.roberta.by_rabiaqayyum': 'roberta_classifier_autotrain_mental_health_analysis_752423172', 'en.classify.roberta.by_rti_international': 'roberta_classifier_rota', 'en.classify.roberta.by_saattrupdan': 'roberta_classifier_verdict', 'en.classify.roberta.by_shenzy': 'roberta_classifier_sentence_classification4designtutor', 'en.classify.roberta.by_shubh024': 'roberta_classifier_autotrain_intentclassificationfilipino_715021714', 'en.classify.roberta.by_siddish': 'roberta_classifier_autotrain_yes_or_no_on_circa_1009033469', 'en.classify.roberta.by_sindhu': 'roberta_classifier_emo', 'en.classify.roberta.by_skolkovoinstitute': 'roberta_classifier_toxicity', 'en.classify.roberta.by_tayyaba': 'roberta_classifier_autotrain_pan_977432399', 'en.classify.roberta.by_unitary': 'roberta_classifier_unbiased_toxic', 'en.classify.roberta.by_unjustify': 'roberta_classifier_autotrain_commonsense_1_696121179', 'en.classify.roberta.by_wanghao2023': 'roberta_classifier_uganda_labor_market_interview_text_classification', 'en.classify.roberta.by_wanyu': 'roberta_classifier_iterater_intention', 'en.classify.roberta.by_xyhy': 'roberta_classifier_autonlp_123_478412765', 'en.classify.roberta.by_zamachi': 'roberta_classifier_for_multilabel_sentence_classification', 'en.classify.roberta.by_zwang199': 'roberta_classifier_autonlp_traffic_nlp_binary_537215209', 'en.classify.roberta.car_review.v1.by_qualitydatalab': 'roberta_classifier_autotrain_car_review_project_966432120', 'en.classify.roberta.car_review.v2.by_qualitydatalab': 'roberta_classifier_autotrain_car_review_project_966432121', 'en.classify.roberta.clickbait.distilled.by_valurank': 'roberta_classifier_distil_clickbait', 'en.classify.roberta.clinc.large_finetuned.by_lewtun': 'roberta_classifier_lewtun_large_finetuned_clinc', 'en.classify.roberta.clinc.v1.large_finetuned.by_lewtun': 'roberta_classifier_large_finetuned_clinc_314', 'en.classify.roberta.clinc.v2.large_finetuned.by_lewtun': 'roberta_classifier_large_finetuned_clinc_1234567', 'en.classify.roberta.clinc.v2large_finetuned.by_lewtun': 'roberta_classifier_large_finetuned_clinc_1', 'en.classify.roberta.clinc.v3large_finetuned.by_lewtun': 'roberta_classifier_large_finetuned_clinc_12', 'en.classify.roberta.clinc.v4large_finetuned.by_lewtun': 'roberta_classifier_large_finetuned_clinc_123', 'en.classify.roberta.clinc.v5large_finetuned.by_lewtun': 'roberta_classifier_large_finetuned_clinc_1234', 'en.classify.roberta.clinc.v6large_finetuned.by_lewtun': 'roberta_classifier_large_finetuned_clinc_12345', 'en.classify.roberta.clinc.v7large_finetuned.by_lewtun': 'roberta_classifier_large_finetuned_clinc_123456', 'en.classify.roberta.clinc.v9large_finetuned.by_lewtun': 'roberta_classifier_large_finetuned_clinc_3141', 'en.classify.roberta.cord19_sentiment.': 'roberta_classifier_discord_nft_sentiment', 'en.classify.roberta.covid.': 'roberta_classifier_autonlp_bert_covid_407910467', 'en.classify.roberta.covid.by_anelnurkayeva': 'roberta_classifier_autonlp_covid_432211280', 'en.classify.roberta.covid.by_moritzlaurer': 'roberta_classifier_covid_policy_21', 'en.classify.roberta.covid_news.': 'roberta_classifier_autonlp_fake_covid_news_36769078', 'en.classify.roberta.crypto.': 'roberta_classifier_cryptobert', 'en.classify.roberta.crypto_sentiment.': 'roberta_classifier_autotrain_nlu_crypto_sentiment_analysis_754123133', 'en.classify.roberta.cuad.': 'roberta_classifier_cuad_contract_type', 'en.classify.roberta.current.distilled.by_valurank': 'roberta_classifier_distil_current', 'en.classify.roberta.distilled': 'roberta_classifier_distil_bias', 'en.classify.roberta.distilled.by_anthos23': 'roberta_classifier_fs_distil_fine_tuned', 'en.classify.roberta.distilled_base': 'roberta_classifier_emotion_english_distil_base', 'en.classify.roberta.distilled_base.by_azizbarank': 'roberta_classifier_distil_base_sst2_distilled', 'en.classify.roberta.distilled_base.by_j_hartmann': 'roberta_classifier_concreteness_english_distil_base', 'en.classify.roberta.distilled_finetuned': 'roberta_classifier_distil_finetuned_financial_text_classification', 'en.classify.roberta.distilled_finetuned.by_narrativa': 'roberta_classifier_distil_finetuned_stereotype_detection', 'en.classify.roberta.distilled_mini_lm_mini': 'roberta_classifier_minilm_l12_clinc_distilled', 'en.classify.roberta.distilled_mini_lm_mini.by_moshew': 'roberta_classifier_minilm_l6_clinc_distilled', 'en.classify.roberta.distilled_v2_mini_lm_mini_finetuned': 'roberta_classifier_lewtun_minilmv2_l12_h384_distilled_finetuned_clinc', 'en.classify.roberta.distilled_v2_mini_lm_mini_finetuned.by_optimum': 'roberta_classifier_optimum_minilmv2_l12_h384_distilled_finetuned_clinc', 'en.classify.roberta.distilled_v2_mini_lm_mini_finetuned.by_philschmid': 'roberta_classifier_philschmid_minilmv2_l12_h384_distilled_finetuned_clinc', 'en.classify.roberta.election_relevancy_best.by_sefaozalpadl': 'roberta_classifier_election_relevancy_best', 'en.classify.roberta.email_spam.distilled_v2_mini_lm_mini_finetuned': 'roberta_classifier_minilmv2_l12_h384_distilled_finetuned_spam_detection', 'en.classify.roberta.emotion.base': 'roberta_classifier_base_emotion', 'en.classify.roberta.emotion.base.by_Jorgeutd': 'roberta_classifier_sagemaker_base_emotion', 'en.classify.roberta.emotion.by_osiris': 'roberta_classifier_emotion', 'en.classify.roberta.emotion.v2_mini_lm_mini': 'roberta_classifier_minilmv2_l12_h384_emotion', 'en.classify.roberta.emotion.v2_mini_lm_mini.by_philschmid': 'roberta_classifier_minilmv2_l6_h384_emotion', 'en.classify.roberta.environment.': 'roberta_classifier_environmental_claims', 'en.classify.roberta.fakeddit.': 'roberta_classifier_fakeddit', 'en.classify.roberta.fever.': 'roberta_classifier_fever', 'en.classify.roberta.fever.large': 'roberta_classifier_large_snli_mnli_fever_anli_r1_r2_r3_nli', 'en.classify.roberta.finetuned': 'roberta_sequence_classifier_codebert2codebert_finetuned_code_defect_detection', 'en.classify.roberta.finetuned.by_ali2066': 'roberta_classifier_finetuned_sentence_itr0_2e_05_all_01_03_2022_02_53_51', 'en.classify.roberta.glue.': 'roberta_classifier_cuenb_mnli', 'en.classify.roberta.glue.base': 'roberta_classifier_intel_base_mrpc', 'en.classify.roberta.glue.distilled_base': 'roberta_classifier_platzi_distil_base_mrpc_glue_omar_espejel', 'en.classify.roberta.glue.distilled_mini_lm_mini': 'roberta_classifier_minilm_l12_h384_sst2_distilled', 'en.classify.roberta.glue.large': 'roberta_classifier_philschmid_large_sst2', 'en.classify.roberta.glue.v2_mini_lm_mini': 'roberta_classifier_minilmv2_l12_h384_sst2', 'en.classify.roberta.glue.v2_mini_lm_mini_l6_h384_sst2.by_philschmid': 'roberta_classifier_minilmv2_l6_h384_sst2', 'en.classify.roberta.glue.v2_mini_lm_mini_l6_h768_sst2.by_philschmid': 'roberta_classifier_minilmv2_l6_h768_sst2', 'en.classify.roberta.glue_cola1.base': 'roberta_classifier_base_cola', 'en.classify.roberta.go_emotions.': 'roberta_classifier_emoroberta', 'en.classify.roberta.hate.': 'roberta_classifier_hate_speech_dynabench_r1_target', 'en.classify.roberta.hate.base': 'roberta_classifier_base_frenk_hate', 'en.classify.roberta.hate.distilled': 'roberta_classifier_distil_hatespeech', 'en.classify.roberta.hate.dynabench_r2_target.by_facebook': 'roberta_classifier_hate_speech_dynabench_r2_target', 'en.classify.roberta.hate.dynabench_r3_target.by_facebook': 'roberta_classifier_hate_speech_dynabench_r3_target', 'en.classify.roberta.hate.dynabench_r4_target.by_facebook': 'roberta_classifier_hate_speech_dynabench_r4_target', 'en.classify.roberta.imdb': 'roberta_base_sequence_classifier_imdb', 'en.classify.roberta.imdb.': 'roberta_classifier_autonlp_imdb_demo_hf_16622775', 'en.classify.roberta.imdb.base': 'roberta_classifier_base_imdb', 'en.classify.roberta.imdb.base.by_abhishek': 'roberta_classifier_autonlp_imdb_base_3662644', 'en.classify.roberta.imdb.base_v1.by_sasha': 'roberta_classifier_autotrain_base_imdb_1275248775', 'en.classify.roberta.imdb.base_v2.by_sasha': 'roberta_classifier_autotrain_base_imdb_1275248776', 'en.classify.roberta.imdb.base_v3.by_sasha': 'roberta_classifier_autotrain_base_imdb_1275248777', 'en.classify.roberta.imdb.base_v4.by_sasha': 'roberta_classifier_autotrain_base_imdb_1275248778', 'en.classify.roberta.imdb.base_v5.by_sasha': 'roberta_classifier_autotrain_base_imdb_1275248779', 'en.classify.roberta.imdb.by_kaveh8': 'roberta_classifier_autonlp_imdb_rating_625417974', 'en.classify.roberta.imdb.by_mmcquade11': 'roberta_classifier_autonlp_imdb_test_21134453', 'en.classify.roberta.imdb_sentiment.': 'roberta_classifier_autonlp_imdb_sentiment_classification_31154', 'en.classify.roberta.imdb_sentiment.32d': 'roberta_classifier_autonlp_imdb_reviews_sentiment_329982', 'en.classify.roberta.imdb_sentiment.by_pierric': 'roberta_classifier_autonlp_my_own_imdb_sentiment_analysis_2131817', 'en.classify.roberta.lang': 'roberta_classifier_codeberta_language_identification', 'en.classify.roberta.lang.by_addy88': 'roberta_classifier_programming_lang_identifier', 'en.classify.roberta.large': 'roberta_classifier_emotion_english_large', 'en.classify.roberta.large.by_alisawuffles': 'roberta_classifier_large_wanli', 'en.classify.roberta.large.by_cestwc': 'roberta_classifier_large', 'en.classify.roberta.large.by_deeppavlov': 'roberta_classifier_large_winogrande', 'en.classify.roberta.large.by_j_hartmann': 'roberta_classifier_purchase_intention_english_large', 'en.classify.roberta.large.by_mcgill_nlp': 'roberta_classifier_large_faithcritic', 'en.classify.roberta.large.by_sam890914': 'roberta_classifier_autonlp_large2_479012819', 'en.classify.roberta.large.by_uploaded by huggingface': 'roberta_classifier_large_mnli', 'en.classify.roberta.large_4h': 'roberta_classifier_autotrain_smm4h_large_clean_874027878', 'en.classify.roberta.large_finetuned': 'roberta_classifier_autonlp_large_finetuned_467612250', 'en.classify.roberta.large_finetuned.by_dbounds': 'roberta_classifier_dbounds_large_finetuned_clinc', 'en.classify.roberta.large_finetuned.by_optimum': 'roberta_classifier_optimum_large_finetuned_clinc', 'en.classify.roberta.large_finetuned.by_philschmid': 'roberta_classifier_philschmid_large_finetuned_clinc', 'en.classify.roberta.large_finetuned.by_yuetian': 'roberta_classifier_large_finetuned_plutchik_emotion', 'en.classify.roberta.large_finetuned_adverse_drug_event': 'roberta_classifier_large_mnli_finetuned_header', 'en.classify.roberta.live_demo_question_intimacy.by_pedropei': 'roberta_classifier_live_demo_question_intimacy', 'en.classify.roberta.mbfc_bias.distilled.by_valurank': 'roberta_classifier_distil_mbfc_bias', 'en.classify.roberta.mbfc_bias_4class.distilled.by_valurank': 'roberta_classifier_distil_mbfc_bias_4class', 'en.classify.roberta.mnli.distilled_base.by_boychaboy': 'roberta_classifier_mnli_distil_base', 'en.classify.roberta.mnli_200.large.by_veronica320': 'roberta_classifier_spte_large_mnli_200', 'en.classify.roberta.mnli_large.large.by_boychaboy': 'roberta_classifier_mnli_large', 'en.classify.roberta.mnli_misogyny_sexism.tweet.large_4tweets_3e_05_0.05.by_annahaz': 'roberta_classifier_large_mnli_misogyny_sexism_4tweets_3e_05_0.05', 'en.classify.roberta.mnli_misogyny_sexism.tweet.large_4tweets_3e_05_0.055_4.by_annahaz': 'roberta_classifier_large_mnli_misogyny_sexism_4tweets_3e_05_0.055_4', 'en.classify.roberta.mnli_misogyny_sexism.tweet.large_4tweets_3e_05_0.05_singledt.by_annahaz': 'roberta_classifier_large_mnli_misogyny_sexism_4tweets_3e_05_0.05_singledt', 'en.classify.roberta.mnli_misogyny_sexism.tweet.large_4tweets_3e_05_0.07.by_annahaz': 'roberta_classifier_large_mnli_misogyny_sexism_4tweets_3e_05_0.07', 'en.classify.roberta.mrpc.glue.base.by_JeremiahZ': 'roberta_classifier_jeremiahz_base_mrpc', 'en.classify.roberta.neutral_non_neutral.by_osiris': 'roberta_classifier_neutral_non_neutral', 'en.classify.roberta.news.': 'roberta_classifier_autotrain_news_916530070', 'en.classify.roberta.news.by_hamzab': 'roberta_classifier_fake_news_classification', 'en.classify.roberta.news.by_nithiwat': 'roberta_classifier_fake_news_debunker', 'en.classify.roberta.news.distilled_base_finetuned': 'roberta_classifier_distil_base_finetuned_fake_news_english', 'en.classify.roberta.news.distilled_small': 'roberta_classifier_distil_news_small', 'en.classify.roberta.news_sentiment.': 'roberta_classifier_news_sentiment_analysis', 'en.classify.roberta.news_sentiment_twitter.base_finetuned': 'roberta_classifier_twitter_base_sentiment_latest_finetuned_fg_single_sentence_news', 'en.classify.roberta.news_sentiment_twitter.d_base_finetuned': 'roberta_classifier_twitter_base_sentiment_latest_finetuned_fg_concat_sentence_h_news', 'en.classify.roberta.not_interested.by_aujer': 'roberta_classifier_autotrain_not_interested_1_1213145894', 'en.classify.roberta.not_interested.v2.by_aujer': 'roberta_classifier_not_interested_v0', 'en.classify.roberta.offensive.distilled.by_valurank': 'roberta_classifier_distil_offensive', 'en.classify.roberta.propaganda_2class.distilled.by_valurank': 'roberta_classifier_distil_propaganda_2class', 'en.classify.roberta.proppy.distilled.by_valurank': 'roberta_classifier_distil_proppy', 'en.classify.roberta.pyrxsum.large.by_shiyue': 'roberta_classifier_large_pyrxsum', 'en.classify.roberta.qnli.glue.base.by_JeremiahZ': 'roberta_classifier_qnli_base', 'en.classify.roberta.qqp.glue.base.by_JeremiahZ': 'roberta_classifier_qqp_base', 'en.classify.roberta.question_intimacy.by_pedropei': 'roberta_classifier_question_intimacy', 'en.classify.roberta.realsumm.large.by_shiyue': 'roberta_classifier_large_realsumm', 'en.classify.roberta.realsumm_by_examples_fold1.large.by_shiyue': 'roberta_classifier_large_realsumm_by_examples_fold1', 'en.classify.roberta.realsumm_by_examples_fold2.large.by_shiyue': 'roberta_classifier_large_realsumm_by_examples_fold2', 'en.classify.roberta.realsumm_by_examples_fold3.large.by_shiyue': 'roberta_classifier_large_realsumm_by_examples_fold3', 'en.classify.roberta.realsumm_by_examples_fold4.large.by_shiyue': 'roberta_classifier_large_realsumm_by_examples_fold4', 'en.classify.roberta.realsumm_by_examples_fold5.large.by_shiyue': 'roberta_classifier_large_realsumm_by_examples_fold5', 'en.classify.roberta.realsumm_by_systems_fold1.large.by_shiyue': 'roberta_classifier_large_realsumm_by_systems_fold1', 'en.classify.roberta.realsumm_by_systems_fold2.large.by_shiyue': 'roberta_classifier_large_realsumm_by_systems_fold2', 'en.classify.roberta.realsumm_by_systems_fold3.large.by_shiyue': 'roberta_classifier_large_realsumm_by_systems_fold3', 'en.classify.roberta.realsumm_by_systems_fold4.large.by_shiyue': 'roberta_classifier_large_realsumm_by_systems_fold4', 'en.classify.roberta.realsumm_by_systems_fold5.large.by_shiyue': 'roberta_classifier_large_realsumm_by_systems_fold5', 'en.classify.roberta.rte.glue.base.by_JeremiahZ': 'roberta_classifier_rte_base', 'en.classify.roberta.security_text.': 'roberta_classifier_autotrain_security_texts_classification_688020754', 'en.classify.roberta.security_text.distilled': 'roberta_classifier_autotrain_security_texts_classification_distil_688220764', 'en.classify.roberta.sentiment.': 'roberta_classifier_autotrain_sentiment_polarity_918130222', 'en.classify.roberta.sentiment.base': 'roberta_classifier_base_indonesian_1.5g_sentiment_analysis_smsa', 'en.classify.roberta.sentiment.base.by_veb': 'roberta_classifier_twitch_base_sentiment_latest', 'en.classify.roberta.sentiment.by_acho0057': 'roberta_classifier_sentiment_analysis_custom', 'en.classify.roberta.sentiment.by_souvikcmsa': 'roberta_classifier_sentiment_analysis', 'en.classify.roberta.sentiment.finetuning_': 'roberta_classifier_finetuning_cardiffnlp_sentiment_model', 'en.classify.roberta.sentiment.large': 'roberta_classifier_sentiment_large_english', 'en.classify.roberta.sentiment.large.by_j_hartmann': 'roberta_classifier_sentiment_large_english_3_classes', 'en.classify.roberta.sentiment.large.by_soleimanian': 'roberta_classifier_financial_large_sentiment', 'en.classify.roberta.sentiment_twitter.': 'roberta_classifier_robbert_twitter_sentiment', 'en.classify.roberta.sentiment_twitter.base': 'roberta_classifier_twitter_base_sentiment_latest', 'en.classify.roberta.sentiment_twitter.base_finetuned': 'roberta_classifier_twitter_base_mar2022_finetuned_sentiment', 'en.classify.roberta.small': 'roberta_classifier_indonesia_emotion_small', 'en.classify.roberta.snli.distilled_base.by_boychaboy': 'roberta_classifier_snli_distil_base', 'en.classify.roberta.snli_large.large.by_boychaboy': 'roberta_classifier_snli_large', 'en.classify.roberta.stop_the_steal_relevancy_analysis_binary.by_sefaozalpadl': 'roberta_classifier_stop_the_steal_relevancy_analysis_binary', 'en.classify.roberta.tac08.large.by_shiyue': 'roberta_classifier_large_tac08', 'en.classify.roberta.tac08_tac09.large.by_shiyue': 'roberta_classifier_large_tac08_tac09', 'en.classify.roberta.tac09.large.by_shiyue': 'roberta_classifier_large_tac09', 'en.classify.roberta.tweet.': 'roberta_classifier_tweet_offensive_eval', 'en.classify.roberta.tweet.base_128d': 'roberta_classifier_autotrain_basetweeteval_1281048986', 'en.classify.roberta.tweet.base_128d_1281048987.by_sasha': 'roberta_classifier_autotrain_basetweeteval_1281048987', 'en.classify.roberta.tweet.base_128d_1281048988.by_sasha': 'roberta_classifier_autotrain_basetweeteval_1281048988', 'en.classify.roberta.tweet.base_128d_1281048989.by_sasha': 'roberta_classifier_autotrain_basetweeteval_1281048989', 'en.classify.roberta.tweet.base_128d_1281048990.by_sasha': 'roberta_classifier_autotrain_basetweeteval_1281048990', 'en.classify.roberta.tweet.by_keithhorgan': 'roberta_classifier_tweetclimateanalysis', 'en.classify.roberta.tweet.large': 'roberta_classifier_large_mnli_misogyny_sexism_4tweets_2e_05_0.05', 'en.classify.roberta.tweet_sentiment.': 'roberta_classifier_tweet_sentiment_eval', 'en.classify.roberta.tweet_topic_19_multi.by_cardiffnlp': 'roberta_classifier_tweet_topic_19_multi', 'en.classify.roberta.tweet_topic_19_single.by_cardiffnlp': 'roberta_classifier_tweet_topic_19_single', 'en.classify.roberta.tweet_topic_21_multi.by_cardiffnlp': 'roberta_classifier_tweet_topic_21_multi', 'en.classify.roberta.tweet_topic_21_single.by_cardiffnlp': 'roberta_classifier_tweet_topic_21_single', 'en.classify.roberta.twisent_siebert.by_shatabdi': 'roberta_classifier_twisent_siebert', 'en.classify.roberta.twisent_twisent.by_shatabdi': 'roberta_classifier_twisent_twisent', 'en.classify.roberta.twitter.': 'roberta_classifier_stress_twitter', 'en.classify.roberta.twitter.base': 'roberta_classifier_twitter_base_dec2021_rbam_fine_tuned', 'en.classify.roberta.twitter.base.by_cardiffnlp': 'roberta_classifier_twitter_base_emotion', 'en.classify.roberta.twitter.base.by_maxpe': 'roberta_classifier_twitter_base_jun2022_sem_eval_2018_task_1', 'en.classify.roberta.twitter.base_finetuned': 'roberta_classifier_twitter_base_mar2022_finetuned_emotion', 'en.classify.roberta.twitter.v2_base': 'roberta_classifier_twitter_base_stance_abortionv2', 'en.classify.roberta.v2_mini_lm_mini_finetuned': 'roberta_classifier_minilmv2_l12_h384_finetuned_clinc', 'en.classify.sarcasm': 'classifierdl_use_sarcasm', 'en.classify.sarcasm.use': 'classifierdl_use_sarcasm', 'en.classify.scibert.battery_scibert.cased': 'bert_classifier_batteryscibert_cased_abstract', 'en.classify.scibert.battery_scibert.uncased': 'bert_classifier_batteryscibert_uncased_abstract', 'en.classify.scibert.scibert.cased': 'bert_classifier_scibert_scivocab_cased_pub_section', 'en.classify.scibert.scibert.uncased': 'bert_classifier_scibert_scivocab_uncased_pub_section', 'en.classify.sentiment.imdb.deberta': 'deberta_v3_xsmall_sequence_classifier_imdb', 'en.classify.sentiment.imdb.deberta.base': 'deberta_v3_base_sequence_classifier_imdb', 'en.classify.sentiment.imdb.deberta.large': 'deberta_v3_large_sequence_classifier_imdb', 'en.classify.sentiment.imdb.deberta.small': 'deberta_v3_small_sequence_classifier_imdb', 'en.classify.snips': 'nerdl_snips_100d', 'en.classify.snli.bert.cased_base.by_boychaboy': 'bert_classifier_snli_base_cased', 'en.classify.snli.roberta.base.by_boychaboy': 'roberta_classifier_snli_base', 'en.classify.song_lyrics': 'bert_sequence_classifier_song_lyrics', 'en.classify.spam': 'classifierdl_use_spam', 'en.classify.spam.use': 'classifierdl_use_spam', 'en.classify.sst1.roberta.by_lucianpopa': 'roberta_classifier_autonlp_sst1_529214890', 'en.classify.target_group.bert.base.by_thusken': 'bert_classifier_nb_base_target_group', 'en.classify.token_albert.xlarge_token_classifier_conll03': 'albert_xlarge_token_classifier_conll03', 'en.classify.token_albert_base_token_classifier_conll03': 'albert_base_token_classifier_conll03', 'en.classify.token_albert_large_token_classifier_conll03': 'albert_large_token_classifier_conll03', 'en.classify.token_bert.classifier_ner_btc': 'bert_token_classifier_ner_btc', 'en.classify.token_bert.conll03': 'bert_base_token_classifier_conll03', 'en.classify.token_bert.few_nerd': 'bert_base_token_classifier_few_nerd', 'en.classify.token_bert.large_conll03': 'bert_large_token_classifier_conll03', 'en.classify.token_bert.large_ontonote': 'bert_large_token_classifier_ontonote', 'en.classify.token_bert.ontonote': 'bert_base_token_classifier_ontonote', 'en.classify.token_distilroberta_base_token_classifier_ontonotes': 'distilroberta_base_token_classifier_ontonotes', 'en.classify.token_longformer.base_token_classifier_conll03': 'longformer_base_token_classifier_conll03', 'en.classify.token_roberta.large_token_classifier_ontonotes': 'roberta_large_token_classifier_ontonotes', 'en.classify.token_roberta_base_token_classifier_conll03': 'roberta_base_token_classifier_conll03', 'en.classify.token_roberta_base_token_classifier_ontonotes': 'roberta_base_token_classifier_ontonotes', 'en.classify.token_roberta_large_token_classifier_conll03': 'roberta_large_token_classifier_conll03', 'en.classify.token_xlnet.large_token_classifier_conll03': 'xlnet_large_token_classifier_conll03', 'en.classify.token_xlnet_base_token_classifier_conll03': 'xlnet_base_token_classifier_conll03', 'en.classify.tone.bert.by_yiyanghkust': 'bert_classifier_finbert_tone', 'en.classify.toxic': 'multiclassifierdl_use_toxic', 'en.classify.toxic.sm': 'multiclassifierdl_use_toxic_sm', 'en.classify.toxicity.bert.uncased_base.by_mohsenfayyaz': 'bert_classifier_base_uncased_toxicity_a', 'en.classify.trec.roberta.by_lucianpopa': 'roberta_classifier_autonlp_trec_classification_522314623', 'en.classify.trec50': 'classifierdl_use_trec50', 'en.classify.trec50.use': 'classifierdl_use_trec50', 'en.classify.trec6': 'classifierdl_use_trec6', 'en.classify.trec6.use': 'classifierdl_use_trec6', 'en.classify.typos.distilbert': 'distilbert_token_classifier_typo_detector', 'en.classify.user_needs.bert.base.by_thusken': 'bert_classifier_nb_base_user_needs', 'en.classify.xlm_roberta.ag_news': 'xlm_roberta_base_sequence_classifier_ag_news', 'en.classify.xlm_roberta.imdb': 'xlm_roberta_base_sequence_classifier_imdb', 'en.classify.xlmr_roberta.glue.base': 'xlmroberta_classifier_base_mrpc', 'en.classify_image.ALL': 'image_classifier_vit_ALL', 'en.classify_image.CarViT': 'image_classifier_vit_CarViT', 'en.classify_image.Check_Aligned_Teeth': 'image_classifier_vit_Check_Aligned_Teeth', 'en.classify_image.Check_GoodBad_Teeth': 'image_classifier_vit_Check_GoodBad_Teeth', 'en.classify_image.Check_Gum_Teeth': 'image_classifier_vit_Check_Gum_Teeth', 'en.classify_image.Check_Missing_Teeth': 'image_classifier_vit_Check_Missing_Teeth', 'en.classify_image.Infrastructures': 'image_classifier_vit_Infrastructures', 'en.classify_image.Insectodoptera': 'image_classifier_vit_Insectodoptera', 'en.classify_image.PANDA_ViT': 'image_classifier_vit_PANDA_ViT', 'en.classify_image.PanJuOffset_TwoClass': 'image_classifier_vit_PanJuOffset_TwoClass', 'en.classify_image.SDO_VT1': 'image_classifier_vit_SDO_VT1', 'en.classify_image.Teeth_A': 'image_classifier_vit_Teeth_A', 'en.classify_image.Teeth_B': 'image_classifier_vit_Teeth_B', 'en.classify_image.Teeth_C': 'image_classifier_vit_Teeth_C', 'en.classify_image.Test_Model': 'image_classifier_vit_Test_Model', 'en.classify_image.Tomato_Leaf_Classifier': 'image_classifier_vit_Tomato_Leaf_Classifier', 'en.classify_image.VIT_Basic': 'image_classifier_vit_VIT_Basic', 'en.classify_image.ViTFineTuned': 'image_classifier_vit_ViTFineTuned', 'en.classify_image.ViT_FaceMask_Finetuned': 'image_classifier_vit_ViT_FaceMask_Finetuned', 'en.classify_image.Visual_transformer_chihuahua_cookies': 'image_classifier_vit_Visual_transformer_chihuahua_cookies', 'en.classify_image.WEC_types': 'image_classifier_vit_WEC_types', 'en.classify_image._beans': 'image_classifier_vit__beans', 'en.classify_image._flyswot_test': 'image_classifier_vit__flyswot_test', 'en.classify_image._spectrogram': 'image_classifier_vit__spectrogram', 'en.classify_image.age_classifier': 'image_classifier_vit_age_classifier', 'en.classify_image.airplanes': 'image_classifier_vit_airplanes', 'en.classify_image.ak__base_patch16_224_in21k_image_classification': 'image_classifier_vit_ak__base_patch16_224_in21k_image_classification', 'en.classify_image.amgerindaf': 'image_classifier_vit_amgerindaf', 'en.classify_image.animal_classifier': 'image_classifier_vit_animal_classifier', 'en.classify_image.animal_classifier_huggingface': 'image_classifier_vit_animal_classifier_huggingface', 'en.classify_image.animals_classifier': 'image_classifier_vit_animals_classifier', 'en.classify_image.anomaly': 'image_classifier_vit_anomaly', 'en.classify_image.apes': 'image_classifier_vit_apes', 'en.classify_image.architectural_styles': 'image_classifier_vit_architectural_styles', 'en.classify_image.asl': 'image_classifier_vit_asl', 'en.classify_image.autotrain_cifar10__base': 'image_classifier_vit_autotrain_cifar10__base', 'en.classify_image.autotrain_dog_vs_food': 'image_classifier_vit_autotrain_dog_vs_food', 'en.classify_image.autotrain_fashion_mnist__base': 'image_classifier_vit_autotrain_fashion_mnist__base', 'en.classify_image.baked_goods': 'image_classifier_vit_baked_goods', 'en.classify_image.base_avengers_v1': 'image_classifier_vit_base_avengers_v1', 'en.classify_image.base_beans': 'image_classifier_vit_base_beans', 'en.classify_image.base_beans_demo': 'image_classifier_vit_base_beans_demo', 'en.classify_image.base_beans_demo_v2': 'image_classifier_vit_base_beans_demo_v2', 'en.classify_image.base_beans_demo_v3': 'image_classifier_vit_base_beans_demo_v3', 'en.classify_image.base_beans_demo_v5': 'image_classifier_vit_base_beans_demo_v5', 'en.classify_image.base_cats_vs_dogs': 'image_classifier_vit_base_cats_vs_dogs', 'en.classify_image.base_cifar10': 'image_classifier_vit_base_cifar10', 'en.classify_image.base_food101': 'image_classifier_vit_base_food101', 'en.classify_image.base_movie_scenes_v1': 'image_classifier_vit_base_movie_scenes_v1', 'en.classify_image.base_mri': 'image_classifier_vit_base_mri', 'en.classify_image.base_patch16_224': 'image_classifier_vit_base_patch16_224', 'en.classify_image.base_patch16_224.by_google': 'image_classifier_vit_base_patch16_224', 'en.classify_image.base_patch16_224_cifar10': 'image_classifier_vit_base_patch16_224_cifar10', 'en.classify_image.base_patch16_224_finetuned_eurosat': 'image_classifier_vit_base_patch16_224_finetuned_eurosat', 'en.classify_image.base_patch16_224_finetuned_kvasirv2_colonoscopy': 'image_classifier_vit_base_patch16_224_finetuned_kvasirv2_colonoscopy', 'en.classify_image.base_patch16_224_finetuned_largerDataSet_docSeperator_more_labels_all_apache2': 'image_classifier_vit_base_patch16_224_finetuned_largerDataSet_docSeperator_more_labels_all_apache2', 'en.classify_image.base_patch16_224_finetuned_pneumothorax': 'image_classifier_vit_base_patch16_224_finetuned_pneumothorax', 'en.classify_image.base_patch16_224_in21k_aidSat': 'image_classifier_vit_base_patch16_224_in21k_aidSat', 'en.classify_image.base_patch16_224_in21k_bantai_v1': 'image_classifier_vit_base_patch16_224_in21k_bantai_v1', 'en.classify_image.base_patch16_224_in21k_classify_4scence': 'image_classifier_vit_base_patch16_224_in21k_classify_4scence', 'en.classify_image.base_patch16_224_in21k_euroSat': 'image_classifier_vit_base_patch16_224_in21k_euroSat', 'en.classify_image.base_patch16_224_in21k_finetuned_cifar10': 'image_classifier_vit_base_patch16_224_in21k_finetuned_cifar10', 'en.classify_image.base_patch16_224_in21k_snacks': 'image_classifier_vit_base_patch16_224_in21k_snacks', 'en.classify_image.base_patch16_224_in21k_ucSat': 'image_classifier_vit_base_patch16_224_in21k_ucSat', 'en.classify_image.base_patch16_224_recylce_ft': 'image_classifier_vit_base_patch16_224_recylce_ft', 'en.classify_image.base_patch16_384': 'image_classifier_vit_base_patch16_384', 'en.classify_image.base_patch16_384.by_google': 'image_classifier_vit_base_patch16_384', 'en.classify_image.base_patch32_384': 'image_classifier_vit_base_patch32_384', 'en.classify_image.base_patch32_384.by_google': 'image_classifier_vit_base_patch32_384', 'en.classify_image.base_patch32_384_finetuned_eurosat': 'image_classifier_vit_base_patch32_384_finetuned_eurosat', 'en.classify_image.base_xray_pneumonia': 'image_classifier_vit_base_xray_pneumonia', 'en.classify_image.baseball_stadium_foods': 'image_classifier_vit_baseball_stadium_foods', 'en.classify_image.beer_vs_wine': 'image_classifier_vit_beer_vs_wine', 'en.classify_image.beer_whisky_wine_detection': 'image_classifier_vit_beer_whisky_wine_detection', 'en.classify_image.blocks': 'image_classifier_vit_blocks', 'en.classify_image.cifar10': 'image_classifier_vit_cifar10', 'en.classify_image.cifar_10_2': 'image_classifier_vit_cifar_10_2', 'en.classify_image.computer_stuff': 'image_classifier_vit_computer_stuff', 'en.classify_image.croupier_creature_classifier': 'image_classifier_vit_croupier_creature_classifier', 'en.classify_image.deit_base_patch16_224': 'image_classifier_vit_deit_base_patch16_224', 'en.classify_image.deit_base_patch16_224.by_facebook': 'image_classifier_vit_deit_base_patch16_224', 'en.classify_image.deit_flyswot': 'image_classifier_vit_deit_flyswot', 'en.classify_image.deit_small_patch16_224': 'image_classifier_vit_deit_small_patch16_224', 'en.classify_image.deit_small_patch16_224.by_facebook': 'image_classifier_vit_deit_small_patch16_224', 'en.classify_image.deit_tiny_patch16_224': 'image_classifier_vit_deit_tiny_patch16_224', 'en.classify_image.deit_tiny_patch16_224.by_facebook': 'image_classifier_vit_deit_tiny_patch16_224', 'en.classify_image.demo': 'image_classifier_vit_demo', 'en.classify_image.denver_nyc_paris': 'image_classifier_vit_denver_nyc_paris', 'en.classify_image.diam': 'image_classifier_vit_diam', 'en.classify_image.digital': 'image_classifier_vit_digital', 'en.classify_image.dog': 'image_classifier_vit_dog', 'en.classify_image.dog_breed_classifier': 'image_classifier_vit_dog_breed_classifier', 'en.classify_image.dog_food__base_patch16_224_in21k': 'image_classifier_vit_dog_food__base_patch16_224_in21k', 'en.classify_image.dog_races': 'image_classifier_vit_dog_races', 'en.classify_image.dog_vs_chicken': 'image_classifier_vit_dog_vs_chicken', 'en.classify_image.doggos_lol': 'image_classifier_vit_doggos_lol', 'en.classify_image.dogs': 'image_classifier_vit_dogs', 'en.classify_image.dwarf_goats': 'image_classifier_vit_dwarf_goats', 'en.classify_image.electric_2': 'image_classifier_vit_electric_2', 'en.classify_image.electric_pole_type_classification': 'image_classifier_vit_electric_pole_type_classification', 'en.classify_image.ex_for_evan': 'image_classifier_vit_ex_for_evan', 'en.classify_image.exper1_mesum5': 'image_classifier_vit_exper1_mesum5', 'en.classify_image.exper2_mesum5': 'image_classifier_vit_exper2_mesum5', 'en.classify_image.exper3_mesum5': 'image_classifier_vit_exper3_mesum5', 'en.classify_image.exper4_mesum5': 'image_classifier_vit_exper4_mesum5', 'en.classify_image.exper5_mesum5': 'image_classifier_vit_exper5_mesum5', 'en.classify_image.exper6_mesum5': 'image_classifier_vit_exper6_mesum5', 'en.classify_image.exper7_mesum5': 'image_classifier_vit_exper7_mesum5', 'en.classify_image.exper_batch_16_e4': 'image_classifier_vit_exper_batch_16_e4', 'en.classify_image.exper_batch_16_e8': 'image_classifier_vit_exper_batch_16_e8', 'en.classify_image.exper_batch_32_e4': 'image_classifier_vit_exper_batch_32_e4', 'en.classify_image.exper_batch_32_e8': 'image_classifier_vit_exper_batch_32_e8', 'en.classify_image.exper_batch_8_e4': 'image_classifier_vit_exper_batch_8_e4', 'en.classify_image.exper_batch_8_e8': 'image_classifier_vit_exper_batch_8_e8', 'en.classify_image.fancy_animales': 'image_classifier_vit_fancy_animales', 'en.classify_image.finetuned_cats_dogs': 'image_classifier_vit_finetuned_cats_dogs', 'en.classify_image.finetuned_chest_xray_pneumonia': 'image_classifier_vit_finetuned_chest_xray_pneumonia', 'en.classify_image.finetuned_eurosat_kornia': 'image_classifier_vit_finetuned_eurosat_kornia', 'en.classify_image.flowers': 'image_classifier_vit_flowers', 'en.classify_image.food': 'image_classifier_vit_food', 'en.classify_image.fruits': 'image_classifier_vit_fruits', 'en.classify_image.garbage_classification': 'image_classifier_vit_garbage_classification', 'en.classify_image.generation_xyz': 'image_classifier_vit_generation_xyz', 'en.classify_image.grain': 'image_classifier_vit_grain', 'en.classify_image.greens': 'image_classifier_vit_greens', 'en.classify_image.gtsrb_model': 'image_classifier_vit_gtsrb_model', 'en.classify_image.hot_dog_or_sandwich': 'image_classifier_vit_hot_dog_or_sandwich', 'en.classify_image.hotdog_not_hotdog': 'image_classifier_vit_hotdog_not_hotdog', 'en.classify_image.housing_categories': 'image_classifier_vit_housing_categories', 'en.classify_image.hugging_geese': 'image_classifier_vit_hugging_geese', 'en.classify_image.huggingpics_package_demo_2': 'image_classifier_vit_huggingpics_package_demo_2', 'en.classify_image.ice_cream': 'image_classifier_vit_ice_cream', 'en.classify_image.iiif_manuscript_': 'image_classifier_vit_iiif_manuscript_', 'en.classify_image.indian_snacks': 'image_classifier_vit_indian_snacks', 'en.classify_image.koala_panda_wombat': 'image_classifier_vit_koala_panda_wombat', 'en.classify_image.lawn_weeds': 'image_classifier_vit_lawn_weeds', 'en.classify_image.llama_alpaca_guanaco_vicuna': 'image_classifier_vit_llama_alpaca_guanaco_vicuna', 'en.classify_image.llama_alpaca_snake': 'image_classifier_vit_llama_alpaca_snake', 'en.classify_image.llama_or_potato': 'image_classifier_vit_llama_or_potato', 'en.classify_image.llama_or_what': 'image_classifier_vit_llama_or_what', 'en.classify_image.lotr': 'image_classifier_vit_lotr', 'en.classify_image.lucky_model': 'image_classifier_vit_lucky_model', 'en.classify_image.lung_cancer': 'image_classifier_vit_lung_cancer', 'en.classify_image.mit_indoor_scenes': 'image_classifier_vit_mit_indoor_scenes', 'en.classify_image.modelversion01': 'image_classifier_vit_modelversion01', 'en.classify_image.modeversion1_m6_e4': 'image_classifier_vit_modeversion1_m6_e4', 'en.classify_image.modeversion1_m6_e4n': 'image_classifier_vit_modeversion1_m6_e4n', 'en.classify_image.modeversion1_m7_e4': 'image_classifier_vit_modeversion1_m7_e4', 'en.classify_image.modeversion28_7': 'image_classifier_vit_modeversion28_7', 'en.classify_image.modeversion2_m7_e8': 'image_classifier_vit_modeversion2_m7_e8', 'en.classify_image.my_bean_VIT': 'image_classifier_vit_my_bean_VIT', 'en.classify_image.new_exper3': 'image_classifier_vit_new_exper3', 'en.classify_image.new_york_tokyo_london': 'image_classifier_vit_new_york_tokyo_london', 'en.classify_image.occupation_prediction': 'image_classifier_vit_occupation_prediction', 'en.classify_image.opencampus_age_detection': 'image_classifier_vit_opencampus_age_detection', 'en.classify_image.orcs_and_friends': 'image_classifier_vit_orcs_and_friends', 'en.classify_image.oz_fauna': 'image_classifier_vit_oz_fauna', 'en.classify_image.pasta_pizza_ravioli': 'image_classifier_vit_pasta_pizza_ravioli', 'en.classify_image.pasta_shapes': 'image_classifier_vit_pasta_shapes', 'en.classify_image.places': 'image_classifier_vit_places', 'en.classify_image.planes_airlines': 'image_classifier_vit_planes_airlines', 'en.classify_image.planes_trains_automobiles': 'image_classifier_vit_planes_trains_automobiles', 'en.classify_image.platzi__base_beans_omar_espejel': 'image_classifier_vit_platzi__base_beans_omar_espejel', 'en.classify_image.pneumonia_bielefeld_dl_course': 'image_classifier_vit_pneumonia_bielefeld_dl_course', 'en.classify_image.pneumonia_test_attempt': 'image_classifier_vit_pneumonia_test_attempt', 'en.classify_image.pond': 'image_classifier_vit_pond', 'en.classify_image.pond_image_classification_1': 'image_classifier_vit_pond_image_classification_1', 'en.classify_image.pond_image_classification_10': 'image_classifier_vit_pond_image_classification_10', 'en.classify_image.pond_image_classification_11': 'image_classifier_vit_pond_image_classification_11', 'en.classify_image.pond_image_classification_12': 'image_classifier_vit_pond_image_classification_12', 'en.classify_image.pond_image_classification_2': 'image_classifier_vit_pond_image_classification_2', 'en.classify_image.pond_image_classification_3': 'image_classifier_vit_pond_image_classification_3', 'en.classify_image.pond_image_classification_4': 'image_classifier_vit_pond_image_classification_4', 'en.classify_image.pond_image_classification_5': 'image_classifier_vit_pond_image_classification_5', 'en.classify_image.pond_image_classification_6': 'image_classifier_vit_pond_image_classification_6', 'en.classify_image.pond_image_classification_7': 'image_classifier_vit_pond_image_classification_7', 'en.classify_image.pond_image_classification_8': 'image_classifier_vit_pond_image_classification_8', 'en.classify_image.pond_image_classification_9': 'image_classifier_vit_pond_image_classification_9', 'en.classify_image.puppies_classify': 'image_classifier_vit_puppies_classify', 'en.classify_image.rare_bottle': 'image_classifier_vit_rare_bottle', 'en.classify_image.rare_puppers': 'image_classifier_vit_rare_puppers', 'en.classify_image.rare_puppers2': 'image_classifier_vit_rare_puppers2', 'en.classify_image.rare_puppers3': 'image_classifier_vit_rare_puppers3', 'en.classify_image.rare_puppers_09_04_2021': 'image_classifier_vit_rare_puppers_09_04_2021', 'en.classify_image.rare_puppers_demo': 'image_classifier_vit_rare_puppers_demo', 'en.classify_image.rare_puppers_new_auth': 'image_classifier_vit_rare_puppers_new_auth', 'en.classify_image.resnet_50_euroSat': 'image_classifier_vit_resnet_50_euroSat', 'en.classify_image.resnet_50_ucSat': 'image_classifier_vit_resnet_50_ucSat', 'en.classify_image.road_good_damaged_condition': 'image_classifier_vit_road_good_damaged_condition', 'en.classify_image.robot2': 'image_classifier_vit_robot2', 'en.classify_image.robot22': 'image_classifier_vit_robot22', 'en.classify_image.rock_challenge_DeiT_solo': 'image_classifier_vit_rock_challenge_DeiT_solo', 'en.classify_image.rock_challenge_DeiT_solo_2': 'image_classifier_vit_rock_challenge_DeiT_solo_2', 'en.classify_image.rock_challenge_ViT_two_by_two': 'image_classifier_vit_rock_challenge_ViT_two_by_two', 'en.classify_image.roomclassifier': 'image_classifier_vit_roomclassifier', 'en.classify_image.roomidentifier': 'image_classifier_vit_roomidentifier', 'en.classify_image.rust_image_classification_1': 'image_classifier_vit_rust_image_classification_1', 'en.classify_image.rust_image_classification_10': 'image_classifier_vit_rust_image_classification_10', 'en.classify_image.rust_image_classification_11': 'image_classifier_vit_rust_image_classification_11', 'en.classify_image.rust_image_classification_12': 'image_classifier_vit_rust_image_classification_12', 'en.classify_image.rust_image_classification_2': 'image_classifier_vit_rust_image_classification_2', 'en.classify_image.rust_image_classification_3': 'image_classifier_vit_rust_image_classification_3', 'en.classify_image.rust_image_classification_4': 'image_classifier_vit_rust_image_classification_4', 'en.classify_image.rust_image_classification_5': 'image_classifier_vit_rust_image_classification_5', 'en.classify_image.rust_image_classification_6': 'image_classifier_vit_rust_image_classification_6', 'en.classify_image.rust_image_classification_7': 'image_classifier_vit_rust_image_classification_7', 'en.classify_image.rust_image_classification_8': 'image_classifier_vit_rust_image_classification_8', 'en.classify_image.rust_image_classification_9': 'image_classifier_vit_rust_image_classification_9', 'en.classify_image.sea_mammals': 'image_classifier_vit_sea_mammals', 'en.classify_image.shirt_identifier': 'image_classifier_vit_shirt_identifier', 'en.classify_image.simple_kitchen': 'image_classifier_vit_simple_kitchen', 'en.classify_image.skin_type': 'image_classifier_vit_skin_type', 'en.classify_image.snacks': 'image_classifier_vit_snacks', 'en.classify_image.south_indian_foods': 'image_classifier_vit_south_indian_foods', 'en.classify_image.string_instrument_detector': 'image_classifier_vit_string_instrument_detector', 'en.classify_image.taco_or_what': 'image_classifier_vit_taco_or_what', 'en.classify_image.teeth_test': 'image_classifier_vit_teeth_test', 'en.classify_image.teeth_verify': 'image_classifier_vit_teeth_verify', 'en.classify_image.test': 'image_classifier_vit_test', 'en.classify_image.test_model_a': 'image_classifier_vit_test_model_a', 'en.classify_image.tiny__random': 'image_classifier_vit_tiny__random', 'en.classify_image.tiny_patch16_224': 'image_classifier_vit_tiny_patch16_224', 'en.classify_image.trainer_rare_puppers': 'image_classifier_vit_trainer_rare_puppers', 'en.classify_image.upside_down_classifier': 'image_classifier_vit_upside_down_classifier', 'en.classify_image.vc_bantai__withoutAMBI': 'image_classifier_vit_vc_bantai__withoutAMBI', 'en.classify_image.vc_bantai__withoutAMBI_adunest': 'image_classifier_vit_vc_bantai__withoutAMBI_adunest', 'en.classify_image.vc_bantai__withoutAMBI_adunest_trial': 'image_classifier_vit_vc_bantai__withoutAMBI_adunest_trial', 'en.classify_image.vc_bantai__withoutAMBI_adunest_v1': 'image_classifier_vit_vc_bantai__withoutAMBI_adunest_v1', 'en.classify_image.vc_bantai__withoutAMBI_adunest_v2': 'image_classifier_vit_vc_bantai__withoutAMBI_adunest_v2', 'en.classify_image.vc_bantai__withoutAMBI_adunest_v3': 'image_classifier_vit_vc_bantai__withoutAMBI_adunest_v3', 'en.classify_image.violation_classification_bantai_': 'image_classifier_vit_violation_classification_bantai_', 'en.classify_image.violation_classification_bantai__v100ep': 'image_classifier_vit_violation_classification_bantai__v100ep', 'en.classify_image.violation_classification_bantai__v80ep': 'image_classifier_vit_violation_classification_bantai__v80ep', 'en.classify_image.violation_classification_bantai__withES': 'image_classifier_vit_violation_classification_bantai__withES', 'en.classify_image.vision_transformer_fmri_classification_ft': 'image_classifier_vit_vision_transformer_fmri_classification_ft', 'en.classify_image.vision_transformer_v3': 'image_classifier_vit_vision_transformer_v3', 'en.classify_image.vision_transformers_spain_or_italy_fan': 'image_classifier_vit_vision_transformers_spain_or_italy_fan', 'en.classify_image.vliegmachine': 'image_classifier_vit_vliegmachine', 'en.classify_image.where_am_I_hospital_balcony_hallway_airport_coffee_house': 'image_classifier_vit_where_am_I_hospital_balcony_hallway_airport_coffee_house', 'en.classify_image.where_am_I_hospital_balcony_hallway_airport_coffee_house_apartment_office': 'image_classifier_vit_where_am_I_hospital_balcony_hallway_airport_coffee_house_apartment_office', 'en.classify_image.world_landmarks': 'image_classifier_vit_world_landmarks', 'en.coreference.spanbert': 'spanbert_base_coref', 'en.dep': 'dependency_typed_conllu', 'en.dep.typed': 'dependency_typed_conllu', 'en.dep.typed.conllu': 'dependency_typed_conllu', 'en.dep.untyped': 'dependency_conllu', 'en.dep.untyped.conllu': 'dependency_conllu', 'en.e2e': 'multiclassifierdl_use_e2e', 'en.embed': 'glove_100d', 'en.embed.Bible_roberta_base': 'roberta_embeddings_Bible_roberta_base', 'en.embed.COVID_SciBERT': 'bert_embeddings_COVID_SciBERT', 'en.embed.DiLBERT': 'bert_embeddings_DiLBERT', 'en.embed.FinancialBERT': 'bert_embeddings_FinancialBERT', 'en.embed.SecBERT': 'bert_embeddings_SecBERT', 'en.embed.SecRoBERTa': 'roberta_embeddings_SecRoBERTa', 'en.embed.agriculture_bert_uncased': 'bert_embeddings_agriculture_bert_uncased', 'en.embed.albert': 'albert_base_uncased', 'en.embed.albert.base_uncased': 'albert_base_uncased', 'en.embed.albert.large_uncased': 'albert_large_uncased', 'en.embed.albert.xlarge_uncased': 'albert_xlarge_uncased', 'en.embed.albert.xxlarge_uncased': 'albert_xxlarge_uncased', 'en.embed.albert_base_v1': 'albert_embeddings_albert_base_v1', 'en.embed.albert_xlarge_v1': 'albert_embeddings_albert_xlarge_v1', 'en.embed.albert_xxlarge_v1': 'albert_embeddings_albert_xxlarge_v1', 'en.embed.bert': 'bert_base_uncased', 'en.embed.bert.base': 'bert_embeddings_v_2021_base', 'en.embed.bert.base_cased': 'bert_base_cased', 'en.embed.bert.base_uncased': 'bert_base_uncased', 'en.embed.bert.base_uncased_legal': 'bert_base_uncased_legal', 'en.embed.bert.by_anferico': 'bert_embeddings_for_patents', 'en.embed.bert.by_beatrice_portelli': 'bert_embeddings_dilbert', 'en.embed.bert.by_law_ai': 'bert_embeddings_incaselawbert', 'en.embed.bert.by_philschmid': 'bert_embeddings_fin_pretrain_yiyanghkust', 'en.embed.bert.cased_base': 'bert_embeddings_jobbert_base_cased', 'en.embed.bert.cased_base.by_ayansinha': 'bert_embeddings_lic_class_scancode_base_cased_l32_1', 'en.embed.bert.cased_base.by_geotrend': 'bert_embeddings_base_en_cased', 'en.embed.bert.cased_base.by_model_attribution_challenge': 'bert_embeddings_model_attribution_challenge_base_cased', 'en.embed.bert.cased_base.by_uploaded by huggingface': 'bert_embeddings_base_cased', 'en.embed.bert.cased_large': 'bert_embeddings_large_cased', 'en.embed.bert.cased_large_whole_word_masking': 'bert_embeddings_large_cased_whole_word_masking', 'en.embed.bert.contracts.large_small_finetuned_legal': 'bert_embeddings_bert_small_finetuned_legal_contracts_larger20_5_1', 'en.embed.bert.contracts.large_small_finetuned_legal.by_muhtasham': 'bert_embeddings_bert_small_finetuned_legal_contracts_larger4010', 'en.embed.bert.contracts.small_finetuned_legal': 'bert_embeddings_bert_small_finetuned_legal_contracts10train10val', 'en.embed.bert.contracts.uncased_base': 'bert_base_uncased_contracts', 'en.embed.bert.covid_bio_clinical.finetuned': 'bert_embeddings_bioclinicalbert_finetuned_covid_papers', 'en.embed.bert.large': 'bert_embeddings_v_2021_large', 'en.embed.bert.large_cased': 'bert_large_cased', 'en.embed.bert.large_legal_7m': 'bert_embeddings_legalbert_large_1.7m_1', 'en.embed.bert.large_legal_7m.by_pile_of_law': 'bert_embeddings_legalbert_large_1.7m_2', 'en.embed.bert.large_uncased': 'bert_large_uncased', 'en.embed.bert.legal': 'bert_embeddings_inlegalbert', 'en.embed.bert.phs': 'bert_embeddings_phs_bert', 'en.embed.bert.pubmed': 'bert_pubmed', 'en.embed.bert.pubmed.uncased': 'bert_biomed_pubmed_uncased', 'en.embed.bert.pubmed_squad2': 'bert_pubmed_squad2', 'en.embed.bert.small_L10_128': 'small_bert_L10_128', 'en.embed.bert.small_L10_256': 'small_bert_L10_256', 'en.embed.bert.small_L10_512': 'small_bert_L10_512', 'en.embed.bert.small_L10_768': 'small_bert_L10_768', 'en.embed.bert.small_L12_128': 'small_bert_L12_128', 'en.embed.bert.small_L12_256': 'small_bert_L12_256', 'en.embed.bert.small_L12_512': 'small_bert_L12_512', 'en.embed.bert.small_L12_768': 'small_bert_L12_768', 'en.embed.bert.small_L2_128': 'small_bert_L2_128', 'en.embed.bert.small_L2_256': 'small_bert_L2_256', 'en.embed.bert.small_L2_512': 'small_bert_L2_512', 'en.embed.bert.small_L2_768': 'small_bert_L2_768', 'en.embed.bert.small_L4_128': 'small_bert_L4_128', 'en.embed.bert.small_L4_256': 'small_bert_L4_256', 'en.embed.bert.small_L4_512': 'small_bert_L4_512', 'en.embed.bert.small_L4_768': 'small_bert_L4_768', 'en.embed.bert.small_L6_128': 'small_bert_L6_128', 'en.embed.bert.small_L6_256': 'small_bert_L6_256', 'en.embed.bert.small_L6_512': 'small_bert_L6_512', 'en.embed.bert.small_L6_768': 'small_bert_L6_768', 'en.embed.bert.small_L8_128': 'small_bert_L8_128', 'en.embed.bert.small_L8_256': 'small_bert_L8_256', 'en.embed.bert.small_L8_512': 'small_bert_L8_512', 'en.embed.bert.small_L8_768': 'small_bert_L8_768', 'en.embed.bert.small_finetuned_legal': 'bert_embeddings_bert_small_finetuned_legal_definitions', 'en.embed.bert.small_finetuned_legal.by_muhtasham': 'bert_embeddings_bert_small_finetuned_legal_definitions_longer', 'en.embed.bert.tiny_finetuned_legal': 'bert_embeddings_bert_tiny_finetuned_legal_definitions', 'en.embed.bert.uncased_base': 'bert_embeddings_base_uncased', 'en.embed.bert.uncased_base.by_model_attribution_challenge': 'bert_embeddings_model_attribution_challenge_base_uncased', 'en.embed.bert.uncased_base_finetuned_legal': 'bert_embeddings_legal_bert_base_uncased_finetuned_rramicus', 'en.embed.bert.uncased_base_finetuned_legal.by_hatemestinbejaia': 'bert_embeddings_legal_bert_base_uncased_finetuned_ledgarscotus7', 'en.embed.bert.uncased_large': 'bert_embeddings_large_uncased', 'en.embed.bert.uncased_large_whole_word_masking': 'bert_embeddings_large_uncased_whole_word_masking', 'en.embed.bert.wiki_books': 'bert_wiki_books', 'en.embed.bert.wiki_books_mnli': 'bert_wiki_books_mnli', 'en.embed.bert.wiki_books_qnli': 'bert_wiki_books_qnli', 'en.embed.bert.wiki_books_qqp': 'bert_wiki_books_qqp', 'en.embed.bert.wiki_books_squad2': 'bert_wiki_books_squad2', 'en.embed.bert.wiki_books_sst2': 'bert_wiki_books_sst2', 'en.embed.bert_base_5lang_cased': 'bert_embeddings_bert_base_5lang_cased', 'en.embed.bert_base_en_cased': 'bert_embeddings_bert_base_en_cased', 'en.embed.bert_base_uncased_dstc9': 'bert_embeddings_bert_base_uncased_dstc9', 'en.embed.bert_base_uncased_mnli_sparse_70_unstructured_no_classifier': 'bert_embeddings_bert_base_uncased_mnli_sparse_70_unstructured_no_classifier', 'en.embed.bert_base_uncased_sparse_70_unstructured': 'bert_embeddings_bert_base_uncased_sparse_70_unstructured', 'en.embed.bert_for_patents': 'bert_embeddings_bert_for_patents', 'en.embed.bert_large_cased_whole_word_masking': 'bert_embeddings_bert_large_cased_whole_word_masking', 'en.embed.bert_large_uncased_whole_word_masking': 'bert_embeddings_bert_large_uncased_whole_word_masking', 'en.embed.bert_political_election2020_twitter_mlm': 'bert_embeddings_bert_political_election2020_twitter_mlm', 'en.embed.biobert': 'biobert_pubmed_base_cased', 'en.embed.biobert.clinical_base_cased': 'biobert_clinical_base_cased', 'en.embed.biobert.discharge_base_cased': 'biobert_discharge_base_cased', 'en.embed.biobert.pmc_base_cased': 'biobert_pmc_base_cased', 'en.embed.biobert.pubmed.cased_base': 'biobert_pubmed_base_cased_v1.2', 'en.embed.biobert.pubmed_large_cased': 'biobert_pubmed_large_cased', 'en.embed.biobert.pubmed_pmc_base_cased': 'biobert_pubmed_pmc_base_cased', 'en.embed.bioformer.cased': 'bert_embeddings_bioformer_cased_v1.0', 'en.embed.chEMBL26_smiles_v2': 'roberta_embeddings_chEMBL26_smiles_v2', 'en.embed.chEMBL_smiles_v1': 'roberta_embeddings_chEMBL_smiles_v1', 'en.embed.chemical_bert_uncased': 'bert_embeddings_chemical_bert_uncased', 'en.embed.childes_bert': 'bert_embeddings_childes_bert', 'en.embed.clinical_pubmed_bert_base_128': 'bert_embeddings_clinical_pubmed_bert_base_128', 'en.embed.clinical_pubmed_bert_base_512': 'bert_embeddings_clinical_pubmed_bert_base_512', 'en.embed.covidbert': 'covidbert_large_uncased', 'en.embed.covidbert.large_uncased': 'covidbert_large_uncased', 'en.embed.crosloengual_bert': 'bert_embeddings_crosloengual_bert', 'en.embed.danbert_small_cased': 'bert_embeddings_danbert_small_cased', 'en.embed.deberta_base_uncased': 'bert_embeddings_deberta_base_uncased', 'en.embed.deberta_v3_base': 'deberta_v3_base', 'en.embed.deberta_v3_large': 'deberta_v3_large', 'en.embed.deberta_v3_small': 'deberta_v3_small', 'en.embed.deberta_v3_xsmall': 'deberta_v3_xsmall', 'en.embed.distil_bert': 'distilbert_embeddings_test_text', 'en.embed.distil_bert.finetuned': 'distilbert_embeddings_finetuned_sarcasm_classification', 'en.embed.distil_bert.uncased_base': 'distilbert_embeddings_base_uncased', 'en.embed.distil_bert.uncased_base_sparse_85_unstructured_pruneofa.by_intel': 'distilbert_embeddings_base_uncased_sparse_85_unstructured_pruneofa', 'en.embed.distil_bert.uncased_base_sparse_90_unstructured_pruneofa.by_intel': 'distilbert_embeddings_base_uncased_sparse_90_unstructured_pruneofa', 'en.embed.distilbert': 'distilbert_base_cased', 'en.embed.distilbert.base': 'distilbert_base_cased', 'en.embed.distilbert.base.uncased': 'distilbert_base_uncased', 'en.embed.distilbert_base_en_cased': 'distilbert_embeddings_distilbert_base_en_cased', 'en.embed.distilbert_base_uncased_sparse_85_unstructured_pruneofa': 'distilbert_embeddings_distilbert_base_uncased_sparse_85_unstructured_pruneofa', 'en.embed.distilbert_base_uncased_sparse_90_unstructured_pruneofa': 'distilbert_embeddings_distilbert_base_uncased_sparse_90_unstructured_pruneofa', 'en.embed.distilroberta': 'distilroberta_base', 'en.embed.distilroberta_base': 'roberta_embeddings_distilroberta_base', 'en.embed.distilroberta_base_climate_d': 'roberta_embeddings_distilroberta_base_climate_d', 'en.embed.distilroberta_base_climate_d_s': 'roberta_embeddings_distilroberta_base_climate_d_s', 'en.embed.distilroberta_base_climate_f': 'roberta_embeddings_distilroberta_base_climate_f', 'en.embed.distilroberta_base_finetuned_jira_qt_issue_title': 'roberta_embeddings_distilroberta_base_finetuned_jira_qt_issue_title', 'en.embed.distilroberta_base_finetuned_jira_qt_issue_titles_and_bodies': 'roberta_embeddings_distilroberta_base_finetuned_jira_qt_issue_titles_and_bodies', 'en.embed.e': 'bert_biolink_base', 'en.embed.electra': 'electra_small_uncased', 'en.embed.electra.base': 'electra_embeddings_electra_base_generator', 'en.embed.electra.base_uncased': 'electra_base_uncased', 'en.embed.electra.large': 'electra_embeddings_electra_large_generator', 'en.embed.electra.large_uncased': 'electra_large_uncased', 'en.embed.electra.medical': 'electra_medal_acronym', 'en.embed.electra.small': 'electra_embeddings_electra_small_generator', 'en.embed.electra.small_uncased': 'electra_small_uncased', 'en.embed.elmo': 'elmo', 'en.embed.fairlex_ecthr_minilm': 'roberta_embeddings_fairlex_ecthr_minilm', 'en.embed.fairlex_scotus_minilm': 'roberta_embeddings_fairlex_scotus_minilm', 'en.embed.false_positives_scancode_bert_base_uncased_L8_1': 'bert_embeddings_false_positives_scancode_bert_base_uncased_L8_1', 'en.embed.finbert_pretrain_yiyanghkust': 'bert_embeddings_finbert_pretrain_yiyanghkust', 'en.embed.finest_bert': 'bert_embeddings_finest_bert', 'en.embed.ge': 'bert_biolink_large', 'en.embed.glove': 'glove_100d', 'en.embed.glove.100d': 'glove_100d', 'en.embed.hateBERT': 'bert_embeddings_hateBERT', 'en.embed.legal.osf_lemmatized_legal': 'word2vec_osf_lemmatized_legal', 'en.embed.legal.osf_raw_legal': 'word2vec_osf_raw_legal', 'en.embed.legal.osf_replaced_lemmatized_legal': 'word2vec_osf_replaced_lemmatized_legal', 'en.embed.legal.osf_replaced_raw_legal': 'word2vec_osf_replaced_raw_legal', 'en.embed.legal_bert_base_uncased': 'bert_embeddings_legal_bert_base_uncased', 'en.embed.legal_bert_small_uncased': 'bert_embeddings_legal_bert_small_uncased', 'en.embed.legal_roberta_base': 'roberta_embeddings_legal_roberta_base', 'en.embed.legalbert.legal.by_zlucia': 'bert_embeddings_legalbert', 'en.embed.legalbert.legal.custom.by_zlucia': 'bert_embeddings_custom_legalbert', 'en.embed.lic_class_scancode_bert_base_cased_L32_1': 'bert_embeddings_lic_class_scancode_bert_base_cased_L32_1', 'en.embed.longformer': 'longformer_base_4096', 'en.embed.longformer.base_legal': 'legal_longformer_base', 'en.embed.longformer.clinical': 'clinical_longformer', 'en.embed.longformer.large': 'longformer_large_4096', 'en.embed.muppet_roberta_base': 'roberta_embeddings_muppet_roberta_base', 'en.embed.muppet_roberta_large': 'roberta_embeddings_muppet_roberta_large', 'en.embed.muril_adapted_local': 'bert_embeddings_muril_adapted_local', 'en.embed.netbert': 'bert_embeddings_netbert', 'en.embed.pmc_med_bio_mlm_roberta_large': 'roberta_embeddings_pmc_med_bio_mlm_roberta_large', 'en.embed.pos.uncased_base': 'bert_embeddings_false_positives_scancode_base_uncased_l8_1', 'en.embed.psych_search': 'bert_embeddings_psych_search', 'en.embed.roberta': 'roberta_base', 'en.embed.roberta.base': 'roberta_base', 'en.embed.roberta.base.by_model_attribution_challenge': 'roberta_embeddings_model_attribution_challenge_base', 'en.embed.roberta.base_finetuned': 'roberta_embeddings_ruperta_base_finetuned_spa_constitution', 'en.embed.roberta.base_legal': 'roberta_embeddings_legal_base', 'en.embed.roberta.cord19.1m': 'roberta_embeddings_cord19_1m7k', 'en.embed.roberta.distilled_base': 'roberta_embeddings_distil_base', 'en.embed.roberta.financial': 'roberta_embeddings_financial', 'en.embed.roberta.large': 'roberta_large', 'en.embed.roberta_pubmed': 'roberta_embeddings_roberta_pubmed', 'en.embed.scibert.cord19_scibert.finetuned': 'bert_embeddings_scibert_scivocab_finetuned_cord19', 'en.embed.scibert.covid_scibert.': 'bert_embeddings_covid_scibert', 'en.embed.sec_bert_base': 'bert_embeddings_sec_bert_base', 'en.embed.sec_bert_num': 'bert_embeddings_sec_bert_num', 'en.embed.sec_bert_sh': 'bert_embeddings_sec_bert_sh', 'en.embed.w2v_cc_300d': 'w2v_cc_300d', 'en.embed.word2vec.gigaword': 'word2vec_gigaword_300', 'en.embed.word2vec.gigaword_wiki': 'word2vec_gigaword_wiki_300', 'en.embed.xlmr_roberta': 'xlmroberta_embeddings_litlat_bert', 'en.embed.xlnet': 'xlnet_base_cased', 'en.embed.xlnet_base_cased': 'xlnet_base_cased', 'en.embed.xlnet_large_cased': 'xlnet_large_cased', 'en.embed_chunk': 'chunk_embeddings', 'en.embed_sentence': 'tfhub_use', 'en.embed_sentence.albert': 'albert_base_uncased', 'en.embed_sentence.bert': 'sent_bert_base_uncased', 'en.embed_sentence.bert.base_uncased_legal': 'sent_bert_base_uncased_legal', 'en.embed_sentence.bert.finetuned': 'sbert_setfit_finetuned_financial_text_classification', 'en.embed_sentence.bert.pubmed': 'sent_bert_pubmed', 'en.embed_sentence.bert.pubmed_squad2': 'sent_bert_pubmed_squad2', 'en.embed_sentence.bert.wiki_books': 'sent_bert_wiki_books', 'en.embed_sentence.bert.wiki_books_mnli': 'sent_bert_wiki_books_mnli', 'en.embed_sentence.bert.wiki_books_qnli': 'sent_bert_wiki_books_qnli', 'en.embed_sentence.bert.wiki_books_qqp': 'sent_bert_wiki_books_qqp', 'en.embed_sentence.bert.wiki_books_squad2': 'sent_bert_wiki_books_squad2', 'en.embed_sentence.bert.wiki_books_sst2': 'sent_bert_wiki_books_sst2', 'en.embed_sentence.bert_base_cased': 'sent_bert_base_cased', 'en.embed_sentence.bert_base_uncased': 'sent_bert_base_uncased', 'en.embed_sentence.bert_large_cased': 'sent_bert_large_cased', 'en.embed_sentence.bert_large_uncased': 'sent_bert_large_uncased', 'en.embed_sentence.bert_use_cmlm_en_base': 'sent_bert_use_cmlm_en_base', 'en.embed_sentence.bert_use_cmlm_en_large': 'sent_bert_use_cmlm_en_large', 'en.embed_sentence.biobert.clinical_base_cased': 'sent_biobert_clinical_base_cased', 'en.embed_sentence.biobert.discharge_base_cased': 'sent_biobert_discharge_base_cased', 'en.embed_sentence.biobert.pmc_base_cased': 'sent_biobert_pmc_base_cased', 'en.embed_sentence.biobert.pubmed_base_cased': 'sent_biobert_pubmed_base_cased', 'en.embed_sentence.biobert.pubmed_large_cased': 'sent_biobert_pubmed_large_cased', 'en.embed_sentence.biobert.pubmed_pmc_base_cased': 'sent_biobert_pubmed_pmc_base_cased', 'en.embed_sentence.covidbert.large_uncased': 'sent_covidbert_large_uncased', 'en.embed_sentence.distil_roberta.distilled_base': 'sent_distilroberta_base', 'en.embed_sentence.doc2vec': 'doc2vec_gigaword_300', 'en.embed_sentence.doc2vec.gigaword_300': 'doc2vec_gigaword_300', 'en.embed_sentence.doc2vec.gigaword_wiki_300': 'doc2vec_gigaword_wiki_300', 'en.embed_sentence.electra': 'sent_electra_small_uncased', 'en.embed_sentence.electra_base_uncased': 'sent_electra_base_uncased', 'en.embed_sentence.electra_large_uncased': 'sent_electra_large_uncased', 'en.embed_sentence.electra_small_uncased': 'sent_electra_small_uncased', 'en.embed_sentence.roberta.base': 'sent_roberta_base', 'en.embed_sentence.roberta.large': 'sent_roberta_large', 'en.embed_sentence.small_bert_L10_128': 'sent_small_bert_L10_128', 'en.embed_sentence.small_bert_L10_256': 'sent_small_bert_L10_256', 'en.embed_sentence.small_bert_L10_512': 'sent_small_bert_L10_512', 'en.embed_sentence.small_bert_L10_768': 'sent_small_bert_L10_768', 'en.embed_sentence.small_bert_L12_128': 'sent_small_bert_L12_128', 'en.embed_sentence.small_bert_L12_256': 'sent_small_bert_L12_256', 'en.embed_sentence.small_bert_L12_512': 'sent_small_bert_L12_512', 'en.embed_sentence.small_bert_L12_768': 'sent_small_bert_L12_768', 'en.embed_sentence.small_bert_L2_128': 'sent_small_bert_L2_128', 'en.embed_sentence.small_bert_L2_256': 'sent_small_bert_L2_256', 'en.embed_sentence.small_bert_L2_512': 'sent_small_bert_L2_512', 'en.embed_sentence.small_bert_L2_768': 'sent_small_bert_L2_768', 'en.embed_sentence.small_bert_L4_128': 'sent_small_bert_L4_128', 'en.embed_sentence.small_bert_L4_256': 'sent_small_bert_L4_256', 'en.embed_sentence.small_bert_L4_512': 'sent_small_bert_L4_512', 'en.embed_sentence.small_bert_L4_768': 'sent_small_bert_L4_768', 'en.embed_sentence.small_bert_L6_128': 'sent_small_bert_L6_128', 'en.embed_sentence.small_bert_L6_256': 'sent_small_bert_L6_256', 'en.embed_sentence.small_bert_L6_512': 'sent_small_bert_L6_512', 'en.embed_sentence.small_bert_L6_768': 'sent_small_bert_L6_768', 'en.embed_sentence.small_bert_L8_128': 'sent_small_bert_L8_128', 'en.embed_sentence.small_bert_L8_256': 'sent_small_bert_L8_256', 'en.embed_sentence.small_bert_L8_512': 'sent_small_bert_L8_512', 'en.embed_sentence.small_bert_L8_768': 'sent_small_bert_L8_768', 'en.embed_sentence.tfhub_use': 'tfhub_use', 'en.embed_sentence.tfhub_use.lg': 'tfhub_use_lg', 'en.embed_sentence.use': 'tfhub_use', 'en.embed_sentence.use.lg': 'tfhub_use_lg', 'en.generate.gpt2.large': 'gpt_large', 'en.glove': 'glove_100d', 'en.gpt2': 'gpt2', 'en.gpt2.distilled': 'gpt2_distilled', 'en.gpt2.large': 'gpt_large', 'en.gpt2.medium': 'gpt2_medium', 'en.lemma': 'lemma_lines', 'en.lemma.antbnc': 'lemma_antbnc', 'en.lemma.atis': 'lemma_atis', 'en.lemma.esl': 'lemma_esl', 'en.lemma.ewt': 'lemma_ewt', 'en.lemma.gum': 'lemma_gum', 'en.lemma.lines': 'lemma_lines', 'en.lemma.partut': 'lemma_partut', 'en.lemma.spacylookup': 'lemma_spacylookup', 'en.ner.airline': 'nerdl_atis_840b_300d', 'en.ner.aspect.airline': 'nerdl_atis_840b_300d', 'en.ner.aspect.atis': 'nerdl_atis_840b_300d', 'en.ner.atis': 'nerdl_atis_840b_300d', 'en.ner.bert.accelerate.by_alexander_learn': 'bert_ner_alexander_learn_bert_finetuned_ner_accelerate', 'en.ner.bert.accelerate.by_artemis13fowl': 'bert_ner_artemis13fowl_bert_finetuned_ner_accelerate', 'en.ner.bert.accelerate.by_peterhsu': 'bert_ner_peterhsu_bert_finetuned_ner_accelerate', 'en.ner.bert.adr.by_ytsai25': 'bert_ner_ytsai25_bert_finetuned_ner_adr', 'en.ner.bert.airi..by_airi': 'bert_ner_airi_bert_finetuned_ner', 'en.ner.bert.ard.by_cwan6830': 'bert_ner_cwan6830_bert_finetuned_ard', 'en.ner.bert.artemis13fowl.by_artemis13fowl': 'bert_ner_artemis13fowl_bert_finetuned_ner', 'en.ner.bert.base': 'bert_ner_far50brbert_base', 'en.ner.bert.base.by_adwayk': 'bert_ner_base_bert_tuned_on_tac2017_as_ner', 'en.ner.bert.base.by_biasedai': 'bert_ner_bert_based_ner', 'en.ner.bert.base.by_brad1141': 'bert_ner_baseline_bertv3', 'en.ner.bert.base.by_leonweber': 'bert_ner_bunsen_base_best', 'en.ner.bert.base.by_nbailab': 'bert_ner_nb_bert_base_ner', 'en.ner.bert.base.by_rexhaif': 'bert_ner_rubert_base_srl_seqlabeling', 'en.ner.bert.base.by_wanjiru': 'bert_ner_ag_based_ner', 'en.ner.bert.base.tcm_0.5.by_ricardo_filho': 'bert_ner_bert_base_tcm_0.5', 'en.ner.bert.base.tcm_0.6.by_ricardo_filho': 'bert_ner_bert_base_tcm_0.6', 'en.ner.bert.base.tcm_0.7.by_ricardo_filho': 'bert_ner_bert_base_tcm_0.7', 'en.ner.bert.base.tcm_0.8.by_ricardo_filho': 'bert_ner_bert_base_tcm_0.8', 'en.ner.bert.base.tcm_no_objeto_0.8.by_ricardo_filho': 'bert_ner_bert_base_tcm_no_objeto_0.8', 'en.ner.bert.base.tcm_teste.by_ricardo_filho': 'bert_ner_bert_base_tcm_teste', 'en.ner.bert.base_finetuned': 'bert_ner_bert_base_ner_finetuned_ner_isu', 'en.ner.bert.base_finetuned.by_deval': 'bert_ner_deval_bert_base_ner_finetuned_ner', 'en.ner.bert.base_finetuned.by_mcdzwil': 'bert_ner_mcdzwil_bert_base_ner_finetuned_ner', 'en.ner.bert.bc5cdr_bluebert.original_disease': 'bert_ner_original_bluebert_bc5cdr_disease', 'en.ner.bert.bc5cdr_ncbi.disease': 'bert_token_classifier_ncbi_bc5cdr_disease', 'en.ner.bert.bgc_accession.by_maaly': 'bert_ner_bgc_accession', 'en.ner.bert.bio_medical.base': 'bert_ner_mbert_base_biomedical_ner', 'en.ner.bert.biomuppet.by_leonweber': 'bert_ner_biomuppet', 'en.ner.bert.bluebert.': 'bert_ner_wlt_bluebert_linnaeus', 'en.ner.bert.bluebert.512d_modified': 'bert_ner_bionlp13_modified_bluebert_512', 'en.ner.bert.bluebert.original': 'bert_ner_original_bluebert_linnaeus', 'en.ner.bert.bluebert_bc2gm.original': 'bert_ner_original_bluebert_bc2gm', 'en.ner.bert.bluebert_ncbi.': 'bert_ner_orignial_bluebert_ncbi', 'en.ner.bert.bluebert_ncbi.by_ghadeermobasher': 'bert_ner_wlt_bluebert_ncbi', 'en.ner.bert.body_site.by_maaly': 'bert_ner_body_site', 'en.ner.bert.by_abnv15': 'bert_ner_mlma', 'en.ner.bert.by_adamlin': 'bert_ner_adamlin_recipe_tag_model', 'en.ner.bert.by_aleksandar': 'bert_ner_bert_srb_ner_setimes', 'en.ner.bert.by_alexbrandsen': 'bert_ner_archeobertje_ner', 'en.ner.bert.by_alichte': 'bert_ner_tg_relation_model', 'en.ner.bert.by_andrija': 'bert_ner_m_bert_ner', 'en.ner.bert.by_avialfont': 'bert_ner_ner_dummy_model', 'en.ner.bert.by_bigscience_biomedical': 'bert_ner_bigbio_mtl', 'en.ner.bert.by_blckwdw61': 'bert_ner_sysformbatches2acs', 'en.ner.bert.by_browndw': 'bert_ner_docusco_bert', 'en.ner.bert.by_ckauth': 'bert_ner_ck_ner_subgroup', 'en.ner.bert.by_clairev': 'bert_ner_mlma_lab8', 'en.ner.bert.by_connorboyle': 'bert_ner_bert_ner_i2b2', 'en.ner.bert.by_danielmantisnlp': 'bert_ner_autotrain_oms_ner_bi_1044135953', 'en.ner.bert.by_deeq': 'bert_ner_dbert_ner', 'en.ner.bert.by_ehsanyb': 'bert_ner_bert_ehsan_ner_accelerate', 'en.ner.bert.by_fagner': 'bert_ner_envoy', 'en.ner.bert.by_fgravelaine': 'bert_ner_ner_test', 'en.ner.bert.by_gauravnuti': 'bert_ner_agro_ner', 'en.ner.bert.by_icelab': 'bert_ner_spacebert_cr', 'en.ner.bert.by_justadvancetechonology': 'bert_ner_bert_fine_tuned_medical_insurance_ner', 'en.ner.bert.by_kevinjesse': 'bert_ner_bert_mt4ts', 'en.ner.bert.by_kleinay': 'bert_ner_nominalization_candidate_classifier', 'en.ner.bert.by_kunalr63': 'bert_ner_simple_transformer', 'en.ner.bert.by_lauler': 'bert_ner_deformer', 'en.ner.bert.by_leemeng': 'bert_ner_core_term_ner_v1', 'en.ner.bert.by_lewtun': 'bert_ner_autotrain_acronym_identification_7324788', 'en.ner.bert.by_media1129': 'bert_ner_keyword_tag_model', 'en.ner.bert.by_mirikwa': 'bert_ner_gro_ner_2', 'en.ner.bert.by_myorg123': 'bert_ner_tinparadox_job_search', 'en.ner.bert.by_ncats': 'bert_ner_epiextract4gard_v1', 'en.ner.bert.by_nguyenkhoa2407': 'bert_token_classifier_autotrain_ner_favsbot', 'en.ner.bert.by_nila_yuki': 'bert_ner_final_lab', 'en.ner.bert.by_noahjadallah': 'bert_ner_cause_effect_detection', 'en.ner.bert.by_obi': 'bert_ner_deid_bert_i2b2', 'en.ner.bert.by_pucpr': 'bert_ner_ehelpbertpt', 'en.ner.bert.by_rdchambers': 'bert_ner_bert_finetuned_filler_2', 'en.ner.bert.by_rj3vans': 'bert_ner_ssmnspantagger', 'en.ner.bert.by_rohanvb': 'bert_ner_umlsbert_ner', 'en.ner.bert.by_rostlab': 'bert_ner_prot_bert_bfd_ss3', 'en.ner.bert.by_satyaalmasian': 'bert_ner_temporal_tagger_bert_tokenclassifier', 'en.ner.bert.by_schnell': 'bert_ner_wakaformer', 'en.ner.bert.by_shenzy2': 'bert_token_classifier_autotrain_tk_1181244086', 'en.ner.bert.by_skolkovoinstitute': 'bert_token_classifier_lewip_informal_tagger', 'en.ner.bert.by_superman': 'bert_ner_testingmodel', 'en.ner.bert.by_tartunlp': 'bert_ner_estbert_ner', 'en.ner.bert.by_varsha12': 'bert_ner_bert_dnrti', 'en.ner.bert.by_vera_pro': 'bert_ner_bert_mention_en_vera_pro', 'en.ner.bert.by_wanjiru': 'bert_token_classifier_autotrain_gro_ner', 'en.ner.bert.by_wzkariampuzha': 'bert_ner_epiextract4gard', 'en.ner.bert.by_yanekyuk': 'bert_ner_bert_keyword_extractor', 'en.ner.bert.by_yihahn': 'bert_ner_ner_2006', 'en.ner.bert.by_ytsai25': 'bert_ner_ytsai25_bert_finetuned_ner', 'en.ner.bert.by_zhuzhusleepearly': 'bert_ner_bert_finetuned', 'en.ner.bert.cased': 'bert_ner_bert_ner_cased_sonar1_nld', 'en.ner.bert.cased.by_yanekyuk': 'bert_token_classifier_cased_keyword_discriminator', 'en.ner.bert.cased_base': 'bert_ner_bert_base_cased_chunking', 'en.ner.bert.cased_base.by_qcri': 'bert_ner_bert_base_cased_sem', 'en.ner.bert.cased_base_original': 'bert_ner_hiner_original_muril_base_cased', 'en.ner.bert.cased_large_finetuned': 'bert_ner_bert_large_cased_finetuned_ner', 'en.ner.bert.cased_multilingual_base': 'bert_ner_bert_base_multilingual_cased_sem_english', 'en.ner.bert.cased_multilingual_base_finetuned': 'bert_ner_bert_base_multilingual_cased_finetuned_sonar_ner', 'en.ner.bert.cased_multilingual_base_finetuned.by_wietsedv': 'bert_ner_bert_base_multilingual_cased_finetuned_udlassy_ner', 'en.ner.bert.chemical_bc5cdr_bluebert.512d_modified': 'bert_ner_bc5cdr_chem_modified_bluebert_512', 'en.ner.bert.chemical_bc5cdr_bluebert.512d_original': 'bert_ner_bc5cdr_chem_original_bluebert_512', 'en.ner.bert.chemical_bc5cdr_bluebert.original': 'bert_ner_bc5cdr_chem_original_bluebert_384', 'en.ner.bert.chemical_bc5cdr_bluebert.original.by_ghadeermobasher': 'bert_ner_original_bluebert_bc5cdr_chemical', 'en.ner.bert.chemical_biored_bluebert.512d_modified': 'bert_ner_modified_bluebert_biored_chem_512_5_30', 'en.ner.bert.chemical_biored_bluebert.512d_original': 'bert_ner_original_bluebert_biored_chem_512_5_30', 'en.ner.bert.chemical_biored_bluebert.original': 'bert_ner_original_bluebert_biored_chem', 'en.ner.bert.chemical_bluebert.512d_modified': 'bert_ner_bc4chemd_chem_modified_bluebert_512', 'en.ner.bert.chemical_bluebert.512d_original': 'bert_ner_bc4chemd_chem_original_bluebert_512', 'en.ner.bert.chemical_bluebert.modified': 'bert_ner_bc4chemd_chem_modified_bluebert_384', 'en.ner.bert.chemical_bluebert.original': 'bert_ner_bc4chemd_chem_original_bluebert_384', 'en.ner.bert.chemical_bluebert.original.by_ghadeermobasher': 'bert_ner_original_bluebert_bc4chemd', 'en.ner.bert.chemical_bluebert_bionlp13cg.512d_original': 'bert_ner_bionlp13cg_chem_chem_original_bluebert_512', 'en.ner.bert.chemical_bluebert_bionlp13cg.original': 'bert_ner_bionlp13cg_chem_chem_original_bluebert_384', 'en.ner.bert.chemical_pubmed_bc5cdr.modified': 'bert_ner_bc5cdr_chem_modified_pubmed_abstract_3', 'en.ner.bert.chemical_pubmed_bc5cdr_bluebert.uncased_12l_768d_a12a_768d_imbalanced': 'bert_ner_bc5cdr_chemical_imbalanced_bluebert_pubmed_uncased_l_12_h_768_a_12_latest', 'en.ner.bert.chemical_pubmed_bc5cdr_bluebert.uncased_12l_768d_a12a_768d_modified': 'bert_ner_bc5cdr_chem_modified_bluebert_pubmed_uncased_l_12_h_768_a_12_latest', 'en.ner.bert.chemical_pubmed_bc5cdr_full.modified.by_ghadeermobasher': 'bert_ner_bc5cdr_chem_modified_pubmed_full_3', 'en.ner.bert.chemical_pubmed_bc5cdr_latest.modified.by_ghadeermobasher': 'bert_ner_bc5cdr_chem_modified_pubmed_abstract_latest', 'en.ner.bert.chemical_pubmed_bionlp13cg.modified': 'bert_ner_bionlp13cg_chem_modified_pubmedabstract_latest', 'en.ner.bert.chemical_pubmed_craft_bluebert.uncased_12l_768d_a12a_768d_modified': 'bert_ner_craft_chem_modified_bluebert_pubmed_uncased_l_12_h_768_a_12', 'en.ner.bert.chemical_pubmed_craft_bluebert.uncased_12l_768d_a12a_768d_original': 'bert_ner_craft_chem_original_bluebert_pubmed_uncased_l_12_h_768_a_12', 'en.ner.bert.clinical.cased': 'bert_ner_nlp_cic_wfu_clinical_cases_ner_paragraph_tokenized_mbert_cased_fine_tuned', 'en.ner.bert.clinical.cased.by_ajtamayoh': 'bert_ner_nlp_cic_wfu_clinical_cases_ner_mbert_cased_fine_tuned', 'en.ner.bert.clinical.cased_sents_tokenized.by_ajtamayoh': 'bert_ner_nlp_cic_wfu_clinical_cases_ner_sents_tokenized_mbert_cased_fine_tuned', 'en.ner.bert.clinical.uncased_base': 'bert_ner_bert_base_uncased_clinical_ner', 'en.ner.bert.clinical_chemical_pubmed.modified': 'bert_ner_bc4chemd_modified_pubmed_clinical', 'en.ner.bert.co_imb.by_ghadeermobasher': 'bert_ner_model_co_imb', 'en.ner.bert.codeswitch_nepeng_lid_lince.by_sagorsarker': 'bert_ner_codeswitch_nepeng_lid_lince', 'en.ner.bert.codeswitch_spaeng_lid_lince.by_sagorsarker': 'bert_ner_codeswitch_spaeng_lid_lince', 'en.ner.bert.codeswitch_spaeng_ner_lince.by_sagorsarker': 'bert_ner_codeswitch_spaeng_ner_lince', 'en.ner.bert.col_mod.by_ghadeermobasher': 'bert_ner_model_col_mod', 'en.ner.bert.conll.': 'bert_ner_ner_conll2003', 'en.ner.bert.conll.base': 'bert_ner_bert_base_ner', 'en.ner.bert.conll.by_ramybaly': 'bert_ner_ner_conll2003', 'en.ner.bert.conll.cased': 'bert_ner_bert_ner_cased_conll2002_nld', 'en.ner.bert.conll.cased_base': 'bert_ner_kamalkraj_bert_base_cased_ner_conll2003', 'en.ner.bert.conll.cased_base.by_StivenLancheros': 'bert_ner_mbert_base_cased_ner_conll', 'en.ner.bert.conll.cased_large_finetuned': 'bert_ner_dbmdz_bert_large_cased_finetuned_conll03_english', 'en.ner.bert.conll.cased_large_finetuned.by_imvladikon': 'bert_ner_imvladikon_bert_large_cased_finetuned_conll03_english', 'en.ner.bert.conll.cased_large_tiny_finetuned': 'bert_ner_tiny_dbmdz_bert_large_cased_finetuned_conll03_english', 'en.ner.bert.conll.cased_multilingual_base_finetuned': 'bert_ner_bert_base_multilingual_cased_finetuned_conll2002_ner', 'en.ner.bert.conll.distilled': 'bert_ner_importsmart_bert_to_distilbert_ner', 'en.ner.bert.conll.distilled.by_kushaljoseph': 'bert_ner_kushaljoseph_bert_to_distilbert_ner', 'en.ner.bert.conll.finetuned': 'bert_ner_ajgp_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_AlexanderPeter': 'bert_ner_alexanderpeter_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_Aneela': 'bert_ner_aneela_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_Buntan': 'bert_ner_buntan_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_DaveMSE': 'bert_ner_davemse_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_Emmanuel': 'bert_ner_emmanuel_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_Ghost1': 'bert_ner_bert_finetuned_ner3', 'en.ner.bert.conll.finetuned.by_JoanTirant': 'bert_ner_joantirant_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_Laure996': 'bert_ner_laure996_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_Mascariddu8': 'bert_ner_mascariddu8_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_Neulvo': 'bert_ner_neulvo_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_Nonzerophilip': 'bert_ner_nonzerophilip_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_RayMelius': 'bert_ner_raymelius_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_SebastianS': 'bert_ner_sebastians_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_Wende': 'bert_ner_wende_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_Yv': 'bert_ner_yv_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_aditya22': 'bert_ner_aditya22_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_alwaysgetbetter': 'bert_ner_alwaysgetbetter_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_amir36': 'bert_ner_amir36_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_animalthemuppet': 'bert_ner_animalthemuppet_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_awilli': 'bert_ner_awilli_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_batya66': 'bert_ner_batya66_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_brjezierski': 'bert_ner_brjezierski_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_buehlpa': 'bert_ner_buehlpa_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_butchland': 'bert_ner_butchland_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_chandrasutrisnotjhong': 'bert_ner_chandrasutrisnotjhong_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_dani_91': 'bert_ner_dani_91_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_datauma': 'bert_ner_datauma_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_dpuccine': 'bert_ner_dpuccine_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_dsghrg': 'bert_ner_dsghrg_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_dshvadskiy': 'bert_ner_dshvadskiy_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_fancyerii': 'bert_ner_fancyerii_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_huggingface_course': 'bert_ner_huggingface_course_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_jatinshah': 'bert_ner_jatinshah_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_jdang': 'bert_ner_jdang_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_kurama': 'bert_ner_kurama_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_kurianbenoy': 'bert_ner_kurianbenoy_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_leander': 'bert_ner_leander_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_lewtun': 'bert_ner_lewtun_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_mateocolina': 'bert_ner_mateocolina_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_mbateman': 'bert_ner_mbateman_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_mdroth': 'bert_ner_mdroth_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_michojan': 'bert_ner_michojan_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_mldev': 'bert_ner_mldev_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_ncduy': 'bert_ner_ncduy_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_peterhsu': 'bert_ner_peterhsu_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_phijve': 'bert_ner_phijve_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_romainlhardy': 'bert_ner_romainlhardy_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_russellc': 'bert_ner_russellc_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_sagerpascal': 'bert_ner_sagerpascal_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_siegelou': 'bert_ner_siegelou_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_spasis': 'bert_ner_spasis_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_stefan_jo': 'bert_ner_stefan_jo_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_suonbo': 'bert_ner_suonbo_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_tolgahanturker': 'bert_ner_tolgahanturker_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_vikasaeta': 'bert_ner_vikasaeta_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_xkang': 'bert_ner_xkang_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_yannis95': 'bert_ner_yannis95_bert_finetuned_ner', 'en.ner.bert.conll.finetuned.by_ysharma': 'bert_ner_ysharma_bert_finetuned_ner', 'en.ner.bert.conll.finetuned_ubb_conll.by_BeardedJohn': 'bert_ner_bert_finetuned_ner_ubb_conll', 'en.ner.bert.conll.finetuned_ubb_conll_endava_only_misc.by_BeardedJohn': 'bert_ner_bert_finetuned_ner_ubb_conll_endava_only_misc', 'en.ner.bert.conll.finetuned_ubb_endava_conll.by_BeardedJohn': 'bert_ner_bert_finetuned_ner_ubb_endava_conll', 'en.ner.bert.conll.finetuned_v2.by_Wende': 'bert_ner_bert_finetuned_ner1', 'en.ner.bert.conll.large': 'bert_ner_bert_large_ner', 'en.ner.bert.conll.large.by_51la5': 'bert_token_classifier_large_ner', 'en.ner.bert.conll.uncased_large_finetuned': 'bert_ner_bert_large_uncased_finetuned_ner', 'en.ner.bert.conll.v2': 'bert_ner_conll12v2', 'en.ner.bert.conll.v2.by_Xiaoman': 'bert_ner_ner_conll2003_v2', 'en.ner.bert.conll.v3.by_xiaoman': 'bert_ner_ner_conll2003_v3', 'en.ner.bert.conll.v4.by_xiaoman': 'bert_ner_ner_conll2003_v4', 'en.ner.bert.craft_bluebert.512d_modified': 'bert_ner_craft_modified_bluebert_512', 'en.ner.bert.craft_bluebert.512d_original': 'bert_ner_craft_original_bluebert_512', 'en.ner.bert.craft_bluebert.modified': 'bert_ner_craft_modified_bluebert_384', 'en.ner.bert.craft_bluebert.original': 'bert_ner_craft_original_bluebert_384', 'en.ner.bert.custom.by_jiaxin97': 'bert_ner_bert_finetuned_ner_custom', 'en.ner.bert.defector.by_lucifermorninstar011': 'bert_ner_autotrain_defector_ner_multi_847927015', 'en.ner.bert.degree_major_ner_1000.by_pkushiqiang': 'bert_ner_bert_degree_major_ner_1000', 'en.ner.bert.designtutor.by_shenzy2': 'bert_token_classifier_ner4designtutor', 'en.ner.bert.disease': 'bert_ner_ck_ner_disease', 'en.ner.bert.distilled': 'bert_ner_distilbert_jur', 'en.ner.bert.distilled.by_kaushalkhator': 'bert_ner_kaushalkhator_bert_to_distilbert_ner', 'en.ner.bert.distilled_cased_base_tiny': 'bert_ner_tiny_distilbert_base_cased', 'en.ner.bert.distilled_finetuned': 'bert_ner_distilbert_finetuned_ner', 'en.ner.bert.final_784824206.by_lucifermorningstar011': 'bert_token_classifier_autotrain_final_784824206', 'en.ner.bert.final_784824213.by_lucifermorningstar011': 'bert_token_classifier_autotrain_final_784824213', 'en.ner.bert.finetuned': 'bert_ner_alekseykorshuk_bert_finetuned_ner', 'en.ner.bert.finetuned.by_aalogan': 'bert_ner_aalogan_bert_finetuned_ner', 'en.ner.bert.finetuned.by_anarise1': 'bert_ner_anarise1_bert_finetuned_ner', 'en.ner.bert.finetuned.by_anery': 'bert_ner_anery_bert_finetuned_ner', 'en.ner.bert.finetuned.by_balamurugan1603': 'bert_ner_balamurugan1603_bert_finetuned_ner', 'en.ner.bert.finetuned.by_binay1999': 'bert_ner_binay1999_bert_finetuned_ner', 'en.ner.bert.finetuned.by_brad1141': 'bert_ner_bert_finetuned_comp2', 'en.ner.bert.finetuned.by_canlinzhang': 'bert_ner_canlinzhang_bert_finetuned_ner', 'en.ner.bert.finetuned.by_caotianyu1996': 'bert_ner_bert_finetuned_ner', 'en.ner.bert.finetuned.by_carblacac': 'bert_ner_carblacac_bert_finetuned_ner', 'en.ner.bert.finetuned.by_chanifrusydi': 'bert_ner_chanifrusydi_bert_finetuned_ner', 'en.ner.bert.finetuned.by_dheerajdhanvee': 'bert_ner_dheerajdhanvee_bert_finetuned_ner', 'en.ner.bert.finetuned.by_dizex': 'bert_ner_dizex_bert_finetuned_ner', 'en.ner.bert.finetuned.by_dshvadskiy': 'bert_ner_dshvadskiy_bert_finetuned_ner_accelerate', 'en.ner.bert.finetuned.by_eduardopds': 'bert_ner_eduardopds_bert_finetuned_ner', 'en.ner.bert.finetuned.by_evanz37': 'bert_ner_evanz37_bert_finetuned_ard', 'en.ner.bert.finetuned.by_ghost1': 'bert_ner_ghost1_bert_finetuned_ner_accelerate', 'en.ner.bert.finetuned.by_hilmluo': 'bert_ner_hilmluo_bert_finetuned_ner', 'en.ner.bert.finetuned.by_hsattar': 'bert_ner_hsattar_bert_finetuned_ner', 'en.ner.bert.finetuned.by_huggingface_course': 'bert_ner_huggingface_course_bert_finetuned_ner_accelerate', 'en.ner.bert.finetuned.by_jameswrbrookes': 'bert_ner_jameswrbrookes_bert_finetuned_ner', 'en.ner.bert.finetuned.by_jimmywu': 'bert_ner_jimmywu_bert_finetuned_ner', 'en.ner.bert.finetuned.by_jjglilleberg': 'bert_ner_jjglilleberg_bert_finetuned_ner', 'en.ner.bert.finetuned.by_joantirant': 'bert_ner_joantirant_bert_finetuned_ner_accelerate', 'en.ner.bert.finetuned.by_jplago': 'bert_ner_jplago_bert_finetuned_ner', 'en.ner.bert.finetuned.by_jrubin01': 'bert_ner_jrubin01_bert_finetuned_ner', 'en.ner.bert.finetuned.by_juancopi81': 'bert_ner_juancopi81_bert_finetuned_ner', 'en.ner.bert.finetuned.by_kakkidaisuki': 'bert_ner_kakkidaisuki_bert_finetuned_ner', 'en.ner.bert.finetuned.by_kalex': 'bert_ner_kalex_bert_finetuned_ner', 'en.ner.bert.finetuned.by_kevinform': 'bert_ner_kevinform_bert_finetuned_ner', 'en.ner.bert.finetuned.by_khan27': 'bert_ner_khan27_bert_finetuned_ner', 'en.ner.bert.finetuned.by_krimo11': 'bert_ner_krimo11_bert_finetuned_ner', 'en.ner.bert.finetuned.by_ksaluja': 'bert_ner_ksaluja_bert_finetuned_ner', 'en.ner.bert.finetuned.by_liyingz': 'bert_ner_liyingz_bert_finetuned_ner', 'en.ner.bert.finetuned.by_lordli': 'bert_ner_lordli_bert_finetuned_ner', 'en.ner.bert.finetuned.by_ludoviciarraga': 'bert_ner_ludoviciarraga_bert_finetuned_ner', 'en.ner.bert.finetuned.by_mascariddu8': 'bert_ner_mascariddu8_bert_finetuned_ner_accelerate', 'en.ner.bert.finetuned.by_mattchurgin': 'bert_ner_mattchurgin_bert_finetuned_ner', 'en.ner.bert.finetuned.by_mbateman': 'bert_ner_mbateman_bert_finetuned_ner_accelerate', 'en.ner.bert.finetuned.by_mdroth': 'bert_ner_mdroth_bert_finetuned_ner_accelerate', 'en.ner.bert.finetuned.by_mordred501': 'bert_ner_mordred501_bert_finetuned_ner', 'en.ner.bert.finetuned.by_neulvo': 'bert_ner_neulvo_bert_finetuned_ner_accelerate', 'en.ner.bert.finetuned.by_nicholasdino': 'bert_ner_nicholasdino_bert_finetuned_ner', 'en.ner.bert.finetuned.by_nielsr': 'bert_ner_nielsr_bert_finetuned_ner', 'en.ner.bert.finetuned.by_nilavoboral': 'bert_ner_nilavo_bert_finetuned', 'en.ner.bert.finetuned.by_rocketknight1': 'bert_ner_test_bert_finetuned_ner', 'en.ner.bert.finetuned.by_russellc': 'bert_ner_russellc_bert_finetuned_ner_accelerate', 'en.ner.bert.finetuned.by_satwiksstp': 'bert_ner_satwiksstp_bert_finetuned_ner', 'en.ner.bert.finetuned.by_sgrannemann': 'bert_ner_sgrannemann_bert_finetuned_ner', 'en.ner.bert.finetuned.by_shaopeng': 'bert_ner_shaopeng_bert_finetuned_ner', 'en.ner.bert.finetuned.by_silviacamplani': 'bert_ner_silviacamplani_bert_finetuned_ner', 'en.ner.bert.finetuned.by_spasis': 'bert_ner_spasis_bert_finetuned_ner_accelerate', 'en.ner.bert.finetuned.by_ssavla2': 'bert_ner_ssavla2_bert_finetuned_ner', 'en.ner.bert.finetuned.by_syedyusufali': 'bert_ner_syedyusufali_bert_finetuned_ner', 'en.ner.bert.finetuned.by_t_202': 'bert_ner_t_202_bert_finetuned_ner', 'en.ner.bert.finetuned.by_tushar_rishav': 'bert_ner_tushar_rishav_bert_finetuned_ner', 'en.ner.bert.finetuned.by_vanmas': 'bert_ner_vanmas_bert_finetuned_ner', 'en.ner.bert.finetuned.by_vdsouza1': 'bert_ner_vdsouza1_bert_finetuned_ner', 'en.ner.bert.finetuned.by_wende': 'bert_ner_wende_bert_finetuned_ner_accelerate', 'en.ner.bert.finetuned.by_winson': 'bert_ner_winson_bert_finetuned_ner_accelerate', 'en.ner.bert.finetuned.by_xesaad': 'bert_ner_xesaad_bert_finetuned_ner', 'en.ner.bert.finetuned.by_xkang': 'bert_ner_xkang_bert_finetuned_ner_accelerate', 'en.ner.bert.finetuned.by_xzt': 'bert_ner_xzt_bert_finetuned_ner', 'en.ner.bert.finetuned.by_yfu2307': 'bert_ner_yfu2307_bert_finetuned_ner', 'en.ner.bert.finetuned.by_yv': 'bert_ner_yv_bert_finetuned_ner_accelerate', 'en.ner.bert.finetuned_0h': 'bert_ner_jo0hnd0e_bert_finetuned_ner', 'en.ner.bert.finetuned_adverse_drug_event': 'bert_ner_bert_finetuned_ades_model_1', 'en.ner.bert.foo.by_leonweber': 'bert_ner_foo', 'en.ner.bert.host.by_maaly': 'bert_ner_host', 'en.ner.bert.imb.by_ghadeermobasher': 'bert_ner_model_imb', 'en.ner.bert.imb_1.by_ghadeermobasher': 'bert_ner_model_imb_1', 'en.ner.bert.imb_2.by_ghadeermobasher': 'bert_ner_model_imb_2', 'en.ner.bert.ingredient.': 'bert_ner_keyword_tag_model_10000_9_16_more_ingredient', 'en.ner.bert.ingredient.2000_9_16.by_media1129': 'bert_ner_keyword_tag_model_2000_9_16_more_ingredient', 'en.ner.bert.ingredient.4000_9_16.by_media1129': 'bert_ner_keyword_tag_model_4000_9_16_more_ingredient', 'en.ner.bert.ingredient.6000_9_16.by_media1129': 'bert_ner_keyword_tag_model_6000_9_16_more_ingredient', 'en.ner.bert.ingredient.8000_9_16.by_media1129': 'bert_ner_keyword_tag_model_8000_9_16_more_ingredient', 'en.ner.bert.ingredient.by_test123': 'bert_token_classifier_autonlp_ingredient_pseudo_label_training_ner_29576765', 'en.ner.bert.job_title.by_lucifermorninstar011': 'bert_ner_autotrain_lucifer_job_title_comb_858027260', 'en.ner.bert.keyword_tag_model_2000.by_media1129': 'bert_ner_keyword_tag_model_2000', 'en.ner.bert.keyword_tag_model_2000_9_16.by_media1129': 'bert_ner_keyword_tag_model_2000_9_16', 'en.ner.bert.keyword_tag_model_4000.by_media1129': 'bert_ner_keyword_tag_model_4000', 'en.ner.bert.keyword_tag_model_6000.by_media1129': 'bert_ner_keyword_tag_model_6000', 'en.ner.bert.lab9_1.by_annaeze': 'bert_ner_lab9_1', 'en.ner.bert.lab9_2.by_annaeze': 'bert_ner_lab9_2', 'en.ner.bert.learn.by_alexander_learn': 'bert_ner_alexander_learn_bert_finetuned_ner', 'en.ner.bert.legal': 'bert_ner_legalbert_beneficiary_single', 'en.ner.bert.legal.by_anery': 'bert_ner_legalbert_clause_combined', 'en.ner.bert.lmr_hd_tb.by_rsuwaileh': 'bert_ner_idrisi_lmr_hd_tb', 'en.ner.bert.lmr_hd_tb_partition.by_rsuwaileh': 'bert_ner_idrisi_lmr_hd_tb_partition', 'en.ner.bert.lmr_hd_tl.by_rsuwaileh': 'bert_ner_idrisi_lmr_hd_tl', 'en.ner.bert.lmr_hd_tl_partition.by_rsuwaileh': 'bert_ner_idrisi_lmr_hd_tl_partition', 'en.ner.bert.male_female.': 'bert_ner_ner_for_female_names', 'en.ner.bert.mutation_recognition_0.by_salvatore': 'bert_ner_bert_finetuned_mutation_recognition_0', 'en.ner.bert.mutation_recognition_1.by_salvatore': 'bert_ner_bert_finetuned_mutation_recognition_1', 'en.ner.bert.mutation_recognition_2.by_salvatore': 'bert_ner_bert_finetuned_mutation_recognition_2', 'en.ner.bert.mutation_recognition_3.by_salvatore': 'bert_ner_bert_finetuned_mutation_recognition_3', 'en.ner.bert.mutation_recognition_4.by_salvatore': 'bert_ner_bert_finetuned_mutation_recognition_4', 'en.ner.bert.name.by_lucifermorninstar011': 'bert_ner_autotrain_lucifer_name_894029080', 'en.ner.bert.ner.by_cwan6830': 'bert_ner_cwan6830_bert_finetuned_ner', 'en.ner.bert.ner.by_jiaxin97': 'bert_ner_jiaxin97_bert_finetuned_ner_adr', 'en.ner.bert.ner.by_lideming7757': 'bert_ner_lideming7757_bert_finetuned_ner', 'en.ner.bert.ner.by_peterhsu': 'bert_ner_tf_bert_finetuned_ner', 'en.ner.bert.nerd.by_ramybaly': 'bert_ner_ner_nerd', 'en.ner.bert.nerd_fine.by_ramybaly': 'bert_ner_ner_nerd_fine', 'en.ner.bert.org.by_ghadeermobasher': 'bert_ner_model_org', 'en.ner.bert.org_1.by_ghadeermobasher': 'bert_ner_model_org_1', 'en.ner.bert.org_2.by_ghadeermobasher': 'bert_ner_model_org_2', 'en.ner.bert.per_loc_org.by_tesemnikov_av': 'bert_ner_ner_rubert_per_loc_org', 'en.ner.bert.peyma.base': 'bert_ner_peyma_ner_bert_base', 'en.ner.bert.prodigy': 'bert_ner_autonlp_prodigy_10_3362554', 'en.ner.bert.protagonist.by_airi': 'bert_ner_bert_finetuned_protagonist', 'en.ner.bert.pubmed_bc5cdr_bluebert.uncased_12l_768d_a12a_768d_disease_imbalanced': 'bert_ner_bc5cdr_disease_imbalanced_bluebert_pubmed_uncased_l_12_h_768_a_12_latest', 'en.ner.bert.pubmed_bc5cdr_bluebert.uncased_12l_768d_a12a_768d_modified_disease': 'bert_ner_bc5cdr_disease_modified_bluebert_pubmed_uncased_l_12_h_768_a_12_latest', 'en.ner.bert.pubmed_bionlp13cg.modified': 'bert_ner_bionlp13cg_modified_pubmedabstract_latest', 'en.ner.bert.pubmed_bluebert.uncased_12l_768d_a12a_768d_modified': 'bert_ner_bc4_modified_bluebert_pubmed_uncased_l_12_h_768_a_12', 'en.ner.bert.pubmed_bluebert.uncased_12l_768d_a12a_768d_original': 'bert_ner_bc4_original_bluebert_pubmed_uncased_l_12_h_768_a_12', 'en.ner.bert.pubmed_bluebert_bionlp13cg.uncased_12l_768d_a12a_768d_modified': 'bert_ner_bionlp13cg_modified_bluebert_pubmed_uncased_l_12_h_768_a_12_latest', 'en.ner.bert.pubmed_bluebert_bionlp13cg.uncased_12l_768d_a12a_768d_original': 'bert_ner_bionlp13cg_original_bluebert_pubmed_uncased_l_12_h_768_a_12_latest', 'en.ner.bert.rdchambers.by_rdchambers': 'bert_ner_rdchambers_bert_finetuned_ner', 'en.ner.bert.recipe_tag_model.by_media1129': 'bert_ner_media1129_recipe_tag_model', 'en.ner.bert.salvatore.by_salvatore': 'bert_ner_salvatore_bert_finetuned_ner', 'en.ner.bert.sentiment_ingredient.': 'bert_token_classifier_autonlp_ingredient_sentiment_analysis_19126711', 'en.ner.bert.small': 'bert_ner_small', 'en.ner.bert.small.by_narsil': 'bert_ner_small2', 'en.ner.bert.small_finetuned': 'bert_ner_bert_small_finetuned_typo_detection', 'en.ner.bert.sourcerecognition.by_lamine': 'bert_ner_bert_finetuned_ner_sourcerecognition', 'en.ner.bert.sourcerecognition.v2.by_lamine': 'bert_ner_bert_finetuned_ner2', 'en.ner.bert.split_title_org.by_pkushiqiang': 'bert_ner_bert_split_title_org', 'en.ner.bert.tac.by_lideming7757': 'bert_ner_tac_bert_finetuned_ner', 'en.ner.bert.task5_finetuned.by_zhuzhusleepearly': 'bert_ner_bert_task5finetuned', 'en.ner.bert.tele.by_ksaluja': 'bert_ner_autonlp_tele_new_5k_557515810', 'en.ner.bert.tele_red.by_ksaluja': 'bert_ner_autonlp_tele_red_data_model_585716433', 'en.ner.bert.tiny': 'bert_ner_rubert_tiny2_sentence_compression', 'en.ner.bert.tiny.by_hf_internal_testing': 'bert_ner_tiny_bert_for_token_classification', 'en.ner.bert.tiny.by_satyamrajawat1994': 'bert_ner_tinybert_fincorp', 'en.ner.bert.title_org.by_pkushiqiang': 'bert_ner_bert_title_org', 'en.ner.bert.toxic.by_tesemnikov_av': 'bert_ner_rubert_ner_toxicity', 'en.ner.bert.tweet.base': 'bert_ner_bert_base_tweetner_2020', 'en.ner.bert.tweet.large': 'bert_ner_bert_large_tweetner_2020', 'en.ner.bert.twiner.base.by_overfit': 'bert_ner_twiner_bert_base', 'en.ner.bert.twiner.base_mtl.by_overfit': 'bert_ner_twiner_bert_base_mtl', 'en.ner.bert.ubb_endava.by_beardedjohn': 'bert_ner_bert_finetuned_ner_ubb_endava', 'en.ner.bert.ubb_endava_1.by_beardedjohn': 'bert_ner_bert_finetuned_ner_ubb_endava_1', 'en.ner.bert.ubb_endava_2.by_beardedjohn': 'bert_ner_bert_finetuned_ner_ubb_endava_2', 'en.ner.bert.ubb_endava_only_misc.by_beardedjohn': 'bert_ner_bert_finetuned_ner_ubb_endava_only_misc', 'en.ner.bert.uncased': 'bert_ner_bert_uncased_keyword_discriminator', 'en.ner.bert.uncased.by_yanekyuk': 'bert_ner_bert_uncased_keyword_extractor', 'en.ner.bert.uncased_base': 'bert_ner_bert_base_ner_uncased', 'en.ner.bert.uncased_base.by_ml6team': 'bert_token_classifier_base_uncased_city_country_ner', 'en.ner.bert.uncased_base.kin.by_arnolfokam': 'bert_ner_bert_base_uncased_kin', 'en.ner.bert.uncased_base.pcm.by_arnolfokam': 'bert_ner_bert_base_uncased_pcm', 'en.ner.bert.uncased_finetuned': 'bert_ner_bert_finetuned_ner_uncased', 'en.ner.bert.uncased_large': 'bert_ner_bert_large_uncased_med_ner', 'en.ner.bert.v1.by_ksaluja': 'bert_ner_new_test_model', 'en.ner.bert.v2': 'bert_ner_epiextract4gard_v2', 'en.ner.bert.v2.3000_v2.by_media1129': 'bert_ner_keyword_tag_model_3000_v2', 'en.ner.bert.v2.6000_v2.by_media1129': 'bert_ner_keyword_tag_model_6000_v2', 'en.ner.bert.v2.9000_v2.by_media1129': 'bert_ner_keyword_tag_model_9000_v2', 'en.ner.bert.v2.by_ksaluja': 'bert_ner_new_test_model2', 'en.ner.bert.v2.by_tartunlp': 'bert_ner_estbert_ner_v2', 'en.ner.bert.wikiann.': 'bert_ner_bert_srb_ner', 'en.ner.bert.wikiann.finetuned': 'bert_ner_ravindra001_bert_finetuned_ner', 'en.ner.bert.wikineural.multilingual': 'bert_ner_mohitsingh_wikineural_multilingual_ner', 'en.ner.bert.wikineural.multilingual.by_Shiva12': 'bert_ner_shiva12_wikineural_multilingual_ner', 'en.ner.bert.wikineural.multilingual.by_Shivanand': 'bert_ner_shivanand_wikineural_multilingual_ner', 'en.ner.bert.wikineural.multilingual.by_Shwetabh': 'bert_ner_shwetabh_wikineural_multilingual_ner', 'en.ner.bert.wikineural.multilingual.by_VikasMani': 'bert_ner_vikasmani_wikineural_multilingual_ner', 'en.ner.bert.wikineural.multilingual.by_Vikings03': 'bert_ner_vikings03_wikineural_multilingual_ner', 'en.ner.bert.wikineural.multilingual.by_Vinspatel4': 'bert_ner_vinspatel4_wikineural_multilingual_ner', 'en.ner.bert.wikineural.multilingual.by_Zainab18': 'bert_ner_zainab18_wikineural_multilingual_ner', 'en.ner.bert.wikineural.multilingual.by_amasi': 'bert_ner_amasi_wikineural_multilingual_ner', 'en.ner.bert.wikineural.multilingual.by_amrita03': 'bert_ner_amrita03_wikineural_multilingual_ner', 'en.ner.bert.wikineural.multilingual.by_ashwathgojo234': 'bert_ner_ashwathgojo234_wikineural_multilingual_ner', 'en.ner.bert.wikineural.multilingual.by_gk07': 'bert_ner_gk07_wikineural_multilingual_ner', 'en.ner.bert.wikineural.multilingual.by_silpa': 'bert_ner_silpa_wikineural_multilingual_ner', 'en.ner.bert_base_cased': 'ner_dl_bert_base_cased', 'en.ner.biobert': 'bert_ner_biobert_genetic_ner', 'en.ner.biobert.512d_modified': 'bert_ner_bionlp13_modified_biobert_512', 'en.ner.biobert.bc2gm.': 'bert_ner_biobert_ner_bc2gm_corpus', 'en.ner.biobert.bc2gm.original': 'bert_ner_original_biobert_bc2gm', 'en.ner.biobert.bc5cdr.cased_base_imbalanced': 'bert_ner_bc5cdr_imbalanced_biobert_base_cased_v1.2', 'en.ner.biobert.bc5cdr.disease_imbalanced': 'bert_ner_bc5cdr_disease_imbalanced_biobert_v1.1', 'en.ner.biobert.bc5cdr.modified_disease': 'bert_ner_bc5cdr_disease_modified_biobert_v1.1', 'en.ner.biobert.bc5cdr.original_disease': 'bert_ner_original_biobert_bc5cdr_disease', 'en.ner.biobert.bionlp13cg.modified': 'bert_ner_bionlp13cg_modified_biobert_v1.1_latest', 'en.ner.biobert.bionlp13cg.original': 'bert_ner_bionlp13cg_original_biobert_v1.1_latest', 'en.ner.biobert.cased_base_finetuned': 'bert_ner_hossay_biobert_base_cased_v1.2_finetuned_ner', 'en.ner.biobert.chemical.': 'bert_ner_biobert_chemical_ner', 'en.ner.biobert.chemical.512d_modified': 'bert_ner_bc4chemd_chem_modified_biobert_512', 'en.ner.biobert.chemical.512d_original': 'bert_ner_bc4chemd_chem_original_biobert_512', 'en.ner.biobert.chemical.base_imbalanced': 'bert_ner_bc4chemd_imbalanced_biobert_base_casesd_v1.1', 'en.ner.biobert.chemical.modified': 'bert_ner_bc4chemd_chem_modified_biobert_384', 'en.ner.biobert.chemical.modified.by_ghadeermobasher': 'bert_ner_bchem4_modified_biobert_v1', 'en.ner.biobert.chemical.original': 'bert_ner_bc4chemd_original_biobert_384', 'en.ner.biobert.chemical_bc5cdr.512d_modified': 'bert_ner_bc5cdr_chem_modified_biobert_512', 'en.ner.biobert.chemical_bc5cdr.512d_original': 'bert_ner_bc5cdr_chem_original_biobert_512', 'en.ner.biobert.chemical_bc5cdr.cased_base_disease': 'bert_ner_bc5cdr_chemical_disease_balanced_biobert_base_cased_v1.2', 'en.ner.biobert.chemical_bc5cdr.cased_large_modified': 'bert_ner_bc5cdr_chem_modified_biobert_large_cased', 'en.ner.biobert.chemical_bc5cdr.imbalanced': 'bert_ner_bc5cdr_chemical_imbalanced_biobert', 'en.ner.biobert.chemical_bc5cdr.imbalanced.by_ghadeermobasher': 'bert_ner_bc5cdr_chemical_imbalanced_biobert_v1.1_latest', 'en.ner.biobert.chemical_bc5cdr.modified': 'bert_ner_bc5cdr_chem_modified_biobert_384', 'en.ner.biobert.chemical_bc5cdr.modified.by_ghadeermobasher': 'bert_ner_bc5cdr_chem_modified_biobert_v1.1_latest', 'en.ner.biobert.chemical_bc5cdr.original': 'bert_ner_bc5cdr_chem_original_biobert_384', 'en.ner.biobert.chemical_bc5cdr.original.by_ghadeermobasher': 'bert_ner_original_biobert_bc5cdr_chemical', 'en.ner.biobert.chemical_bionlp13cg.512d_original': 'bert_ner_bionlp13cg_chem_chem_original_biobert_512', 'en.ner.biobert.chemical_bionlp13cg.imbalanced': 'bert_ner_bionlp13cg_chem_imbalanced_biobert', 'en.ner.biobert.chemical_bionlp13cg.large_modified': 'bert_ner_bionlp13cg_chem_modified_biobert_large', 'en.ner.biobert.chemical_bionlp13cg.modified': 'bert_ner_bionlp13cg_chem_modified_biobert_384', 'en.ner.biobert.chemical_bionlp13cg.original': 'bert_ner_bionlp13cg_chem_original_biobert_384', 'en.ner.biobert.chemical_craft.cased_large_modified': 'bert_ner_craft_chem_modified_biobert_large_cased', 'en.ner.biobert.chemical_craft.modified': 'bert_ner_craft_chem_modified_biobert_v1.1', 'en.ner.biobert.chemical_craft.original': 'bert_ner_craft_chem_original_biobert_v1.1', 'en.ner.biobert.craft.512d_modified': 'bert_ner_craft_modified_biobert_512', 'en.ner.biobert.craft.512d_original': 'bert_ner_craft_original_biobert_512', 'en.ner.biobert.craft.cased_base_finetuned': 'bert_ner_biobert_base_cased_v1.2_finetuned_ner_craft', 'en.ner.biobert.craft.modified': 'bert_ner_craft_modified_biobert_384', 'en.ner.biobert.craft.original': 'bert_ner_craft_original_biobert_384', 'en.ner.biobert.finetuned': 'bert_ner_biobert_finetuned_ner', 'en.ner.biobert.finetuned_k.by_krishadow': 'bert_ner_biobert_finetuned_ner_k', 'en.ner.biobert.finetuned_k2.by_krishadow': 'bert_ner_biobert_finetuned_ner_k2', 'en.ner.biobert.mlma.by_adwayk': 'bert_ner_hugging_face_biobert_mlma', 'en.ner.biobert.mlmav3.by_adwayk': 'bert_ner_hugging_face_biobert_mlmav3', 'en.ner.biobert.modified': 'bert_ner_bc4_modified_biobert_v1.1', 'en.ner.biobert.ncbi.': 'bert_ner_wlt_biobert_ncbi', 'en.ner.biobert.ncbi.disease': 'bert_ner_biobert_ner_ncbi_disease', 'en.ner.biobert.ncbi.disease.by_AdwayK': 'bert_ner_biobert_ncbi_disease_ner_tuned_on_tac2017', 'en.ner.biobert.ncbi.disease.by_ugaray96': 'bert_ner_biobert_ncbi_disease_ner', 'en.ner.biobert.ncbi.original': 'bert_ner_original_biobert_ncbi', 'en.ner.biobert.on_adr_as_ner.by_adwayk': 'bert_ner_biobert_on_adr_as_ner', 'en.ner.biobert.original': 'bert_ner_bc4_original_biobert_v1.1', 'en.ner.biobert.original.by_ghadeermobasher': 'bert_ner_original_biobert_linnaeus', 'en.ner.biobert.pubmed.finetuned': 'bert_ner_biobert_v1.1_pubmed_finetuned_ner', 'en.ner.biobert.pubmed.finetuned.by_fidukm34': 'bert_ner_biobert_v1.1_pubmed_finetuned_ner_finetuned_ner', 'en.ner.biobert.v2': 'bert_ner_hugging_face_biobert_mlmav2', 'en.ner.bioformer.bc2gm.cased': 'bert_ner_bioformer_cased_v1.0_bc2gm', 'en.ner.bioformer.chemical_bionlp13cg.modified': 'bert_ner_bionlp13cg_chem_modified_bioformers', 'en.ner.bioformer.chemical_bionlp13cg.modified.by_ghadeermobasher': 'bert_ner_bionlp13cg_chem_modified_bioformers_2', 'en.ner.bioformer.chemical_craft.modified': 'bert_ner_craft_chem_modified_bioformers', 'en.ner.bioformer.ncbi.cased_disease': 'bert_ner_bioformer_cased_v1.0_ncbi_disease', 'en.ner.camembert': 'camembert_classifier_berties', 'en.ner.camembert.base': 'camembert_base_token_classifier_wikiner', 'en.ner.camembert.base_finetuned': 'camembert_classifier_base_wikipedia_4gb_finetuned_job_ner', 'en.ner.camembert.by_cassandra_themis': 'camembert_classifier_test_tcp_ca_cassandra_themis', 'en.ner.camembert.finetuned': 'camembert_classifier_das22_41_pretrained_finetuned_ref', 'en.ner.camembert.finetuned_das22_42_ref.by_hueynemud': 'camembert_classifier_das22_42_finetuned_ref', 'en.ner.camembert.finetuned_das22_43_pero.by_hueynemud': 'camembert_classifier_das22_43_pretrained_finetuned_pero', 'en.ner.camembert.finetuned_das22_44_pero.by_hueynemud': 'camembert_classifier_das22_44_finetuned_pero', 'en.ner.camembert.large': 'camembert_large_token_classifier_wikiner', 'en.ner.conll.base': 'nerdl_conll_deberta_base', 'en.ner.conll.dl.elmo.': 'nerdl_conll_elmo', 'en.ner.conll.elmo.': 'ner_conll_elmo', 'en.ner.conll.large': 'nerdl_conll_deberta_large', 'en.ner.conll_albert_base_uncased': 'ner_conll_albert_base_uncased', 'en.ner.conll_albert_large_uncased': 'ner_conll_albert_large_uncased', 'en.ner.conll_bert_base_cased': 'ner_conll_bert_base_cased', 'en.ner.conll_distilbert_base_cased': 'ner_conll_distilbert_base_cased', 'en.ner.conll_longformer_large_4096': 'ner_conll_longformer_large_4096', 'en.ner.conll_roberta_base': 'ner_conll_roberta_base', 'en.ner.conll_roberta_large': 'ner_conll_roberta_large', 'en.ner.conll_xlm_roberta_base': 'ner_conll_xlm_roberta_base', 'en.ner.conll_xlnet_base_cased': 'ner_conll_xlnet_base_cased', 'en.ner.debertav3_base.conll03': 'deberta_v3_base_token_classifier_conll03', 'en.ner.debertav3_base.ontonotes': 'deberta_v3_base_token_classifier_ontonotes', 'en.ner.debertav3_large.conll03': 'deberta_v3_large_token_classifier_conll03', 'en.ner.debertav3_large.ontonotes': 'deberta_v3_large_token_classifier_ontonotes', 'en.ner.debertav3_small.conll03': 'deberta_v3_small_token_classifier_conll03', 'en.ner.debertav3_small.ontonotes': 'deberta_v3_small_token_classifier_ontonotes', 'en.ner.debertav3_xsmall.conll03': 'deberta_v3_xsmall_token_classifier_conll03', 'en.ner.debertav3_xsmall.ontonotes': 'deberta_v3_xsmall_token_classifier_ontonotes', 'en.ner.distil_bert': 'distilbert_ner_autotrain_defector_ner_846726994', 'en.ner.distil_bert.by_couchcat': 'distilbert_ner_ma_ner_v7_distil', 'en.ner.distil_bert.company.by_lucifermorninstar011': 'distilbert_ner_autotrain_luicfer_company_861827409', 'en.ner.distil_bert.conll.base': 'distilbert_base_token_classifier_conll03', 'en.ner.distil_bert.conll.cased_base_finetuned': 'distilbert_ner_distilbert_base_cased_finetuned_conll03_english', 'en.ner.distil_bert.conll.uncased_base_finetuned': 'distilbert_ner_distilbert_base_uncased_finetuned_conll03_english', 'en.ner.distil_bert.few_nerd.base': 'distilbert_base_token_classifier_few_nerd', 'en.ner.distil_bert.job_title.by_lucifermorninstar011': 'distilbert_ner_autotrain_lucifer_job_title_853727204', 'en.ner.distil_bert.keyphrase.': 'distilbert_ner_keyphrase_extraction_distilbert_inspec', 'en.ner.distil_bert.keyphrase.kptimes.by_dedeckerthomas': 'distilbert_ner_keyphrase_extraction_distilbert_kptimes', 'en.ner.distil_bert.keyphrase.openkp.by_dedeckerthomas': 'distilbert_ner_keyphrase_extraction_distilbert_openkp', 'en.ner.distil_bert.morningstar_job.by_lucifermorninstar011': 'distilbert_ner_autotrain_lucifer_morningstar_job_859227344', 'en.ner.distil_bert.ontonotes.base': 'distilbert_base_token_classifier_ontonotes', 'en.ner.distil_roberta.conll.distilled_base': 'roberta_ner_distilroberta_base_ner_conll2003', 'en.ner.distil_roberta.conll_wikiann.distilled_base': 'roberta_ner_distilroberta_base_ner_wikiann_conll2003_3_class', 'en.ner.distil_roberta.conll_wikiann.distilled_base.by_philschmid': 'roberta_ner_distilroberta_base_ner_wikiann_conll2003_4_class', 'en.ner.distil_roberta.wikiann.distilled_base': 'roberta_ner_distilroberta_base_ner_wikiann', 'en.ner.dl': 'ner_dl', 'en.ner.dl.bert': 'ner_dl_bert', 'en.ner.farbrbert.base.by_giggio': 'bert_ner_farbrbert_base', 'en.ner.farbrbert.base_75.by_giggio': 'bert_ner_far75brbert_base', 'en.ner.fewnerd': 'nerdl_fewnerd_100d', 'en.ner.fewnerd_subentity': 'nerdl_fewnerd_subentity_100d', 'en.ner.glove': 'ner_dl_sentence', 'en.ner.glove.100d': 'ner_dl_sentence', 'en.ner.indo_bert.finetuned': 'bert_ner_chanifrusydi_indobert_finetuned_ner', 'en.ner.indo_bert.finetuned.by_xenergy': 'bert_ner_xenergy_indobert_finetuned_ner', 'en.ner.longformer.conll.large': 'longformer_large_token_classifier_conll03', 'en.ner.mit_movie_complex_bert': 'ner_mit_movie_complex_bert_base_cased', 'en.ner.mit_movie_complex_distilbert': 'ner_mit_movie_complex_distilbert_base_cased', 'en.ner.mit_movie_simple': 'ner_mit_movie_simple_distilbert_base_cased', 'en.ner.movie': 'ner_mit_movie_complex_bert_base_cased', 'en.ner.movie_complex': 'ner_mit_movie_complex_bert_base_cased', 'en.ner.movie_simple': 'ner_mit_movie_complex_bert_base_cased', 'en.ner.ner_chemical_bionlp_bc5cdr_pubmed': 'roberta_ner_ner_chemical_bionlp_bc5cdr_pubmed', 'en.ner.ner_disease_ncbi_bionlp_bc5cdr_pubmed': 'roberta_ner_ner_disease_ncbi_bionlp_bc5cdr_pubmed', 'en.ner.ner_gene_dna_rna_jnlpba_pubmed': 'roberta_ner_ner_gene_dna_rna_jnlpba_pubmed', 'en.ner.onto': 'onto_100', 'en.ner.onto.bert.cased_base': 'onto_bert_base_cased', 'en.ner.onto.bert.cased_large': 'onto_bert_large_cased', 'en.ner.onto.bert.small_l2_128': 'onto_small_bert_L2_128', 'en.ner.onto.bert.small_l4_256': 'onto_small_bert_L4_256', 'en.ner.onto.bert.small_l4_512': 'onto_small_bert_L4_512', 'en.ner.onto.bert.small_l8_512': 'onto_small_bert_L8_512', 'en.ner.onto.electra.uncased_base': 'onto_electra_base_uncased', 'en.ner.onto.electra.uncased_large': 'onto_electra_large_uncased', 'en.ner.onto.electra.uncased_small': 'onto_electra_small_uncased', 'en.ner.onto.glove.6B_100d': 'onto_100', 'en.ner.onto.glove.840B_300d': 'onto_300', 'en.ner.ontonotes_distilbert_base_cased': 'ner_ontonotes_distilbert_base_cased', 'en.ner.ontonotes_roberta_base': 'ner_ontonotes_roberta_base', 'en.ner.ontonotes_roberta_large': 'ner_ontonotes_roberta_large', 'en.ner.pos': 'bert_pos_codeswitch_spaeng_pos_lince', 'en.ner.pos.128d': 'bert_pos_estbert_morph_128', 'en.ner.pos.4l_weight_decay.by_kktoto': 'bert_pos_4l_weight_decay', 'en.ner.pos.amhariccacopostag.by_mitiku': 'bert_pos_amhariccacopostag', 'en.ner.pos.amharicwicpostag.by_mitiku': 'bert_pos_amharicwicpostag', 'en.ner.pos.amharicwicpostag10tags.by_mitiku': 'bert_pos_amharicwicpostag10tags', 'en.ner.pos.base_finetuned': 'roberta_pos_roberto_base_finetuned_pos', 'en.ner.pos.by_alice_hml': 'bert_pos_mbert_grammatical_error_tagger', 'en.ner.pos.by_alvenir': 'bert_pos_bert_punct_restoration_en_alvenir', 'en.ner.pos.by_kitaev': 'bert_pos_tetra_tag_en_kitaev', 'en.ner.pos.by_proycon': 'roberta_pos_veganuary_pos', 'en.ner.pos.cased': 'bert_pos_bert_pos_cased_deepfrog_nld', 'en.ner.pos.cased.by_proycon': 'roberta_pos_robbert2_pos_cased_deepfrog_nld', 'en.ner.pos.cased_base': 'bert_pos_bert_base_cased_ccg', 'en.ner.pos.cased_base.by_qcri': 'bert_pos_bert_base_cased_pos', 'en.ner.pos.cased_multilingual_base': 'bert_pos_bert_base_multilingual_cased_pos_english', 'en.ner.pos.cased_multilingual_base.by_qcri': 'bert_pos_bert_base_multilingual_cased_chunking_english', 'en.ner.pos.ccvspantagger.by_rj3vans': 'bert_pos_ccvspantagger', 'en.ner.pos.clnspantagger.by_rj3vans': 'bert_pos_clnspantagger', 'en.ner.pos.cmn1spantagger.by_rj3vans': 'bert_pos_cmn1spantagger', 'en.ner.pos.cmv1spantagger.by_rj3vans': 'bert_pos_cmv1spantagger', 'en.ner.pos.conll.': 'bert_pos_tr_kg_pos_conllu_bert', 'en.ner.pos.conll.finetuned': 'bert_pos_bert_finetuned_conll2003_pos', 'en.ner.pos.conll.finetuned.by_Fredvv': 'bert_pos_bert_finetuned_pos', 'en.ner.pos.estbert_upos.128d.by_tartunlp': 'bert_pos_estbert_upos_128', 'en.ner.pos.estbert_xpos.128d.by_tartunlp': 'bert_pos_estbert_xpos_128', 'en.ner.pos.finetuned': 'bert_pos_bert_finetuned_chunking', 'en.ner.pos.finetuned_accelerate.by_deborah': 'bert_pos_bertimbau_finetuned_pos_accelerate', 'en.ner.pos.finetuned_accelerate2.by_deborah': 'bert_pos_bertimbau_finetuned_pos_accelerate2', 'en.ner.pos.finetuned_accelerate3.by_deborah': 'bert_pos_bertimbau_finetuned_pos_accelerate3', 'en.ner.pos.finetuned_accelerate_5.by_camilag': 'bert_pos_bertimbau_finetuned_pos_accelerate_5', 'en.ner.pos.finetuned_accelerate_6.by_camilag': 'bert_pos_bertimbau_finetuned_pos_accelerate_6', 'en.ner.pos.finetuned_accelerate_7.by_camilag': 'bert_pos_bertimbau_finetuned_pos_accelerate_7', 'en.ner.pos.focal_alpah.tiny.by_kktoto': 'bert_pos_tiny_focal_alpah', 'en.ner.pos.focal_alpah75.tiny.by_kktoto': 'bert_pos_tiny_focal_alpah75', 'en.ner.pos.focal_ckpt.tiny.by_kktoto': 'bert_pos_tiny_focal_ckpt', 'en.ner.pos.focal_v3.tiny.by_kktoto': 'bert_pos_tiny_focal_v3', 'en.ner.pos.kt_punctuator.tiny.by_kktoto': 'bert_pos_tiny_kt_punctuator', 'en.ner.pos.ktoto_punctuator.tiny.by_kktoto': 'bert_pos_tiny_ktoto_punctuator', 'en.ner.pos.lr_kk_kktoto.tiny.by_kktoto': 'bert_pos_tiny_lr_kk_kktoto', 'en.ner.pos.pars_bert.finetuned': 'bert_pos_parsbert_finetuned_pos', 'en.ner.pos.signtagger.by_rj3vans': 'bert_pos_signtagger', 'en.ner.pos.ssccvspantagger.by_rj3vans': 'bert_pos_ssccvspantagger', 'en.ner.pos.ssccvspantagger.v2.by_rj3vans': 'bert_pos_13.05.2022.ssccvspantagger', 'en.ner.pos.tiny': 'bert_pos_tiny_bb_wd', 'en.ner.pos.tiny.tiny.by_kktoto': 'bert_pos_wwdd_tiny', 'en.ner.pos.toto_punctuator.tiny.by_kktoto': 'bert_pos_tiny_toto_punctuator', 'en.ner.pos.ty_punctuator.by_kktoto': 'bert_pos_ty_punctuator', 'en.ner.pos.uncased_finetuned': 'bert_pos_bert_english_uncased_finetuned_chunk', 'en.ner.pos.uncased_finetuned.by_vblagoje': 'bert_pos_bert_english_uncased_finetuned_pos', 'en.ner.pos.universal_dependencies.base': 'xlmroberta_pos_xlm_roberta_base_english_upos', 'en.ner.pos.universal_dependencies.distilled': 'roberta_pos_distil_slovakbert_upos', 'en.ner.pos.v2_tiny': 'bert_pos_tiny_focal_v2_label', 'en.ner.pos.v2_tiny.by_kktoto': 'bert_pos_tiny_no_focal_v2', 'en.ner.pubmed_bert.chemical_pubmed.': 'bert_ner_bc4_chem_pubmedbert', 'en.ner.pubmed_bert.chemical_pubmed.512d_modified': 'bert_ner_bc5cd_chem_modified_pubmedbert_512', 'en.ner.pubmed_bert.chemical_pubmed.512d_original': 'bert_ner_bc4chemd_chem_original_pubmedbert_512', 'en.ner.pubmed_bert.chemical_pubmed.imbalanced': 'bert_ner_bc4chemd_imbalancedpubmedbert', 'en.ner.pubmed_bert.chemical_pubmed.modified': 'bert_ner_bc4chemd_chem_modified_pubmedbert_384', 'en.ner.pubmed_bert.chemical_pubmed.modified.by_ghadeermobasher': 'bert_ner_bc4chemd_modified_pubmedbert', 'en.ner.pubmed_bert.chemical_pubmed.original': 'bert_ner_bc4chemd_chem_original_pubmedbert_384', 'en.ner.pubmed_bert.chemical_pubmed.original.by_ghadeermobasher': 'bert_ner_original_pubmedbert_bc4chemd', 'en.ner.pubmed_bert.chemical_pubmed_bc5cdr.512d_original': 'bert_ner_bc5cdr_chem_original_pubmedbert_512', 'en.ner.pubmed_bert.chemical_pubmed_bc5cdr.disease': 'bert_ner_bc5cdr_chemical_disease_balanced_sapbert_from_pubmedbert_fulltext', 'en.ner.pubmed_bert.chemical_pubmed_bc5cdr.disease.by_ghadeermobasher': 'bert_ner_bc5cdr_chemical_disease_balanced_pubmedbert', 'en.ner.pubmed_bert.chemical_pubmed_bc5cdr.imbalanced': 'bert_ner_bc5cdr_chemical_imbalanced_pubmedbert', 'en.ner.pubmed_bert.chemical_pubmed_bc5cdr.modified': 'bert_ner_bc5cdr_chem_modified_pubmedbert_384', 'en.ner.pubmed_bert.chemical_pubmed_bc5cdr.modified.by_ghadeermobasher': 'bert_ner_bc5cdr_chemical_modified_pubmedbert', 'en.ner.pubmed_bert.chemical_pubmed_bc5cdr.original': 'bert_ner_original_pubmedbert_bc5cdr_chemical', 'en.ner.pubmed_bert.chemical_pubmed_bc5cdr.uncased_base_disease': 'bert_ner_bc5cdr_chemical_disease_balanced_biomednlp_pubmedbert_base_uncased_abstract_fulltext', 'en.ner.pubmed_bert.chemical_pubmed_bc5cdr.uncased_base_imbalanced': 'bert_ner_bc5cdr_chem2_imbalanced_biomednlp_pubmedbert_base_uncased_abstract', 'en.ner.pubmed_bert.chemical_pubmed_bc5cdr.uncased_base_imbalanced.by_ghadeermobasher': 'bert_ner_bc5cdr_chemical_imbalanced_pubmedbert_base_uncased_abstract_latest', 'en.ner.pubmed_bert.chemical_pubmed_bc5cdr.uncased_base_modified': 'bert_ner_bc5cdr_chem2_modified_biomednlp_pubmedbert_base_uncased_abstract', 'en.ner.pubmed_bert.chemical_pubmed_bionlp13cg.512d_modified': 'bert_ner_bionlp13cg_chem_modified_pubmedbert_512', 'en.ner.pubmed_bert.chemical_pubmed_bionlp13cg.512d_original': 'bert_ner_bionlp13cg_chem_original_pubmedbert_512', 'en.ner.pubmed_bert.chemical_pubmed_bionlp13cg.imbalanced': 'bert_ner_bionlp13cg_chem_imbalancedpubmedbert', 'en.ner.pubmed_bert.chemical_pubmed_bionlp13cg.modified': 'bert_ner_bionlp13cg_chem_modified_pubmedbert', 'en.ner.pubmed_bert.chemical_pubmed_bionlp13cg.modified_384.by_ghadeermobasher': 'bert_ner_bionlp13cg_chem_modified_pubmedbert_384', 'en.ner.pubmed_bert.chemical_pubmed_bionlp13cg.modified_abstract_3.by_ghadeermobasher': 'bert_ner_bionlp13cg_chem_modified_pubmedbert_abstract_3', 'en.ner.pubmed_bert.chemical_pubmed_bionlp13cg.modified_full_3.by_ghadeermobasher': 'bert_ner_bionlp13cg_chem_modified_pubmedbert_full_3', 'en.ner.pubmed_bert.chemical_pubmed_biored.128d_32d_modified': 'bert_ner_biored_chem_modified_pubmedbert_128_32', 'en.ner.pubmed_bert.chemical_pubmed_biored.128d_32d_original': 'bert_ner_biored_chem_original_pubmedbert_128_32', 'en.ner.pubmed_bert.chemical_pubmed_biored.128d_modified': 'bert_ner_biored_chem_modified_pubmedbert_128_10', 'en.ner.pubmed_bert.chemical_pubmed_biored.128d_modified_128_20.by_ghadeermobasher': 'bert_ner_biored_chem_modified_pubmedbert_128_20', 'en.ner.pubmed_bert.chemical_pubmed_biored.128d_modified_128_5.by_ghadeermobasher': 'bert_ner_biored_chem_modified_pubmedbert_128_5', 'en.ner.pubmed_bert.chemical_pubmed_biored.128d_original': 'bert_ner_biored_chem_original_pubmedbert_128_10', 'en.ner.pubmed_bert.chemical_pubmed_biored.128d_original_128_20.by_ghadeermobasher': 'bert_ner_biored_chem_original_pubmedbert_128_20', 'en.ner.pubmed_bert.chemical_pubmed_biored.128d_original_128_5.by_ghadeermobasher': 'bert_ner_biored_chem_original_pubmedbert_128_5', 'en.ner.pubmed_bert.chemical_pubmed_biored.256d_modified': 'bert_ner_biored_chem_modified_pubmedbert_256_13', 'en.ner.pubmed_bert.chemical_pubmed_biored.256d_modified_256_40.by_ghadeermobasher': 'bert_ner_biored_chem_modified_pubmedbert_256_40', 'en.ner.pubmed_bert.chemical_pubmed_biored.256d_modified_256_5.by_ghadeermobasher': 'bert_ner_biored_chem_modified_pubmedbert_256_5', 'en.ner.pubmed_bert.chemical_pubmed_biored.256d_original': 'bert_ner_biored_chem_original_pubmedbert_256_13', 'en.ner.pubmed_bert.chemical_pubmed_biored.256d_original_256_40.by_ghadeermobasher': 'bert_ner_biored_chem_original_pubmedbert_256_40', 'en.ner.pubmed_bert.chemical_pubmed_biored.256d_original_256_5.by_ghadeermobasher': 'bert_ner_biored_chem_original_pubmedbert_256_5', 'en.ner.pubmed_bert.chemical_pubmed_biored.32d_modified': 'bert_ner_biored_chem_modified_pubmedbert_320_8', 'en.ner.pubmed_bert.chemical_pubmed_biored.32d_modified.by_ghadeermobasher': 'bert_ner_biored_chem_modified_pubmedbert_320_8_10', 'en.ner.pubmed_bert.chemical_pubmed_biored.32d_original': 'bert_ner_biored_chem_original_pubmedbert_320_8', 'en.ner.pubmed_bert.chemical_pubmed_biored.512d_modified': 'bert_ner_biored_chem_modified_pubmedbert_512', 'en.ner.pubmed_bert.chemical_pubmed_biored.512d_modified_512_5.by_ghadeermobasher': 'bert_ner_biored_chem_modified_pubmedbert_512_5', 'en.ner.pubmed_bert.chemical_pubmed_biored.512d_modified_512_5_30.by_ghadeermobasher': 'bert_ner_biored_chem_modified_pubmedbert_512_5_30', 'en.ner.pubmed_bert.chemical_pubmed_biored.512d_original': 'bert_ner_biored_chem_original_pubmedbert_512', 'en.ner.pubmed_bert.chemical_pubmed_biored.512d_original_512_5.by_ghadeermobasher': 'bert_ner_biored_chem_original_pubmedbert_512_5', 'en.ner.pubmed_bert.chemical_pubmed_biored.512d_original_512_5_30.by_ghadeermobasher': 'bert_ner_biored_chem_original_pubmedbert_512_5_30', 'en.ner.pubmed_bert.chemical_pubmed_biored.modified': 'bert_ner_biored_chem_modified_pubmedbert_384_5', 'en.ner.pubmed_bert.chemical_pubmed_biored.modified_384_8.by_ghadeermobasher': 'bert_ner_biored_chem_modified_pubmedbert_384_8', 'en.ner.pubmed_bert.chemical_pubmed_biored.modified_384_8_10.by_ghadeermobasher': 'bert_ner_biored_chem_modified_pubmedbert_384_8_10', 'en.ner.pubmed_bert.chemical_pubmed_biored.original': 'bert_ner_biored_chem_original_pubmedbert_384_5', 'en.ner.pubmed_bert.chemical_pubmed_biored.original.by_ghadeermobasher': 'bert_ner_biored_chem_original_pubmedbert_384_8', 'en.ner.pubmed_bert.chemical_pubmed_craft.imbalanced': 'bert_ner_craft_chem_imbalancedpubmedbert', 'en.ner.pubmed_bert.chemical_pubmed_craft.modified': 'bert_ner_craft_chem_modified_pubmedbert', 'en.ner.pubmed_bert.chemical_pubmed_craft.uncased_base_modified': 'bert_ner_craft_chem_modified_biomednlp_pubmedbert_base_uncased_abstract', 'en.ner.pubmed_bert.chemical_pubmed_craft.uncased_base_original': 'bert_ner_craft_chem_original_biomednlp_pubmedbert_base_uncased_abstract', 'en.ner.pubmed_bert.pubmed.': 'bert_ner_wlt_pubmedbert_linnaeus', 'en.ner.pubmed_bert.pubmed.modified': 'bert_ner_bionlp13_modified_pubmedbert_384', 'en.ner.pubmed_bert.pubmed.original': 'bert_ner_bc4_original_pubmedbert', 'en.ner.pubmed_bert.pubmed.original.by_ghadeermobasher': 'bert_ner_original_pubmedbert_linnaeus', 'en.ner.pubmed_bert.pubmed.small_modified': 'bert_ner_bc4_modified_pubmedbert_small', 'en.ner.pubmed_bert.pubmed.small_original': 'bert_ner_bc4_original_pubmedbert_small', 'en.ner.pubmed_bert.pubmed.uncased_base_modified': 'bert_ner_bc4_modified_biomednlp_pubmedbert_base_uncased_abstract', 'en.ner.pubmed_bert.pubmed.uncased_base_original': 'bert_ner_bc4_original_biomednlp_pubmedbert_base_uncased_abstract', 'en.ner.pubmed_bert.pubmed_bc2gm.': 'bert_ner_wlt_pubmedbert_bc2gm', 'en.ner.pubmed_bert.pubmed_bc2gm.imbalanced': 'bert_ner_bc2gm_gene_imbalancedpubmedbert', 'en.ner.pubmed_bert.pubmed_bc2gm.modified': 'bert_ner_bc2gm_gene_modified_pubmedbert', 'en.ner.pubmed_bert.pubmed_bc2gm.original': 'bert_ner_original_pubmedbert_bc2gm', 'en.ner.pubmed_bert.pubmed_bc5cdr.disease': 'bert_ner_bc5cdr_disease_balancedpubmedbert', 'en.ner.pubmed_bert.pubmed_bc5cdr.imbalanced': 'bert_ner_bc5cdr_imbalanced_pubmedbert', 'en.ner.pubmed_bert.pubmed_bc5cdr.imbalanced.by_ghadeermobasher': 'bert_ner_bc5cdr_imbalanced_sapbert_from_pubmedbert_fulltext', 'en.ner.pubmed_bert.pubmed_bc5cdr.modified_disease': 'bert_ner_bc5cdr_disease_modified_pubmedbert', 'en.ner.pubmed_bert.pubmed_bc5cdr.original_disease': 'bert_ner_original_pubmedbert_bc5cdr_disease', 'en.ner.pubmed_bert.pubmed_bc5cdr.uncased_base_disease_imbalanced': 'bert_ner_bc5cdr_disease_imbalanced_biomednlp_pubmedbert_base_uncased_abstract', 'en.ner.pubmed_bert.pubmed_bc5cdr.uncased_base_modified_disease': 'bert_ner_bc5cdr_disease_modified_biomednlp_pubmedbert_base_uncased_abstract', 'en.ner.pubmed_bert.pubmed_bionlp13cg.original': 'bert_ner_bionlp13cg_original_pubmedbert_abstract_latest', 'en.ner.pubmed_bert.pubmed_biored.128d_32d_modified': 'bert_ner_biored_dis_modified_pubmedbert_128_32', 'en.ner.pubmed_bert.pubmed_biored.128d_32d_original': 'bert_ner_biored_dis_original_pubmedbert_128_32', 'en.ner.pubmed_bert.pubmed_biored.256d_modified': 'bert_ner_biored_dis_modified_pubmedbert_256_13', 'en.ner.pubmed_bert.pubmed_biored.256d_modified.by_ghadeermobasher': 'bert_ner_biored_dis_modified_pubmedbert_256_5', 'en.ner.pubmed_bert.pubmed_biored.256d_original': 'bert_ner_biored_dis_original_pubmedbert_256_13', 'en.ner.pubmed_bert.pubmed_biored.256d_original.by_ghadeermobasher': 'bert_ner_biored_dis_original_pubmedbert_256_5', 'en.ner.pubmed_bert.pubmed_biored.32d_modified': 'bert_ner_biored_dis_modified_pubmedbert_320_8', 'en.ner.pubmed_bert.pubmed_biored.32d_modified.by_ghadeermobasher': 'bert_ner_biored_dis_modified_pubmedbert_320_8_10', 'en.ner.pubmed_bert.pubmed_biored.32d_original': 'bert_ner_biored_dis_original_pubmedbert_320_8', 'en.ner.pubmed_bert.pubmed_biored.512d_modified': 'bert_ner_biored_cd_modified_pubmedbert_512', 'en.ner.pubmed_bert.pubmed_biored.512d_modified_512.by_ghadeermobasher': 'bert_ner_biored_dis_modified_pubmedbert_512', 'en.ner.pubmed_bert.pubmed_biored.512d_modified_512_5.by_ghadeermobasher': 'bert_ner_biored_dis_modified_pubmedbert_512_5', 'en.ner.pubmed_bert.pubmed_biored.512d_original': 'bert_ner_biored_cd_original_pubmedbert_512', 'en.ner.pubmed_bert.pubmed_biored.512d_original_512.by_ghadeermobasher': 'bert_ner_biored_dis_original_pubmedbert_512', 'en.ner.pubmed_bert.pubmed_biored.512d_original_512_5.by_ghadeermobasher': 'bert_ner_biored_dis_original_pubmedbert_512_5', 'en.ner.pubmed_bert.pubmed_biored.modified': 'bert_ner_biored_dis_modified_pubmedbert_384_5', 'en.ner.pubmed_bert.pubmed_biored.modified_384_8.by_ghadeermobasher': 'bert_ner_biored_dis_modified_pubmedbert_384_8', 'en.ner.pubmed_bert.pubmed_biored.modified_384_8_10.by_ghadeermobasher': 'bert_ner_biored_dis_modified_pubmedbert_384_8_10', 'en.ner.pubmed_bert.pubmed_biored.original': 'bert_ner_biored_dis_original_pubmedbert_384_5', 'en.ner.pubmed_bert.pubmed_biored.original.by_ghadeermobasher': 'bert_ner_biored_dis_original_pubmedbert_384_8', 'en.ner.pubmed_bert.pubmed_craft.512d_modified': 'bert_ner_craft_modified_pubmedbert_512', 'en.ner.pubmed_bert.pubmed_craft.512d_original': 'bert_ner_craft_original_pubmedbert_512', 'en.ner.pubmed_bert.pubmed_craft.modified': 'bert_ner_craft_modified_pubmedbert_384', 'en.ner.pubmed_bert.pubmed_craft.original': 'bert_ner_craft_original_pubmedbert_384', 'en.ner.pubmed_bert.pubmed_ncbi.original': 'bert_ner_original_pubmedbert_ncbi', 'en.ner.restaurant': 'nerdl_restaurant_100d', 'en.ner.roberta': 'roberta_ner_clulab_roberta_timex_semeval', 'en.ner.roberta.base': 'roberta_ner_mimics_tagging_roberta_base', 'en.ner.roberta.base.by_djagatiya': 'roberta_ner_ner_roberta_base_ontonotesv5_englishv4', 'en.ner.roberta.base.by_ganzorig': 'roberta_ner_ganzorig_roberta_base_ner_demo', 'en.ner.roberta.base.by_omunkhuush': 'roberta_ner_omunkhuush_roberta_base_ner_demo', 'en.ner.roberta.base_finetuned': 'roberta_ner_codebert_base_finetuned_stackoverflow', 'en.ner.roberta.base_finetuned.by_abhishek_shrm': 'roberta_ner_roberta_base_finetuned_beer_ner', 'en.ner.roberta.base_finetuned.by_alekseykorshuk': 'roberta_ner_alekseykorshuk_roberta_base_finetuned_ner', 'en.ner.roberta.base_finetuned.by_mrm8488': 'roberta_ner_codebert_base_finetuned_stackoverflow_ner', 'en.ner.roberta.base_finetuned.by_surrey_nlp': 'roberta_ner_roberta_base_finetuned_abbr', 'en.ner.roberta.base_finetuned_v0.by_gozdi': 'roberta_ner_roberta_base_finetuned_ner_0', 'en.ner.roberta.base_finetuned_v1.by_gozdi': 'roberta_ner_roberta_base_finetuned_ner_1', 'en.ner.roberta.base_finetuned_v2.by_gozdi': 'roberta_ner_roberta_base_finetuned_ner_2', 'en.ner.roberta.base_finetuned_v3.by_gozdi': 'roberta_ner_roberta_base_finetuned_ner_3', 'en.ner.roberta.base_finetuned_v4.by_gozdi': 'roberta_ner_roberta_base_finetuned_ner_4', 'en.ner.roberta.by_andrija': 'roberta_ner_sroberta_nlp', 'en.ner.roberta.by_egoitz': 'roberta_ner_egoitz_roberta_timex_semeval', 'en.ner.roberta.by_iammartahir': 'roberta_ner_domain_adapted_timex', 'en.ner.roberta.by_icelab': 'roberta_ner_spaceroberta_cr', 'en.ner.roberta.by_nates': 'roberta_ner_ler_roberta', 'en.ner.roberta.by_obi': 'roberta_ner_deid_roberta_i2b2', 'en.ner.roberta.by_recognai': 'roberta_ner_veganuary_ner', 'en.ner.roberta.by_scarlettsun9': 'roberta_ner_autotrain_zuozhuan_1100540141', 'en.ner.roberta.by_vesteinn': 'roberta_ner_icebert_ner', 'en.ner.roberta.by_wolfrage89': 'roberta_ner_company_segment_ner', 'en.ner.roberta.cased': 'roberta_ner_robbert2_ner_cased_sonar1_nld', 'en.ner.roberta.cased.by_proycon': 'roberta_ner_robbert_ner_cased_sonar1_nld', 'en.ner.roberta.codebert_mt4ts.by_kevinjesse': 'roberta_ner_codebert_mt4ts', 'en.ner.roberta.codeberta_mt4ts.by_kevinjesse': 'roberta_ner_codeberta_mt4ts', 'en.ner.roberta.conll.base_finetuned': 'roberta_ner_roberta_base_finetuned_ner_kmeans', 'en.ner.roberta.conll.large_finetuned': 'roberta_ner_romainlhardy_roberta_large_finetuned_ner', 'en.ner.roberta.finetuned': 'roberta_ner_bergurth_icebert_finetuned_ner', 'en.ner.roberta.finetuned.by_eliasbe': 'roberta_ner_eliasbe_icebert_finetuned_ner', 'en.ner.roberta.finetuned.by_orri': 'roberta_ner_orri_icebert_finetuned_ner', 'en.ner.roberta.finetuned.by_thorduragust': 'roberta_ner_thorduragust_icebert_finetuned_ner', 'en.ner.roberta.finetuned_protagonist_english.by_airi': 'roberta_ner_bert_finetuned_protagonist_english', 'en.ner.roberta.finetuned_protagonist_english_pc.by_airi': 'roberta_ner_bert_finetuned_protagonist_english_pc', 'en.ner.roberta.finetuning_': 'roberta_ner_jurisbert_finetuning_ner', 'en.ner.roberta.fnrbt.by_mehari': 'roberta_ner_fnrbt', 'en.ner.roberta.graphcodebert_mt4ts.by_kevinjesse': 'roberta_ner_graphcodebert_mt4ts', 'en.ner.roberta.graphpolygot_mt4ts.by_kevinjesse': 'roberta_ner_graphpolygot_mt4ts', 'en.ner.roberta.keyphrase.': 'roberta_ner_keyphrase_extraction_kbir_inspec', 'en.ner.roberta.keyphrase.kbir_kpcrowd.by_ml6team': 'roberta_ner_keyphrase_extraction_kbir_kpcrowd', 'en.ner.roberta.keyphrase.kbir_kptimes.by_ml6team': 'roberta_ner_keyphrase_extraction_kbir_kptimes', 'en.ner.roberta.keyphrase.kbir_openkp.by_ml6team': 'roberta_ner_keyphrase_extraction_kbir_openkp', 'en.ner.roberta.keyphrase.kbir_semeval2017.by_ml6team': 'roberta_ner_keyphrase_extraction_kbir_semeval2017', 'en.ner.roberta.large': 'roberta_ner_tner_roberta_large_multiconer_en_asahi417', 'en.ner.roberta.large_finetuned_adverse_drug_event': 'roberta_ner_roberta_large_finetuned_ades_model_2', 'en.ner.roberta.large_legal': 'roberta_ner_roberta_large_legal_act_extraction', 'en.ner.roberta.meds.by_cariai': 'roberta_ner_meds', 'en.ner.roberta.medslabs.by_cariai': 'roberta_ner_medslabs', 'en.ner.roberta.polygot_mt4ts.by_kevinjesse': 'roberta_ner_polygot_mt4ts', 'en.ner.roberta.roberta_mt4ts.by_kevinjesse': 'roberta_ner_roberta_mt4ts', 'en.ner.roberta.troberta.by_mehari': 'roberta_ner_troberta', 'en.ner.roberta.tweet.base': 'roberta_ner_roberta_base_tweetner_2020', 'en.ner.roberta.tweet.base.by_asahi417': 'roberta_ner_tner_roberta_base_tweet_2020', 'en.ner.roberta.tweet.large': 'roberta_ner_roberta_large_tweetner_2020', 'en.ner.roberta.tweet.large.by_asahi417': 'roberta_ner_tner_roberta_large_tweet_2020', 'en.ner.roberta.tweet.tweetner_2020.large.by_tner': 'roberta_ner_bertweet_large_tweetner_2020', 'en.ner.roberta.tweet.tweetner_2020_2021_concat.base.by_tner': 'roberta_ner_roberta_base_tweetner_2020_2021_concat', 'en.ner.roberta.tweet.tweetner_2020_2021_concat.large.by_tner': 'roberta_ner_roberta_large_tweetner_2020_2021_concat', 'en.ner.roberta.tweet.tweetner_2020_2021_continuous.base.by_tner': 'roberta_ner_roberta_base_tweetner_2020_2021_continuous', 'en.ner.roberta.tweet.tweetner_2020_2021_continuous.large.by_tner': 'roberta_ner_roberta_large_tweetner_2020_2021_continuous', 'en.ner.roberta.tweet.tweetner_2020_selflabel2020_concat.large.by_tner': 'roberta_ner_roberta_large_tweetner_2020_selflabel2020_concat', 'en.ner.roberta.tweet.tweetner_2020_selflabel2020_continuous.large.by_tner': 'roberta_ner_roberta_large_tweetner_2020_selflabel2020_continuous', 'en.ner.roberta.tweet.tweetner_2020_selflabel2021_concat.large.by_tner': 'roberta_ner_roberta_large_tweetner_2020_selflabel2021_concat', 'en.ner.roberta.tweet.tweetner_2020_selflabel2021_continuous.large.by_tner': 'roberta_ner_roberta_large_tweetner_2020_selflabel2021_continuous', 'en.ner.roberta.tweet.tweetner_2021.base.by_tner': 'roberta_ner_roberta_base_tweetner_2021', 'en.ner.roberta.tweet.tweetner_2021.large.by_tner': 'roberta_ner_roberta_large_tweetner_2021', 'en.ner.roberta.tweet.tweetner_random.large.by_tner': 'roberta_ner_roberta_large_tweetner_random', 'en.ner.roberta.tweet.tweetner_selflabel2020.large.by_tner': 'roberta_ner_roberta_large_tweetner_selflabel2020', 'en.ner.roberta.tweet.tweetner_selflabel2021.large.by_tner': 'roberta_ner_roberta_large_tweetner_selflabel2021', 'en.ner.roberta.tweet_twitter.2019_90m_tweetner_2020_2021_concat.base_90m.by_tner': 'roberta_ner_twitter_roberta_base_2019_90m_tweetner_2020_2021_concat', 'en.ner.roberta.tweet_twitter.2019_90m_tweetner_2020_2021_continuous.base_90m.by_tner': 'roberta_ner_twitter_roberta_base_2019_90m_tweetner_2020_2021_continuous', 'en.ner.roberta.tweet_twitter.2019_90m_tweetner_2021.base_90m.by_tner': 'roberta_ner_twitter_roberta_base_2019_90m_tweetner_2021', 'en.ner.roberta.tweet_twitter.2019_90m_tweetner_random.base_90m.by_tner': 'roberta_ner_twitter_roberta_base_2019_90m_tweetner_random', 'en.ner.roberta.tweet_twitter.base': 'roberta_ner_twitter_roberta_base_dec2021_tweetner_2020_2021_continuous', 'en.ner.roberta.tweet_twitter.base_90m': 'roberta_ner_twitter_roberta_base_2019_90m_tweetner_2020', 'en.ner.roberta.tweet_twitter.dec2020_tweetner_2020.base.by_tner': 'roberta_ner_twitter_roberta_base_dec2020_tweetner_2020', 'en.ner.roberta.tweet_twitter.dec2020_tweetner_2020_2021_concat.base.by_tner': 'roberta_ner_twitter_roberta_base_dec2020_tweetner_2020_2021_concat', 'en.ner.roberta.tweet_twitter.dec2020_tweetner_2020_2021_continuous.base.by_tner': 'roberta_ner_twitter_roberta_base_dec2020_tweetner_2020_2021_continuous', 'en.ner.roberta.tweet_twitter.dec2020_tweetner_2021.base.by_tner': 'roberta_ner_twitter_roberta_base_dec2020_tweetner_2021', 'en.ner.roberta.tweet_twitter.dec2020_tweetner_random.base.by_tner': 'roberta_ner_twitter_roberta_base_dec2020_tweetner_random', 'en.ner.roberta.tweet_twitter.dec2021_tweetner_2020.base.by_tner': 'roberta_ner_twitter_roberta_base_dec2021_tweetner_2020', 'en.ner.roberta.tweet_twitter.dec2021_tweetner_2020_2021_concat.base.by_tner': 'roberta_ner_twitter_roberta_base_dec2021_tweetner_2020_2021_concat', 'en.ner.roberta.tweet_twitter.dec2021_tweetner_2021.base.by_tner': 'roberta_ner_twitter_roberta_base_dec2021_tweetner_2021', 'en.ner.roberta.tweet_twitter.dec2021_tweetner_random.base.by_tner': 'roberta_ner_twitter_roberta_base_dec2021_tweetner_random', 'en.ner.roberta_classics_ner': 'roberta_ner_roberta_classics_ner', 'en.ner.roberta_large_finetuned_abbr': 'roberta_ner_roberta_large_finetuned_abbr', 'en.ner.roberta_large_ner_english': 'roberta_ner_roberta_large_ner_english', 'en.ner.roberta_ticker': 'roberta_ner_roberta_ticker', 'en.ner.scibert.bc5cdr_scibert.cased_imbalanced': 'bert_ner_bc5cdr_imbalanced_scibert_scivocab_cased', 'en.ner.scibert.bc5cdr_scibert.cased_modified_disease': 'bert_ner_bc5cdr_disease_modified_scibert_scivocab_cased', 'en.ner.scibert.bc5cdr_scibert.original_disease': 'bert_ner_original_scibert_bc5cdr_disease', 'en.ner.scibert.bc5cdr_scibert.uncased_disease_imbalanced': 'bert_ner_bc5cdr_disease_imbalanced_scibert_scivocab_uncased', 'en.ner.scibert.bc5cdr_scibert.uncased_modified_disease': 'bert_ner_bc5cdr_disease_modified_scibert_scivocab_uncased', 'en.ner.scibert.chemical_bc5cdr_scibert.512d_modified': 'bert_ner_bc5cdr_chem_modified_scibert_512', 'en.ner.scibert.chemical_bc5cdr_scibert.512d_original': 'bert_ner_bc5cdr_chem_original_scibert_512', 'en.ner.scibert.chemical_bc5cdr_scibert.cased_disease': 'bert_ner_bc5cdr_chemical_disease_balanced_scibert_scivocab_cased', 'en.ner.scibert.chemical_bc5cdr_scibert.cased_imbalanced': 'bert_ner_bc5cdr_chemical_imbalanced_scibert_scivocab_cased', 'en.ner.scibert.chemical_bc5cdr_scibert.cased_modified': 'bert_ner_bc5cdr_chemical_modified_scibert_scivocab_cased', 'en.ner.scibert.chemical_bc5cdr_scibert.modified': 'bert_ner_bc5cdr_chem_modified_scibert_384', 'en.ner.scibert.chemical_bc5cdr_scibert.original': 'bert_ner_bc5cdr_chem_original_scibert_384', 'en.ner.scibert.chemical_bc5cdr_scibert.original.by_ghadeermobasher': 'bert_ner_original_scibert_bc5cdr_chemical', 'en.ner.scibert.chemical_bc5cdr_scibert.original_t.by_ghadeermobasher': 'bert_ner_original_scibert_bc5cdr_chemical_t', 'en.ner.scibert.chemical_bc5cdr_scibert.original_t1.by_ghadeermobasher': 'bert_ner_original_scibert_bc5cdr_chemical_t1', 'en.ner.scibert.chemical_bc5cdr_scibert.original_t2.by_ghadeermobasher': 'bert_ner_original_scibert_bc5cdr_chemical_t2', 'en.ner.scibert.chemical_bc5cdr_scibert.uncased_imbalanced': 'bert_ner_bc5cdr_chemical_imbalanced_scibert_scivocab_uncased_latest', 'en.ner.scibert.chemical_bc5cdr_scibert.uncased_modified': 'bert_ner_bc5cdr_chem_modified_scibert_scivocab_uncased_latest', 'en.ner.scibert.chemical_craft_scibert.imbalanced': 'bert_ner_craft_chem_imbalanced_scibert', 'en.ner.scibert.chemical_craft_scibert.modified': 'bert_ner_craft_chem_modified_scibert', 'en.ner.scibert.chemical_craft_scibert.uncased_modified': 'bert_ner_craft_chem_modified_scibert_scivocab_uncased', 'en.ner.scibert.chemical_craft_scibert.uncased_original': 'bert_ner_craft_chem_original_scibert_scivocab_uncased', 'en.ner.scibert.chemical_scibert.512d_modified': 'bert_ner_bc4chemd_chem_modified_scibert_512', 'en.ner.scibert.chemical_scibert.512d_original': 'bert_ner_bc4chemd_chem_original_scibert_512', 'en.ner.scibert.chemical_scibert.cased_imbalanced': 'bert_ner_bc4chemd_imbalancedscibert_scivocab_cased', 'en.ner.scibert.chemical_scibert.cased_modified': 'bert_ner_bc4chemd_modified_scibert_scivocab_cased', 'en.ner.scibert.chemical_scibert.modified': 'bert_ner_bc4chemd_chem_modified_scibert_384', 'en.ner.scibert.chemical_scibert.original': 'bert_ner_bc4chemd_chem_original_scibert_384', 'en.ner.scibert.chemical_scibert.original.by_ghadeermobasher': 'bert_ner_original_scibert_bc4chemd', 'en.ner.scibert.chemical_scibert.original_o.by_ghadeermobasher': 'bert_ner_original_scibert_bc4chemd_o', 'en.ner.scibert.chemical_scibert_bionlp13cg.512d_original': 'bert_ner_bionlp13cg_chem_chem_original_scibert_512', 'en.ner.scibert.chemical_scibert_bionlp13cg.cased_imbalanced': 'bert_ner_bionlp13cg_chem_imbalanced_scibert_scivocab_cased', 'en.ner.scibert.chemical_scibert_bionlp13cg.modified': 'bert_ner_bionlp13cg_chem_modified_scibert', 'en.ner.scibert.chemical_scibert_bionlp13cg.original': 'bert_ner_bionlp13cg_chem_chem_original_scibert_384', 'en.ner.scibert.craft_scibert.512d_modified': 'bert_ner_craft_modified_scibert_512', 'en.ner.scibert.craft_scibert.512d_original': 'bert_ner_craft_original_scibert_512', 'en.ner.scibert.craft_scibert.modified': 'bert_ner_craft_modified_scibert_384', 'en.ner.scibert.craft_scibert.original': 'bert_ner_craft_original_scibert_384', 'en.ner.scibert.scibert.': 'bert_ner_wlt_scibert_linnaeus', 'en.ner.scibert.scibert.512d_modified': 'bert_ner_bionlp13_modified_scibert_512', 'en.ner.scibert.scibert.by_icelab': 'bert_ner_spacescibert_cr', 'en.ner.scibert.scibert.cased': 'bert_ner_scibert_scivocab_cased_sdu21_ai', 'en.ner.scibert.scibert.cased.by_fran_martinez': 'bert_ner_scibert_scivocab_cased_ner_jnlpba', 'en.ner.scibert.scibert.modified': 'bert_ner_bionlp13_modified_scibert_384', 'en.ner.scibert.scibert.original': 'bert_ner_original_scibert_linnaeus', 'en.ner.scibert.scibert.uncased': 'bert_ner_scibert_scivocab_uncased_sdu21_ai', 'en.ner.scibert.scibert.uncased_modified': 'bert_ner_bc4_modified_scibert_scivocab_uncased', 'en.ner.scibert.scibert.uncased_original': 'bert_ner_bc4_original_scibert_scivocab_uncased', 'en.ner.scibert.scibert.uncased_scivocab_ft_sdu21_ai.by_napsternxg': 'bert_ner_scibert_scivocab_uncased_ft_sdu21_ai', 'en.ner.scibert.scibert.uncased_scivocab_ft_tv_sdu21_ai.by_napsternxg': 'bert_ner_scibert_scivocab_uncased_ft_tv_sdu21_ai', 'en.ner.scibert.scibert.uncased_scivocab_tv_sdu21_ai.by_napsternxg': 'bert_ner_scibert_scivocab_uncased_tv_sdu21_ai', 'en.ner.scibert.scibert_bc2gm.': 'bert_ner_wlt_scibert_bc2gm', 'en.ner.scibert.scibert_bc2gm.cased_imbalanced': 'bert_ner_bc2gm_gene_imbalancedscibert_scivocab_cased', 'en.ner.scibert.scibert_bc2gm.cased_modified': 'bert_ner_bc2gm_gene_modified_scibert_scivocab_cased', 'en.ner.scibert.scibert_bc2gm.original': 'bert_ner_original_scibert_bc2gm', 'en.ner.scibert.scibert_bionlp13cg.original': 'bert_ner_bionlp13cg_original_scibert_latest', 'en.ner.scibert.scibert_bionlp13cg.uncased_modified': 'bert_ner_bionlp13cg_modified_scibert_uncased_latest', 'en.ner.scibert.scibert_ncbi.': 'bert_ner_orignal_scibert_ncbi', 'en.ner.snips': 'classifierdl_use_snips', 'en.ner.span_bert.cased_v2_large_finetuned_adverse_drug_event': 'bert_ner_spanbert_large_cased_finetuned_ade_corpus_v2', 'en.ner.span_bert.conll.cased_base': 'roberta_ner_spanberta_base_cased_ner_conll02', 'en.ner.span_bert.conll.cased_base_finetuned': 'roberta_ner_spanberta_base_cased_ner_conll02_finetuned_ner', 'en.ner.stocks_ticker': 'roberta_token_classifier_ticker', 'en.ner.time': 'roberta_token_classifier_timex_semeval', 'en.ner.xlm_roberta.base': 'xlm_roberta_base_token_classifier_ontonotes', 'en.ner.xlm_roberta.conll.base': 'xlm_roberta_base_token_classifier_conll03', 'en.ner.xlmr_roberta.all_english.uncased_base.by_tner': 'xlmroberta_ner_base_uncased_all_english', 'en.ner.xlmr_roberta.all_english.uncased_large.by_tner': 'xlmroberta_ner_large_uncased_all_english', 'en.ner.xlmr_roberta.base': 'xlmroberta_ner_base_bionlp2004', 'en.ner.xlmr_roberta.base.by_ai4sec': 'xlmroberta_ner_cyner_base', 'en.ner.xlmr_roberta.base_finetuned': 'xlmroberta_ner_base_finetuned_recipe_all', 'en.ner.xlmr_roberta.bc5cdr.base': 'xlmroberta_ner_base_bc5cdr', 'en.ner.xlmr_roberta.bc5cdr.large': 'xlmroberta_ner_tner_large_bc5cdr', 'en.ner.xlmr_roberta.bc5cdr.uncased_base': 'xlmroberta_ner_base_uncased_bc5cdr', 'en.ner.xlmr_roberta.bc5cdr.uncased_large': 'xlmroberta_ner_large_uncased_bc5cdr', 'en.ner.xlmr_roberta.bionlp2004.large.by_tner': 'xlmroberta_ner_large_bionlp2004', 'en.ner.xlmr_roberta.bionlp2004.uncased_base.by_tner': 'xlmroberta_ner_base_uncased_bionlp2004', 'en.ner.xlmr_roberta.bionlp2004.uncased_large.by_tner': 'xlmroberta_ner_large_uncased_bionlp2004', 'en.ner.xlmr_roberta.conll.base': 'xlmroberta_ner_tner_base_conll2003', 'en.ner.xlmr_roberta.conll.base.by_Yaxin': 'xlmroberta_ner_yaxin_base_conll2003', 'en.ner.xlmr_roberta.conll.large': 'xlmroberta_ner_large_conll2003', 'en.ner.xlmr_roberta.conll.large_finetuned': 'xlmroberta_ner_large_finetuned_conll03_english', 'en.ner.xlmr_roberta.conll.uncased_base': 'xlmroberta_ner_base_uncased_conll2003', 'en.ner.xlmr_roberta.conll.uncased_large': 'xlmroberta_ner_large_uncased_conll2003', 'en.ner.xlmr_roberta.large': 'xlmroberta_ner_tner_large_all_english', 'en.ner.xlmr_roberta.mit_restaurant.uncased_base.by_tner': 'xlmroberta_ner_base_uncased_mit_restaurant', 'en.ner.xlmr_roberta.mit_restaurant.uncased_large.by_tner': 'xlmroberta_ner_large_uncased_mit_restaurant', 'en.ner.xlmr_roberta.multiconer_multi.large.by_asahi417': 'xlmroberta_ner_tner_large_multiconer_multi', 'en.ner.xlmr_roberta.ontonotes5.base.by_asahi417': 'xlmroberta_ner_tner_base_ontonotes5', 'en.ner.xlmr_roberta.ontonotes5.large.by_asahi417': 'xlmroberta_ner_tner_large_ontonotes5', 'en.ner.xlmr_roberta.panx.uncased_large.by_tner': 'xlmroberta_ner_large_uncased_panx_dataset', 'en.ner.xlmr_roberta.panx_dataset.base.by_tner': 'xlmroberta_ner_base_panx_dataset', 'en.ner.xlmr_roberta.panx_dataset.large.by_tner': 'xlmroberta_ner_large_panx_dataset', 'en.ner.xlmr_roberta.panx_dataset.uncased_base.by_tner': 'xlmroberta_ner_base_uncased_panx_dataset', 'en.ner.xlmr_roberta.tner.base.by_asahi417': 'xlmroberta_ner_tner_base_all_english', 'en.ner.xlmr_roberta.trivia_movie.uncased_base': 'xlmroberta_ner_base_uncased_mit_movie_trivia', 'en.ner.xlmr_roberta.trivia_movie.uncased_large': 'xlmroberta_ner_large_uncased_mit_movie_trivia', 'en.ner.xlmr_roberta.uncased_base': 'xlmroberta_ner_tner_base_uncased_ontonotes5', 'en.ner.xlmr_roberta.uncased_large': 'xlmroberta_ner_tner_large_uncased_ontonotes5', 'en.ner.xlmr_roberta.wnut2017.base.by_tner': 'xlmroberta_ner_base_wnut2017', 'en.ner.xlmr_roberta.wnut2017.large.by_tner': 'xlmroberta_ner_large_wnut2017', 'en.ner.xlmr_roberta.wnut2017.uncased_base.by_tner': 'xlmroberta_ner_base_uncased_wnut2017', 'en.ner.xlmr_roberta.wnut2017.uncased_large.by_tner': 'xlmroberta_ner_large_uncased_wnut2017', 'en.ner.xlmr_roberta.xtreme.base_finetuned': 'xlmroberta_ner_cj_mills_base_finetuned_panx', 'en.ner.xlmr_roberta.xtreme.base_finetuned.by_Neha2608': 'xlmroberta_ner_neha2608_base_finetuned_panx', 'en.ner.xlmr_roberta.xtreme.base_finetuned.by_V3RX2000': 'xlmroberta_ner_v3rx2000_base_finetuned_panx', 'en.ner.xlmr_roberta.xtreme.base_finetuned.by_dkasti': 'xlmroberta_ner_dkasti_base_finetuned_panx', 'en.ner.xlmr_roberta.xtreme.base_finetuned.by_edwardjross': 'xlmroberta_ner_edwardjross_base_finetuned_panx', 'en.ner.xlmr_roberta.xtreme.base_finetuned.by_flood': 'xlmroberta_ner_flood_base_finetuned_panx', 'en.ner.xlmr_roberta.xtreme.base_finetuned.by_haesun': 'xlmroberta_ner_haesun_base_finetuned_panx', 'en.ner.xlmr_roberta.xtreme.base_finetuned.by_iis2009002': 'xlmroberta_ner_iis2009002_base_finetuned_panx', 'en.ner.xlmr_roberta.xtreme.base_finetuned.by_jgriffi': 'xlmroberta_ner_jgriffi_base_finetuned_panx', 'en.ner.xlmr_roberta.xtreme.base_finetuned.by_lijingxin': 'xlmroberta_ner_lijingxin_base_finetuned_panx', 'en.ner.xlmr_roberta.xtreme.base_finetuned.by_moghis': 'xlmroberta_ner_moghis_base_finetuned_panx', 'en.ner.xlmr_roberta.xtreme.base_finetuned.by_robkayinto': 'xlmroberta_ner_robkayinto_base_finetuned_panx', 'en.ner.xlmr_roberta.xtreme.base_finetuned.by_skr3178': 'xlmroberta_ner_skr3178_base_finetuned_panx', 'en.ner.xlmr_roberta.xtreme.base_finetuned.by_transformersbook': 'xlmroberta_ner_transformersbook_base_finetuned_panx', 'en.ngram': 'ngram', 'en.norm': 'norm', 'en.pos': 'pos_atis', 'en.pos.anc': 'pos_anc', 'en.pos.atis': 'pos_atis', 'en.pos.ewt': 'pos_ewt', 'en.pos.gum': 'pos_gum', 'en.pos.lines': 'pos_lines', 'en.pos.partut': 'pos_partut', 'en.pos.roberta_base_english_upos': 'roberta_pos_roberta_base_english_upos', 'en.pos.roberta_large_english_upos': 'roberta_pos_roberta_large_english_upos', 'en.pos.ud_ewt': 'pos_ud_ewt', 'en.sentiment.imdb.glove': 'sentimentdl_glove_imdb', 'en.sentiment.imdb.use.dl': 'sentimentdl_use_imdb', 'en.sentiment.twitter.dl': 'sentimentdl_use_twitter', 'en.sentiment.twitter.use.dl': 'sentimentdl_use_twitter', 'en.sentiment.vivekn': 'sentiment_vivekn', 'en.span_question.albert': 'albert_base_qa_squad2', 'en.speech2text.accents.wav2vec2.v2.by_willcai': 'asr_wav2vec2_common_voice_accents_3', 'en.speech2text.accents_indian.wav2vec2.v2.by_willcai': 'asr_wav2vec2_common_voice_accents_indian', 'en.speech2text.bilal.wav2vec2.v2.by_roshana': 'asr_wav2vec2_bilal_2022', 'en.speech2text.cetuc_sid_voxforge.wav2vec2.v2_mls0.by_joaoalvarenga': 'asr_wav2vec2_cetuc_sid_voxforge_mls_0', 'en.speech2text.cetuc_sid_voxforge.wav2vec2.v2_mls1.by_joaoalvarenga': 'asr_wav2vec2_cetuc_sid_voxforge_mls_1', 'en.speech2text.coralwav2vec2.v2.by_joaoalvarenga': 'asr_wav2vec2_coral_300ep', 'en.speech2text.dansk.wav2vec2.v2.by_siyam': 'asr_dansk_wav2vec21', 'en.speech2text.dansk.wav2vec2.v2.stt.by_siyam': 'asr_dansk_wav2vec2_stt', 'en.speech2text.wav2vec2': 'asr_part1', 'en.speech2text.wav2vec2.aa_v2_large_gpu': 'asr_wav2vec2_large_a_gpu', 'en.speech2text.wav2vec2.accents.v2_gpu.by_willcai': 'asr_wav2vec2_common_voice_accents_3_gpu', 'en.speech2text.wav2vec2.aug.v2_gpu.by_roshana': 'asr_wav2vec2_25_1aug_2022_gpu', 'en.speech2text.wav2vec2.bilal.v2_gpu.by_roshana': 'asr_wav2vec2_bilal_2022_gpu', 'en.speech2text.wav2vec2.by_abhishek': 'asr_autonlp_hindi_asr', 'en.speech2text.wav2vec2.by_behroz': 'asr_sp_proj', 'en.speech2text.wav2vec2.by_bilalahmed15': 'asr_urdu_repo', 'en.speech2text.wav2vec2.by_birgermoell': 'asr_liepa_lithuanian', 'en.speech2text.wav2vec2.by_bkh6722': 'asr_bach_arb', 'en.speech2text.wav2vec2.by_buidung2004': 'asr_maialong_model', 'en.speech2text.wav2vec2.by_denden': 'asr_iloko', 'en.speech2text.wav2vec2.by_fractalego': 'asr_personal_speech_text_model', 'en.speech2text.wav2vec2.by_hf_internal_testing': 'asr_processor_with_lm', 'en.speech2text.wav2vec2.by_khady': 'asr_wolof_asr', 'en.speech2text.wav2vec2.by_lilitket': 'asr_20220507_122935', 'en.speech2text.wav2vec2.by_mimi': 'asr_wynehills_mimi_asr', 'en.speech2text.wav2vec2.by_nuwaisir': 'asr_quran_speech_recognizer', 'en.speech2text.wav2vec2.by_sania67': 'asr_fine_tunning_on_cv_dataset', 'en.speech2text.wav2vec2.by_talha': 'asr_urdu_asr', 'en.speech2text.wav2vec2.by_tarakki100': 'asr_sanskrit', 'en.speech2text.wav2vec2.by_ying_tina': 'asr_temp', 'en.speech2text.wav2vec2.crypto.v2_finetuned': 'asr_wav2vec_finetuned_on_cryptocurrency', 'en.speech2text.wav2vec2.crypto.v2_finetuned_gpu': 'asr_wav2vec_finetuned_on_cryptocurrency_gpu', 'en.speech2text.wav2vec2.dansk.v2_gpu.by_siyam': 'asr_dansk_wav2vec21_gpu', 'en.speech2text.wav2vec2.dansk.v2_stt_gpu.by_siyam': 'asr_dansk_wav2vec2_stt_gpu', 'en.speech2text.wav2vec2.distilled_v2': 'asr_distil_wav2vec2', 'en.speech2text.wav2vec2.distilled_v2_gpu': 'asr_distil_wav2vec2_gpu', 'en.speech2text.wav2vec2.finetuned': 'asr_finetuned_audio_transcriber', 'en.speech2text.wav2vec2.gpu': 'asr_quran_speech_recognizer_gpu', 'en.speech2text.wav2vec2.gpu.by_abhishek': 'asr_autonlp_hindi_asr_gpu', 'en.speech2text.wav2vec2.gpu.by_behroz': 'asr_sp_proj_gpu', 'en.speech2text.wav2vec2.gpu.by_bilalahmed15': 'asr_urdu_repo_gpu', 'en.speech2text.wav2vec2.gpu.by_birgermoell': 'asr_liepa_lithuanian_gpu', 'en.speech2text.wav2vec2.gpu.by_bkh6722': 'asr_bach_arb_gpu', 'en.speech2text.wav2vec2.gpu.by_buidung2004': 'asr_maialong_model_gpu', 'en.speech2text.wav2vec2.gpu.by_denden': 'asr_iloko_gpu', 'en.speech2text.wav2vec2.gpu.by_fractalego': 'asr_personal_speech_text_model_gpu', 'en.speech2text.wav2vec2.gpu.by_hf_internal_testing': 'asr_processor_with_lm_gpu', 'en.speech2text.wav2vec2.gpu.by_joaoalvarenga': 'asr_model_sid_voxforge_cetuc_1_gpu', 'en.speech2text.wav2vec2.gpu.by_khady': 'asr_wolof_asr_gpu', 'en.speech2text.wav2vec2.gpu.by_sania67': 'asr_fine_tunning_on_cv_dataset_gpu', 'en.speech2text.wav2vec2.gpu.by_talha': 'asr_urdu_asr_gpu', 'en.speech2text.wav2vec2.gpu.by_tarakki100': 'asr_sanskrit_gpu', 'en.speech2text.wav2vec2.gpu.by_ying_tina': 'asr_temp_gpu', 'en.speech2text.wav2vec2.gpu.by_zasheza': 'asr_part1_gpu', 'en.speech2text.wav2vec2.gpu.v1.by_niclas': 'asr_model_2_gpu', 'en.speech2text.wav2vec2.gpu.v2.by_niclas': 'asr_model_4_gpu', 'en.speech2text.wav2vec2.gpu.v3.by_niclas': 'asr_models_6_gpu', 'en.speech2text.wav2vec2.gram.v2_base_100h_gpu.by_saahith': 'asr_wav2vec2_base_100h_ngram_gpu', 'en.speech2text.wav2vec2.indian.v2_gpu.by_harveenchadha': 'asr_vakyansh_wav2vec2_indian_english_enm_700_gpu', 'en.speech2text.wav2vec2.indian_accents.v2_gpu.by_willcai': 'asr_wav2vec2_common_voice_accents_indian_gpu', 'en.speech2text.wav2vec2.ksponspeech.v2': 'asr_wav2vec2_ksponspeech', 'en.speech2text.wav2vec2.ksponspeech.v2_gpu': 'asr_wav2vec2_ksponspeech_gpu', 'en.speech2text.wav2vec2.libri.v2_large_960h.by_facebook': 'asr_wav2vec2_large_robust_libri_960h', 'en.speech2text.wav2vec2.maithili.v2.by_harveenchadha': 'asr_vakyansh_wav2vec2_maithili_maim_50', 'en.speech2text.wav2vec2.maithili.v2_gpu.by_harveenchadha': 'asr_vakyansh_wav2vec2_maithili_maim_50_gpu', 'en.speech2text.wav2vec2.murad.v2_data_gpu.by_mbmmurad': 'asr_wav2vec2_murad_with_some_data_gpu', 'en.speech2text.wav2vec2.murad.v2_gpu.by_mbmmurad': 'asr_wav2vec2_murad_gpu', 'en.speech2text.wav2vec2.ngram.v2_base_100h.by_saahith': 'asr_wav2vec2_base_100h_ngram', 'en.speech2text.wav2vec2.sanskrit.v2.by_harveenchadha': 'asr_vakyansh_wav2vec2_sanskrit_sam_60', 'en.speech2text.wav2vec2.sanskrit.v2_gpu.by_harveenchadha': 'asr_vakyansh_wav2vec2_sanskrit_sam_60_gpu', 'en.speech2text.wav2vec2.telgu.v2_gpu.by_harveenchadha': 'asr_vakyansh_wav2vec2_telugu_tem_100_gpu', 'en.speech2text.wav2vec2.test.v2_base_100h.by_saahith': 'asr_wav2vec2_base_100h_test', 'en.speech2text.wav2vec2.test.v2_base_100h_gpu.by_saahith': 'asr_wav2vec2_base_100h_test_gpu', 'en.speech2text.wav2vec2.timit.v2_base': 'asr_wav2vec2_base_timit_asr', 'en.speech2text.wav2vec2.timit.v2_base.by_patrickvonplaten': 'asr_wav2vec2_base_timit_demo_by_patrickvonplaten', 'en.speech2text.wav2vec2.timit.v2_base.by_prows12': 'asr_wav2vec2_base_timit_demo_test_jong', 'en.speech2text.wav2vec2.timit.v2_base.by_shields': 'asr_wav2vec2_base_20sec_timit_and_dementiabank', 'en.speech2text.wav2vec2.timit.v2_base_32d_colab': 'asr_wav2vec2_base_timit_demo_colab_32_epochs30', 'en.speech2text.wav2vec2.timit.v2_base_32d_colab.by_ying_tina': 'asr_wav2vec2_base_timit_demo_colab_32_epochs50_earlystop', 'en.speech2text.wav2vec2.timit.v2_base_32d_gpu_colab': 'asr_wav2vec2_base_timit_demo_colab_32_epochs30_gpu', 'en.speech2text.wav2vec2.timit.v2_base_32d_gpu_colab.by_ying_tina': 'asr_wav2vec2_base_timit_demo_colab_32_epochs50_earlystop_gpu', 'en.speech2text.wav2vec2.timit.v2_base_adverse_drug_event_colab': 'asr_wav2vec2_base_timit_demo_google_colab_by_adelgalu', 'en.speech2text.wav2vec2.timit.v2_base_adverse_drug_event_gpu_colab': 'asr_wav2vec2_base_timit_demo_google_colab_by_adelgalu_gpu', 'en.speech2text.wav2vec2.timit.v2_base_colab': 'asr_wav2vec2_base_timit_ali_hasan_colab_ex2', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_MoHai': 'asr_wav2vec2_base_timit_demo_colab_by_mohai', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_Sarahliu186': 'asr_wav2vec2_base_timit_demo_colab_by_sarahliu186', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_ali221000262': 'asr_wav2vec2_base_timit_demo_colab_by_ali221000262', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_anan0329': 'asr_wav2vec2_base_timit_demo_colab_by_anan0329', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_doddle124578': 'asr_wav2vec2_base_timit_demo_colab_1_by_doddle124578', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_gullenasatish': 'asr_wav2vec2_base_timit_demo_colab_by_gullenasatish', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_hady': 'asr_wav2vec2_base_timit_demo_colab_by_hady', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_izzy_lazerson': 'asr_wav2vec2_base_timit_demo_colab_by_izzy_lazerson', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_jessiejohnson': 'asr_wav2vec2_base_timit_demo_colab_by_jessiejohnson', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_nadaAlnada': 'asr_wav2vec2_base_timit_demo_colab_by_nadaalnada', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_ntp0102': 'asr_wav2vec2_base_timit_demo_colab_by_ntp0102', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_patrickvonplaten': 'asr_wav2vec2_base_timit_demo_google_colab_by_patrickvonplaten', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_radhakri119': 'asr_wav2vec2_base_timit_demo_colab_by_radhakri119', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_samantharhay': 'asr_wav2vec2_base_timit_demo_colab_by_samantharhay', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_terri1102': 'asr_wav2vec2_base_timit_demo_colab_by_terri1102', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_testimonial': 'asr_wav2vec2_base_timit_demo_colab_by_testimonial', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_tingtingyuli': 'asr_wav2vec2_base_timit_demo_colab_by_tingtingyuli', 'en.speech2text.wav2vec2.timit.v2_base_colab.by_ying_tina': 'asr_wav2vec2_base_timit_demo_colab_test', 'en.speech2text.wav2vec2.timit.v2_base_colab0': 'asr_wav2vec2_base_timit_demo_colab0_by_cuzeverynameistaken', 'en.speech2text.wav2vec2.timit.v2_base_colab0.by_hassnain': 'asr_wav2vec2_base_timit_demo_colab0_by_hassnain', 'en.speech2text.wav2vec2.timit.v2_base_colab0.by_sherry7144': 'asr_wav2vec2_base_timit_demo_colab0_by_sherry7144', 'en.speech2text.wav2vec2.timit.v2_base_colab0.by_tahazakir': 'asr_wav2vec2_base_timit_demo_colab0_by_tahazakir', 'en.speech2text.wav2vec2.timit.v2_base_colab1': 'asr_wav2vec2_base_timit_demo_colab1_by_cuzeverynameistaken', 'en.speech2text.wav2vec2.timit.v2_base_colab1.by_sherry7144': 'asr_wav2vec2_base_timit_demo_colab1_by_sherry7144', 'en.speech2text.wav2vec2.timit.v2_base_colab1.by_tahazakir': 'asr_wav2vec2_base_timit_demo_colab1_by_tahazakir', 'en.speech2text.wav2vec2.timit.v2_base_colab10': 'asr_wav2vec2_base_timit_demo_colab10', 'en.speech2text.wav2vec2.timit.v2_base_colab11': 'asr_wav2vec2_base_timit_demo_colab11_by_sameearif88', 'en.speech2text.wav2vec2.timit.v2_base_colab2': 'asr_wav2vec2_base_timit_demo_colab2_by_ahmad573', 'en.speech2text.wav2vec2.timit.v2_base_colab2.by_hassnain': 'asr_wav2vec2_base_timit_demo_colab2_by_hassnain', 'en.speech2text.wav2vec2.timit.v2_base_colab2.by_sameearif88': 'asr_wav2vec2_base_timit_demo_colab2_by_sameearif88', 'en.speech2text.wav2vec2.timit.v2_base_colab2.by_sherry7144': 'asr_wav2vec2_base_timit_demo_colab2_by_sherry7144', 'en.speech2text.wav2vec2.timit.v2_base_colab240': 'asr_wav2vec2_base_timit_demo_colab240', 'en.speech2text.wav2vec2.timit.v2_base_colab3': 'asr_wav2vec2_base_timit_demo_colab3_by_hassnain', 'en.speech2text.wav2vec2.timit.v2_base_colab3.by_sherry7144': 'asr_wav2vec2_base_timit_demo_colab3_by_sherry7144', 'en.speech2text.wav2vec2.timit.v2_base_colab30': 'asr_wav2vec2_base_timit_demo_colab30', 'en.speech2text.wav2vec2.timit.v2_base_colab4': 'asr_wav2vec2_base_timit_demo_colab4', 'en.speech2text.wav2vec2.timit.v2_base_colab40': 'asr_wav2vec2_base_timit_demo_colab40', 'en.speech2text.wav2vec2.timit.v2_base_colab50': 'asr_wav2vec2_base_timit_demo_colab50', 'en.speech2text.wav2vec2.timit.v2_base_colab51': 'asr_wav2vec2_base_timit_demo_colab51', 'en.speech2text.wav2vec2.timit.v2_base_colab52': 'asr_wav2vec2_base_timit_demo_colab52', 'en.speech2text.wav2vec2.timit.v2_base_colab53': 'asr_wav2vec2_base_timit_demo_colab53_by_hassnain', 'en.speech2text.wav2vec2.timit.v2_base_colab57': 'asr_wav2vec2_base_timit_demo_colab57', 'en.speech2text.wav2vec2.timit.v2_base_colab6': 'asr_wav2vec2_base_timit_demo_colab6_by_hassnain', 'en.speech2text.wav2vec2.timit.v2_base_colab647': 'asr_wav2vec2_base_timit_demo_colab647', 'en.speech2text.wav2vec2.timit.v2_base_colab66': 'asr_wav2vec2_base_timit_demo_colab66', 'en.speech2text.wav2vec2.timit.v2_base_colab7': 'asr_wav2vec2_base_timit_demo_colab7_by_hassnain', 'en.speech2text.wav2vec2.timit.v2_base_colab7.by_sameearif88': 'asr_wav2vec2_base_timit_demo_colab7_by_sameearif88', 'en.speech2text.wav2vec2.timit.v2_base_colab70': 'asr_wav2vec2_base_timit_demo_colab70', 'en.speech2text.wav2vec2.timit.v2_base_colab9': 'asr_wav2vec2_base_timit_demo_colab9', 'en.speech2text.wav2vec2.timit.v2_base_colab90': 'asr_wav2vec2_base_timit_demo_colab90', 'en.speech2text.wav2vec2.timit.v2_base_colab92': 'asr_wav2vec2_base_timit_demo_colab92', 'en.speech2text.wav2vec2.timit.v2_base_colab971': 'asr_wav2vec2_base_timit_demo_colab971', 'en.speech2text.wav2vec2.timit.v2_base_exp1.by_moaiz237': 'asr_wav2vec2_base_timit_moaiz_exp1', 'en.speech2text.wav2vec2.timit.v2_base_exp2.by_moaiz237': 'asr_wav2vec2_base_timit_moaiz_exp2', 'en.speech2text.wav2vec2.timit.v2_base_explast.by_moaiz237': 'asr_wav2vec2_base_timit_moaiz_explast', 'en.speech2text.wav2vec2.timit.v2_base_gpu': 'asr_wav2vec2_base_timit_asr_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu.by_moaiz237': 'asr_wav2vec2_base_timit_moaiz_explast_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu.by_patrickvonplaten': 'asr_wav2vec2_base_timit_demo_by_patrickvonplaten_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu.by_prows12': 'asr_wav2vec2_base_timit_demo_test_jong_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu.by_shields': 'asr_wav2vec2_base_20sec_timit_and_dementiabank_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab': 'asr_wav2vec2_base_timit_ali_hasan_colab_ex2_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab.by_MoHai': 'asr_wav2vec2_base_timit_demo_colab_by_mohai_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab.by_anan0329': 'asr_wav2vec2_base_timit_demo_colab_by_anan0329_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab.by_doddle124578': 'asr_wav2vec2_base_timit_demo_colab_by_doddle124578_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab.by_gullenasatish': 'asr_wav2vec2_base_timit_demo_colab_by_gullenasatish_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab.by_hady': 'asr_wav2vec2_base_timit_demo_colab_by_hady_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab.by_izzy_lazerson': 'asr_wav2vec2_base_timit_demo_colab_by_izzy_lazerson_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab.by_jessiejohnson': 'asr_wav2vec2_base_timit_demo_colab_by_jessiejohnson_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab.by_nadaAlnada': 'asr_wav2vec2_base_timit_demo_colab_by_nadaalnada_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab.by_ntp0102': 'asr_wav2vec2_base_timit_demo_colab_by_ntp0102_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab.by_patrickvonplaten': 'asr_wav2vec2_base_timit_demo_google_colab_by_patrickvonplaten_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab.by_radhakri119': 'asr_wav2vec2_base_timit_demo_colab_by_radhakri119_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab.by_samantharhay': 'asr_wav2vec2_base_timit_demo_colab_by_samantharhay_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab.by_testimonial': 'asr_wav2vec2_base_timit_demo_colab_by_testimonial_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab.by_tingtingyuli': 'asr_wav2vec2_base_timit_demo_colab_by_tingtingyuli_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab.by_ying_tina': 'asr_wav2vec2_base_timit_demo_colab_test_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab0': 'asr_wav2vec2_base_timit_demo_colab0_by_cuzeverynameistaken_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab0.by_hassnain': 'asr_wav2vec2_base_timit_demo_colab0_by_hassnain_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab0.by_sherry7144': 'asr_wav2vec2_base_timit_demo_colab0_by_sherry7144_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab0.by_tahazakir': 'asr_wav2vec2_base_timit_demo_colab0_by_tahazakir_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab1': 'asr_wav2vec2_base_timit_demo_colab1_by_cuzeverynameistaken_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab1.by_sherry7144': 'asr_wav2vec2_base_timit_demo_colab1_by_sherry7144_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab1.by_tahazakir': 'asr_wav2vec2_base_timit_demo_colab1_by_tahazakir_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab10': 'asr_wav2vec2_base_timit_demo_colab10_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab2': 'asr_wav2vec2_base_timit_demo_colab2_by_ahmad573_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab2.by_sherry7144': 'asr_wav2vec2_base_timit_demo_colab2_by_sherry7144_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab3': 'asr_wav2vec2_base_timit_demo_colab3_by_hassnain_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab4': 'asr_wav2vec2_base_timit_demo_colab4_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab40': 'asr_wav2vec2_base_timit_demo_colab40_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab50': 'asr_wav2vec2_base_timit_demo_colab50_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab51': 'asr_wav2vec2_base_timit_demo_colab51_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab53': 'asr_wav2vec2_base_timit_demo_colab53_by_hassnain_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab6': 'asr_wav2vec2_base_timit_demo_colab6_by_hassnain_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab7': 'asr_wav2vec2_base_timit_demo_colab7_by_hassnain_gpu', 'en.speech2text.wav2vec2.timit.v2_base_gpu_colab9': 'asr_wav2vec2_base_timit_demo_colab9_gpu', 'en.speech2text.wav2vec2.timit.v2_large': 'asr_wav2vec2_large_lv60_timit_asr', 'en.speech2text.wav2vec2.timit.v2_large_gpu': 'asr_wav2vec2_large_lv60_timit_asr_gpu', 'en.speech2text.wav2vec2.timit.v3_base_colab.by_doddle124578': 'asr_wav2vec2_base_timit_demo_colab_2', 'en.speech2text.wav2vec2.timit.v4_base_colab.by_doddle124578': 'asr_wav2vec2_base_timit_demo_colab_3', 'en.speech2text.wav2vec2.timit.v5_base_colab.by_doddle124578': 'asr_wav2vec2_base_timit_demo_colab_by_doddle124578', 'en.speech2text.wav2vec2.v1.by_niclas': 'asr_model_2', 'en.speech2text.wav2vec2.v2': 'asr_asr_with_transformers_wav2vec2', 'en.speech2text.wav2vec2.v2.by_ai4bharat': 'asr_indicwav2vec_v1_bengali', 'en.speech2text.wav2vec2.v2.by_anjulrajendrasharma': 'asr_wav2vec2_indian_english', 'en.speech2text.wav2vec2.v2.by_burakyldrm': 'asr_wav2vec2_burak_v2.1', 'en.speech2text.wav2vec2.v2.by_gngpostalsrvc': 'asr_w2v2_ami', 'en.speech2text.wav2vec2.v2.by_hoangbinhmta99': 'asr_wav2vec_demo', 'en.speech2text.wav2vec2.v2.by_itaihay': 'asr_wav2vec_asr_swbd', 'en.speech2text.wav2vec2.v2.by_mbmmurad': 'asr_wav2vec2_murad_with_some_data', 'en.speech2text.wav2vec2.v2.by_neelan_elucidate_ai': 'asr_wav2vec2_tcrs_runtest', 'en.speech2text.wav2vec2.v2.by_niclas': 'asr_model_4', 'en.speech2text.wav2vec2.v2.by_raffay': 'asr_final_wav2vec2_urdu_asr_project', 'en.speech2text.wav2vec2.v2.by_rattana': 'asr_wav2vec2_thai_asr', 'en.speech2text.wav2vec2.v2.by_roshana': 'asr_wav2vec2_25_1aug_2022', 'en.speech2text.wav2vec2.v2.by_snehatyagi': 'asr_wav2vec2_test', 'en.speech2text.wav2vec2.v2.by_thunninoi': 'asr_wav2vec2_japanese_hiragana_vtuber', 'en.speech2text.wav2vec2.v2.by_urukhan': 'asr_wav2vec2_russian', 'en.speech2text.wav2vec2.v2_20epoch': 'asr_wav2vec2_bilal_20epoch', 'en.speech2text.wav2vec2.v2_20epoch_gpu': 'asr_wav2vec2_bilal_20epoch_gpu', 'en.speech2text.wav2vec2.v2_ami_gpu.by_gngpostalsrvc': 'asr_w2v2_ami_gpu', 'en.speech2text.wav2vec2.v2_base': 'asr_wav2vec2_base_checkpoint_10', 'en.speech2text.wav2vec2.v2_base.by_cahya': 'asr_wav2vec2_base_test', 'en.speech2text.wav2vec2.v2_base.by_huyue012': 'asr_wav2vec2_base_cynthia_tedlium_2500_v2', 'en.speech2text.wav2vec2.v2_base.by_rjrohit': 'asr_wav2vec2_base_rj_try_5', 'en.speech2text.wav2vec2.v2_base.by_sanchit_gandhi': 'asr_wav2vec2_ctc_earnings22_baseline_5_gram', 'en.speech2text.wav2vec2.v2_base_10000.by_jiobiala24': 'asr_wav2vec2_base_10000', 'en.speech2text.wav2vec2.v2_base_10000_gpu.by_jiobiala24': 'asr_wav2vec2_base_10000_gpu', 'en.speech2text.wav2vec2.v2_base_100h': 'asr_wav2vec2_base_100h_by_facebook', 'en.speech2text.wav2vec2.v2_base_100h.by_gorkemgoknar': 'asr_wav2vec2_base_100h_with_lm_turkish', 'en.speech2text.wav2vec2.v2_base_100h.by_patrickvonplaten': 'asr_wav2vec2_base_100h_with_lm_by_patrickvonplaten', 'en.speech2text.wav2vec2.v2_base_100h.by_saahith': 'asr_wav2vec2_base_100h_with_lm_by_saahith', 'en.speech2text.wav2vec2.v2_base_100h.by_vuiseng9': 'asr_wav2vec2_base_100h_by_vuiseng9', 'en.speech2text.wav2vec2.v2_base_100h_13k_steps.by_patrickvonplaten': 'asr_wav2vec2_base_100h_13k_steps', 'en.speech2text.wav2vec2.v2_base_100h_13ksteps_gpu.by_patrickvonplaten': 'asr_wav2vec2_base_100h_13k_steps_gpu', 'en.speech2text.wav2vec2.v2_base_100h_gpu': 'asr_wav2vec2_base_100h_by_facebook_gpu', 'en.speech2text.wav2vec2.v2_base_100h_gpu.by_gorkemgoknar': 'asr_wav2vec2_base_100h_with_lm_turkish_gpu', 'en.speech2text.wav2vec2.v2_base_100h_gpu.by_patrickvonplaten': 'asr_wav2vec2_base_100h_with_lm_by_patrickvonplaten_gpu', 'en.speech2text.wav2vec2.v2_base_100h_gpu.by_saahith': 'asr_wav2vec2_base_100h_with_lm_by_saahith_gpu', 'en.speech2text.wav2vec2.v2_base_3percent': 'asr_wav2vec2_base_swbd_turn_eos_long_short_utt_removed_3percent', 'en.speech2text.wav2vec2.v2_base_3percent_gpu': 'asr_wav2vec2_base_swbd_turn_eos_long_short_utt_removed_3percent_gpu', 'en.speech2text.wav2vec2.v2_base_5percent': 'asr_wav2vec2_base_swbd_turn_eos_long_short_utt_removed_5percent', 'en.speech2text.wav2vec2.v2_base_5percent_gpu': 'asr_wav2vec2_base_swbd_turn_eos_long_short_utt_removed_5percent_gpu', 'en.speech2text.wav2vec2.v2_base_960h': 'asr_wav2vec2_base_960h_4_gram', 'en.speech2text.wav2vec2.v2_base_960h.by_facebook': 'asr_wav2vec2_base_960h_by_facebook', 'en.speech2text.wav2vec2.v2_base_960h_gpu': 'asr_wav2vec2_base_960h_by_facebook_gpu', 'en.speech2text.wav2vec2.v2_base_960h_gpu.by_patrickvonplaten': 'asr_wav2vec2_base_960h_4_gram_gpu', 'en.speech2text.wav2vec2.v2_base_checkpoint14.by_jiobiala24': 'asr_wav2vec2_base_checkpoint_14', 'en.speech2text.wav2vec2.v2_base_checkpoint_6.by_jiobiala24': 'asr_wav2vec2_base_checkpoint_6', 'en.speech2text.wav2vec2.v2_base_checkpoint_9.by_jiobiala24': 'asr_wav2vec2_base_checkpoint_9', 'en.speech2text.wav2vec2.v2_base_colab': 'asr_wav2vec2_base_common_voice_second_colab', 'en.speech2text.wav2vec2.v2_base_colab.by_thyagosme': 'asr_wav2vec2_base_demo_colab_by_thyagosme', 'en.speech2text.wav2vec2.v2_base_finetuned': 'asr_wav2vec2_base_finetuned_spgi_speech_dev', 'en.speech2text.wav2vec2.v2_base_finetuned_gpu': 'asr_wav2vec2_base_finetuned_spgi_speech_dev_gpu', 'en.speech2text.wav2vec2.v2_base_gpu': 'asr_wav2vec2_ctc_earnings22_baseline_5_gram_gpu', 'en.speech2text.wav2vec2.v2_base_gpu.by_cahya': 'asr_wav2vec2_base_test_gpu', 'en.speech2text.wav2vec2.v2_base_gpu.by_huyue012': 'asr_wav2vec2_base_cynthia_tedlium_2500_v2_gpu', 'en.speech2text.wav2vec2.v2_base_gpu.by_rjrohit': 'asr_wav2vec2_base_rj_try_5_gpu', 'en.speech2text.wav2vec2.v2_base_gpu_checkpoint_6.by_jiobiala24': 'asr_wav2vec2_base_checkpoint_6_gpu', 'en.speech2text.wav2vec2.v2_base_gpu_checkpoint_9.by_jiobiala24': 'asr_wav2vec2_base_checkpoint_9_gpu', 'en.speech2text.wav2vec2.v2_base_gpu_colab': 'asr_wav2vec2_base_nptel_demo_colab_gpu', 'en.speech2text.wav2vec2.v2_base_gpu_colab.by_thyagosme': 'asr_wav2vec2_base_demo_colab_by_thyagosme_gpu', 'en.speech2text.wav2vec2.v2_base_gpu_colab.by_zoha': 'asr_wav2vec2_base_common_voice_second_colab_gpu', 'en.speech2text.wav2vec2.v2_colab': 'asr_wav2vec2_vee_demo_colab', 'en.speech2text.wav2vec2.v2_enm700.by_harveenchadha': 'asr_vakyansh_wav2vec2_indian_english_enm_700', 'en.speech2text.wav2vec2.v2_gpu': 'asr_asr_with_transformers_wav2vec2_gpu', 'en.speech2text.wav2vec2.v2_gpu.by_ai4bharat': 'asr_indicwav2vec_v1_bengali_gpu', 'en.speech2text.wav2vec2.v2_gpu.by_anjulrajendrasharma': 'asr_wav2vec2_indian_english_gpu', 'en.speech2text.wav2vec2.v2_gpu.by_burakyldrm': 'asr_wav2vec2_burak_v2.1_gpu', 'en.speech2text.wav2vec2.v2_gpu.by_gngpostalsrvc': 'asr_w2v2_gpu', 'en.speech2text.wav2vec2.v2_gpu.by_hoangbinhmta99': 'asr_wav2vec_demo_gpu', 'en.speech2text.wav2vec2.v2_gpu.by_itaihay': 'asr_wav2vec_asr_swbd_gpu', 'en.speech2text.wav2vec2.v2_gpu.by_joaoalvarenga': 'asr_wav2vec2_coral_300ep_gpu', 'en.speech2text.wav2vec2.v2_gpu.by_neelan_elucidate_ai': 'asr_wav2vec2_tcrs_runtest_gpu', 'en.speech2text.wav2vec2.v2_gpu.by_snehatyagi': 'asr_wav2vec2_test_gpu', 'en.speech2text.wav2vec2.v2_gpu.by_thunninoi': 'asr_wav2vec2_japanese_hiragana_vtuber_gpu', 'en.speech2text.wav2vec2.v2_gpu.by_urukhan': 'asr_wav2vec2_russian_gpu', 'en.speech2text.wav2vec2.v2_gpu_colab': 'asr_wav2vec2_vee_demo_colab_gpu', 'en.speech2text.wav2vec2.v2_large': 'asr_wav2vec2_large_a', 'en.speech2text.wav2vec2.v2_large.by_crossdelenna': 'asr_wav2vec2_large_in_lm', 'en.speech2text.wav2vec2.v2_large.by_jonatasgrosman': 'asr_wav2vec2_large_english', 'en.speech2text.wav2vec2.v2_large.by_leonardvorbeck': 'asr_wav2vec2_large_robust_ls960', 'en.speech2text.wav2vec2.v2_large.by_prajwals': 'asr_wav2vec2_med_custom_train_large', 'en.speech2text.wav2vec2.v2_large.by_sanchit_gandhi': 'asr_wav2vec2_large_tedlium', 'en.speech2text.wav2vec2.v2_large_10m': 'asr_wav2vec2_large_10min_lv60_self', 'en.speech2text.wav2vec2.v2_large_10m_gpu': 'asr_wav2vec2_large_10min_lv60_self_gpu', 'en.speech2text.wav2vec2.v2_large_300h': 'asr_wav2vec2_large_robust_swbd_300h', 'en.speech2text.wav2vec2.v2_large_300h_gpu': 'asr_wav2vec2_large_robust_swbd_300h_gpu', 'en.speech2text.wav2vec2.v2_large_4500h': 'asr_iwslt_asr_wav2vec_large_4500h', 'en.speech2text.wav2vec2.v2_large_4500h_gpu': 'asr_iwslt_asr_wav2vec_large_4500h_gpu', 'en.speech2text.wav2vec2.v2_large_960h': 'asr_wav2vec2_large_960h', 'en.speech2text.wav2vec2.v2_large_960h.by_gxbag': 'asr_wav2vec2_large_960h_lv60_self_with_wikipedia_lm', 'en.speech2text.wav2vec2.v2_large_960h.by_patrickvonplaten': 'asr_wav2vec2_large_960h_lv60_self_4_gram', 'en.speech2text.wav2vec2.v2_large_960h_gpu': 'asr_wav2vec2_large_960h_gpu', 'en.speech2text.wav2vec2.v2_large_960h_gpu.by_facebook': 'asr_wav2vec2_large_960h_lv60_gpu', 'en.speech2text.wav2vec2.v2_large_960h_gpu.by_gxbag': 'asr_wav2vec2_large_960h_lv60_self_with_wikipedia_lm_gpu', 'en.speech2text.wav2vec2.v2_large_960h_gpu.by_patrickvonplaten': 'asr_wav2vec2_large_960h_lv60_self_4_gram_gpu', 'en.speech2text.wav2vec2.v2_large_960h_lv60.by_facebook': 'asr_wav2vec2_large_960h_lv60', 'en.speech2text.wav2vec2.v2_large_960h_self.by_facebook': 'asr_wav2vec2_large_960h_lv60_self', 'en.speech2text.wav2vec2.v2_large_960h_self_gpu.by_facebook': 'asr_wav2vec2_large_960h_lv60_self_gpu', 'en.speech2text.wav2vec2.v2_large_gpu': 'asr_wav2vec2_large_english_gpu', 'en.speech2text.wav2vec2.v2_large_gpu.by_crossdelenna': 'asr_wav2vec2_large_in_lm_gpu', 'en.speech2text.wav2vec2.v2_large_gpu.by_leonardvorbeck': 'asr_wav2vec2_large_robust_ls960_gpu', 'en.speech2text.wav2vec2.v2_large_gpu.by_sanchit_gandhi': 'asr_wav2vec2_large_tedlium_gpu', 'en.speech2text.wav2vec2.v2_large_roobust_960h_gpu.by_facebook': 'asr_wav2vec2_large_robust_libri_960h_gpu', 'en.speech2text.wav2vec2.v3.by_niclas': 'asr_models_6', 'en.speech2text.wav2vec2.voxforge.v2_gpu_mls.by_joaoalvarenga': 'asr_wav2vec2_cetuc_sid_voxforge_mls_0_gpu', 'en.speech2text.wav2vec2.voxforge_cetuc.v1.by_joaoalvarenga': 'asr_model_sid_voxforge_cetuc_1', 'en.speech2text.wav2vec2.voxforge_cetuc.v2.by_joaoalvarenga': 'asr_model_sid_voxforge_cetuc_2', 'en.speech2text.wav2vec2.voxpopuli.v2_base': 'asr_wav2vec2_base_10k_voxpopuli', 'en.speech2text.wav2vec2.voxpopuli.v2_base_gpu': 'asr_wav2vec2_base_10k_voxpopuli_gpu', 'en.speech2text.wav2vec_xlsr': 'asr_xlsr_punctuation', 'en.speech2text.wav2vec_xlsr.300m': 'asr_hausa_4_wa2vec_data_aug_xls_r_300m', 'en.speech2text.wav2vec_xlsr.300m_gpu': 'asr_hausa_4_wa2vec_data_aug_xls_r_300m_gpu', 'en.speech2text.wav2vec_xlsr.5h': 'asr_xlsr_53_bemba_5hrs', 'en.speech2text.wav2vec_xlsr.5h_gpu': 'asr_xlsr_53_bemba_5hrs_gpu', 'en.speech2text.wav2vec_xlsr.by_akashpb13': 'asr_central_kurdish_xlsr', 'en.speech2text.wav2vec_xlsr.by_bakhtullah123': 'asr_xls_r_53_english', 'en.speech2text.wav2vec_xlsr.by_sania67': 'asr_fine_tuned_xlsr_english', 'en.speech2text.wav2vec_xlsr.egyptian.v2_large.by_othrif': 'asr_wav2vec2_large_xlsr_egyptian', 'en.speech2text.wav2vec_xlsr.egyptian.v2_large_gpu_egyptian.by_othrif': 'asr_wav2vec2_large_xlsr_egyptian_gpu', 'en.speech2text.wav2vec_xlsr.gender_male_female.v2_gpu_s287_exp': 'asr_exp_w2v2r_xls_r_gender_male_10_female_0_s287_gpu', 'en.speech2text.wav2vec_xlsr.gender_male_female.v2_s287_exp': 'asr_exp_w2v2r_xls_r_gender_male_10_female_0_s287', 'en.speech2text.wav2vec_xlsr.german.v2_300m_gpu.by_aware_ai': 'asr_wav2vec2_xls_r_300m_german_english_gpu', 'en.speech2text.wav2vec_xlsr.gpu': 'asr_central_kurdish_xlsr_gpu', 'en.speech2text.wav2vec_xlsr.gpu.by_boris': 'asr_xlsr_punctuation_gpu', 'en.speech2text.wav2vec_xlsr.gpu.by_sania67': 'asr_fine_tuned_xlsr_english_gpu', 'en.speech2text.wav2vec_xlsr.hindi.v2_large_300m_colab.by_nimrah': 'asr_wav2vec2_large_xls_r_300m_hindi_home_colab_11', 'en.speech2text.wav2vec_xlsr.kana.v2_large_300m_gpu_colab.by_pinot': 'asr_wav2vec2_large_xls_r_300m_j_kana_colab_gpu', 'en.speech2text.wav2vec_xlsr.kana.v3_large_300m_colab.by_pinot': 'asr_wav2vec2_large_xls_r_300m_j_kana_colab', 'en.speech2text.wav2vec_xlsr.ksponspeech.v2_large': 'asr_wav2vec2_large_xlsr_ksponspeech_1_20', 'en.speech2text.wav2vec_xlsr.ksponspeech.v2_large_gpu': 'asr_wav2vec2_large_xlsr_ksponspeech_1_20_gpu', 'en.speech2text.wav2vec_xlsr.l_v2': 'asr_english_filipino_wav2vec2_l_xls_r_test_03', 'en.speech2text.wav2vec_xlsr.l_v2_300m_gpu_official': 'asr_filipino_wav2vec2_l_xls_r_300m_official_gpu', 'en.speech2text.wav2vec_xlsr.l_v2_300m_official': 'asr_filipino_wav2vec2_l_xls_r_300m_official', 'en.speech2text.wav2vec_xlsr.l_v2_gpu': 'asr_english_filipino_wav2vec2_l_xls_r_test_05_gpu', 'en.speech2text.wav2vec_xlsr.large_v3.by_khalsuu': 'asr_english_filipino_wav2vec2_l_xls_r_test_04', 'en.speech2text.wav2vec_xlsr.large_v4.by_khalsuu': 'asr_english_filipino_wav2vec2_l_xls_r_test_05', 'en.speech2text.wav2vec_xlsr.large_v5.by_khalsuu': 'asr_english_filipino_wav2vec2_l_xls_r_test_06', 'en.speech2text.wav2vec_xlsr.large_v6.by_khalsuu': 'asr_english_filipino_wav2vec2_l_xls_r_test_07', 'en.speech2text.wav2vec_xlsr.large_v7.by_khalsuu': 'asr_english_filipino_wav2vec2_l_xls_r_test_09', 'en.speech2text.wav2vec_xlsr.large_v8_gpu.by_khalsuu': 'asr_english_filipino_wav2vec2_l_xls_r_test_07_gpu', 'en.speech2text.wav2vec_xlsr.large_v9_gpu.by_khalsuu': 'asr_english_filipino_wav2vec2_l_xls_r_test_09_gpu', 'en.speech2text.wav2vec_xlsr.moroccan.v2_large.by_othrif': 'asr_wav2vec2_large_xlsr_moroccan', 'en.speech2text.wav2vec_xlsr.moroccan.v2_large_gpu_moroccan.by_othrif': 'asr_wav2vec2_large_xlsr_moroccan_gpu', 'en.speech2text.wav2vec_xlsr.phon.v2.by_facebook': 'asr_wav2vec2_xlsr_53_phon', 'en.speech2text.wav2vec_xlsr.phon_babel.v2.by_facebook': 'asr_wav2vec2_xlsr_53_phon_babel', 'en.speech2text.wav2vec_xlsr.phon_babel_gpu.v2_gpu.by_facebook': 'asr_wav2vec2_xlsr_53_phon_babel_gpu', 'en.speech2text.wav2vec_xlsr.phon_gpu.v2_gpu.by_facebook': 'asr_wav2vec2_xlsr_53_phon_gpu', 'en.speech2text.wav2vec_xlsr.phonome.v2_large_300m_gpu_colab.by_pinot': 'asr_wav2vec2_large_xls_r_300m_j_phoneme_colab_gpu', 'en.speech2text.wav2vec_xlsr.phonome.v3_large_300m_colab.by_pinot': 'asr_wav2vec2_large_xls_r_300m_j_phoneme_colab', 'en.speech2text.wav2vec_xlsr.slovakian.v2_large.by_jimregan': 'asr_wav2vec2_large_xlsr_slovakian', 'en.speech2text.wav2vec_xlsr.slovakian.v2_large_gpu.by_jimregan': 'asr_wav2vec2_large_xlsr_slovakian_gpu', 'en.speech2text.wav2vec_xlsr.timit.v2_base': 'asr_wav2vec2_xls_r_timit_tokenizer_base', 'en.speech2text.wav2vec_xlsr.timit.v2_base_gpu': 'asr_wav2vec2_xls_r_timit_tokenizer_base_gpu', 'en.speech2text.wav2vec_xlsr.turkish.v2_large_300m_colab.by_nimrah': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_nimrah', 'en.speech2text.wav2vec_xlsr.turkish.v2_large_300m_colab_9.by_nimrah': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_9', 'en.speech2text.wav2vec_xlsr.turkish.v3_large_300m_colab.by_nimrah': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_4', 'en.speech2text.wav2vec_xlsr.turrkish.v2_300m_gpu.by_emre': 'asr_wav2vec2_xls_r_300m_turkish_tr_med_gpu', 'en.speech2text.wav2vec_xlsr.upper_sorbian.v2_large_gpu.by_jimregan': 'asr_wav2vec2_large_xlsr_upper_sorbian_mixed_gpu', 'en.speech2text.wav2vec_xlsr.upper_sorbian_mixed.v2_large.by_jimregan': 'asr_wav2vec2_large_xlsr_upper_sorbian_mixed', 'en.speech2text.wav2vec_xlsr.urdu.v2_large_300m.by_omar47': 'asr_wav2vec2_large_xls_r_300m_urdu_10', 'en.speech2text.wav2vec_xlsr.urdu.v3_large_300m.by_omar47': 'asr_wav2vec2_large_xls_r_300m_urdu_v2', 'en.speech2text.wav2vec_xlsr.v1gpu.by_bakhtullah123': 'asr_xls_r_53_english_gpu', 'en.speech2text.wav2vec_xlsr.v2': 'asr_wav2vec2_xls_r_1b_english', 'en.speech2text.wav2vec_xlsr.v2.by_chrisvinsen': 'asr_xlsr_wav2vec2_final', 'en.speech2text.wav2vec_xlsr.v2.by_harshit345': 'asr_xlsr_wav2vec_english', 'en.speech2text.wav2vec_xlsr.v2.by_hrdipto': 'asr_wav2vec2_xls_r_tf_left_right_shuru', 'en.speech2text.wav2vec_xlsr.v2.by_shoubhik': 'asr_wav2vec2_xlsr_bengali_10500', 'en.speech2text.wav2vec_xlsr.v2.by_slplab': 'asr_wav2vec2_xlsr50k_english_phoneme', 'en.speech2text.wav2vec_xlsr.v2_300m': 'asr_wav2vec2_xls_r_300m', 'en.speech2text.wav2vec_xlsr.v2_300m.by_aware_ai': 'asr_wav2vec2_xls_r_300m_german_english', 'en.speech2text.wav2vec_xlsr.v2_300m.by_comodoro': 'asr_wav2vec2_xls_r_300m_cv8', 'en.speech2text.wav2vec_xlsr.v2_300m.by_emre': 'asr_wav2vec2_xls_r_300m_turkish_tr_med', 'en.speech2text.wav2vec_xlsr.v2_300m.by_hrdipto': 'asr_wav2vec2_xls_r_300m_bangla_command_generated_data_finetune', 'en.speech2text.wav2vec_xlsr.v2_300m.by_kongkeaouch': 'asr_wav2vec2_xls_r_300m_kh', 'en.speech2text.wav2vec_xlsr.v2_300m.by_malay_huggingface': 'asr_wav2vec2_xls_r_300m_mixed_by_malay_huggingface', 'en.speech2text.wav2vec_xlsr.v2_300m.by_shoubhik': 'asr_wav2vec2_xls_r_300m_hindi_lm', 'en.speech2text.wav2vec_xlsr.v2_300m_cv8.by_emre': 'asr_wav2vec2_xls_r_300m_am_cv8_v1', 'en.speech2text.wav2vec_xlsr.v2_300m_gpu': 'asr_wav2vec2_xls_r_300m_cv8_gpu', 'en.speech2text.wav2vec_xlsr.v2_300m_gpu.by_aware_ai': 'asr_wav2vec2_xls_r_300m_gpu', 'en.speech2text.wav2vec_xlsr.v2_300m_gpu.by_hrdipto': 'asr_wav2vec2_xls_r_300m_bangla_command_generated_data_finetune_gpu', 'en.speech2text.wav2vec_xlsr.v2_300m_gpu.by_kongkeaouch': 'asr_wav2vec2_xls_r_300m_kh_gpu', 'en.speech2text.wav2vec_xlsr.v2_300m_gpu.by_malay_huggingface': 'asr_wav2vec2_xls_r_300m_mixed_by_malay_huggingface_gpu', 'en.speech2text.wav2vec_xlsr.v2_300m_gpu.by_shoubhik': 'asr_wav2vec2_xls_r_300m_hindi_lm_gpu', 'en.speech2text.wav2vec_xlsr.v2_300m_gpu.german.by_aware_ai': 'asr_wav2vec2_xls_r_300m_german_gpu', 'en.speech2text.wav2vec_xlsr.v2_300m_gpu_cv8.by_emre': 'asr_wav2vec2_xls_r_300m_am_cv8_v1_gpu', 'en.speech2text.wav2vec_xlsr.v2_40m': 'asr_wav2vec2_xlsr50k_korean_phoneme_aihub_40m', 'en.speech2text.wav2vec_xlsr.v2_40m_gpu': 'asr_wav2vec2_xlsr50k_korean_phoneme_aihub_40m_gpu', 'en.speech2text.wav2vec_xlsr.v2_base_colab': 'asr_xlsr_wav2vec2_base_commonvoice_demo_colab_4', 'en.speech2text.wav2vec_xlsr.v2_base_colab.by_chrisvinsen': 'asr_xlsr_wav2vec2_base_commonvoice_demo_colab_6', 'en.speech2text.wav2vec_xlsr.v2_base_gpu_colab': 'asr_xlsr_wav2vec2_base_commonvoice_demo_colab_4_gpu', 'en.speech2text.wav2vec_xlsr.v2_gpu': 'asr_wav2vec2_xls_r_1b_english_gpu', 'en.speech2text.wav2vec_xlsr.v2_gpu.by_chrisvinsen': 'asr_xlsr_wav2vec2_final_gpu', 'en.speech2text.wav2vec_xlsr.v2_gpu.by_harshit345': 'asr_xlsr_wav2vec_english_gpu', 'en.speech2text.wav2vec_xlsr.v2_gpu.by_hrdipto': 'asr_wav2vec2_xls_r_tf_left_right_shuru_gpu', 'en.speech2text.wav2vec_xlsr.v2_gpu.by_slplab': 'asr_wav2vec2_xlsr50k_english_phoneme_gpu', 'en.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xls_r_thai_test', 'en.speech2text.wav2vec_xlsr.v2_large.by_arbml': 'asr_wav2vec2_large_xlsr_53_arabic_egyptian_by_arbml', 'en.speech2text.wav2vec_xlsr.v2_large.by_cpierse': 'asr_wav2vec2_large_xlsr_53_irish', 'en.speech2text.wav2vec_xlsr.v2_large.by_fleek': 'asr_wav2vec_large_xlsr_korean', 'en.speech2text.wav2vec_xlsr.v2_large.by_gary109': 'asr_ai_light_dance_singing2_wav2vec2_large_xlsr_53_5gram_v4_2', 'en.speech2text.wav2vec_xlsr.v2_large.by_jonatasgrosman': 'asr_wav2vec2_large_xlsr_53_english_by_jonatasgrosman', 'en.speech2text.wav2vec_xlsr.v2_large.by_lgris': 'asr_wav2vec2_large_xlsr_coraa_portuguese_cv8', 'en.speech2text.wav2vec_xlsr.v2_large.by_logicbloke': 'asr_wav2vec2_large_xlsr_53_arabic_by_logicbloke', 'en.speech2text.wav2vec_xlsr.v2_large.by_patrickvonplaten': 'asr_wav2vec2_large_xlsr_turkish_demo', 'en.speech2text.wav2vec_xlsr.v2_large.by_sharonibejih': 'asr_wav2vec2_large_xlsr_sermon', 'en.speech2text.wav2vec_xlsr.v2_large.by_tyoc213': 'asr_wav2vec2_large_xlsr_nahuatl', 'en.speech2text.wav2vec_xlsr.v2_large.by_voidful': 'asr_wav2vec2_large_xlsr_53_gpt', 'en.speech2text.wav2vec_xlsr.v2_large_300m': 'asr_wav2vec2_large_xls_r_300m_cantonese', 'en.speech2text.wav2vec_xlsr.v2_large_300m.by_abdulqahar47': 'asr_wav2vec2_large_xls_r_300m_hausa_v1.2', 'en.speech2text.wav2vec_xlsr.v2_large_300m.by_aditya3107': 'asr_wav2vec2_large_xls_r_300m_ie_cv_la_as', 'en.speech2text.wav2vec_xlsr.v2_large_300m.by_lilitket': 'asr_wav2vec2_large_xls_r_300m_hyam_batch4', 'en.speech2text.wav2vec_xlsr.v2_large_300m.by_msaudtahir': 'asr_wav2vec2_large_xls_r_300m_urdu_proj', 'en.speech2text.wav2vec_xlsr.v2_large_300m.by_pavle_tsotskolauri': 'asr_wav2vec2_large_xls_r_300m_georgian_v0.6', 'en.speech2text.wav2vec_xlsr.v2_large_300m.by_raphaelkalandadze': 'asr_wav2vec2_large_xls_r_300m_georgian_large', 'en.speech2text.wav2vec_xlsr.v2_large_300m_6h_colab': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_vai6hav', 'en.speech2text.wav2vec_xlsr.v2_large_300m_6h_gpu_colab': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_vai6hav_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_colab': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_chaitanya97', 'en.speech2text.wav2vec_xlsr.v2_large_300m_colab.by_emeson77': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_emeson77', 'en.speech2text.wav2vec_xlsr.v2_large_300m_colab.by_fitods': 'asr_wav2vec2_large_xls_r_300m_guarani_colab', 'en.speech2text.wav2vec_xlsr.v2_large_300m_colab.by_izzy_lazerson': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_izzy_lazerson', 'en.speech2text.wav2vec_xlsr.v2_large_300m_colab.by_krirk': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_krirk', 'en.speech2text.wav2vec_xlsr.v2_large_300m_colab.by_li666': 'asr_wav2vec2_large_xls_r_300m_cn_colab', 'en.speech2text.wav2vec_xlsr.v2_large_300m_colab.by_lilitket': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_lilitket', 'en.speech2text.wav2vec_xlsr.v2_large_300m_colab.by_masapasa': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_masapasa', 'en.speech2text.wav2vec_xlsr.v2_large_300m_colab.by_patrickvonplaten': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_patrickvonplaten', 'en.speech2text.wav2vec_xlsr.v2_large_300m_colab.by_robertodtg': 'asr_wav2vec2_large_xls_r_300m_colab_by_robertodtg', 'en.speech2text.wav2vec_xlsr.v2_large_300m_colab.by_satyamatury': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_satyamatury', 'en.speech2text.wav2vec_xlsr.v2_large_300m_colab.by_shacharm': 'asr_wav2vec2_large_xls_r_300m_english_colab', 'en.speech2text.wav2vec_xlsr.v2_large_300m_colab.by_tonyalves': 'asr_wav2vec2_large_xls_r_300m_colab_by_tonyalves', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu': 'asr_wav2vec2_large_xls_r_300m_cantonese_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu.by_abdulqahar47': 'asr_wav2vec2_large_xls_r_300m_hausa_v1.2_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu.by_aditya3107': 'asr_wav2vec2_large_xls_r_300m_ie_cv_la_as_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu.by_lilitket': 'asr_wav2vec2_large_xls_r_300m_hyam_batch4_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu.by_msaudtahir': 'asr_wav2vec2_large_xls_r_300m_urdu_proj_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu.by_omar47': 'asr_wav2vec2_large_xls_r_300m_urdu_10_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu.by_raphaelkalandadze': 'asr_wav2vec2_large_xls_r_300m_georgian_large_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu_colab': 'asr_wav2vec2_large_xls_r_300m_colab_by_robertodtg_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu_colab.by_chaitanya97': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_chaitanya97_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu_colab.by_emeson77': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_emeson77_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu_colab.by_fitods': 'asr_wav2vec2_large_xls_r_300m_guarani_colab_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu_colab.by_izzy_lazerson': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_izzy_lazerson_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu_colab.by_li666': 'asr_wav2vec2_large_xls_r_300m_cn_colab_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu_colab.by_nimrah': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_nimrah_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu_colab.by_patrickvonplaten': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_patrickvonplaten_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu_colab.by_satyamatury': 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_satyamatury_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu_colab.by_shacharm': 'asr_wav2vec2_large_xls_r_300m_english_colab_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_gpu_colab.by_tonyalves': 'asr_wav2vec2_large_xls_r_300m_colab_by_tonyalves_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_300m_lr4.by_lilitket': 'asr_wav2vec2_large_xls_r_300m_hyam_batch4_lr4', 'en.speech2text.wav2vec_xlsr.v2_large_300m_lr8.by_lilitket': 'asr_wav2vec2_large_xls_r_300m_hyam_batch4_lr8', 'en.speech2text.wav2vec_xlsr.v2_large_colab': 'asr_wav2vec2_large_xlsr_53_demo_colab_by_project2you', 'en.speech2text.wav2vec_xlsr.v2_large_colab.by_rafiulrumy': 'asr_wav2vec2_large_xlsr_hindi_demo_colab_by_rafiulrumy', 'en.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_english_by_jonatasgrosman_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_gpu.by_arbml': 'asr_wav2vec2_large_xlsr_53_arabic_egyptian_by_arbml_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_gpu.by_cpierse': 'asr_wav2vec2_large_xlsr_53_irish_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_gpu.by_fleek': 'asr_wav2vec_large_xlsr_korean_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_gpu.by_gary109': 'asr_ai_light_dance_singing2_wav2vec2_large_xlsr_53_5gram_v4_2_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_gpu.by_logicbloke': 'asr_wav2vec2_large_xlsr_53_arabic_by_logicbloke_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_gpu.by_patrickvonplaten': 'asr_wav2vec2_large_xlsr_turkish_demo_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_gpu.by_sharonibejih': 'asr_wav2vec2_large_xlsr_sermon_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_gpu.by_tyoc213': 'asr_wav2vec2_large_xlsr_nahuatl_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_gpu.by_voidful': 'asr_wav2vec2_large_xlsr_53_gpt_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_gpu_colab': 'asr_wav2vec2_large_xlsr_53_demo_colab_by_project2you_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_small_300m': 'asr_wav2vec2_large_xls_r_300m_guarani_small_by_jhonparra18', 'en.speech2text.wav2vec_xlsr.v2_large_small_300m.by_tomascufaro': 'asr_wav2vec2_large_xls_r_300m_spanish_small_by_tomascufaro', 'en.speech2text.wav2vec_xlsr.v2_large_small_300m_gpu': 'asr_wav2vec2_large_xls_r_300m_spanish_small_by_tomascufaro_gpu', 'en.speech2text.wav2vec_xlsr.v2_large_small_300m_gpu.by_jhonparra18': 'asr_wav2vec2_large_xls_r_300m_guarani_small_by_jhonparra18_gpu', 'en.speech2text.wav2vec_xlsr.v2_multilingual': 'asr_wav2vec2_xlsr_multilingual_53', 'en.speech2text.wav2vec_xlsr.v2_multilingual_gpu': 'asr_wav2vec2_xlsr_multilingual_53_gpu', 'en.speech2text.wav2vec_xlsr.v2gpu.by_bakhtullah123': 'asr_xlsr_training_gpu', 'en.spell.norvig': 'spellcheck_norvig', 'en.stem': 'stemmer', 'en.stopwords': 'stopwords_iso', 'en.stopwords.iso': 'stopwords_iso', 'en.t5': 'google_t5_small_ssm_nq', 'en.t5.active_to_passive_styletransfer': 't5_active_to_passive_styletransfer', 'en.t5.base': 't5_base', 'en.t5.formal_to_informal_styletransfer': 't5_formal_to_informal_styletransfer', 'en.t5.grammar_error_corrector': 't5_grammar_error_corrector', 'en.t5.informal_to_formal_styletransfer': 't5_informal_to_formal_styletransfer', 'en.t5.passive_to_active_styletransfer': 't5_passive_to_active_styletransfer', 'en.t5.small': 't5_small', 'en.t5.small.generation': 't5_question_generation_small', 'en.t5.wikiSQL': 't5_small_wikiSQL', 'en.tokenize': 'spark_nlp_tokenizer', 'en.toxic': 'multiclassifierdl_use_toxic', 'en.use': 'tfhub_use', 'en.xlnet': 'xlnet_base_cased', 'en.yake': 'yake', 'roberta': 'distilroberta_base'}, 'eo': {'eo.embed.w2v_cc_300d': 'w2v_cc_300d', 'eo.ner.pos.small': 'roberta_pos_esperberto_small_pos', 'eo.stopwords': 'stopwords_eo'}, 'es': { 'es.answer_question.bert.cased_base_finetuned': 'bert_qa_bert_base_spanish_wwm_cased_finetuned_qa_tar', 'es.answer_question.bert.cased_multilingual_base': 'bert_qa_multilingual_bert_base_cased_spanish', 'es.answer_question.bert.mlqa.cased_base_finetuned': 'bert_qa_bert_base_spanish_wwm_cased_finetuned_qa_mlqa', 'es.answer_question.bert.mlqa.uncased_base_finetuned': 'bert_qa_bert_base_spanish_wwm_uncased_finetuned_qa_mlqa', 'es.answer_question.bert.sqac.cased_base_finetuned': 'bert_qa_bert_base_spanish_wwm_cased_finetuned_qa_sqac', 'es.answer_question.bert.sqac.uncased_base_finetuned': 'bert_qa_bert_base_spanish_wwm_uncased_finetuned_qa_sqac', 'es.answer_question.bert.squad_es.uncased_base_finetuned': 'bert_qa_base_spanish_wwm_uncased_finetuned_squad', 'es.answer_question.bert.uncased_base_finetuned': 'bert_qa_bert_base_spanish_wwm_uncased_finetuned_qa_tar', 'es.answer_question.distil_bert.base_uncased': 'distilbert_qa_distillbert_base_spanish_uncased_finetuned_qa_tar', 'es.answer_question.mlqa.distil_bert.base_uncased': 'distilbert_qa_distillbert_base_spanish_uncased_finetuned_qa_mlqa', 'es.answer_question.roberta.base_finetuned': 'roberta_qa_mrm8488_base_bne_finetuned_s_c', 'es.answer_question.roberta.base_finetuned.by_nlp_en_es': 'roberta_qa_nlp_en_es_base_bne_finetuned_s_c', 'es.answer_question.roberta.sqac.base': 'roberta_qa_base_spanish_s_c', 'es.answer_question.roberta.sqac.base.by_bsc_temu': 'roberta_qa_bsc_temu_base_bne_s_c', 'es.answer_question.roberta.sqac.base.by_plantl_gob_es': 'roberta_qa_plantl_gob_es_base_bne_s_c', 'es.answer_question.roberta.sqac.large': 'roberta_qa_bsc_temu_large_bne_s_c', 'es.answer_question.roberta.sqac.large.by_plantl_gob_es': 'roberta_qa_plantl_gob_es_large_bne_s_c', 'es.answer_question.roberta.sqac.large_finetuned': 'roberta_qa_bertin_large_finetuned_s_c', 'es.answer_question.roberta.squad.base_finetuned': 'roberta_qa_ruperta_base_finetuned_squadv1', 'es.answer_question.roberta.squad_es.base': 'roberta_qa_base_bne_squad2', 'es.answer_question.roberta.squad_es.base.by_jamarju': 'roberta_qa_base_bne_squad_2.0', 'es.answer_question.roberta.squad_es.base_adverse_drug_event': 'roberta_qa_base_spanish_squades', 'es.answer_question.roberta.squad_es.large': 'roberta_qa_large_bne_squad_2.0', 'es.answer_question.roberta.squadv2.v2_base_finetuned': 'roberta_qa_ruperta_base_finetuned_squadv2', 'es.answer_question.sqac.bert.base': 'bert_qa_beto_base_spanish_sqac', 'es.answer_question.sqac.bert.base_cased': 'bert_qa_bert_base_spanish_wwm_cased_finetuned_sqac', 'es.answer_question.sqac.distil_bert.base_uncased': 'distilbert_qa_distillbert_base_spanish_uncased_finetuned_qa_sqac', 'es.answer_question.sqac.roberta.base': 'roberta_qa_PlanTL_GOB_ES_roberta_base_bne_sqac', 'es.answer_question.sqac.roberta.base.by_BSC-TeMU': 'roberta_qa_BSC_TeMU_roberta_base_bne_sqac', 'es.answer_question.sqac.roberta.base.by_IIC': 'roberta_qa_roberta_base_spanish_sqac', 'es.answer_question.sqac.roberta.base.by_PlanTL-GOB-ES': 'roberta_qa_PlanTL_GOB_ES_roberta_base_bne_sqac', 'es.answer_question.sqac.roberta.base.by_mrm8488': 'roberta_qa_mrm8488_roberta_base_bne_finetuned_sqac', 'es.answer_question.sqac.roberta.base.by_nlp-en-es': 'roberta_qa_nlp_en_es_roberta_base_bne_finetuned_sqac', 'es.answer_question.sqac.roberta.large': 'roberta_qa_BSC_TeMU_roberta_large_bne_sqac', 'es.answer_question.sqac.roberta.large.by_BSC-TeMU': 'roberta_qa_BSC_TeMU_roberta_large_bne_sqac', 'es.answer_question.sqac.roberta.large.by_PlanTL-GOB-ES': 'roberta_qa_PlanTL_GOB_ES_roberta_large_bne_sqac', 'es.answer_question.sqac.roberta.large.by_nlp-en-es': 'roberta_qa_bertin_large_finetuned_sqac', 'es.answer_question.squad.electra.small': 'electra_qa_electricidad_small_finetuned_squadv1', 'es.answer_question.squad.roberta.base': 'roberta_qa_roberta_base_spanish_squades', 'es.answer_question.squad.roberta.base.by_IIC': 'roberta_qa_roberta_base_spanish_squades', 'es.answer_question.squad.roberta.base.by_jamarju': 'roberta_qa_roberta_base_bne_squad_2.0_es_jamarju', 'es.answer_question.squad.roberta.base_4096.by_mrm8488': 'roberta_qa_longformer_base_4096_spanish_finetuned_squad', 'es.answer_question.squad.roberta.large': 'roberta_qa_roberta_large_fine_tuned_squad_es_stevemobs', 'es.answer_question.squad.roberta.large.by_jamarju': 'roberta_qa_roberta_large_bne_squad_2.0_es_jamarju', 'es.answer_question.squad.roberta.large.by_stevemobs': 'roberta_qa_roberta_large_fine_tuned_squad_es_stevemobs', 'es.answer_question.squad.ruperta.base.by_mrm8488': 'roberta_qa_RuPERTa_base_finetuned_squadv1', 'es.answer_question.squad_sqac.bert.base_cased': 'bert_qa_bert_base_spanish_wwm_cased_finetuned_sqac_finetuned_squad', 'es.answer_question.squadv2.bert.base_cased': 'bert_qa_bert_base_spanish_wwm_cased_finetuned_squad2_es_MMG', 'es.answer_question.squadv2.bert.base_cased.by_MMG': 'bert_qa_bert_base_spanish_wwm_cased_finetuned_squad2_es_MMG', 'es.answer_question.squadv2.bert.base_cased.by_mrm8488': 'bert_qa_bert_base_spanish_wwm_cased_finetuned_spa_squad2_es_mrm8488', 'es.answer_question.squadv2.bert.distilled_base_cased': 'bert_qa_distill_bert_base_spanish_wwm_cased_finetuned_spa_squad2_es_mrm8488', 'es.answer_question.squadv2.electra.small': 'electra_qa_biomedtra_small_es_squad2', 'es.answer_question.squadv2.roberta.base': 'roberta_qa_roberta_base_bne_squad2_hackathon_pln', 'es.answer_question.squadv2.roberta.base_v2': 'roberta_qa_RuPERTa_base_finetuned_squadv2', 'es.answer_question.squadv2_bio_medical.roberta.base': 'roberta_qa_roberta_base_biomedical_es_squad2_hackathon_pln', 'es.answer_question.squadv2_clinical_bio_medical.roberta.base': 'roberta_qa_roberta_base_biomedical_clinical_es_squad2_hackathon_pln', 'es.answer_question.squadv2_sqac.bert.base_cased.by_MMG': 'bert_qa_bert_base_spanish_wwm_cased_finetuned_sqac_finetuned_squad2_es_MMG', 'es.answer_question.squadv2_sqac.bert.base_cased_spa.by_MMG': 'bert_qa_bert_base_spanish_wwm_cased_finetuned_spa_squad2_es_finetuned_sqac', 'es.answer_question.squadv2_sqac.bert.base_cased_v2.by_MMG': 'bert_qa_bert_base_spanish_wwm_cased_finetuned_squad2_es_finetuned_sqac', 'es.answer_question.xlm_roberta.base': 'xlm_roberta_qa_xlm_roberta_base_spanish', 'es.answer_question.xlm_roberta.multilingual_large': 'xlm_roberta_qa_xlm_roberta_large_qa_multilingual_finedtuned_ru_ru_AlexKay', 'es.answer_question.xlmr_roberta.squad.base': 'xlm_roberta_qa_xlmr_base_texas_squad_es_es_saattrupdan', 'es.classify.3class_paragraphs.roberta.by_hackathon_pln_es': 'roberta_classifier_readability_es_3class_paragraphs', 'es.classify.3class_sentences.roberta.by_hackathon_pln_es': 'roberta_classifier_readability_es_3class_sentences', 'es.classify.bert': 'bert_classifier_autonlp_spanish_songs_202661', 'es.classify.bert.hate.': 'bert_classifier_dehatebert_mono_spanish', 'es.classify.bert.news.': 'bert_classifier_spanish_news_classification_headlines', 'es.classify.bert.news.by_m47labs': 'bert_classifier_spanish_news_classification_headlines_untrained', 'es.classify.bert.sentiment_hate.finetuned': 'bert_classifier_dehate_mono_spanish_finetuned_sentiments_reviews_politicos', 'es.classify.bert.tiny_finetuned': 'bert_sequence_classifier_spanish_tinybert_betito_finetuned_mnli', 'es.classify.bert.xnli.tiny_finetuned': 'bert_sequence_classifier_spanish_tinybert_betito_finetuned_xnli', 'es.classify.beto_bert': 'bert_sequence_classifier_beto_emotion_analysis', 'es.classify.beto_bert.by_finiteautomata': 'bert_classifier_beto_emotion_analysis', 'es.classify.beto_bert.sentiment': 'beto_sentiment', 'es.classify.beto_bert.sentiment.by_finiteautomata': 'bert_classifier_beto_sentiment_analysis', 'es.classify.beto_bert.sentiment_analysis': 'bert_sequence_classifier_beto_sentiment_analysis', 'es.classify.electra.amazon.small_finetuned': 'electra_classifier_mrm8488_electricidad_small_finetuned_amazon_review_classification', 'es.classify.electra.go_emotions.base_finetuned': 'electra_classifier_electricidad_base_finetuned_go_emotions', 'es.classify.hate_contextualized.bert.by_finiteautomata': 'bert_classifier_contextualized_hate_speech', 'es.classify.hate_non_contextualized.bert.by_finiteautomata': 'bert_classifier_non_contextualized_hate_speech', 'es.classify.readability_paragraphs.roberta.by_hackathon_pln_es': 'roberta_classifier_readability_es_paragraphs', 'es.classify.readability_sentences.roberta.by_hackathon_pln_es': 'roberta_classifier_readability_es_sentences', 'es.classify.roberta': 'roberta_jurisbert_clas_art_convencion_americana_dh', 'es.classify.roberta.amazon.base_finetuned': 'roberta_classifier_base_bne_finetuned_amazon_reviews_multi_finetuned_amazon_reviews_multi', 'es.classify.roberta.base': 'roberta_sequence_classifier_bsc_base_spanish_diagnostics', 'es.classify.roberta.base.by_maxpe': 'roberta_classifier_bertin_base_spanish_sem_eval_2018_task_1', 'es.classify.roberta.base_finetuned': 'roberta_classifier_base_bne_finetuned_cyberbullying_spanish', 'es.classify.roberta.by_alexhf90': 'roberta_classifier_clasificacion_sentimientos', 'es.classify.roberta.by_gagandeepkundi': 'roberta_classifier_latam_question_quality', 'es.classify.roberta.by_prompsit': 'roberta_classifier_paraphrase', 'es.classify.roberta.catalonia_independence.base_finetuned': 'roberta_classifier_base_bne_finetuned_catalonia_independence_detector', 'es.classify.roberta.hate.base_finetuned': 'roberta_classifier_base_bne_finetuned_hate_speech_offensive_spanish', 'es.classify.roberta.news.': 'roberta_classifier_fake_news_detection_spanish', 'es.classify.roberta.pawsx_xtreme.base_finetuned': 'roberta_sequence_classifier_ruperta_base_finetuned_pawsx', 'es.classify.roberta.sentiment.': 'roberta_classifier_bne_sentiment_analysis', 'es.classify.roberta.sentiment.base': 'roberta_classifier_bertin_base_sentiment_analysis', 'es.classify.roberta.sentiment.base.by_edumunozsala': 'roberta_classifier_ruperta_base_sentiment_analysis', 'es.classify.roberta.twitter.': 'roberta_classifier_detect_acoso_twitter', 'es.classify.roberta.xnli.base': 'roberta_classifier_bertin_base_xnli', 'es.classify.sentiment.': 'classifierdl_bert_sentiment', 'es.classify.token_bert.spanish_ner': 'bert_token_classifier_spanish_ner', 'es.classify.tratados.roberta.by_hackathon_pln_es': 'roberta_jurisbert_class_tratados_internacionales_sistema_universal', 'es.classify.xlmr_roberta.twitter.': 'xlmroberta_classifier_twitter_emotion', 'es.embed.RoBERTalex': 'roberta_embeddings_RoBERTalex', 'es.embed.RuPERTa_base': 'roberta_embeddings_RuPERTa_base', 'es.embed.alberti_bert_base_multilingual_cased': 'bert_embeddings_alberti_bert_base_multilingual_cased', 'es.embed.bert.base_cased': 'bert_base_cased', 'es.embed.bert.base_legal': 'legalectra_base', 'es.embed.bert.base_uncased': 'bert_base_uncased', 'es.embed.bert.cased_base': 'bert_embeddings_base_es_cased', 'es.embed.bert.cased_base.by_dccuchile': 'bert_embeddings_base_spanish_wwm_cased', 'es.embed.bert.small_legal': 'legalectra_small', 'es.embed.bert.uncased_base': 'bert_embeddings_base_spanish_wwm_uncased', 'es.embed.bert_base_5lang_cased': 'bert_embeddings_bert_base_5lang_cased', 'es.embed.bert_base_es_cased': 'bert_embeddings_bert_base_es_cased', 'es.embed.bertin_base_gaussian': 'roberta_embeddings_bertin_base_gaussian', 'es.embed.bertin_base_gaussian_exp_512seqlen': 'roberta_embeddings_bertin_base_gaussian_exp_512seqlen', 'es.embed.bertin_base_random': 'roberta_embeddings_bertin_base_random', 'es.embed.bertin_base_random_exp_512seqlen': 'roberta_embeddings_bertin_base_random_exp_512seqlen', 'es.embed.bertin_base_stepwise': 'roberta_embeddings_bertin_base_stepwise', 'es.embed.bertin_base_stepwise_exp_512seqlen': 'roberta_embeddings_bertin_base_stepwise_exp_512seqlen', 'es.embed.bertin_roberta_base_spanish': 'roberta_embeddings_bertin_roberta_base_spanish', 'es.embed.bertin_roberta_large_spanish': 'roberta_embeddings_bertin_roberta_large_spanish', 'es.embed.beto_gn_base_cased': 'bert_embeddings_beto_gn_base_cased', 'es.embed.distilbert_base_es_cased': 'distilbert_embeddings_distilbert_base_es_cased', 'es.embed.distilbert_base_es_multilingual_cased': 'distilbert_embeddings_distilbert_base_es_multilingual_cased', 'es.embed.dpr_spanish_passage_encoder_allqa_base': 'bert_embeddings_dpr_spanish_passage_encoder_allqa_base', 'es.embed.dpr_spanish_passage_encoder_squades_base': 'bert_embeddings_dpr_spanish_passage_encoder_squades_base', 'es.embed.dpr_spanish_question_encoder_allqa_base': 'bert_embeddings_dpr_spanish_question_encoder_allqa_base', 'es.embed.dpr_spanish_question_encoder_squades_base': 'bert_embeddings_dpr_spanish_question_encoder_squades_base', 'es.embed.electra.base': 'electra_embeddings_electricidad_base_generator', 'es.embed.jurisbert': 'roberta_embeddings_jurisbert', 'es.embed.legal.cbow.cased_d100': 'word2vec_cbow_legal_d100_cased', 'es.embed.legal.cbow.cased_d300': 'word2vec_cbow_legal_d300_cased', 'es.embed.legal.cbow.cased_d50': 'word2vec_cbow_legal_d50_cased', 'es.embed.legal.cbow.uncased_d100': 'word2vec_cbow_legal_d100_uncased', 'es.embed.legal.cbow.uncased_d300': 'word2vec_cbow_legal_d300_uncased', 'es.embed.legal.cbow.uncased_d50': 'word2vec_cbow_legal_d50_uncased', 'es.embed.legal.skipgram.cased_d100': 'word2vec_skipgram_legal_d100_cased', 'es.embed.legal.skipgram.cased_d300': 'word2vec_skipgram_legal_d300_cased', 'es.embed.legal.skipgram.cased_d50': 'word2vec_skipgram_legal_d50_cased', 'es.embed.legal.skipgram.uncased_d100': 'word2vec_skipgram_legal_d100_uncased', 'es.embed.legal.skipgram.uncased_d300': 'word2vec_skipgram_legal_d300_uncased', 'es.embed.legal.skipgram.uncased_d50': 'word2vec_skipgram_legal_d50_uncased', 'es.embed.longformer.base_legal': 'longformer_legal_base_8192', 'es.embed.longformer.legal': 'longformer_legal_embeddings', 'es.embed.mlm_spanish_roberta_base': 'roberta_embeddings_mlm_spanish_roberta_base', 'es.embed.roberta_base_bne': 'roberta_embeddings_roberta_base_bne', 'es.embed.roberta_large_bne': 'roberta_embeddings_roberta_large_bne', 'es.embed.w2v_cc_300d': 'w2v_cc_300d', 'es.embed_sentence.bert.base_cased': 'sent_bert_base_cased', 'es.embed_sentence.bert.base_uncased': 'sent_bert_base_uncased', 'es.lemma': 'lemma_spacylookup', 'es.lemma.ancora': 'lemma_ancora', 'es.lemma.gsd': 'lemma_gsd', 'es.lemma.spacylookup': 'lemma_spacylookup', 'es.ner': 'wikiner_6B_100', 'es.ner.NER_LAW_MONEY4': 'roberta_ner_NER_LAW_MONEY4', 'es.ner.RuPERTa_base_finetuned_ner': 'roberta_ner_RuPERTa_base_finetuned_ner', 'es.ner.bert': 'bert_ner_anglicisms_spanish_mbert', 'es.ner.bert.by_luch0247': 'bert_token_classifier_autotrain_lucy_alicorp_1356152290', 'es.ner.bert.by_nymiz': 'bert_ner_eus_es_nymiz', 'es.ner.bert.cased': 'bert_ner_spanish_cased_finedtuned', 'es.ner.bert.cased_finetuned': 'bert_ner_bert_spanish_cased_finetuned_ner', 'es.ner.bert.clinical.uncased_base_finetuned': 'bert_ner_bert_base_spanish_wwm_uncased_finetuned_clinical', 'es.ner.bert.uncased_base_finetuned': 'bert_ner_bert_base_spanish_wwm_uncased_finetuned_ner_medical', 'es.ner.bert.uncased_tiny_finetuned': 'bert_ner_tinybert_spanish_uncased_finetuned_ner', 'es.ner.beto_bert': 'bert_ner_meddocan_beto_ner', 'es.ner.beto_bert.by_nazagara': 'bert_token_classifier_ner_fine_tuned_beto', 'es.ner.bsc_bio_ehr_es_cantemist': 'roberta_ner_bsc_bio_ehr_es_cantemist', 'es.ner.bsc_bio_ehr_es_pharmaconer': 'roberta_ner_bsc_bio_ehr_es_pharmaconer', 'es.ner.pos': 'roberta_pos_veganuary_pos', 'es.ner.pos.cased_finetuned': 'bert_pos_bert_spanish_cased_finetuned_pos', 'es.ner.pos.cased_finetuned.by_mrm8488': 'bert_pos_spanish_cased_finetuned_pos_16_tags', 'es.ner.pos.cased_finetuned_snytax.by_mrm8488': 'bert_pos_bert_spanish_cased_finetuned_pos_syntax', 'es.ner.pos.cased_finetuned_v2.by_mrm8488': 'bert_pos_bert_spanish_cased_finetuned_pos_16_tags', 'es.ner.roberta': 'roberta_token_classifier_bne_capitel_ner', 'es.ner.roberta.bio_clinical.finetuned': 'roberta_ner_finetuned_bioclinical', 'es.ner.roberta.clinical.base': 'roberta_ner_nlp_cic_wfu_clinical_cases_ner_sents_tokenized_bertin_roberta_base_spanish_fine_tuned', 'es.ner.roberta.clinical_bio_medical_craft.base_augmented_finetuned': 'roberta_ner_roberta_base_biomedical_clinical_es_finetuned_ner_craft_augmentedtransfer_es', 'es.ner.roberta.clinical_bio_medical_craft.base_augmented_finetuned.by_StivenLancheros': 'roberta_ner_roberta_base_biomedical_clinical_es_finetuned_ner_craft_augmented_es', 'es.ner.roberta.clinical_bio_medical_craft.base_finetuned': 'roberta_ner_roberta_base_biomedical_clinical_es_finetuned_ner_craft_en_es_stivenlancheros', 'es.ner.roberta.clinical_bio_medical_craft.base_finetuned.by_StivenLancheros': 'roberta_ner_roberta_base_biomedical_clinical_es_finetuned_ner_concat_craft_es_stivenlancheros', 'es.ner.roberta.disease': 'roberta_ner_spanish_disease_finder', 'es.ner.roberta.finetuning_': 'roberta_ner_jurisbert_finetuning_ner', 'es.ner.roberta_base_bne_capitel_ner': 'roberta_ner_roberta_base_bne_capitel_ner', 'es.ner.roberta_base_bne_capitel_ner_plus': 'roberta_ner_roberta_base_bne_capitel_ner_plus', 'es.ner.roberta_large_bne_capitel_ner': 'roberta_ner_roberta_large_bne_capitel_ner', 'es.ner.wikiner': 'wikiner_6B_100', 'es.ner.wikiner.glove.6B_100': 'wikiner_6B_100', 'es.ner.wikiner.glove.6B_300': 'wikiner_6B_300', 'es.ner.wikiner.glove.840B_300': 'wikiner_840B_300', 'es.ner.xlmr_roberta.base': 'xlmroberta_ner_base_panx_dataset', 'es.ner.xlmr_roberta.large': 'xlmroberta_ner_xlm_roberta_large_ner_spanish', 'es.ner.xlmr_roberta.large.by_tner': 'xlmroberta_ner_large_panx_dataset', 'es.pos': 'pos_ancora', 'es.pos.RuPERTa_base_finetuned_pos': 'roberta_pos_RuPERTa_base_finetuned_pos', 'es.pos.ancora': 'pos_ancora', 'es.pos.gsd': 'pos_gsd', 'es.pos.roberta_base_bne_capitel_pos': 'roberta_pos_roberta_base_bne_capitel_pos', 'es.pos.roberta_large_bne_capitel_pos': 'roberta_pos_roberta_large_bne_capitel_pos', 'es.pos.ud_gsd': 'pos_ud_gsd', 'es.stopwords': 'stopwords_iso', 'es.stopwords_es': 'stopwords_es'}, 'et': {'et.answer_question.xlm_roberta': 'xlm_roberta_qa_xlm_roberta_est_qa', 'et.embed.camembert': 'camembert_embeddings_est_roberta', 'et.embed.w2v_cc_300d': 'w2v_cc_300d', 'et.lemma': 'lemma_ewt', 'et.lemma.edt': 'lemma_edt', 'et.lemma.ewt': 'lemma_ewt', 'et.ner.camembert': 'camembert_classifier_est_roberta_hist_ner', 'et.pos': 'pos_edt', 'et.pos.ewt': 'pos_ewt', 'et.stopwords': 'stopwords_iso'}, 'eu': { 'eu.answer_question.ixam_bert.squad.finetuned': 'bert_qa_ixambert_finetuned_squad_eu_marcbrun', 'eu.answer_question.ixam_bert.squad.finetuned.by_MarcBrun': 'bert_qa_ixambert_finetuned_squad_eu_en_marcbrun', 'eu.embed.roberta': 'roberta_embeddings_robasqu', 'eu.embed.w2v_cc_300d': 'w2v_cc_300d', 'eu.lemma': 'lemma_bdt', 'eu.lemma.bdt': 'lemma_bdt', 'eu.pos': 'pos_ud_bdt', 'eu.pos.bdt': 'pos_bdt', 'eu.stopwords': 'stopwords_iso', 'eu.stopwords.iso': 'stopwords_iso'}, 'fa': {'fa.answer_question.bert': 'bert_qa_bert_fa_qa_v1', 'fa.answer_question.bert.base': 'bert_qa_bert_base_fa_qa', 'fa.answer_question.bert.by_alirezabaneshi': 'bert_qa_testpersianqa', 'fa.answer_question.bert.by_newsha': 'bert_qa_pquad', 'fa.answer_question.bert.pars_bert.finetuned': 'bert_qa_parsbert_finetuned_persianqa', 'fa.answer_question.bert.pars_bert.uncased_base_finetuned': 'bert_qa_base_parsbert_uncased_finetuned_perqa', 'fa.answer_question.bert.squad_pars_bert.uncased_base_finetuned': 'bert_qa_base_parsbert_uncased_finetuned_squad', 'fa.answer_question.bert.v2.by_newsha': 'bert_qa_pquad_2', 'fa.answer_question.xlm_roberta.large': 'xlm_roberta_qa_xlm_roberta_large_fa_qa', 'fa.answer_question.xlmr_roberta.large': 'xlmroberta_qa_xlmr_large', 'fa.classify.bert.news.uncased_base': 'bert_classifier_bert_fa_base_uncased_clf_persiannews', 'fa.classify.bert.sentiment.deepsentipers_multi.uncased_base.by_HooshvareLab': 'bert_classifier_bert_fa_base_uncased_sentiment_deepsentipers_multi', 'fa.classify.bert.sentiment.digikala.uncased_base.by_HooshvareLab': 'bert_classifier_bert_fa_base_uncased_sentiment_digikala', 'fa.classify.bert.sentiment.snappfood.uncased_base.by_HooshvareLab': 'bert_classifier_bert_fa_base_uncased_sentiment_snappfood', 'fa.classify.bert.sentiment.uncased_base': 'bert_classifier_bert_fa_base_uncased_sentiment_deepsentipers_binary', 'fa.classify.bert.sentiment_twitter.': 'bert_classifier_pars_fa_sentiment_twitter', 'fa.classify.bert.uncased_base': 'bert_classifier_bert_fa_base_uncased_clf_digimag', 'fa.classify.bert.uncased_base.by_demoversion': 'bert_classifier_bert_fa_base_uncased_haddad_wikinli', 'fa.classify.bert.uncased_base.farstail.by_m3hrdadfi': 'bert_classifier_bert_fa_base_uncased_farstail', 'fa.classify.bert.uncased_base.wikinli.by_m3hrdadfi': 'bert_classifier_bert_fa_base_uncased_wikinli', 'fa.classify.token_bert.parsbert_armanner': 'bert_token_classifier_parsbert_armanner', 'fa.classify.token_bert.parsbert_ner': 'bert_token_classifier_parsbert_ner', 'fa.classify.token_bert.parsbert_peymaner': 'bert_token_classifier_parsbert_peymaner', 'fa.classify.token_roberta_token_classifier_zwnj_base_ner': 'roberta_token_classifier_zwnj_base_ner', 'fa.embed': 'persian_w2v_cc_300d', 'fa.embed.albert': 'albert_embeddings_albert_fa_base_v2', 'fa.embed.albert_fa_zwnj_base_v2': 'albert_embeddings_albert_fa_zwnj_base_v2', 'fa.embed.bert.base': 'bert_embeddings_fa_zwnj_base', 'fa.embed.bert.uncased_base': 'bert_embeddings_fa_base_uncased', 'fa.embed.distilbert_fa_zwnj_base': 'distilbert_embeddings_distilbert_fa_zwnj_base', 'fa.embed.roberta_fa_zwnj_base': 'roberta_embeddings_roberta_fa_zwnj_base', 'fa.embed.word2vec': 'persian_w2v_cc_300d', 'fa.embed.word2vec.300d': 'persian_w2v_cc_300d', 'fa.lemma': 'lemma', 'fa.lemma.perdt': 'lemma_perdt', 'fa.lemma.seraji': 'lemma_seraji', 'fa.ner': 'personer_cc_300d', 'fa.ner.bert.arman.uncased_base': 'bert_ner_bert_fa_base_uncased_ner_arman', 'fa.ner.bert.base': 'bert_ner_bert_fa_zwnj_base_ner', 'fa.ner.bert.pars_bert.uncased_base': 'bert_ner_bert_base_parsbert_ner_uncased', 'fa.ner.bert.pars_bert_arman.uncased_base': 'bert_ner_bert_base_parsbert_armanner_uncased', 'fa.ner.bert.pars_bert_peyma.uncased_base': 'bert_ner_bert_base_parsbert_peymaner_uncased', 'fa.ner.bert.peyma.uncased_base': 'bert_ner_bert_fa_base_uncased_ner_peyma', 'fa.ner.distil_bert': 'distilbert_token_classifier_persian_ner', 'fa.ner.person': 'personer_cc_300d', 'fa.ner.person.cc_300d': 'personer_cc_300d', 'fa.ner.roberta_fa_zwnj_base_ner': 'roberta_ner_roberta_fa_zwnj_base_ner', 'fa.ner.xlmr_roberta.arman_xtreme.base_finetuned': 'xlmroberta_ner_base_finetuned_arman', 'fa.ner.xlmr_roberta.peyma.base_finetuned': 'xlmroberta_ner_base_finetuned_peyma', 'fa.pos': 'pos_perdt', 'fa.pos.perdt': 'pos_perdt', 'fa.pos.seraji': 'pos_seraji', 'fa.speech2text.wav2vec2.v2_gpu_s117_exp': 'asr_exp_w2v2t_pretraining_s117_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s165_vp_exp': 'asr_exp_w2v2t_vp_s165_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s168_exp': 'asr_exp_w2v2t_wav2vec2_s168_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s198_vp_exp': 'asr_exp_w2v2t_vp_s198_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s224_vp_exp': 'asr_exp_w2v2t_vp_s224_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s282_vp_exp': 'asr_exp_w2v2t_vp_s282_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s28_exp': 'asr_exp_w2v2t_pretraining_s28_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s321_exp': 'asr_exp_w2v2t_wav2vec2_s321_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s339_vp_exp': 'asr_exp_w2v2t_vp_s339_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s376_vp_exp': 'asr_exp_w2v2t_vp_s376_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s407_vp_exp': 'asr_exp_w2v2t_vp_100k_s407_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s419_vp_exp': 'asr_exp_w2v2t_vp_s419_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s533_vp_exp': 'asr_exp_w2v2t_vp_s533_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s555_vp_exp': 'asr_exp_w2v2t_vp_s555_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s650_exp': 'asr_exp_w2v2t_pretraining_s650_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s689_vp_exp': 'asr_exp_w2v2t_vp_s689_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s738_vp_exp': 'asr_exp_w2v2t_vp_s738_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s749_vp_exp': 'asr_exp_w2v2t_vp_s749_gpu', 'fa.speech2text.wav2vec2.v2_gpu_s88_vp_exp': 'asr_exp_w2v2t_vp_100k_s88_gpu', 'fa.speech2text.wav2vec2.v2_s165_vp_exp': 'asr_exp_w2v2t_vp_s165', 'fa.speech2text.wav2vec2.v2_s168_exp': 'asr_exp_w2v2t_wav2vec2_s168', 'fa.speech2text.wav2vec2.v2_s198_vp_exp': 'asr_exp_w2v2t_vp_s198', 'fa.speech2text.wav2vec2.v2_s224_vp_exp': 'asr_exp_w2v2t_vp_s224', 'fa.speech2text.wav2vec2.v2_s282_vp_exp': 'asr_exp_w2v2t_vp_s282', 'fa.speech2text.wav2vec2.v2_s321_exp': 'asr_exp_w2v2t_wav2vec2_s321', 'fa.speech2text.wav2vec2.v2_s339_vp_exp': 'asr_exp_w2v2t_vp_s339', 'fa.speech2text.wav2vec2.v2_s376_vp_exp': 'asr_exp_w2v2t_vp_s376', 'fa.speech2text.wav2vec2.v2_s407_vp_exp': 'asr_exp_w2v2t_vp_100k_s407', 'fa.speech2text.wav2vec2.v2_s419_vp_exp': 'asr_exp_w2v2t_vp_s419', 'fa.speech2text.wav2vec2.v2_s533_vp_exp': 'asr_exp_w2v2t_vp_s533', 'fa.speech2text.wav2vec2.v2_s555_vp_exp': 'asr_exp_w2v2t_vp_s555', 'fa.speech2text.wav2vec2.v2_s689_vp_exp': 'asr_exp_w2v2t_vp_s689', 'fa.speech2text.wav2vec2.v2_s738_vp_exp': 'asr_exp_w2v2t_vp_s738', 'fa.speech2text.wav2vec2.v2_s749_vp_exp': 'asr_exp_w2v2t_vp_s749', 'fa.speech2text.wav2vec2.v2_s88_vp_exp': 'asr_exp_w2v2t_vp_100k_s88', 'fa.speech2text.wav2vec_xlsr': 'asr_xlsr_lm', 'fa.speech2text.wav2vec_xlsr.gpu': 'asr_xlsr_lm_gpu', 'fa.speech2text.wav2vec_xlsr.v2_gpu_s116_exp': 'asr_exp_w2v2t_xlsr_53_s116_gpu', 'fa.speech2text.wav2vec_xlsr.v2_gpu_s204_exp': 'asr_exp_w2v2t_xlsr_53_s204_gpu', 'fa.speech2text.wav2vec_xlsr.v2_gpu_s356_exp': 'asr_exp_w2v2t_xlsr_53_s356_gpu', 'fa.speech2text.wav2vec_xlsr.v2_gpu_s44_exp': 'asr_exp_w2v2t_xls_r_s44_gpu', 'fa.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_53_persian', 'fa.speech2text.wav2vec_xlsr.v2_large_3h': 'asr_wav2vec2_large_xlsr_persian_v3_by_m3hrdadfi', 'fa.speech2text.wav2vec_xlsr.v2_large_3h_gpu': 'asr_wav2vec2_large_xlsr_persian_v3_by_m3hrdadfi_gpu', 'fa.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_persian_gpu', 'fa.speech2text.wav2vec_xlsr.v2_large_gpu.by_m3hrdadfi': 'asr_wav2vec2_large_xlsr_persian_gpu', 'fa.speech2text.wav2vec_xlsr.v2_s116_exp': 'asr_exp_w2v2t_xlsr_53_s116', 'fa.speech2text.wav2vec_xlsr.v2_s204_exp': 'asr_exp_w2v2t_xlsr_53_s204', 'fa.speech2text.wav2vec_xlsr.v2_s356_exp': 'asr_exp_w2v2t_xlsr_53_s356', 'fa.speech2text.wav2vec_xlsr.v2_s44_exp': 'asr_exp_w2v2t_xls_r_s44', 'fa.stopwords': 'stopwords_iso', 'fa.stopwords.iso': 'stopwords_iso'}, 'fi': {'fi.answer_question.roberta': 'roberta_qa_addi_fi_roberta', 'fi.answer_question.roberta.squad.finetuned': 'roberta_qa_fin_roberta_v1_finetuned_squad', 'fi.answer_question.xlm_roberta': 'xlm_roberta_qa_ADDI_FI_XLM_R', 'fi.classify.bert.sentiment.': 'bert_classifier_finbert_finnsentiment', 'fi.embed.bert.cased_base': 'bert_embeddings_base_finnish_cased_v1', 'fi.embed.bert.uncased_base': 'bert_embeddings_base_finnish_uncased_v1', 'fi.embed.w2v_cc_300d': 'w2v_cc_300d', 'fi.embed_sentence.bert': 'bert_base_finnish_uncased', 'fi.embed_sentence.bert.cased': 'bert_base_finnish_cased', 'fi.embed_sentence.bert.uncased': 'bert_base_finnish_uncased', 'fi.lemma': 'lemma_tdt', 'fi.lemma.ftb': 'lemma_ftb', 'fi.lemma.tdt': 'lemma_tdt', 'fi.ner': 'wikiner_6B_100', 'fi.ner.6B_100d': 'finnish_ner_6B_100', 'fi.ner.6B_300d': 'finnish_ner_6B_300', 'fi.ner.840B_300d': 'finnish_ner_840B_300', 'fi.ner.xlmr_roberta.base': 'xlmroberta_ner_base_fin', 'fi.ner.xlmr_roberta.large': 'xlmroberta_ner_large_fin', 'fi.ner.xlmr_roberta.uncased_base': 'xlmroberta_ner_base_uncased_fin', 'fi.ner.xlmr_roberta.uncased_large': 'xlmroberta_ner_large_uncased_fin', 'fi.pos': 'pos_tdt', 'fi.pos.ftb': 'pos_ftb', 'fi.pos.tdt': 'pos_tdt', 'fi.pos.ud_tdt': 'pos_ud_tdt', 'fi.speech2text.wav2vec2.voxpopuli.v2_base': 'asr_wav2vec2_base_10k_voxpopuli', 'fi.speech2text.wav2vec2.voxpopuli.v2_base_finetuned': 'asr_wav2vec2_base_voxpopuli_v2_finetuned', 'fi.speech2text.wav2vec2.voxpopuli.v2_base_finetuned_gpu': 'asr_wav2vec2_base_voxpopuli_v2_finetuned_gpu', 'fi.speech2text.wav2vec2.voxpopuli.v2_base_gpu': 'asr_wav2vec2_base_10k_voxpopuli_gpu', 'fi.speech2text.wav2vec2.voxpopuli.v2_large': 'asr_wav2vec2_large_uralic_voxpopuli_v2_finnish', 'fi.speech2text.wav2vec2.voxpopuli.v2_large_gpu': 'asr_wav2vec2_large_uralic_voxpopuli_v2_finnish_gpu', 'fi.speech2text.wav2vec_xlsr.by_finnish_nlp': 'asr_wav2vec2_xlsr_1b_finnish_lm_by_finnish_nlp', 'fi.speech2text.wav2vec_xlsr.v2': 'asr_wav2vec2_xlsr_1b_finnish', 'fi.speech2text.wav2vec_xlsr.v2.by_aapot': 'asr_wav2vec2_xlsr_1b_finnish_lm_v2_by_aapot', 'fi.speech2text.wav2vec_xlsr.v2.by_finnish_nlp': 'asr_wav2vec2_xlsr_1b_finnish_lm_v2_by_finnish_nlp', 'fi.speech2text.wav2vec_xlsr.v2.by_rasmus': 'asr_wav2vec2_xlsr_train_aug_biglm_1b', 'fi.speech2text.wav2vec_xlsr.v2_300m': 'asr_wav2vec2_xlsr_300m_finnish', 'fi.speech2text.wav2vec_xlsr.v2_300m.by_aapot': 'asr_wav2vec2_xlsr_300m_finnish_lm_by_aapot', 'fi.speech2text.wav2vec_xlsr.v2_300m.by_finnish_nlp': 'asr_wav2vec2_xlsr_300m_finnish_lm_by_finnish_nlp', 'fi.speech2text.wav2vec_xlsr.v2_gpu': 'asr_wav2vec2_xlsr_1b_finnish_gpu', 'fi.speech2text.wav2vec_xlsr.v2_gpu.by_aapot': 'asr_wav2vec2_xlsr_1b_finnish_lm_v2_by_aapot_gpu', 'fi.speech2text.wav2vec_xlsr.v2_gpu.by_finnish_nlp': 'asr_wav2vec2_xlsr_1b_finnish_lm_v2_by_finnish_nlp_gpu', 'fi.speech2text.wav2vec_xlsr.v2_gpu.by_rasmus': 'asr_wav2vec2_xlsr_train_aug_biglm_1b_gpu', 'fi.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_53_finnish_by_tommi', 'fi.speech2text.wav2vec_xlsr.v2_large.by_aapot': 'asr_wav2vec2_large_xlsr_53_finnish_by_aapot', 'fi.speech2text.wav2vec_xlsr.v2_large.by_birgermoell': 'asr_wav2vec2_large_xlsr_finnish', 'fi.speech2text.wav2vec_xlsr.v2_large.by_jonatasgrosman': 'asr_wav2vec2_large_xlsr_53_finnish_by_jonatasgrosman', 'fi.speech2text.wav2vec_xlsr.v2_large.by_vasilis': 'asr_wav2vec2_large_xlsr_53_finnish_by_vasilis', 'fi.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_finnish_by_tommi_gpu', 'fi.speech2text.wav2vec_xlsr.v2_large_gpu.by_aapot': 'asr_wav2vec2_large_xlsr_53_finnish_by_aapot_gpu', 'fi.speech2text.wav2vec_xlsr.v2_large_gpu.by_birgermoell': 'asr_wav2vec2_large_xlsr_finnish_gpu', 'fi.speech2text.wav2vec_xlsr.v2_large_gpu.by_jonatasgrosman': 'asr_wav2vec2_large_xlsr_53_finnish_by_jonatasgrosman_gpu', 'fi.speech2text.wav2vec_xlsr.v2_large_gpu.by_vasilis': 'asr_wav2vec2_large_xlsr_53_finnish_by_vasilis_gpu', 'fi.speech2text.wav2vec_xlsrby_aapot': 'asr_wav2vec2_xlsr_1b_finnish_lm_by_aapot', 'fi.stopwords': 'stopwords_iso', 'fi.stopwords.iso': 'stopwords_iso'}, 'fo': {'fo.lemma': 'lemma_farpahc', 'fo.lemma.farpahc': 'lemma_farpahc', 'fo.pos': 'pos_farpahc'}, 'fon': {'fon.speech2text.wav2vec_xlsr': 'asr_fonxlsr', 'fon.speech2text.wav2vec_xlsr.gpu': 'asr_fonxlsr_gpu'}, 'fr': { 'fr.answer_question.camembert.fquad':'camembert_base_qa_fquad', 'fr.answer_question.roberta': 'roberta_qa_addi_fr_roberta', 'fr.answer_question.roberta.by_gantenbein': 'roberta_qa_addi_fr_xlm_r', 'fr.answer_question.squad.xlmr_roberta.base': 'xlm_roberta_qa_xlmr_base_texas_squad_fr_fr_saattrupdan', 'fr.classify.allocine': 'mdeberta_v3_base_sequence_classifier_allocine', 'fr.classify.bert.hate.': 'bert_classifier_dehatebert_mono_french', 'fr.classify.bert.multi2convai.': 'bert_classifier_multi2convai_corona', 'fr.classify.bert.multi2convai.by_inovex': 'bert_classifier_multi2convai_quality', 'fr.classify.bert.multi2convai.v2.by_inovex': 'bert_classifier_multi2convai_quality_fr_mbert', 'fr.classify.bert.multilingual': 'bert_multilingual_sequence_classifier_allocine', 'fr.classify.camembert.base': 'camembert_base_sequence_classifier_allocine', 'fr.classify.camembert.large': 'camembert_large_sequence_classifier_allocine', 'fr.classify.distilbert_sequence.allocine': 'distilbert_multilingual_sequence_classifier_allocine', 'fr.classify.sentiment.bert': 'classifierdl_bert_sentiment', 'fr.classify.xlm_roberta.allocine': 'xlm_roberta_base_sequence_classifier_allocine', 'fr.embed.albert': 'albert_embeddings_fralbert_base', 'fr.embed.bert.cased_base': 'bert_embeddings_base_fr_cased', 'fr.embed.bert_5lang_cased': 'bert_embeddings_bert_base_5lang_cased', 'fr.embed.bert_base_fr_cased': 'bert_embeddings_bert_base_fr_cased', 'fr.embed.camembert': 'camembert_embeddings_dummy', 'fr.embed.camembert.91m_generic': 'camembert_embeddings_generic_model_r91m', 'fr.embed.camembert.adverse_drug_event_generic': 'camembert_embeddings_adeimousa_generic_model', 'fr.embed.camembert.base': 'camembert_embeddings_dataikunlp_camembert_base', 'fr.embed.camembert.by_ebtihal': 'camembert_embeddings_arbertmo', 'fr.embed.camembert.by_ghani_25': 'camembert_embeddings_summfinfr', 'fr.embed.camembert.by_hueynemud': 'camembert_embeddings_das22_10_camembert_pretrained', 'fr.embed.camembert.by_jodsa': 'camembert_embeddings_camembert_mlm', 'fr.embed.camembert.distilled_base': 'camembert_embeddings_distilcamembert_base', 'fr.embed.camembert.generic': 'camembert_embeddings_doyyingface_generic_model', 'fr.embed.camembert.generic.by_adam1224': 'camembert_embeddings_adam1224_generic_model', 'fr.embed.camembert.generic.by_aliasdasd': 'camembert_embeddings_aliasdasd_generic_model', 'fr.embed.camembert.generic.by_ankitkupadhyay': 'camembert_embeddings_ankitkupadhyay_generic_model', 'fr.embed.camembert.generic.by_codingjacob': 'camembert_embeddings_codingjacob_generic_model', 'fr.embed.camembert.generic.by_cylee': 'camembert_embeddings_cylee_generic_model', 'fr.embed.camembert.generic.by_devtrent': 'camembert_embeddings_devtrent_generic_model', 'fr.embed.camembert.generic.by_dianeshan': 'camembert_embeddings_dianeshan_generic_model', 'fr.embed.camembert.generic.by_edge2992': 'camembert_embeddings_edge2992_generic_model', 'fr.embed.camembert.generic.by_eduardopds': 'camembert_embeddings_eduardopds_generic_model', 'fr.embed.camembert.generic.by_elliotsmith': 'camembert_embeddings_elliotsmith_generic_model', 'fr.embed.camembert.generic.by_elusive_magnolia': 'camembert_embeddings_elusive_magnolia_generic_model', 'fr.embed.camembert.generic.by_ericchchiu': 'camembert_embeddings_ericchchiu_generic_model', 'fr.embed.camembert.generic.by_fjluque': 'camembert_embeddings_fjluque_generic_model', 'fr.embed.camembert.generic.by_gulabpatel': 'camembert_embeddings_new_generic_model', 'fr.embed.camembert.generic.by_h4d35': 'camembert_embeddings_h4d35_generic_model', 'fr.embed.camembert.generic.by_hackertec': 'camembert_embeddings_generic2', 'fr.embed.camembert.generic.by_hasanmurad': 'camembert_embeddings_hasanmurad_generic_model', 'fr.embed.camembert.generic.by_hasanmuradbuet': 'camembert_embeddings_hasanmuradbuet_generic_model', 'fr.embed.camembert.generic.by_henrywang': 'camembert_embeddings_henrywang_generic_model', 'fr.embed.camembert.generic.by_jcai1': 'camembert_embeddings_jcai1_generic_model', 'fr.embed.camembert.generic.by_joe8zhang': 'camembert_embeddings_joe8zhang_generic_model', 'fr.embed.camembert.generic.by_jonathansum': 'camembert_embeddings_jonathansum_generic_model', 'fr.embed.camembert.generic.by_juliencarbonnell': 'camembert_embeddings_juliencarbonnell_generic_model', 'fr.embed.camembert.generic.by_katrin_kc': 'camembert_embeddings_katrin_kc_generic_model', 'fr.embed.camembert.generic.by_katster': 'camembert_embeddings_katster_generic_model', 'fr.embed.camembert.generic.by_kaushikacharya': 'camembert_embeddings_kaushikacharya_generic_model', 'fr.embed.camembert.generic.by_leisa': 'camembert_embeddings_leisa_generic_model', 'fr.embed.camembert.generic.by_lewtun': 'camembert_embeddings_lewtun_generic_model', 'fr.embed.camembert.generic.by_lijingxin': 'camembert_embeddings_lijingxin_generic_model', 'fr.embed.camembert.generic.by_linyi': 'camembert_embeddings_linyi_generic_model', 'fr.embed.camembert.generic.by_mbateman': 'camembert_embeddings_mbateman_generic_model', 'fr.embed.camembert.generic.by_mohammadrea76': 'camembert_embeddings_mohammadrea76_generic_model', 'fr.embed.camembert.generic.by_myx4567': 'camembert_embeddings_myx4567_generic_model', 'fr.embed.camembert.generic.by_osanseviero': 'camembert_embeddings_generic_model_test', 'fr.embed.camembert.generic.by_peterhsu': 'camembert_embeddings_peterhsu_generic_model', 'fr.embed.camembert.generic.by_pgperrone': 'camembert_embeddings_pgperrone_generic_model', 'fr.embed.camembert.generic.by_safik': 'camembert_embeddings_safik_generic_model', 'fr.embed.camembert.generic.by_sebu': 'camembert_embeddings_sebu_generic_model', 'fr.embed.camembert.generic.by_seyfullah': 'camembert_embeddings_seyfullah_generic_model', 'fr.embed.camembert.generic.by_sonny': 'camembert_embeddings_sonny_generic_model', 'fr.embed.camembert.generic.by_tnagata': 'camembert_embeddings_tnagata_generic_model', 'fr.embed.camembert.generic.by_tpanza': 'camembert_embeddings_tpanza_generic_model', 'fr.embed.camembert.generic.by_wangst': 'camembert_embeddings_wangst_generic_model', 'fr.embed.camembert.generic.by_weipeng': 'camembert_embeddings_weipeng_generic_model', 'fr.embed.camembert.generic.by_xkang': 'camembert_embeddings_xkang_generic_model', 'fr.embed.camembert.generic.by_yancong': 'camembert_embeddings_yancong_generic_model', 'fr.embed.camembert.generic.by_ysharma': 'camembert_embeddings_ysharma_generic_model_2', 'fr.embed.camembert.generic.by_zhenghuabin': 'camembert_embeddings_zhenghuabin_generic_model', 'fr.embed.camembert.generic_v2.by_fjluque': 'camembert_embeddings_fjluque_generic_model2', 'fr.embed.camembert.generic_v2.by_hackertec': 'camembert_embeddings_hackertec_generic', 'fr.embed.camembert.generic_v2.by_lijingxin': 'camembert_embeddings_lijingxin_generic_model_2', 'fr.embed.camembert.generic_v2.by_osanseviero': 'camembert_embeddings_osanseviero_generic_model', 'fr.embed.camembert.generic_v2.by_peterhsu': 'camembert_embeddings_tf_generic_model', 'fr.embed.camembert.tweet.base': 'camembert_embeddings_bertweetfr_base', 'fr.embed.camembert_base': 'camembert_base', 'fr.embed.camembert_base_ccnet': 'camembert_base_ccnet', 'fr.embed.camembert_ccnet4g': 'camembert_base_ccnet_4gb', 'fr.embed.camembert_large': 'camembert_large', 'fr.embed.camembert_oscar_4g': 'camembert_base_oscar_4gb', 'fr.embed.camembert_wiki_4g': 'camembert_base_wikipedia_4gb', 'fr.embed.distilbert': 'distilbert_embeddings_distilbert_base_fr_cased', 'fr.embed.electra.cased_base': 'electra_embeddings_electra_base_french_europeana_cased_generator', 'fr.embed.french_roberta': 'roberta_embeddings_french_roberta', 'fr.embed.roberta_base_wechsel_french': 'roberta_embeddings_roberta_base_wechsel_french', 'fr.embed.w2v_cc_300d': 'w2v_cc_300d', 'fr.embed.word2vec_wac_200': 'word2vec_wac_200', 'fr.embed.word2vec_wiki_1000': 'word2vec_wiki_1000', 'fr.lemma': 'lemma_ftb', 'fr.lemma.ftb': 'lemma_ftb', 'fr.lemma.gsd': 'lemma_gsd', 'fr.lemma.parisstories': 'lemma_parisstories', 'fr.lemma.partut': 'lemma_partut', 'fr.lemma.rhapsodie': 'lemma_rhapsodie', 'fr.lemma.sequoia': 'lemma_sequoia', 'fr.lemma.spacylookup': 'lemma_spacylookup', 'fr.ner': 'wikiner_840B_300', 'fr.ner.bert': 'bert_ner_bert_mention_fr_vera_pro', 'fr.ner.camembert': 'camembert_classifier_magbert_ner', 'fr.ner.camembert.antilles.': 'camembert_classifier_poet', 'fr.ner.camembert.by_jean_baptiste': 'camembert_classifier_ner', 'fr.ner.camembert.fquad.': 'camembert_classifier_squadfr_fquad_piaf_answer_extraction', 'fr.ner.camembert.with_dates.by_jean_baptiste': 'camembert_classifier_ner_with_dates', 'fr.ner.pos': 'bert_pos_french_postag_model', 'fr.ner.pos.antilles.': 'camembert_classifier_pos_french', 'fr.ner.wikiner': 'wikiner_840B_300', 'fr.ner.wikiner.glove.6B_300': 'wikiner_6B_300', 'fr.ner.wikiner.glove.840B_300': 'wikiner_840B_300', 'fr.ner.xlmr_roberta.xtreme.base_finetuned': 'xlmroberta_ner_cj_mills_base_finetuned_panx', 'fr.ner.xlmr_roberta.xtreme.base_finetuned.by_Neha2608': 'xlmroberta_ner_neha2608_base_finetuned_panx', 'fr.ner.xlmr_roberta.xtreme.base_finetuned.by_V3RX2000': 'xlmroberta_ner_v3rx2000_base_finetuned_panx', 'fr.ner.xlmr_roberta.xtreme.base_finetuned.by_dkasti': 'xlmroberta_ner_dkasti_base_finetuned_panx', 'fr.ner.xlmr_roberta.xtreme.base_finetuned.by_edwardjross': 'xlmroberta_ner_edwardjross_base_finetuned_panx', 'fr.ner.xlmr_roberta.xtreme.base_finetuned.by_flood': 'xlmroberta_ner_flood_base_finetuned_panx', 'fr.ner.xlmr_roberta.xtreme.base_finetuned.by_haesun': 'xlmroberta_ner_haesun_base_finetuned_panx', 'fr.ner.xlmr_roberta.xtreme.base_finetuned.by_iis2009002': 'xlmroberta_ner_iis2009002_base_finetuned_panx', 'fr.ner.xlmr_roberta.xtreme.base_finetuned.by_jgriffi': 'xlmroberta_ner_jgriffi_base_finetuned_panx', 'fr.ner.xlmr_roberta.xtreme.base_finetuned.by_lijingxin': 'xlmroberta_ner_lijingxin_base_finetuned_panx', 'fr.ner.xlmr_roberta.xtreme.base_finetuned.by_moghis': 'xlmroberta_ner_moghis_base_finetuned_panx', 'fr.ner.xlmr_roberta.xtreme.base_finetuned.by_olpa': 'xlmroberta_ner_xml_roberta_base_finetuned_panx', 'fr.ner.xlmr_roberta.xtreme.base_finetuned.by_robkayinto': 'xlmroberta_ner_robkayinto_base_finetuned_panx', 'fr.ner.xlmr_roberta.xtreme.base_finetuned.by_skr3178': 'xlmroberta_ner_skr3178_base_finetuned_panx', 'fr.ner.xlmr_roberta.xtreme.base_finetuned.by_transformersbook': 'xlmroberta_ner_transformersbook_base_finetuned_panx', 'fr.pos': 'pos_sequoia', 'fr.pos.gsd': 'pos_gsd', 'fr.pos.parisstories': 'pos_parisstories', 'fr.pos.partut': 'pos_partut', 'fr.pos.rhapsodie': 'pos_rhapsodie', 'fr.pos.sequoia': 'pos_sequoia', 'fr.pos.ud_gsd': 'pos_ud_gsd', 'fr.speech2text.wav2vec2.gender_male_female.v2_gpu_s156_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_10_female_0_s156_gpu', 'fr.speech2text.wav2vec2.gender_male_female.v2_gpu_s21_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_5_female_5_s21_gpu', 'fr.speech2text.wav2vec2.gender_male_female.v2_gpu_s255_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_2_female_8_s255_gpu', 'fr.speech2text.wav2vec2.gender_male_female.v2_gpu_s3_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_2_female_8_s3_gpu', 'fr.speech2text.wav2vec2.gender_male_female.v2_gpu_s400_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_0_female_10_s400_gpu', 'fr.speech2text.wav2vec2.gender_male_female.v2_gpu_s428_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_8_female_2_s428_gpu', 'fr.speech2text.wav2vec2.gender_male_female.v2_gpu_s469_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_0_female_10_s469_gpu', 'fr.speech2text.wav2vec2.gender_male_female.v2_gpu_s474_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_5_female_5_s474_gpu', 'fr.speech2text.wav2vec2.gender_male_female.v2_gpu_s500_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_8_female_2_s500_gpu', 'fr.speech2text.wav2vec2.gender_male_female.v2_gpu_s626_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_10_female_0_s626_gpu', 'fr.speech2text.wav2vec2.gender_male_female.v2_gpu_s714_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_10_female_0_s714_gpu', 'fr.speech2text.wav2vec2.gender_male_female.v2_gpu_s722_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_5_female_5_s722_gpu', 'fr.speech2text.wav2vec2.gender_male_female.v2_gpu_s911_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_8_female_2_s911_gpu', 'fr.speech2text.wav2vec2.gender_male_female.v2_gpu_s934_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_0_female_10_s934_gpu', 'fr.speech2text.wav2vec2.gender_male_female.v2_s156_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_10_female_0_s156', 'fr.speech2text.wav2vec2.gender_male_female.v2_s21_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_5_female_5_s21', 'fr.speech2text.wav2vec2.gender_male_female.v2_s255_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_2_female_8_s255', 'fr.speech2text.wav2vec2.gender_male_female.v2_s3_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_2_female_8_s3', 'fr.speech2text.wav2vec2.gender_male_female.v2_s400_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_0_female_10_s400', 'fr.speech2text.wav2vec2.gender_male_female.v2_s428_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_8_female_2_s428', 'fr.speech2text.wav2vec2.gender_male_female.v2_s469_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_0_female_10_s469', 'fr.speech2text.wav2vec2.gender_male_female.v2_s474_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_5_female_5_s474', 'fr.speech2text.wav2vec2.gender_male_female.v2_s500_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_8_female_2_s500', 'fr.speech2text.wav2vec2.gender_male_female.v2_s626_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_10_female_0_s626', 'fr.speech2text.wav2vec2.gender_male_female.v2_s714_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_10_female_0_s714', 'fr.speech2text.wav2vec2.gender_male_female.v2_s722_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_5_female_5_s722', 'fr.speech2text.wav2vec2.gender_male_female.v2_s911_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_8_female_2_s911', 'fr.speech2text.wav2vec2.gender_male_female.v2_s934_vp_exp': 'asr_exp_w2v2r_vp_100k_gender_male_0_female_10_s934', 'fr.speech2text.wav2vec2.v2_gpu_s169_vp_exp': 'asr_exp_w2v2t_vp_s169_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s179_vp_exp': 'asr_exp_w2v2t_vp_s179_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s203_vp_exp': 'asr_exp_w2v2t_vp_s203_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s208_exp': 'asr_exp_w2v2t_pretraining_s208_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s227_exp': 'asr_exp_w2v2t_wav2vec2_s227_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s251_exp': 'asr_exp_w2v2t_r_wav2vec2_s251_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s271_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_france_10_belgium_0_s271_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s281_vp_exp': 'asr_exp_w2v2t_vp_s281_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s320_vp_exp': 'asr_exp_w2v2t_vp_s320_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s438_vp_exp': 'asr_exp_w2v2t_vp_s438_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s456_exp': 'asr_exp_w2v2t_r_wav2vec2_s456_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s459_exp': 'asr_exp_w2v2t_r_wav2vec2_s459_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s509_vp_exp': 'asr_exp_w2v2t_vp_100k_s509_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s596_vp_exp': 'asr_exp_w2v2t_vp_s596_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s607_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_france_5_belgium_5_s607_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s688_vp_exp': 'asr_exp_w2v2t_vp_100k_s688_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s709_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_france_2_belgium_8_s709_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s766_exp': 'asr_exp_w2v2t_pretraining_s766_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s809_exp': 'asr_exp_w2v2t_wav2vec2_s809_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s870_exp': 'asr_exp_w2v2t_wav2vec2_s870_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s875_vp_exp': 'asr_exp_w2v2t_vp_s875_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s877_vp_exp': 'asr_exp_w2v2t_vp_s877_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s878_vp_exp': 'asr_exp_w2v2t_vp_s878_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s929_exp': 'asr_exp_w2v2t_pretraining_s929_gpu', 'fr.speech2text.wav2vec2.v2_gpu_s973_vp_exp': 'asr_exp_w2v2t_vp_100k_s973_gpu', 'fr.speech2text.wav2vec2.v2_s169_vp_exp': 'asr_exp_w2v2t_vp_s169', 'fr.speech2text.wav2vec2.v2_s179_vp_exp': 'asr_exp_w2v2t_vp_s179', 'fr.speech2text.wav2vec2.v2_s203_vp_exp': 'asr_exp_w2v2t_vp_s203', 'fr.speech2text.wav2vec2.v2_s227_exp': 'asr_exp_w2v2t_wav2vec2_s227', 'fr.speech2text.wav2vec2.v2_s251_exp': 'asr_exp_w2v2t_r_wav2vec2_s251', 'fr.speech2text.wav2vec2.v2_s271_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_france_10_belgium_0_s271', 'fr.speech2text.wav2vec2.v2_s281_vp_exp': 'asr_exp_w2v2t_vp_s281', 'fr.speech2text.wav2vec2.v2_s320_vp_exp': 'asr_exp_w2v2t_vp_s320', 'fr.speech2text.wav2vec2.v2_s438_vp_exp': 'asr_exp_w2v2t_vp_s438', 'fr.speech2text.wav2vec2.v2_s456_exp': 'asr_exp_w2v2t_r_wav2vec2_s456', 'fr.speech2text.wav2vec2.v2_s459_exp': 'asr_exp_w2v2t_r_wav2vec2_s459', 'fr.speech2text.wav2vec2.v2_s509_vp_exp': 'asr_exp_w2v2t_vp_100k_s509', 'fr.speech2text.wav2vec2.v2_s596_vp_exp': 'asr_exp_w2v2t_vp_s596', 'fr.speech2text.wav2vec2.v2_s607_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_france_5_belgium_5_s607', 'fr.speech2text.wav2vec2.v2_s688_vp_exp': 'asr_exp_w2v2t_vp_100k_s688', 'fr.speech2text.wav2vec2.v2_s709_vp_exp': 'asr_exp_w2v2r_vp_100k_accent_france_2_belgium_8_s709', 'fr.speech2text.wav2vec2.v2_s809_exp': 'asr_exp_w2v2t_wav2vec2_s809', 'fr.speech2text.wav2vec2.v2_s870_exp': 'asr_exp_w2v2t_wav2vec2_s870', 'fr.speech2text.wav2vec2.v2_s875_vp_exp': 'asr_exp_w2v2t_vp_s875', 'fr.speech2text.wav2vec2.v2_s877_vp_exp': 'asr_exp_w2v2t_vp_s877', 'fr.speech2text.wav2vec2.v2_s878_vp_exp': 'asr_exp_w2v2t_vp_s878', 'fr.speech2text.wav2vec2.v2_s973_vp_exp': 'asr_exp_w2v2t_vp_100k_s973', 'fr.speech2text.wav2vec2.voxpopuli.v2_base_gpu': 'asr_wav2vec2_base_10k_voxpopuli_gpu', 'fr.speech2text.wav2vec2.voxpopuli.v2_large': 'asr_wav2vec2_large_voxpopuli_french', 'fr.speech2text.wav2vec2.voxpopuli.v2_large_gpu': 'asr_wav2vec2_large_voxpopuli_french_gpu', 'fr.speech2text.wav2vec_xlsr.gender_male_female.v2_gpu_s295_exp': 'asr_exp_w2v2r_xls_r_gender_male_2_female_8_s295_gpu', 'fr.speech2text.wav2vec_xlsr.gender_male_female.v2_gpu_s559_exp': 'asr_exp_w2v2r_xls_r_gender_male_10_female_0_s559_gpu', 'fr.speech2text.wav2vec_xlsr.gender_male_female.v2_gpu_s755_exp': 'asr_exp_w2v2r_xls_r_gender_male_8_female_2_s755_gpu', 'fr.speech2text.wav2vec_xlsr.gender_male_female.v2_s295_exp': 'asr_exp_w2v2r_xls_r_gender_male_2_female_8_s295', 'fr.speech2text.wav2vec_xlsr.gender_male_female.v2_s559_exp': 'asr_exp_w2v2r_xls_r_gender_male_10_female_0_s559', 'fr.speech2text.wav2vec_xlsr.gender_male_female.v2_s755_exp': 'asr_exp_w2v2r_xls_r_gender_male_8_female_2_s755', 'fr.speech2text.wav2vec_xlsr.v2_300m_gpu': 'asr_wav2vec2_xls_r_300m_gpu', 'fr.speech2text.wav2vec_xlsr.v2_gpu': 'asr_wav2vec2_xls_r_1b_french_by_bhuang_gpu', 'fr.speech2text.wav2vec_xlsr.v2_gpu_s250_exp': 'asr_exp_w2v2t_xls_r_s250_gpu', 'fr.speech2text.wav2vec_xlsr.v2_gpu_s286_exp': 'asr_exp_w2v2t_xlsr_53_s286_gpu', 'fr.speech2text.wav2vec_xlsr.v2_gpu_s539_exp': 'asr_exp_w2v2t_xlsr_53_s539_gpu', 'fr.speech2text.wav2vec_xlsr.v2_gpu_s800_exp': 'asr_exp_w2v2t_xlsr_53_s800_gpu', 'fr.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_53_french', 'fr.speech2text.wav2vec_xlsr.v2_large.by_facebook': 'asr_wav2vec2_large_xlsr_53_french_by_facebook', 'fr.speech2text.wav2vec_xlsr.v2_large.by_ilyes': 'asr_wav2vec2_large_xlsr_53_french_by_ilyes', 'fr.speech2text.wav2vec_xlsr.v2_large.by_jonatasgrosman': 'asr_wav2vec2_large_xlsr_53_french_by_jonatasgrosman', 'fr.speech2text.wav2vec_xlsr.v2_large.by_nhut': 'asr_wav2vec2_large_xlsr_french', 'fr.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_french_gpu', 'fr.speech2text.wav2vec_xlsr.v2_large_gpu.by_facebook': 'asr_wav2vec2_large_xlsr_53_french_by_facebook_gpu', 'fr.speech2text.wav2vec_xlsr.v2_large_gpu.by_ilyes': 'asr_wav2vec2_large_xlsr_53_french_by_ilyes_gpu', 'fr.speech2text.wav2vec_xlsr.v2_large_gpu.by_jonatasgrosman': 'asr_wav2vec2_large_xlsr_53_french_by_jonatasgrosman_gpu', 'fr.speech2text.wav2vec_xlsr.v2_large_gpu.by_nhut': 'asr_wav2vec2_large_xlsr_french_gpu', 'fr.speech2text.wav2vec_xlsr.v2_s250_exp': 'asr_exp_w2v2t_xls_r_s250', 'fr.speech2text.wav2vec_xlsr.v2_s286_exp': 'asr_exp_w2v2t_xlsr_53_s286', 'fr.speech2text.wav2vec_xlsr.v2_s539_exp': 'asr_exp_w2v2t_xlsr_53_s539', 'fr.speech2text.wav2vec_xlsr.v2_s800_exp': 'asr_exp_w2v2t_xlsr_53_s800', 'fr.stopwords': 'stopwords_iso', 'fr.stopwords.iso': 'stopwords_iso'}, 'fro': {'fro.lemma': 'lemma_srcmf', 'fro.lemma.srcmf': 'lemma_srcmf', 'fro.pos.srcmf': 'pos_srcmf'}, 'frr': {'frr.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'fy': {'fy.embed.bert.cased_base': 'bert_embeddings_base_dutch_cased_frisian', 'fy.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'ga': {'ga.lemma': 'lemma', 'ga.lemma.idt': 'lemma_idt', 'ga.ner.bert.wikiann.cased_base_finetuned': 'bert_ner_bert_base_irish_cased_v1_finetuned_ner', 'ga.ner.roberta.wikiann.finetuned': 'roberta_ner_bertreach_finetuned_ner', 'ga.pos': 'pos_idt', 'ga.pos.idt': 'pos_idt', 'ga.stopwords': 'stopwords_iso', 'ga.stopwords.iso': 'stopwords_iso'}, 'gam': { 'gam.answer_question.roberta.cuad.base_finetuned': 'roberta_qa_roberta_base_finetuned_cuad_gam'}, 'gd': {'gd.embed.w2v_cc_300d': 'w2v_cc_300d', 'gd.lemma': 'lemma_arcosg', 'gd.pos.arcosg': 'pos_arcosg'}, 'gl': {'gl.embed.roberta': 'roberta_embeddings_robertinh', 'gl.embed.w2v_cc_300d': 'w2v_cc_300d', 'gl.lemma': 'lemma_treegal', 'gl.lemma.ctg': 'lemma_ctg', 'gl.lemma.treegal': 'lemma_treegal', 'gl.pos': 'pos_ud_treegal', 'gl.pos.ctg': 'pos_ctg', 'gl.pos.treegal': 'pos_treegal', 'gl.stopwords': 'stopwords_gl'}, 'gom': {'gom.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'got': {'got.lemma': 'lemma_proiel', 'got.lemma.proiel': 'lemma_proiel', 'got.pos.proiel': 'pos_proiel'}, 'grc': {'grc.lemma': 'lemma_proiel', 'grc.lemma.perseus': 'lemma_perseus', 'grc.lemma.proiel': 'lemma_proiel', 'grc.lemma.spacylookup': 'lemma_spacylookup', 'grc.pos': 'pos_proiel', 'grc.pos.perseus': 'pos_perseus', 'grc.stopwords': 'stopwords_iso'}, 'gu': {'gu.detect_sentence': 'sentence_detector_dl', 'gu.embed.RoBERTa_hindi_guj_san': 'roberta_embeddings_RoBERTa_hindi_guj_san', 'gu.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_gpu', 'gu.stopwords': 'stopwords_iso'}, 'gv': {'gv.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'ha': { 'ha.embed.bert.cased_multilingual_base_finetuned': 'bert_embeddings_base_multilingual_cased_finetuned_hausa', 'ha.embed.bert.cased_multilingual_base_finetuned.by_davlan': 'bert_embeddings_base_multilingual_cased_finetuned_swahili', 'ha.embed.xlm_roberta': 'xlm_roberta_base_finetuned_hausa', 'ha.embed_sentence.xlm_roberta': 'sent_xlm_roberta_base_finetuned_hausa', 'ha.ner.xlmr_roberta.base_finetuned': 'xlmroberta_ner_xlm_roberta_base_finetuned_hausa_finetuned_ner_hausa', 'ha.ner.xlmr_roberta.base_finetuned_hausa.by_mbeukman': 'xlmroberta_ner_xlm_roberta_base_finetuned_ner_hausa', 'ha.ner.xlmr_roberta.base_finetuned_swahilis.by_mbeukman': 'xlmroberta_ner_xlm_roberta_base_finetuned_swahili_finetuned_ner_hausa', 'ha.speech2text.wav2vec_xlsr': 'asr_hausa_xlsr', 'ha.speech2text.wav2vec_xlsr.gpu': 'asr_hausa_xlsr_gpu', 'ha.stopwords': 'stopwords_ha'}, 'he': {'he.answer_question.squad.bert': 'bert_qa_hebert_finetuned_hebrew_squad', 'he.classify.bert': 'bert_classifier_descriptive', 'he.classify.bert.sentiment.': 'bert_classifier_he_sentiment_analysis', 'he.embed': 'hebrew_cc_300d', 'he.embed.bert.base': 'bert_embeddings_onlplab_aleph_base', 'he.embed.bert.legal': 'bert_embeddings_legal_hebert', 'he.embed.bert.legal.by_avichr': 'bert_embeddings_legal_hebert_ft', 'he.embed.cbow_300d': 'hebrew_cc_300d', 'he.embed.glove': 'hebrew_cc_300d', 'he.lemma': 'lemma', 'he.lemma.htb': 'lemma_htb', 'he.ner': 'hebrewner_cc_300d', 'he.ner.bert': 'bert_ner_hebert_ner', 'he.ner.bert.finetuned': 'bert_token_classifier_aleph_finetuned_metaphor_detection', 'he.ner.cc_300d': 'hebrewner_cc_300d', 'he.pos': 'pos_ud_htb', 'he.pos.htb': 'pos_htb', 'he.pos.ud_htb': 'pos_ud_htb', 'he.stopwords': 'stopwords_iso', 'he.stopwords.iso': 'stopwords_iso'}, 'hi': {'hi.answer_question.bert.cased_large': 'bert_qa_muril_large_cased_hita_qa', 'hi.answer_question.bert.cased_multilingual_base': 'bert_qa_multilingual_bert_base_cased_hindi', 'hi.answer_question.bert.mlqa.finetuned': 'bert_qa_mbert_finetuned_mlqa_dev', 'hi.answer_question.bert.squad.finetuned': 'bert_qa_muril_finetuned_squadv1', 'hi.answer_question.bert.squadv2.large': 'bert_qa_muril_large_squad2', 'hi.answer_question.xlm_roberta': 'xlm_roberta_qa_autonlp_hindi_question_answering_23865268', 'hi.answer_question.xlm_roberta.base': 'xlm_roberta_qa_xlm_roberta_base_hindi', 'hi.classify.roberta': 'roberta_classifier_autotrain_citizen_nlu_hindi_1370952776', 'hi.classify.xlmr_roberta.news.': 'xlmroberta_classifier_autonlp_fake_news_detection_system_29906863', 'hi.embed': 'hindi_cc_300d', 'hi.embed.RoBERTa_hindi_guj_san': 'roberta_embeddings_RoBERTa_hindi_guj_san', 'hi.embed.bert': 'bert_embeddings_indic_transformers', 'hi.embed.bert.cased_base': 'bert_embeddings_base_hi_cased', 'hi.embed.bert_hi_cased': 'bert_embeddings_bert_base_hi_cased', 'hi.embed.distil_bert': 'distilbert_embeddings_indic_transformers', 'hi.embed.distilbert_base_hi_cased': 'distilbert_embeddings_distilbert_base_hi_cased', 'hi.embed.indic_transformers_hi_bert': 'bert_embeddings_indic_transformers_hi_bert', 'hi.embed.indic_transformers_hi_distilbert': 'distilbert_embeddings_indic_transformers_hi_distilbert', 'hi.embed.indic_transformers_hi_roberta': 'roberta_embeddings_indic_transformers_hi_roberta', 'hi.embed.muril_adapted_local': 'bert_embeddings_muril_adapted_local', 'hi.embed.roberta': 'roberta_embeddings_hindi', 'hi.embed.roberta.by_neuralspace_reverie': 'roberta_embeddings_indic_transformers', 'hi.embed.xlmr_roberta': 'xlmroberta_embeddings_indic_transformers_hi_xlmroberta', 'hi.lemma': 'lemma', 'hi.lemma.hdtb': 'lemma_hdtb', 'hi.ner': 'bert_hi_en_ner', 'hi.ner.bert': 'bert_token_classifier_hi_en_ner', 'hi.ner.bert.by_l3cube_pune': 'bert_ner_hing_bert_lid', 'hi.ner.bert.by_sagorsarker': 'bert_ner_codeswitch_hineng_ner_lince', 'hi.ner.bert.ner_codeswitch_hineng_lid_lince': 'bert_ner_codeswitch_hineng_lid_lince', 'hi.ner.pos': 'bert_pos_codeswitch_hineng_pos_lince', 'hi.ner.xlmr_roberta.large_original': 'xlmroberta_ner_hiner_original_large', 'hi.pos': 'pos_ud_hdtb', 'hi.pos.hdtb': 'pos_hdtb', 'hi.speech2text.wav2vec2': 'asr_cdac_hindispeechrecognition', 'hi.speech2text.wav2vec2.by_harveenchadha': 'asr_hindi_model_with_lm_vakyansh', 'hi.speech2text.wav2vec2.by_swayam01': 'asr_hindi_clsril_100', 'hi.speech2text.wav2vec2.gpu': 'asr_cdac_hindispeechrecognition_gpu', 'hi.speech2text.wav2vec2.gpu.by_harveenchadha': 'asr_hindi_model_with_lm_vakyansh_gpu', 'hi.speech2text.wav2vec2.gpu.by_swayam01': 'asr_hindi_clsril_100_gpu', 'hi.speech2text.wav2vec2.v2': 'asr_indicwav2vec_hindi', 'hi.speech2text.wav2vec2.v2.by_harveenchadha': 'asr_vakyansh_wav2vec2_hindi_him_4200', 'hi.speech2text.wav2vec2.v2_base': 'asr_hindi_base_wav2vec2', 'hi.speech2text.wav2vec2.v2_base_gpu': 'asr_hindi_base_wav2vec2_gpu', 'hi.speech2text.wav2vec2.v2_gpu': 'asr_indicwav2vec_hindi_gpu', 'hi.speech2text.wav2vec2.v2_gpu.by_harveenchadha': 'asr_vakyansh_wav2vec2_hindi_him_4200_gpu', 'hi.speech2text.wav2vec2.v2_large': 'asr_hindi_large_wav2vec2', 'hi.speech2text.wav2vec2.v2_large_gpu': 'asr_hindi_large_wav2vec2_gpu', 'hi.speech2text.wav2vec_xlsr': 'asr_xls_r_ab_test_by_priyajay', 'hi.speech2text.wav2vec_xlsr.300m': 'asr_xls_r_300m_prod', 'hi.speech2text.wav2vec_xlsr.300m.by_yaswanth': 'asr_xls_r_300m_yaswanth_hindi2', 'hi.speech2text.wav2vec_xlsr.300m_gpu': 'asr_xls_r_300m_prod_gpu', 'hi.speech2text.wav2vec_xlsr.300m_gpu.by_yaswanth': 'asr_xls_r_300m_yaswanth_hindi2_gpu', 'hi.speech2text.wav2vec_xlsr.by_kapilkd13': 'asr_xls_r_test', 'hi.speech2text.wav2vec_xlsr.gpu': 'asr_xls_r_ab_test_by_priyajay_gpu', 'hi.speech2text.wav2vec_xlsr.gpu.by_priyajay': 'asr_xls_r_test_gpu', 'hi.speech2text.wav2vec_xlsr.large': 'asr_wave2vec2_large_xlsr_hindi', 'hi.speech2text.wav2vec_xlsr.large_gpu': 'asr_wave2vec2_large_xlsr_hindi_gpu', 'hi.speech2text.wav2vec_xlsr.v2_300m': 'asr_wav2vec2_xls_r_300m_final', 'hi.speech2text.wav2vec_xlsr.v2_300m.by_legolastheelf': 'asr_wav2vec2_xls_r_lm_300m', 'hi.speech2text.wav2vec_xlsr.v2_300m_gpu': 'asr_wav2vec2_xls_r_300m_final_gpu', 'hi.speech2text.wav2vec_xlsr.v2_300m_gpu.by_legolastheelf': 'asr_wav2vec2_xls_r_lm_300m_gpu', 'hi.speech2text.wav2vec_xlsr.v2_gpu': 'asr_xlsr_53_wav2vec_gpu', 'hi.speech2text.wav2vec_xlsr.v2_large_300m': 'asr_wav2vec2_large_xls_r_300m_cv7', 'hi.speech2text.wav2vec_xlsr.v2_large_300m.by_ravirajoshi': 'asr_wav2vec2_large_xls_r_300m_hindi_by_ravirajoshi', 'hi.speech2text.wav2vec_xlsr.v2_large_300m.by_reichenbach': 'asr_wav2vec2_large_xls_r_300m_by_reichenbach', 'hi.speech2text.wav2vec_xlsr.v2_large_300m.by_saitomar': 'asr_wav2vec2_large_xls_r_300m_hindi_kaggle', 'hi.speech2text.wav2vec_xlsr.v2_large_300m_cv8.by_drishtisharma': 'asr_wav2vec2_large_xls_r_300m_cv8', 'hi.speech2text.wav2vec_xlsr.v2_large_300m_d3.by_drishtisharma': 'asr_wav2vec2_large_xls_r_300m_d3', 'hi.speech2text.wav2vec_xlsr.v2_large_300m_d3_gpu': 'asr_wav2vec2_large_xls_r_300m_d3_gpu', 'hi.speech2text.wav2vec_xlsr.v2_large_300m_gpu': 'asr_wav2vec2_large_xls_r_300m_cv7_gpu', 'hi.speech2text.wav2vec_xlsr.v2_large_300m_gpu.by_ravirajoshi': 'asr_wav2vec2_large_xls_r_300m_hindi_by_ravirajoshi_gpu', 'hi.speech2text.wav2vec_xlsr.v2_large_300m_gpu.by_reichenbach': 'asr_wav2vec2_large_xls_r_300m_by_reichenbach_gpu', 'hi.speech2text.wav2vec_xlsr.v2_large_300m_gpu.by_saitomar': 'asr_wav2vec2_large_xls_r_300m_hindi_kaggle_gpu', 'hi.speech2text.wav2vec_xlsr.v2_large_300m_gpu_cv8.by_drishtisharma': 'asr_wav2vec2_large_xls_r_300m_cv8_gpu', 'hi.speech2text.wav2vec_xlsr.v2_large_300m_gpu_wx1.by_drishtisharma': 'asr_wav2vec2_large_xls_r_300m_wx1_gpu', 'hi.speech2text.wav2vec_xlsr.v2_large_300m_wx1.by_drishtisharma': 'asr_wav2vec2_large_xls_r_300m_wx1', 'hi.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_hindi_gpu', 'hi.stopwords': 'stopwords_iso', 'hi.stopwords.iso': 'stopwords_iso'}, 'hif': {'hif.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'hr': {'hr.classify.bert.hate.': 'bert_classifier_bcms_ic_frenk_hate', 'hr.classify.bert.multi2convai.': 'bert_classifier_multi2convai_logistics', 'hr.embed.w2v_cc_300d': 'w2v_cc_300d', 'hr.lemma': 'lemma_spacylookup', 'hr.lemma.set': 'lemma_set', 'hr.lemma.spacylookup': 'lemma_spacylookup', 'hr.pos.set': 'pos_set', 'hr.stopwords': 'stopwords_iso'}, 'hsb': {'hsb.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'hu': { 'hu.answer_question.squad.bert': 'bert_qa_huBert_fine_tuned_hungarian_squadv1', 'hu.lemma': 'lemma_spacylookup', 'hu.lemma.spacylookup': 'lemma_spacylookup', 'hu.lemma.szeged': 'lemma_szeged', 'hu.ner.bert': 'bert_ner_named_entity_recognition_nerkor_hubert_hungarian', 'hu.ner.bert.by_fdominik98': 'bert_ner_ner_hu_model_2021', 'hu.ner.bert.cased_base': 'bert_ner_bert_base_hu_cased_ner', 'hu.ner.bert.cased_base.by_akdeniz27': 'bert_ner_bert_base_hungarian_cased_ner', 'hu.pos': 'pos_ud_szeged', 'hu.pos.szeged': 'pos_szeged', 'hu.pos.ud_szeged': 'pos_ud_szeged', 'hu.speech2text.wav2vec2.voxpopuli.v2_base_gpu': 'asr_wav2vec2_base_10k_voxpopuli_gpu', 'hu.speech2text.wav2vec_xlsr': 'asr_xlsr_hungarian', 'hu.speech2text.wav2vec_xlsr.gpu': 'asr_xlsr_hungarian_gpu', 'hu.speech2text.wav2vec_xlsr.l_v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_hungarian_by_anton_l_gpu', 'hu.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_53_hungarian_by_anton_l', 'hu.speech2text.wav2vec_xlsr.v2_large.by_birgermoell': 'asr_wav2vec2_large_xlsr_hungarian', 'hu.speech2text.wav2vec_xlsr.v2_large.by_jonatasgrosman': 'asr_wav2vec2_large_xlsr_53_hungarian_by_jonatasgrosman', 'hu.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_hungarian_by_jonatasgrosman_gpu', 'hu.speech2text.wav2vec_xlsr.v2_large_gpu.by_birgermoell': 'asr_wav2vec2_large_xlsr_hungarian_gpu', 'hu.speech2text.wav2vec_xlsr.v2_large_gpu.by_gchhablani': 'asr_wav2vec2_large_xlsr_gpu', 'hu.stopwords': 'stopwords_iso', 'hu.stopwords.iso': 'stopwords_iso'}, 'hy': {'hy.embed.w2v_cc_300d': 'w2v_cc_300d', 'hy.lemma': 'lemma_armtdp', 'hy.lemma.armtdp': 'lemma_armtdp', 'hy.pos': 'pos_ud_armtdp', 'hy.pos.armtdp': 'pos_armtdp', 'hy.stopwords': 'stopwords_iso', 'hy.stopwords.iso': 'stopwords_iso'}, 'hyw': {'hyw.lemma': 'lemma_armtdp', 'hyw.pos': 'pos_armtdp'}, 'id': {'id.answer_question.bert.lang': 'bert_qa_firmanindolanguagemodel', 'id.answer_question.bert.tydiqa.base': 'bert_qa_base_indonesian_tydiqa', 'id.answer_question.indo_bert': 'bert_qa_Indobert_QA', 'id.classify.bert.sentiment.base': 'bert_classifier_bert_base_indonesian_1.5g_sentiment_analysis_smsa', 'id.classify.roberta': 'roberta_classifier_indo_indonli', 'id.classify.roberta.base': 'roberta_classifier_indonesian_base_emotion', 'id.classify.roberta.base.by_w11wo': 'roberta_classifier_indonesian_base_indonli', 'id.classify.roberta.by_akahana': 'roberta_classifier_indonesia_emotion', 'id.classify.roberta.sentiment.': 'roberta_classifier_indonesia_sentiment', 'id.classify.roberta.sentiment.base': 'roberta_classifier_indonesian_base_sentiment', 'id.classify.roberta.sentiment.base.by_ayameRushia': 'roberta_classifier_base_indonesian_sentiment_analysis_smsa', 'id.classify.roberta.sentiment.by_sahri': 'roberta_classifier_indonesiasentiment', 'id.detect_sentence': 'sentence_detector_dl', 'id.embed.bert.base': 'bert_embeddings_base_indonesian_1.5g', 'id.embed.bert.base_522m': 'bert_embeddings_base_indonesian_522m', 'id.embed.distilbert': 'distilbert_embeddings_distilbert_base_indonesian', 'id.embed.indo_roberta_small': 'roberta_embeddings_indo_roberta_small', 'id.embed.indonesian_roberta_base': 'roberta_embeddings_indonesian_roberta_base', 'id.embed.indonesian_roberta_large': 'roberta_embeddings_indonesian_roberta_large', 'id.embed.roberta.base_522m': 'roberta_embeddings_base_indonesian_522m', 'id.embed.roberta.small': 'roberta_embeddings_indo_small', 'id.embed.roberta_base_indonesian_522M': 'roberta_embeddings_roberta_base_indonesian_522M', 'id.lemma': 'lemma_gsd', 'id.lemma.csui': 'lemma_csui', 'id.lemma.gsd': 'lemma_gsd', 'id.lemma.spacylookup': 'lemma_spacylookup', 'id.ner': 'xlm_roberta_large_token_classification_ner', 'id.ner.bert.base': 'bert_ner_bert_base_indonesian_ner', 'id.ner.bert.xtreme.base': 'bert_ner_bert_base_ft_ner_xtreme_id_sultannn', 'id.ner.pos': 'roberta_token_classifier_pos_tagger', 'id.ner.pos.base': 'roberta_ner_indonesian_roberta_base_bapos_tagger', 'id.ner.xlmr_roberta': 'xlmroberta_ner_shopee', 'id.ner.xlmr_roberta.base': 'xlmroberta_ner_base_indonesian', 'id.ner.xlmr_roberta.large': 'xlmroberta_ner_large_indonesian', 'id.pos': 'pos_csui', 'id.pos.csui': 'pos_csui', 'id.pos.gsd': 'pos_gsd', 'id.pos.indonesian_roberta_base_posp_tagger': 'roberta_pos_indonesian_roberta_base_posp_tagger', 'id.speech2text.wav2vec2.v2': 'asr_wav2vec2_from_scratch_finetune_dummy', 'id.speech2text.wav2vec2.v2_gpu': 'asr_wav2vec2_from_scratch_finetune_dummy_gpu', 'id.speech2text.wav2vec2.v2_gpu_s156_exp': 'asr_exp_w2v2t_wav2vec2_s156_gpu', 'id.speech2text.wav2vec2.v2_gpu_s226_exp': 'asr_exp_w2v2t_wav2vec2_s226_gpu', 'id.speech2text.wav2vec2.v2_gpu_s417_exp': 'asr_exp_w2v2t_wav2vec2_s417_gpu', 'id.speech2text.wav2vec2.v2_gpu_s615_vp_exp': 'asr_exp_w2v2t_vp_100k_s615_gpu', 'id.speech2text.wav2vec2.v2_gpu_s764_vp_exp': 'asr_exp_w2v2t_vp_100k_s764_gpu', 'id.speech2text.wav2vec2.v2_gpu_s842_vp_exp': 'asr_exp_w2v2t_vp_100k_s842_gpu', 'id.speech2text.wav2vec2.v2_s156_exp': 'asr_exp_w2v2t_wav2vec2_s156', 'id.speech2text.wav2vec2.v2_s226_exp': 'asr_exp_w2v2t_wav2vec2_s226', 'id.speech2text.wav2vec2.v2_s417_exp': 'asr_exp_w2v2t_wav2vec2_s417', 'id.speech2text.wav2vec2.v2_s615_vp_exp': 'asr_exp_w2v2t_vp_100k_s615', 'id.speech2text.wav2vec2.v2_s764_vp_exp': 'asr_exp_w2v2t_vp_100k_s764', 'id.speech2text.wav2vec2.v2_s842_vp_exp': 'asr_exp_w2v2t_vp_100k_s842', 'id.speech2text.wav2vec_xlsr': 'asr_xlsr_indonesia', 'id.speech2text.wav2vec_xlsr.gpu': 'asr_xlsr_indonesia_gpu', 'id.speech2text.wav2vec_xlsr.v2': 'asr_wav2vec2_xlsr_53_common_voice_indonesian', 'id.speech2text.wav2vec_xlsr.v2_300m': 'asr_fine_tune_wav2vec2_xls_r_300m_indonesia', 'id.speech2text.wav2vec_xlsr.v2_300m.by_wikidepia': 'asr_wav2vec2_xls_r_300m_indonesian', 'id.speech2text.wav2vec_xlsr.v2_300m_gpu': 'asr_fine_tune_wav2vec2_xls_r_300m_indonesia_gpu', 'id.speech2text.wav2vec_xlsr.v2_300m_gpu.by_wikidepia': 'asr_wav2vec2_xls_r_300m_indonesian_gpu', 'id.speech2text.wav2vec_xlsr.v2_gpu': 'asr_wav2vec2_xlsr_53_common_voice_indonesian_gpu', 'id.speech2text.wav2vec_xlsr.v2_gpu_s449_exp': 'asr_exp_w2v2t_xlsr_53_s449_gpu', 'id.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_indonesian_artificial', 'id.speech2text.wav2vec_xlsr.v2_large.by_cahya': 'asr_wav2vec2_large_xlsr_indonesian_by_cahya', 'id.speech2text.wav2vec_xlsr.v2_large.by_galuh': 'asr_wav2vec2_large_xlsr_indonesian_by_galuh', 'id.speech2text.wav2vec_xlsr.v2_large.by_indonesian_nlp': 'asr_wav2vec2_large_xlsr_indonesian_by_indonesian_nlp', 'id.speech2text.wav2vec_xlsr.v2_large_300m': 'asr_wav2vec2_large_xls_r_300m_indonesian', 'id.speech2text.wav2vec_xlsr.v2_large_300m_gpu': 'asr_wav2vec2_large_xls_r_300m_indonesian_gpu', 'id.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_indonesian_artificial_gpu', 'id.speech2text.wav2vec_xlsr.v2_large_gpu.by_cahya': 'asr_wav2vec2_large_xlsr_indonesian_by_cahya_gpu', 'id.speech2text.wav2vec_xlsr.v2_large_gpu.by_galuh': 'asr_wav2vec2_large_xlsr_indonesian_by_galuh_gpu', 'id.speech2text.wav2vec_xlsr.v2_large_gpu.by_indonesian_nlp': 'asr_wav2vec2_large_xlsr_indonesian_by_indonesian_nlp_gpu', 'id.speech2text.wav2vec_xlsr.v2_s449_exp': 'asr_exp_w2v2t_xlsr_53_s449', 'id.stopwords': 'stopwords_iso', 'id.stopwords.iso': 'stopwords_iso'}, 'ig': {'ig.embed.xlm_roberta': 'xlm_roberta_base_finetuned_igbo', 'ig.embed_sentence.xlm_roberta': 'sent_xlm_roberta_base_finetuned_igbo', 'ig.ner.distil_bert.cased_multilingual_base': 'distilbert_ner_distilbert_base_multilingual_cased_masakhaner', 'ig.ner.xlmr_roberta.base_finetuned': 'xlmroberta_ner_xlm_roberta_base_finetuned_igbo_finetuned_ner_igbo', 'ig.ner.xlmr_roberta.base_finetuned_igbo.by_mbeukman': 'xlmroberta_ner_xlm_roberta_base_finetuned_ner_igbo', 'ig.ner.xlmr_roberta.base_finetuned_swahili.by_mbeukman': 'xlmroberta_ner_xlm_roberta_base_finetuned_swahili_finetuned_ner_igbo', 'ig.ner.xlmr_roberta.wikiann.base': 'xlmroberta_ner_xlm_roberta_base_wikiann_ner'}, 'is': {'is.answer_question.roberta': 'roberta_qa_icebert', 'is.answer_question.squad.roberta': 'roberta_qa_icebert_texas_squad_is_saattrupdan', 'is.answer_question.squad.xlmr_roberta.base': 'xlm_roberta_qa_xlmr_base_texas_squad_is_is_saattrupdan', 'is.answer_question.xlmr_roberta': 'xlm_roberta_qa_XLMr_ENIS_QA_Is', 'is.classify.roberta.finetuned': 'roberta_classifier_icebert_finetuned_grouped', 'is.lemma': 'lemma_icepahc', 'is.lemma.modern': 'lemma_modern', 'is.ner': 'roberta_token_classifier_icelandic_ner', 'is.ner.bert': 'bert_ner_icelandic_ner_bert', 'is.ner.distil_bert': 'distilbert_token_classifier_typo_detector', 'is.ner.roberta': 'roberta_ner_icelandic_ner_roberta', 'is.pos': 'pos_icepahc', 'is.pos.modern': 'pos_modern', 'is.stopwords': 'stopwords_iso'}, 'it': {'it.answer_question.roberta': 'roberta_qa_addi_it_roberta', 'it.answer_question.squad.bert': 'bert_qa_bert_italian_finedtuned_squadv1_it_alfa', 'it.answer_question.squad.bert.base_uncased': 'bert_qa_bert_base_italian_uncased_squad_it_antoniocappiello', 'it.answer_question.squad.bert.xxl_cased': 'bert_qa_squad_xxl_cased_hub1', 'it.answer_question.xlm_roberta': 'xlm_roberta_qa_ADDI_IT_XLM_R', 'it.classify.bert': 'bert_classifier_autonlp_bank_transaction_classification_5521155', 'it.classify.bert.by_m47labs': 'bert_classifier_it_iptc', 'it.classify.bert.hate.': 'bert_classifier_dehatebert_mono_italian', 'it.classify.bert.multi2convai.': 'bert_classifier_multi2convai_corona', 'it.classify.bert.multi2convai.quality.by_inovex': 'bert_classifier_multi2convai_qualy', 'it.classify.bert.multi2convai.quality_v2.by_inovex': 'bert_classifier_multi2convai_quality_it_mbert', 'it.classify.bert.sentiment.cased_base': 'bert_classifier_bert_base_italian_cased_sentiment', 'it.classify.bert.vaccine.': 'bert_classifier_vaccine_topic', 'it.classify.distil_bert': 'distilbert_sequence_classifier_cross_encoder_distilbert_it_efederici', 'it.classify.sentiment': 'bert_sequence_classifier_sentiment', 'it.embed.BERTino': 'distilbert_embeddings_BERTino', 'it.embed.bert': 'bert_base_italian_cased', 'it.embed.bert.cased_base': 'bert_embeddings_base_it_cased', 'it.embed.bert.cased_base.by_dbmdz': 'bert_embeddings_base_italian_cased', 'it.embed.bert.cased_xxl_base': 'bert_embeddings_base_italian_xxl_cased', 'it.embed.bert.uncased': 'bert_base_italian_uncased', 'it.embed.bert.uncased_base': 'bert_embeddings_base_italian_uncased', 'it.embed.bert.uncased_xxl_base': 'bert_embeddings_base_italian_xxl_uncased', 'it.embed.bert_base_italian_xxl_cased': 'bert_embeddings_bert_base_italian_xxl_cased', 'it.embed.bert_base_italian_xxl_uncased': 'bert_embeddings_bert_base_italian_xxl_uncased', 'it.embed.bert_it_cased': 'bert_embeddings_bert_base_it_cased', 'it.embed.camembert.cased': 'camembert_embeddings_umberto_commoncrawl_cased_v1', 'it.embed.camembert.uncased': 'camembert_embeddings_umberto_wikipedia_uncased_v1', 'it.embed.chefberto_italian_cased': 'bert_embeddings_chefberto_italian_cased', 'it.embed.distil_bert': 'distilbert_embeddings_bertino', 'it.embed.distilbert_base_it_cased': 'distilbert_embeddings_distilbert_base_it_cased', 'it.embed.electra.cased_xxl_base': 'electra_embeddings_electra_base_italian_xxl_cased_generator', 'it.embed.hseBert_it_cased': 'bert_embeddings_hseBert_it_cased', 'it.embed.wineberto_italian_cased': 'bert_embeddings_wineberto_italian_cased', 'it.embed.word2vec': 'w2v_cc_300d', 'it.lemma': 'lemma_twittiro', 'it.lemma.dxc': 'lemma_dxc', 'it.lemma.isdt': 'lemma_isdt', 'it.lemma.partut': 'lemma_partut', 'it.lemma.postwita': 'lemma_postwita', 'it.lemma.spacylookup': 'lemma_spacylookup', 'it.lemma.twittiro': 'lemma_twittiro', 'it.lemma.vit': 'lemma_vit', 'it.ner': 'wikiner_840B_300', 'it.ner.pos.xtreme.cased_finetuned': 'bert_pos_bert_italian_cased_finetuned_pos', 'it.ner.wikiner.glove.6B_300': 'wikiner_6B_300', 'it.ner.xlmr_roberta.xtreme.base_finetuned': 'xlmroberta_ner_xlm_roberta_base_finetuned_panx_ner', 'it.ner.xlmr_roberta.xtreme.base_finetuned.by_Neha2608': 'xlmroberta_ner_neha2608_base_finetuned_panx', 'it.ner.xlmr_roberta.xtreme.base_finetuned.by_V3RX2000': 'xlmroberta_ner_v3rx2000_base_finetuned_panx', 'it.ner.xlmr_roberta.xtreme.base_finetuned.by_cj_mills': 'xlmroberta_ner_cj_mills_base_finetuned_panx', 'it.ner.xlmr_roberta.xtreme.base_finetuned.by_dkasti': 'xlmroberta_ner_dkasti_base_finetuned_panx', 'it.ner.xlmr_roberta.xtreme.base_finetuned.by_edwardjross': 'xlmroberta_ner_edwardjross_base_finetuned_panx', 'it.ner.xlmr_roberta.xtreme.base_finetuned.by_flood': 'xlmroberta_ner_flood_base_finetuned_panx', 'it.ner.xlmr_roberta.xtreme.base_finetuned.by_haesun': 'xlmroberta_ner_haesun_base_finetuned_panx', 'it.ner.xlmr_roberta.xtreme.base_finetuned.by_iis2009002': 'xlmroberta_ner_iis2009002_base_finetuned_panx', 'it.ner.xlmr_roberta.xtreme.base_finetuned.by_jgriffi': 'xlmroberta_ner_jgriffi_base_finetuned_panx', 'it.ner.xlmr_roberta.xtreme.base_finetuned.by_lijingxin': 'xlmroberta_ner_lijingxin_base_finetuned_panx', 'it.ner.xlmr_roberta.xtreme.base_finetuned.by_moghis': 'xlmroberta_ner_moghis_base_finetuned_panx', 'it.ner.xlmr_roberta.xtreme.base_finetuned.by_robkayinto': 'xlmroberta_ner_robkayinto_base_finetuned_panx', 'it.ner.xlmr_roberta.xtreme.base_finetuned.by_skr3178': 'xlmroberta_ner_skr3178_base_finetuned_panx', 'it.ner.xlmr_roberta.xtreme.base_finetuned.by_transformersbook': 'xlmroberta_ner_transformersbook_base_finetuned_panx', 'it.pos': 'pos_partut', 'it.pos.isdt': 'pos_isdt', 'it.pos.partut': 'pos_partut', 'it.pos.postwita': 'pos_postwita', 'it.pos.twittiro': 'pos_twittiro', 'it.pos.ud_isdt': 'pos_ud_isdt', 'it.pos.vit': 'pos_vit', 'it.speech2text.wav2vec2.v2_gpu_s149_vp_exp': 'asr_exp_w2v2t_vp_100k_s149_gpu', 'it.speech2text.wav2vec2.v2_gpu_s149_vp_exp.by_jonatasgrosman': 'asr_exp_w2v2t_vp_s149_gpu', 'it.speech2text.wav2vec2.v2_gpu_s1_vp_exp': 'asr_exp_w2v2t_vp_s1_gpu', 'it.speech2text.wav2vec2.v2_gpu_s211_exp': 'asr_exp_w2v2t_wav2vec2_s211_gpu', 'it.speech2text.wav2vec2.v2_gpu_s222_vp_exp': 'asr_exp_w2v2t_vp_s222_gpu', 'it.speech2text.wav2vec2.v2_gpu_s27_vp_exp': 'asr_exp_w2v2t_vp_s27_gpu', 'it.speech2text.wav2vec2.v2_gpu_s317_exp': 'asr_exp_w2v2t_r_wav2vec2_s317_gpu', 'it.speech2text.wav2vec2.v2_gpu_s324_vp_exp': 'asr_exp_w2v2t_vp_s324_gpu', 'it.speech2text.wav2vec2.v2_gpu_s335_vp_exp': 'asr_exp_w2v2t_vp_s335_gpu', 'it.speech2text.wav2vec2.v2_gpu_s33_vp_exp': 'asr_exp_w2v2t_vp_s33_gpu', 'it.speech2text.wav2vec2.v2_gpu_s358_vp_exp': 'asr_exp_w2v2t_vp_100k_s358_gpu', 'it.speech2text.wav2vec2.v2_gpu_s411_vp_exp': 'asr_exp_w2v2t_vp_s411_gpu', 'it.speech2text.wav2vec2.v2_gpu_s449_vp_exp': 'asr_exp_w2v2t_vp_100k_s449_gpu', 'it.speech2text.wav2vec2.v2_gpu_s496_vp_exp': 'asr_exp_w2v2t_vp_s496_gpu', 'it.speech2text.wav2vec2.v2_gpu_s557_vp_exp': 'asr_exp_w2v2t_vp_s557_gpu', 'it.speech2text.wav2vec2.v2_gpu_s578_exp': 'asr_exp_w2v2t_r_wav2vec2_s578_gpu', 'it.speech2text.wav2vec2.v2_gpu_s579_vp_exp': 'asr_exp_w2v2t_vp_s579_gpu', 'it.speech2text.wav2vec2.v2_gpu_s609_exp': 'asr_exp_w2v2t_wav2vec2_s609_gpu', 'it.speech2text.wav2vec2.v2_gpu_s615_exp': 'asr_exp_w2v2t_pretraining_s615_gpu', 'it.speech2text.wav2vec2.v2_gpu_s646_exp': 'asr_exp_w2v2t_r_wav2vec2_s646_gpu', 'it.speech2text.wav2vec2.v2_gpu_s692_exp': 'asr_exp_w2v2t_wav2vec2_s692_gpu', 'it.speech2text.wav2vec2.v2_gpu_s764_exp': 'asr_exp_w2v2t_pretraining_s764_gpu', 'it.speech2text.wav2vec2.v2_gpu_s791_vp_exp': 'asr_exp_w2v2t_vp_s791_gpu', 'it.speech2text.wav2vec2.v2_gpu_s821_vp_exp': 'asr_exp_w2v2t_vp_s821_gpu', 'it.speech2text.wav2vec2.v2_gpu_s842_exp': 'asr_exp_w2v2t_pretraining_s842_gpu', 'it.speech2text.wav2vec2.v2_gpu_s878_vp_exp': 'asr_exp_w2v2t_vp_s878_gpu', 'it.speech2text.wav2vec2.v2_gpu_s965_vp_exp': 'asr_exp_w2v2t_vp_s965_gpu', 'it.speech2text.wav2vec2.v2_s149_vp_exp': 'asr_exp_w2v2t_vp_100k_s149', 'it.speech2text.wav2vec2.v2_s149_vp_exp.by_jonatasgrosman': 'asr_exp_w2v2t_vp_s149', 'it.speech2text.wav2vec2.v2_s211_exp': 'asr_exp_w2v2t_wav2vec2_s211', 'it.speech2text.wav2vec2.v2_s222_vp_exp': 'asr_exp_w2v2t_vp_s222', 'it.speech2text.wav2vec2.v2_s27_vp_exp': 'asr_exp_w2v2t_vp_s27', 'it.speech2text.wav2vec2.v2_s317_exp': 'asr_exp_w2v2t_r_wav2vec2_s317', 'it.speech2text.wav2vec2.v2_s324_vp_exp': 'asr_exp_w2v2t_vp_s324', 'it.speech2text.wav2vec2.v2_s335_vp_exp': 'asr_exp_w2v2t_vp_s335', 'it.speech2text.wav2vec2.v2_s358_vp_exp': 'asr_exp_w2v2t_vp_100k_s358', 'it.speech2text.wav2vec2.v2_s411_vp_exp': 'asr_exp_w2v2t_vp_s411', 'it.speech2text.wav2vec2.v2_s449_vp_exp': 'asr_exp_w2v2t_vp_100k_s449', 'it.speech2text.wav2vec2.v2_s496_vp_exp': 'asr_exp_w2v2t_vp_s496', 'it.speech2text.wav2vec2.v2_s557_vp_exp': 'asr_exp_w2v2t_vp_s557', 'it.speech2text.wav2vec2.v2_s578_exp': 'asr_exp_w2v2t_r_wav2vec2_s578', 'it.speech2text.wav2vec2.v2_s579_vp_exp': 'asr_exp_w2v2t_vp_s579', 'it.speech2text.wav2vec2.v2_s609_exp': 'asr_exp_w2v2t_wav2vec2_s609', 'it.speech2text.wav2vec2.v2_s646_exp': 'asr_exp_w2v2t_r_wav2vec2_s646', 'it.speech2text.wav2vec2.v2_s692_exp': 'asr_exp_w2v2t_wav2vec2_s692', 'it.speech2text.wav2vec2.v2_s791_vp_exp': 'asr_exp_w2v2t_vp_s791', 'it.speech2text.wav2vec2.v2_s821_vp_exp': 'asr_exp_w2v2t_vp_s821', 'it.speech2text.wav2vec2.v2_s965_vp_exp': 'asr_exp_w2v2t_vp_s965', 'it.speech2text.wav2vec2.voxpopuli.base': 'asr_voxpopuli_base_2_5_gram_doc4lm', 'it.speech2text.wav2vec2.voxpopuli.base_gpu': 'asr_voxpopuli_base_2_5_gram_doc4lm_gpu', 'it.speech2text.wav2vec2.voxpopuli.v2_base_gpu': 'asr_wav2vec2_base_10k_voxpopuli_gpu', 'it.speech2text.wav2vec_xlsr.v2_300m': 'asr_wav2vec2_xls_r_300m_italian_robust', 'it.speech2text.wav2vec_xlsr.v2_300m_gpu': 'asr_wav2vec2_xls_r_300m_italian_gpu', 'it.speech2text.wav2vec_xlsr.v2_300m_gpu.by_dbdmg': 'asr_wav2vec2_xls_r_300m_italian_robust_gpu', 'it.speech2text.wav2vec_xlsr.v2_gpu_s226_exp': 'asr_exp_w2v2t_xls_r_s226_gpu', 'it.speech2text.wav2vec_xlsr.v2_gpu_s237_exp': 'asr_exp_w2v2t_xlsr_53_s237_gpu', 'it.speech2text.wav2vec_xlsr.v2_gpu_s387_exp': 'asr_exp_w2v2t_xlsr_53_s387_gpu', 'it.speech2text.wav2vec_xlsr.v2_gpu_s417_exp': 'asr_exp_w2v2t_xls_r_s417_gpu', 'it.speech2text.wav2vec_xlsr.v2_gpu_s79_exp': 'asr_exp_w2v2t_xlsr_53_s79_gpu', 'it.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_italian', 'it.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_gpu', 'it.speech2text.wav2vec_xlsr.v2_large_gpu.by_joaoalvarenga': 'asr_wav2vec2_large_xlsr_italian_gpu', 'it.speech2text.wav2vec_xlsr.v2_s226_exp': 'asr_exp_w2v2t_xls_r_s226', 'it.speech2text.wav2vec_xlsr.v2_s237_exp': 'asr_exp_w2v2t_xlsr_53_s237', 'it.speech2text.wav2vec_xlsr.v2_s387_exp': 'asr_exp_w2v2t_xlsr_53_s387', 'it.speech2text.wav2vec_xlsr.v2_s417_exp': 'asr_exp_w2v2t_xls_r_s417', 'it.stopwords': 'stopwords_iso', 'it.stopwords.iso': 'stopwords_iso'}, 'ja': { 'ja.answer_question.bert.base_whole_word_masking': 'bert_qa_base_japanese_whole_word_masking_tes', 'ja.answer_question.wikipedia.bert.base': 'bert_qa_base_japanese_wikipedia_ud_head', 'ja.answer_question.wikipedia.bert.large': 'bert_qa_large_japanese_wikipedia_ud_head', 'ja.classify.bert': 'bert_classifier_autotrain_iine_classification10_737422470', 'ja.classify.bert.joy.': 'bert_classifier_autonlp_wrime_joy_only_117396', 'ja.classify.bert.news.': 'bert_classifier_autotrain_livedoor_news_722922024', 'ja.classify.bert.news.by_jurader': 'bert_classifier_autotrain_livedoor_news_732022289', 'ja.classify.bert.sentiment.': 'bert_sequence_classifier_japanese_sentiment', 'ja.classify.bert.sentiment.59362.by_abhishek': 'bert_classifier_autonlp_japanese_sentiment_59362', 'ja.classify.bert.sentiment.59363.by_abhishek': 'bert_classifier_autonlp_japanese_sentiment_59363', 'ja.classify.bert.v2_base': 'bert_classifier_bert_base_japanese_v2_wrime_fine_tune', 'ja.classify.token_bert.classifier_ner_ud_gsd': 'bert_token_classifier_ner_ud_gsd', 'ja.embed.albert_base_japanese_v1': 'albert_embeddings_albert_base_japanese_v1', 'ja.embed.bert.base': 'bert_base_japanese', 'ja.embed.bert.base_whole_word_masking': 'bert_embeddings_base_japanese_char_whole_word_masking', 'ja.embed.bert.base_whole_word_masking.by_cl_tohoku': 'bert_embeddings_base_japanese_whole_word_masking', 'ja.embed.bert.cased_base': 'bert_embeddings_base_ja_cased', 'ja.embed.bert.large': 'bert_embeddings_large_japanese', 'ja.embed.bert.large.by_cl_tohoku': 'bert_embeddings_large_japanese_char', 'ja.embed.bert.v2_base': 'bert_embeddings_base_japanese_char_v2', 'ja.embed.bert.v2_base.by_cl_tohoku': 'bert_embeddings_base_japanese_v2', 'ja.embed.bert.wiki.base.by_cl_tohoku': 'bert_embeddings_base_japanese', 'ja.embed.bert.wiki.base_char.by_cl_tohoku': 'bert_embeddings_base_japanese_char', 'ja.embed.bert_base_ja_cased': 'bert_embeddings_bert_base_ja_cased', 'ja.embed.bert_base_japanese_basic_char_v2': 'bert_embeddings_bert_base_japanese_basic_char_v2', 'ja.embed.bert_base_japanese_char': 'bert_embeddings_bert_base_japanese_char', 'ja.embed.bert_base_japanese_char_extended': 'bert_embeddings_bert_base_japanese_char_extended', 'ja.embed.bert_base_japanese_char_v2': 'bert_embeddings_bert_base_japanese_char_v2', 'ja.embed.bert_base_japanese_char_whole_word_masking': 'bert_embeddings_bert_base_japanese_char_whole_word_masking', 'ja.embed.bert_base_japanese_v2': 'bert_embeddings_bert_base_japanese_v2', 'ja.embed.bert_base_japanese_whole_word_masking': 'bert_embeddings_bert_base_japanese_whole_word_masking', 'ja.embed.bert_large_japanese': 'bert_embeddings_bert_large_japanese', 'ja.embed.bert_large_japanese_char': 'bert_embeddings_bert_large_japanese_char', 'ja.embed.bert_large_japanese_char_extended': 'bert_embeddings_bert_large_japanese_char_extended', 'ja.embed.bert_small_japanese': 'bert_embeddings_bert_small_japanese', 'ja.embed.bert_small_japanese_fin': 'bert_embeddings_bert_small_japanese_fin', 'ja.embed.distilbert_base_ja_cased': 'distilbert_embeddings_distilbert_base_ja_cased', 'ja.embed.electra.base': 'electra_embeddings_electra_base_japanese_generator', 'ja.embed.electra.small': 'electra_embeddings_electra_small_japanese_fin_generator', 'ja.embed.electra.small.by_cinnamon': 'electra_embeddings_electra_small_japanese_generator', 'ja.embed.electra.small_paper_japanese_fin_generator.small.by_izumi_lab': 'electra_embeddings_electra_small_paper_japanese_fin_generator', 'ja.embed.electra.small_paper_japanese_generator.small.by_izumi_lab': 'electra_embeddings_electra_small_paper_japanese_generator', 'ja.embed.glove.cc_300d': 'japanese_cc_300d', 'ja.lemma': 'lemma', 'ja.lemma.gsd': 'lemma_gsd', 'ja.lemma.gsdluw': 'lemma_gsdluw', 'ja.ner': 'ner_ud_gsd_glove_840B_300d', 'ja.ner.base': 'ner_ud_gsd_bert_base_japanese', 'ja.ner.bert': 'bert_token_classifier_ner_japanese', 'ja.ner.pos.universal_dependencies.base': 'bert_pos_bert_base_japanese_luw_upos', 'ja.ner.pos.universal_dependencies.base.by_KoichiYasuoka': 'bert_pos_bert_base_japanese_unidic_luw_upos', 'ja.ner.pos.universal_dependencies.large': 'bert_pos_bert_large_japanese_luw_upos', 'ja.ner.pos.universal_dependencies_luw_upos.large.by_KoichiYasuoka': 'bert_pos_bert_large_japanese_unidic_luw_upos', 'ja.ner.pos.universal_dependencies_unidic_luw_upos.base.by_KoichiYasuoka': 'bert_pos_bert_base_japanese_upos', 'ja.ner.pos.universal_dependencies_upos.large.by_KoichiYasuoka': 'bert_pos_bert_large_japanese_upos', 'ja.ner.ud_gsd': 'ner_ud_gsd_glove_840B_300d', 'ja.ner.ud_gsd.glove_840B_300D': 'ner_ud_gsd_glove_840B_300d', 'ja.ner.ud_gsd_cc_300d': 'ner_ud_gsd_cc_300d', 'ja.ner.ud_gsd_xlm_roberta_base': 'ner_ud_gsd_xlm_roberta_base', 'ja.ner.xlmr_roberta.base': 'xlmroberta_ner_base_panx_dataset', 'ja.ner.xlmr_roberta.large': 'xlmroberta_ner_large_panx_dataset', 'ja.pos': 'pos_gsd', 'ja.pos.gsd': 'pos_gsd', 'ja.pos.gsdluw': 'pos_gsdluw', 'ja.pos.ud_gsd': 'pos_ud_gsd', 'ja.segment_words': 'wordseg_gsd_ud', 'ja.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_japanese_hiragana', 'ja.stopwords': 'stopwords_iso', 'ja.stopwords.iso': 'stopwords_iso'}, 'jv': {'jv.embed.bert.imdb_javanese.small': 'bert_embeddings_javanese_small_imdb', 'jv.embed.bert.small': 'bert_embeddings_javanese_small', 'jv.embed.distil_bert.imdb_javanese.small': 'distilbert_embeddings_javanese_small_imdb', 'jv.embed.distil_bert.small': 'distilbert_embeddings_javanese_small', 'jv.embed.distilbert': 'distilbert_embeddings_javanese_distilbert_small', 'jv.embed.javanese_bert_small': 'bert_embeddings_javanese_bert_small', 'jv.embed.javanese_bert_small_imdb': 'bert_embeddings_javanese_bert_small_imdb', 'jv.embed.javanese_distilbert_small_imdb': 'distilbert_embeddings_javanese_distilbert_small_imdb', 'jv.embed.javanese_roberta_small': 'roberta_embeddings_javanese_roberta_small', 'jv.embed.javanese_roberta_small_imdb': 'roberta_embeddings_javanese_roberta_small_imdb', 'jv.embed.roberta.imdb_javanese.small': 'roberta_embeddings_javanese_small_imdb', 'jv.embed.roberta.small': 'roberta_embeddings_javanese_small'}, 'ka': {'ka.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'kab': {'kab.speech2text.wav2vec_xlsr': 'asr_kabyle_xlsr', 'kab.speech2text.wav2vec_xlsr.gpu': 'asr_kabyle_xlsr_gpu'}, 'kin': {'kin.ner.bert.uncased_base': 'bert_ner_mbert_base_uncased_kin', 'kin.ner.bert.uncased_base.by_arnolfokam': 'bert_ner_mbert_base_uncased_ner_kin', 'kin.ner.roberta.base': 'roberta_ner_roberta_base_kin'}, 'kn': {'kn.classify.xlmr_roberta': 'xlmroberta_classifier_deoffxlmr_mono_kannada', 'kn.embed.KNUBert': 'roberta_embeddings_KNUBert', 'kn.embed.KanBERTo': 'roberta_embeddings_KanBERTo', 'kn.stopwords': 'stopwords_iso', 'ml.detect_sentence': 'sentence_detector_dl'}, 'ko': {'ko.answer_question.bert': 'bert_qa_komrc_train', 'ko.answer_question.bert.by_taekyoon': 'bert_qa_neg_komrc_train', 'ko.answer_question.bert.klue.cased_multilingual_base_finetuned': 'bert_qa_bert_base_multilingual_cased_finetuned_klue', 'ko.answer_question.bert.klue.v2_finetuned': 'bert_qa_kobert_finetuned_klue_v2', 'ko.answer_question.bert.korquad.cased_multilingual_base': 'bert_qa_bert_base_multilingual_cased_korquad', 'ko.answer_question.bert.korquad.cased_multilingual_base.by_eliza_dukim': 'bert_qa_bert_base_multilingual_cased_korquad_v1', 'ko.answer_question.bert.squad.base_finetuned': 'bert_qa_kcbert_base_finetuned_squad', 'ko.answer_question.bert.squad.finetuned': 'bert_qa_kobert_finetuned_squad_kor_v1', 'ko.answer_question.electra': 'electra_qa_long', 'ko.answer_question.klue.bert.base': 'bert_qa_ainize_klue_bert_base_mrc', 'ko.answer_question.klue.bert.base.by_ainize': 'bert_qa_ainize_klue_bert_base_mrc', 'ko.answer_question.klue.bert.base.by_bespin-global': 'bert_qa_bespin_global_klue_bert_base_mrc', 'ko.answer_question.klue.bert.base_aihub.by_bespin-global': 'bert_qa_klue_bert_base_aihub_mrc', 'ko.answer_question.klue.electra.base': 'electra_qa_klue_mrc_base', 'ko.answer_question.klue.electra.base.by_obokkkk': 'electra_qa_base_v3_discriminator_finetuned_klue_v4', 'ko.answer_question.klue.electra.base.by_seongju': 'electra_qa_klue_mrc_base', 'ko.answer_question.korquad.electra.base': 'electra_qa_base_v3_finetuned_korquad', 'ko.answer_question.korquad.electra.base_v2.by_monologg': 'electra_qa_base_v2_finetuned_korquad', 'ko.answer_question.korquad.electra.base_v2_384.by_monologg': 'electra_qa_base_v2_finetuned_korquad_384', 'ko.answer_question.korquad.electra.small': 'electra_qa_small_v3_finetuned_korquad', 'ko.classify.bert': 'bert_classifier_kor_unsmile', 'ko.classify.bert.by_dobbytk': 'bert_classifier_letr_sol_profanity_filter', 'ko.classify.bert.cased_multilingual_base': 'bert_classifier_bert_base_multilingual_cased_nsmc', 'ko.classify.bert.hate.': 'bert_classifier_hatescore_korean_hate_speech', 'ko.classify.electra': 'electra_classifier_ko_senti_1', 'ko.classify.electra.base': 'electra_classifier_beep_kc_base_bias', 'ko.classify.electra.base.by_beomi': 'electra_classifier_beep_ko_base_v3_discriminator_bias', 'ko.classify.electra.base.by_jminj': 'electra_classifier_ko_base_bad_sentence', 'ko.classify.electra.base.by_mindlogic': 'electra_classifier_mindlogic_ko_ai_citizen_base', 'ko.classify.electra.base.by_monologg': 'electra_classifier_ko_base_bias', 'ko.classify.electra.base.kc.by_jminj': 'electra_classifier_kc_base_bad_sentence', 'ko.classify.electra.base_finetuned': 'electra_classifier_ko_base_finetuned_nsmc', 'ko.classify.electra.by_jaecheol': 'electra_classifier_nsmc_ko_test_model', 'ko.classify.electra.by_searle_j': 'electra_classifier_kote_for_easygoing_people', 'ko.classify.electra.gender.base': 'electra_classifier_ko_base_gender_bias', 'ko.classify.electra.gender.base.by_monologg': 'electra_classifier_ko_base_v3_gender_bias', 'ko.classify.electra.hate.': 'electra_classifier_korean_hatespeech', 'ko.classify.electra.hate.base': 'electra_classifier_beep_kc_base_hate', 'ko.classify.electra.hate.base.by_beomi': 'electra_classifier_beep_ko_base_v3_discriminator_hate', 'ko.classify.electra.hate.base.by_monologg': 'electra_classifier_ko_base_v3_hate_speech', 'ko.classify.electra.hate.by_beomi': 'electra_classifier_korean_hatespeech_multilabel', 'ko.classify.electra.sentiment.base': 'electra_classifier_ko_base_v3_generalized_sentiment_analysis', 'ko.classify.electra.sentiment.base_finetuned': 'electra_classifier_ko_base_finetuned_sentiment', 'ko.classify.electra.sentiment.small_finetuned': 'electra_classifier_ko_small_finetuned_sentiment', 'ko.classify.electra.small_finetuned': 'electra_classifier_ko_small_finetuned_intent_cls', 'ko.classify.electra.small_finetuned.by_monologg': 'electra_classifier_ko_small_finetuned_nsmc', 'ko.classify.electra.tunib.base.by_jminj': 'electra_classifier_tunib_base_bad_sentence', 'ko.classify.electra_v3.base.by_monologg': 'electra_classifier_ko_base_v3_bias', 'ko.classify.roberta.base': 'roberta_classifier_soongsil_bert_base_apeach', 'ko.classify.roberta.base.by_jason9693': 'roberta_classifier_soongsilbert_base_beep', 'ko.classify.roberta.small': 'roberta_classifier_soongsil_bert_small_apeach', 'ko.embed.KR_FinBert': 'bert_embeddings_KR_FinBert', 'ko.embed.bert': 'bert_embeddings_bert_base', 'ko.embed.bert.base': 'bert_embeddings_kor_base', 'ko.embed.bert_base_v1_sports': 'bert_embeddings_bert_base_v1_sports', 'ko.embed.bert_kor_base': 'bert_embeddings_bert_kor_base', 'ko.embed.dbert': 'bert_embeddings_dbert', 'ko.embed.electra': 'electra_embeddings_kr_electra_generator', 'ko.embed.electra.base': 'electra_embeddings_finance_koelectra_base_generator', 'ko.embed.electra.by_deeq': 'electra_embeddings_delectra_generator', 'ko.embed.electra.small': 'electra_embeddings_finance_koelectra_small_generator', 'ko.embed.electra.small.by_monologg': 'electra_embeddings_koelectra_small_generator', 'ko.embed.electra.v2_base': 'electra_embeddings_koelectra_base_v2_generator', 'ko.embed.koelelectra.base.by_monologg': 'electra_embeddings_koelectra_base_generator', 'ko.embed.koelelectra.base_v3.by_monologg': 'electra_embeddings_koelectra_base_v3_generator', 'ko.embed.roberta_ko_small': 'roberta_embeddings_roberta_ko_small', 'ko.lemma': 'lemma_gsd', 'ko.lemma.gsd': 'lemma_gsd', 'ko.lemma.kaist': 'lemma_kaist', 'ko.ner': 'ner_kmou_glove_840B_300d', 'ko.ner.kmou': 'ner_kmou_glove_840B_300d', 'ko.ner.kmou.glove_840B_300d': 'ner_kmou_glove_840B_300d', 'ko.ner.xlmr_roberta.base': 'xlmroberta_ner_base_panx_dataset', 'ko.ner.xlmr_roberta.large': 'xlmroberta_ner_large_panx_dataset', 'ko.pos': 'pos_gsd', 'ko.pos.gsd': 'pos_gsd', 'ko.pos.kaist': 'pos_kaist', 'ko.pos.ud_kaist': 'pos_ud_kaist', 'ko.segment_words': 'wordseg_kaist_ud', 'ko.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_korean', 'ko.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_korean_gpu', 'ko.stopwords': 'stopwords_iso'}, 'kr': {'kr.speech2text.wav2vec_xlsr.v2': 'asr_wav2vec2_xlsr_korean_senior', 'kr.speech2text.wav2vec_xlsr.v2_gpu': 'asr_wav2vec2_xlsr_korean_senior_gpu'}, 'ku': {'ku.speech2text.wav2vec_xlsr': 'asr_xlsr_kurmanji_kurdish', 'ku.speech2text.wav2vec_xlsr.gpu': 'asr_xlsr_kurmanji_kurdish_gpu'}, 'ky': {'ky.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_53_kyrgyz', 'ky.speech2text.wav2vec_xlsr.v2_large.by_adilism': 'asr_wav2vec2_large_xlsr_kyrgyz_by_adilism', 'ky.speech2text.wav2vec_xlsr.v2_large.by_iarfmoose': 'asr_wav2vec2_large_xlsr_kyrgyz_by_iarfmoose', 'ky.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_kyrgyz_gpu', 'ky.speech2text.wav2vec_xlsr.v2_large_gpu.by_adilism': 'asr_wav2vec2_large_xlsr_kyrgyz_by_adilism_gpu', 'ky.speech2text.wav2vec_xlsr.v2_large_gpu.by_iarfmoose': 'asr_wav2vec2_large_xlsr_kyrgyz_by_iarfmoose_gpu', 'ky.stopwords': 'stopwords_iso'}, 'la': {'la.embed.bert': 'bert_embeddings_cicero_similis', 'la.lemma': 'lemma_proiel', 'la.lemma.ittb': 'lemma_ittb', 'la.lemma.llct': 'lemma_llct', 'la.lemma.perseus': 'lemma_perseus', 'la.lemma.proiel': 'lemma_proiel', 'la.lemma.udante': 'lemma_udante', 'la.pos': 'pos_perseus', 'la.pos.ittb': 'pos_ittb', 'la.pos.llct': 'pos_llct', 'la.pos.perseus': 'pos_perseus', 'la.pos.proiel': 'pos_proiel', 'la.pos.udante': 'pos_udante', 'la.speech2text.wav2vec2.v2_base': 'asr_wav2vec2_base_latin', 'la.speech2text.wav2vec2.v2_base_gpu': 'asr_wav2vec2_base_latin_gpu', 'la.stopwords': 'stopwords_la'}, 'lb': {'lb.embed.w2v_cc_300d': 'w2v_cc_300d', 'lb.lemma': 'lemma_spacylookup', 'lb.stopwords': 'stopwords_iso'}, 'lg': {'lg.embed.xlm_roberta': 'xlm_roberta_base_finetuned_luganda', 'lg.embed_sentence.xlm_roberta': 'sent_xlm_roberta_base_finetuned_luganda', 'lg.ner.xlmr_roberta.base_finetuned': 'xlmroberta_ner_base_finetuned_luganda', 'lg.ner.xlmr_roberta.base_finetuned_luganda.by_mbeukman': 'xlmroberta_ner_base_finetuned_luganda_finetuned_luganda', 'lg.ner.xlmr_roberta.base_finetuned_swahili.by_mbeukman': 'xlmroberta_ner_base_finetuned_swahili_finetuned_luganda', 'lg.speech2text.wav2vec2.v2': 'asr_wav2vec2_luganda_by_birgermoell', 'lg.speech2text.wav2vec2.v2.by_cahya': 'asr_wav2vec2_luganda_by_cahya', 'lg.speech2text.wav2vec2.v2.by_indonesian_nlp': 'asr_wav2vec2_luganda_by_indonesian_nlp', 'lg.speech2text.wav2vec2.v2_gpu': 'asr_wav2vec2_luganda_by_birgermoell_gpu', 'lg.speech2text.wav2vec2.v2_gpu.by_cahya': 'asr_wav2vec2_luganda_by_cahya_gpu', 'lg.speech2text.wav2vec2.v2_gpu.by_indonesian_nlp': 'asr_wav2vec2_luganda_by_indonesian_nlp_gpu', 'lg.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_luganda', 'lg.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_luganda_gpu', 'lg.speech2text.wav2vec_xlsr.v2_multilingual': 'asr_wav2vec2_xlsr_multilingual_56', 'lg.speech2text.wav2vec_xlsr.v2_multilingual_gpu': 'asr_wav2vec2_xlsr_multilingual_56_gpu'}, 'lij': {'lij.stopwords': 'stopwords_iso'}, 'lmo': {'lmo.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'lo': {'lo.ner.pos.base': 'roberta_pos_lao_roberta_base_pos_tagger'}, 'lou': {'lou.embed.xlm_roberta': 'xlm_roberta_base_finetuned_luo'}, 'lt': {'lt.embed.bert.cased_base': 'bert_embeddings_base_lt_cased', 'lt.embed.w2v_cc_300d': 'w2v_cc_300d', 'lt.lemma': 'lemma_spacylookup', 'lt.lemma.alksnis': 'lemma_alksnis', 'lt.lemma.spacylookup': 'lemma_spacylookup', 'lt.pos.alksnis': 'pos_alksnis', 'lt.speech2text.wav2vec2': 'asr_common_voice_lithuanian_fairseq', 'lt.speech2text.wav2vec2.gpu': 'asr_common_voice_lithuanian_fairseq_gpu', 'lt.speech2text.wav2vec2.v2': 'asr_wav2vec2_common_voice_lithuanian', 'lt.speech2text.wav2vec2.v2.by_seccily': 'asr_wav2vec_lite', 'lt.speech2text.wav2vec2.v2_gpu': 'asr_wav2vec2_common_voice_lithuanian_gpu', 'lt.speech2text.wav2vec2.v2_gpu.by_seccily': 'asr_wav2vec_lite_gpu', 'lt.speech2text.wav2vec_xlsr.l_v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_lithuanian_by_anton_l_gpu', 'lt.speech2text.wav2vec_xlsr.v2_gpu': 'asr_wav2vec2_xlsr_lithuanian_gpu', 'lt.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_53_lithuanian_by_deividasm', 'lt.speech2text.wav2vec_xlsr.v2_large.by_anton_l': 'asr_wav2vec2_large_xlsr_53_lithuanian_by_anton_l', 'lt.speech2text.wav2vec_xlsr.v2_large.by_dundar': 'asr_wav2vec2_large_xlsr_53_lithuanian_by_dundar', 'lt.speech2text.wav2vec_xlsr.v2_large.by_m3hrdadfi': 'asr_wav2vec2_large_xlsr_lithuanian', 'lt.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_lithuanian_by_deividasm_gpu', 'lt.speech2text.wav2vec_xlsr.v2_large_gpu.by_dundar': 'asr_wav2vec2_large_xlsr_53_lithuanian_by_dundar_gpu', 'lt.speech2text.wav2vec_xlsr.v2_large_gpu.by_m3hrdadfi': 'asr_wav2vec2_large_xlsr_lithuanian_gpu', 'lt.stopwords': 'stopwords_iso'}, 'lu': {'lu.embed.bert.medium': 'bert_embeddings_medium_luxembourgish'}, 'luo': { 'luo.ner.xlmr_roberta.base_finetuned': 'xlmroberta_ner_base_finetuned_luo_finetuned_ner', 'luo.ner.xlmr_roberta.base_finetuned_ner.by_mbeukman': 'xlmroberta_ner_base_finetuned_ner', 'luo.ner.xlmr_roberta.base_finetuned_swahili.by_mbeukman': 'xlmroberta_ner_base_finetuned_swahili_finetuned_ner'}, 'lv': {'lv.lemma': 'lemma', 'lv.lemma.lvtb': 'lemma_lvtb', 'lv.pos': 'pos_lvtb', 'lv.pos.lvtb': 'pos_lvtb', 'lv.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_53_latvian', 'lv.speech2text.wav2vec_xlsr.v2_large.by_jimregan': 'asr_wav2vec2_large_xlsr_latvian', 'lv.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xls_r_1b_common_voice7_gpu', 'lv.speech2text.wav2vec_xlsr.v2_large_gpu.by_anton_l': 'asr_wav2vec2_large_xlsr_53_latvian_gpu', 'lv.speech2text.wav2vec_xlsr.v2_large_gpu.by_jimregan': 'asr_wav2vec2_large_xlsr_latvian_gpu', 'lv.stopwords': 'stopwords_iso', 'lv.stopwords.iso': 'stopwords_iso'}, 'lwt': { 'lwt.answer_question.distil_roberta.squadv2.distilled_base_finetuned': 'roberta_qa_distilroberta_base_finetuned_squad2_lwt', 'lwt.answer_question.roberta.squadv2.base_finetuned': 'roberta_qa_roberta_base_finetuned_squad2_lwt'}, 'lzh': {'lzh.lemma': 'lemma_kyoto', 'lzh.lemma.kyoto': 'lemma_kyoto', 'lzh.pos.kyoto': 'pos_kyoto'}, 'mai': {'mai.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'mg': {'mg.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'min': {'min.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'mk': {'mk.embed.w2v_cc_300d': 'w2v_cc_300d', 'mk.lemma': 'lemma_spacylookup', 'mk.stopwords': 'stopwords_iso'}, 'ml': {'ml.classify.roberta.news.': 'roberta_classifier_malayalam_news', 'ml.classify.xlmr_roberta': 'xlmroberta_classifier_deoffxlmr_mono_malyalam', 'ml.detect_sentence': 'sentence_detector_dl', 'ml.embed.w2v_cc_300d': 'w2v_cc_300d', 'ml.stopwords': 'stopwords_iso'}, 'mn': {'mn.embed.w2v_cc_300d': 'w2v_cc_300d', 'mn.ner.roberta.base': 'roberta_ner_roberta_base_ner', 'mn.ner.roberta.base.by_bayartsogt': 'roberta_ner_bayartsogt_roberta_base_ner_demo', 'mn.ner.roberta.base.by_buyandelger': 'roberta_ner_buyandelger_roberta_base_ner_demo', 'mn.speech2text.wav2vec_xlsr.l_v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_mongolian_by_anton_l_gpu', 'mn.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_53_mongolian_by_anton_l', 'mn.speech2text.wav2vec_xlsr.v2_large.by_manandey': 'asr_wav2vec2_large_xlsr_mongolian_by_manandey', 'mn.speech2text.wav2vec_xlsr.v2_large.by_tugstugi': 'asr_wav2vec2_large_xlsr_53_mongolian_by_tugstugi', 'mn.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_mongolian_by_tugstugi_gpu', 'mn.speech2text.wav2vec_xlsr.v2_large_gpu.by_manandey': 'asr_wav2vec2_large_xlsr_mongolian_by_manandey_gpu'}, 'mr': {'mr.detect_sentence': 'sentence_detector_dl', 'mr.embed.albert': 'albert_embeddings_marathi_albert', 'mr.embed.albert_v2': 'albert_embeddings_marathi_albert_v2', 'mr.embed.distil_bert': 'distilbert_embeddings_marathi', 'mr.embed.distilbert': 'distilbert_embeddings_marathi_distilbert', 'mr.embed.marathi_bert': 'bert_embeddings_marathi_bert', 'mr.embed.muril_adapted_local': 'bert_embeddings_muril_adapted_local', 'mr.embed.xlmr_roberta': 'xlmroberta_embeddings_marathi_roberta', 'mr.lemma': 'lemma_ufal', 'mr.lemma.ufal': 'lemma_ufal', 'mr.ner.bert': 'bert_ner_marathi_ner', 'mr.pos': 'pos_ufal', 'mr.pos.ufal': 'pos_ufal', 'mr.stopwords': 'stopwords_iso', 'mr.stopwords.iso': 'stopwords_iso'}, 'ms': {'ms.answer_question.bert.squad.base': 'bert_qa_squad_ms_bert_base', 'ms.embed.albert': 'albert_embeddings_albert_large_bahasa_cased', 'ms.embed.albert_base_bahasa_cased': 'albert_embeddings_albert_base_bahasa_cased', 'ms.embed.albert_tiny_bahasa_cased': 'albert_embeddings_albert_tiny_bahasa_cased', 'ms.embed.bert': 'bert_embeddings_melayubert', 'ms.embed.distil_bert.small': 'distilbert_embeddings_malaysian_small', 'ms.embed.distilbert': 'distilbert_embeddings_malaysian_distilbert_small', 'ms.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'mt': {'mt.embed.camembert': 'camembert_embeddings_camembert_aux_amandes', 'mt.embed.w2v_cc_300d': 'w2v_cc_300d', 'mt.lemma': 'lemma_mudt', 'mt.lemma.mudt': 'lemma_mudt', 'mt.pos': 'pos_mudt'}, 'mwl': {'mwl.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'my': {'my.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'myv': {'myv.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'mzn': {'mzn.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'nah': {'nah.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'nap': {'nap.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'nb': {'nb.classify.bert.large': 'bert_classifier_nb_large_user_needs', 'nb.lemma': 'lemma_spacylookup', 'nb.lemma.spacylookup': 'lemma_spacylookup', 'nb.pos': 'pos_ud_bokmaal', 'nb.stopwords': 'stopwords_iso'}, 'nds': {'nds.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'ne': {'ne.detect_sentence': 'sentence_detector_dl', 'ne.embed.w2v_cc_300d': 'w2v_cc_300d', 'ne.stopwords': 'stopwords_iso'}, 'new': {'new.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'nl': { 'nl.answer_question.bert.tydiqa.multilingual': 'bert_qa_part_2_bert_multilingual_dutch_model_e1', 'nl.answer_question.squadv2.bert.multilingual_base_cased': 'bert_qa_bert_base_multilingual_cased_finetuned_dutch_squad2', 'nl.classify.bert': 'bert_classifier_republic', 'nl.classify.bert.cased_base': 'bert_classifier_bert_base_dutch_cased_hebban_reviews', 'nl.classify.bert.cased_multilingual_base': 'bert_classifier_bert_base_multilingual_cased_hebban_reviews', 'nl.classify.bert.frombertje2_dadialog02.by_jeska': 'bert_classifier_vaccinchatsentenceclassifierdutch_frombertje2_dadialog02', 'nl.classify.bert.frombertje2_dadialogqonly.by_jeska': 'bert_classifier_vaccinchatsentenceclassifierdutch_frombertje2_dadialogqonly', 'nl.classify.bert.frombertje2_dadialogqonly09.by_jeska': 'bert_classifier_vaccinchatsentenceclassifierdutch_frombertje2_dadialogqonly09', 'nl.classify.bert.frombertjedial.by_jeska': 'bert_classifier_vaccinchatsentenceclassifierdutch_frombertjedial', 'nl.classify.bert.news.finetuned': 'bert_classifier_dutch_news_clf_finetuned', 'nl.classify.bert.sentiment.cased_base_finetuned': 'bert_classifier_base_dutch_cased_finetuned_sentiment', 'nl.classify.distil_bert.cased_base': 'distilbert_sequence_classifier_distilbert_base_dutch_cased_toxic_comments', 'nl.classify.roberta': 'roberta_classifier_autonlp_savesome_631818261', 'nl.classify.roberta.base': 'roberta_classifier_robbert_dutch_base_toxic_comments', 'nl.classify.roberta.sentiment.': 'roberta_classifier_robbertje_merged_dutch_sentiment', 'nl.classify.roberta.sentiment.v2': 'roberta_classifier_robbert_v2_dutch_sentiment', 'nl.classify.roberta.sentiment_twitter.': 'roberta_classifier_robbert_twitter_sentiment_custom', 'nl.classify.roberta.v2_base': 'roberta_classifier_robbert_v2_dutch_base_hebban_reviews', 'nl.embed': 'dutch_cc_300d', 'nl.embed.bert': 'bert_base_dutch_cased', 'nl.embed.bert.base_cased': 'bert_base_cased', 'nl.embed.bert.cased_base': 'bert_embeddings_base_dutch_cased', 'nl.embed.bert.cased_base.by_geotrend': 'bert_embeddings_base_nl_cased', 'nl.embed.distilbert_base_cased': 'distilbert_embeddings_distilbert_base_nl_cased', 'nl.embed.robbert_v2_dutch_base': 'roberta_embeddings_robbert_v2_dutch_base', 'nl.embed.robbertje_1_gb_bort': 'roberta_embeddings_robbertje_1_gb_bort', 'nl.embed.robbertje_1_gb_merged': 'roberta_embeddings_robbertje_1_gb_merged', 'nl.embed.robbertje_1_gb_non_shuffled': 'roberta_embeddings_robbertje_1_gb_non_shuffled', 'nl.embed.robbertje_1_gb_shuffled': 'roberta_embeddings_robbertje_1_gb_shuffled', 'nl.embed.roberta': 'roberta_embeddings_medroberta.nl', 'nl.embed.roberta.conll.v2_base': 'roberta_embeddings_pdelobelle_robbert_v2_dutch_base', 'nl.embed.w2v_cc_300d': 'w2v_cc_300d', 'nl.embed_sentence.bert.base_cased': 'sent_bert_base_cased', 'nl.lemma': 'lemma_spacylookup', 'nl.lemma.lassysmall': 'lemma_lassysmall', 'nl.lemma.spacylookup': 'lemma_spacylookup', 'nl.lemma_alpino': 'lemma_alpino', 'nl.ner': 'wikiner_6B_100', 'nl.ner.bert': 'bert_token_classifier_dutch_udlassy_ner', 'nl.ner.bert.cased_base_finetuned': 'bert_ner_bert_base_dutch_cased_finetuned_sonar_ner', 'nl.ner.bert.cased_base_finetuned.by_wietsedv': 'bert_ner_bert_base_dutch_cased_finetuned_udlassy_ner', 'nl.ner.bert.cased_multilingual_base': 'bert_ner_bert_base_multilingual_cased_ner_hrl', 'nl.ner.bert.conll.cased_base_finetuned': 'bert_ner_bert_base_dutch_cased_finetuned_conll2002_ner', 'nl.ner.bert.wikineural.multilingual': 'bert_ner_wikineural_multilingual_ner', 'nl.ner.distil_bert.cased_multilingual_base': 'distilbert_ner_distilbert_base_multilingual_cased_ner_hrl', 'nl.ner.distil_bert.conll.cased_multilingual_base_finetuned': 'distilbert_ner_distilbert_base_multilingual_cased_finetuned_conll2003_ner', 'nl.ner.pos': 'roberta_pos_veganuary_pos', 'nl.ner.pos.cased_base': 'bert_pos_bert_base_dutch_cased_upos_alpino', 'nl.ner.pos.cased_base.by_gronlp': 'bert_pos_bert_base_dutch_cased_upos_alpino_gronings', 'nl.ner.pos.cased_base_finetuned': 'bert_pos_bert_base_dutch_cased_finetuned_lassysmall_pos', 'nl.ner.pos.cased_base_finetuned.by_wietsedv': 'bert_pos_bert_base_dutch_cased_finetuned_udlassy_pos', 'nl.ner.roberta.conll.v2': 'roberta_ner_robbert_v2_dutch_ner', 'nl.ner.wikiner': 'wikiner_6B_300', 'nl.ner.wikiner.glove.6B_300': 'wikiner_6B_300', 'nl.ner.wikiner.glove.840B_300': 'wikiner_840B_300', 'nl.ner.xlmr_roberta.base': 'xlmroberta_ner_xlm_roberta_base_ner_hrl', 'nl.ner.xlmr_roberta.base.by_ml6team': 'xlmroberta_ner_xlm_roberta_base_nl_emoji_ner', 'nl.ner.xlmr_roberta.large': 'xlmroberta_ner_xlm_roberta_large_ner_hrl', 'nl.pos': 'pos_ud_alpino', 'nl.pos.alpino': 'pos_alpino', 'nl.pos.fullstop_dutch_punctuation_prediction': 'roberta_pos_fullstop_dutch_punctuation_prediction', 'nl.pos.lassysmall': 'pos_lassysmall', 'nl.pos.ud_alpino': 'pos_ud_alpino', 'nl.speech2text.wav2vec2.v2': 'asr_wav2vec2_common_voice_demo', 'nl.speech2text.wav2vec2.v2_gpu': 'asr_wav2vec2_common_voice_demo_gpu', 'nl.speech2text.wav2vec2.v2_gpu_s379_exp': 'asr_exp_w2v2t_wav2vec2_s379_gpu', 'nl.speech2text.wav2vec2.v2_gpu_s408_vp_exp': 'asr_exp_w2v2t_vp_100k_s408_gpu', 'nl.speech2text.wav2vec2.v2_gpu_s461_exp': 'asr_exp_w2v2t_pretraining_s461_gpu', 'nl.speech2text.wav2vec2.v2_gpu_s510_vp_exp': 'asr_exp_w2v2t_vp_s510_gpu', 'nl.speech2text.wav2vec2.v2_gpu_s607_vp_exp': 'asr_exp_w2v2t_vp_s607_gpu', 'nl.speech2text.wav2vec2.v2_gpu_s703_vp_exp': 'asr_exp_w2v2t_vp_s703_gpu', 'nl.speech2text.wav2vec2.v2_gpu_s721_exp': 'asr_exp_w2v2t_wav2vec2_s721_gpu', 'nl.speech2text.wav2vec2.v2_gpu_s754_exp': 'asr_exp_w2v2t_wav2vec2_s754_gpu', 'nl.speech2text.wav2vec2.v2_gpu_s772_vp_exp': 'asr_exp_w2v2t_vp_100k_s772_gpu', 'nl.speech2text.wav2vec2.v2_gpu_s899_vp_exp': 'asr_exp_w2v2t_vp_100k_s899_gpu', 'nl.speech2text.wav2vec2.v2_large': 'asr_wav2vec2_dutch_large_cgn', 'nl.speech2text.wav2vec2.v2_large_3h': 'asr_wav2vec2_dutch_large_cgn_3hrs', 'nl.speech2text.wav2vec2.v2_large_3h.by_bartelds': 'asr_wav2vec2_large_cgn_3hrs', 'nl.speech2text.wav2vec2.v2_large_3h_gpu': 'asr_wav2vec2_dutch_large_cgn_3hrs_gpu', 'nl.speech2text.wav2vec2.v2_large_3h_gpu.by_bartelds': 'asr_wav2vec2_large_cgn_3hrs_gpu', 'nl.speech2text.wav2vec2.v2_large_gpu': 'asr_wav2vec2_dutch_large_cgn_gpu', 'nl.speech2text.wav2vec2.v2_s379_exp': 'asr_exp_w2v2t_wav2vec2_s379', 'nl.speech2text.wav2vec2.v2_s408_vp_exp': 'asr_exp_w2v2t_vp_100k_s408', 'nl.speech2text.wav2vec2.v2_s510_vp_exp': 'asr_exp_w2v2t_vp_s510', 'nl.speech2text.wav2vec2.v2_s607_vp_exp': 'asr_exp_w2v2t_vp_s607', 'nl.speech2text.wav2vec2.v2_s703_vp_exp': 'asr_exp_w2v2t_vp_s703', 'nl.speech2text.wav2vec2.v2_s721_exp': 'asr_exp_w2v2t_wav2vec2_s721', 'nl.speech2text.wav2vec2.v2_s754_exp': 'asr_exp_w2v2t_wav2vec2_s754', 'nl.speech2text.wav2vec2.v2_s772_vp_exp': 'asr_exp_w2v2t_vp_100k_s772', 'nl.speech2text.wav2vec2.v2_s899_vp_exp': 'asr_exp_w2v2t_vp_100k_s899', 'nl.speech2text.wav2vec2.voxpopuli.v2_base_gpu': 'asr_wav2vec2_base_10k_voxpopuli_gpu', 'nl.speech2text.wav2vec_xlsr.300m': 'asr_xlsr300m_7.0_lm', 'nl.speech2text.wav2vec_xlsr.300m.by_iskaj': 'asr_xlsr300m_8.0', 'nl.speech2text.wav2vec_xlsr.300m_gpu': 'asr_xlsr300m_7.0_lm_gpu', 'nl.speech2text.wav2vec_xlsr.300m_gpu.by_iskaj': 'asr_xlsr300m_8.0_gpu', 'nl.speech2text.wav2vec_xlsr.v2_gpu_s799_exp': 'asr_exp_w2v2t_xlsr_53_s799_gpu', 'nl.speech2text.wav2vec_xlsr.v2_gpu_s948_exp': 'asr_exp_w2v2t_xlsr_53_s948_gpu', 'nl.speech2text.wav2vec_xlsr.v2_gpu_s972_exp': 'asr_exp_w2v2t_xlsr_53_s972_gpu', 'nl.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_53_dutch', 'nl.speech2text.wav2vec_xlsr.v2_large.by_jonatasgrosman': 'asr_wav2vec2_large_xlsr_53_dutch_by_jonatasgrosman', 'nl.speech2text.wav2vec_xlsr.v2_large.by_nithinholla': 'asr_wav2vec2_large_xlsr_53_dutch_by_nithinholla', 'nl.speech2text.wav2vec_xlsr.v2_large.by_simonsr': 'asr_wav2vec2_large_xlsr_dutch', 'nl.speech2text.wav2vec_xlsr.v2_large.by_wietsedv': 'asr_wav2vec2_large_xlsr_53_dutch_by_wietsedv', 'nl.speech2text.wav2vec_xlsr.v2_large.dutch.by_facebook': 'asr_wav2vec2_large_xlsr_53_dutch_by_facebook', 'nl.speech2text.wav2vec_xlsr.v2_large.polish.by_facebook': 'asr_wav2vec2_large_xlsr_53_polish_by_facebook', 'nl.speech2text.wav2vec_xlsr.v2_large_300m_gpu': 'asr_wav2vec2_large_xls_r_300m_gpu', 'nl.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_dutch_gpu', 'nl.speech2text.wav2vec_xlsr.v2_large_gpu.by_jonatasgrosman': 'asr_wav2vec2_large_xlsr_53_dutch_by_jonatasgrosman_gpu', 'nl.speech2text.wav2vec_xlsr.v2_large_gpu.by_nithinholla': 'asr_wav2vec2_large_xlsr_53_dutch_by_nithinholla_gpu', 'nl.speech2text.wav2vec_xlsr.v2_large_gpu.by_simonsr': 'asr_wav2vec2_large_xlsr_dutch_gpu', 'nl.speech2text.wav2vec_xlsr.v2_large_gpu.by_wietsedv': 'asr_wav2vec2_large_xlsr_53_dutch_by_wietsedv_gpu', 'nl.speech2text.wav2vec_xlsr.v2_large_gpu.dutch.by_facebook': 'asr_wav2vec2_large_xlsr_53_dutch_by_facebook_gpu', 'nl.speech2text.wav2vec_xlsr.v2_large_gpu.polish.by_facebook': 'asr_wav2vec2_large_xlsr_53_polish_by_facebook_gpu', 'nl.speech2text.wav2vec_xlsr.v2_s799_exp': 'asr_exp_w2v2t_xlsr_53_s799', 'nl.speech2text.wav2vec_xlsr.v2_s948_exp': 'asr_exp_w2v2t_xlsr_53_s948', 'nl.speech2text.wav2vec_xlsr.v2_s972_exp': 'asr_exp_w2v2t_xlsr_53_s972', 'nl.stopwords': 'stopwords_iso'}, 'nn': {'nn.embed.w2v_cc_300d': 'w2v_cc_300d', 'nn.pos': 'pos_ud_nynorsk', 'nn.pos.ud_nynorsk': 'pos_ud_nynorsk'}, 'no': {'no.embed.bert': 'bert_embeddings_norbert', 'no.embed.bert.by_ltgoslo': 'bert_embeddings_norbert2', 'no.embed.bert.cased_base': 'bert_embeddings_base_no_cased', 'no.embed.w2v_cc_300d': 'w2v_cc_300d', 'no.lemma': 'lemma_nynorsk', 'no.lemma.bokmaal': 'lemma_bokmaal', 'no.lemma.nynorsk': 'lemma_nynorsk', 'no.lemma.nynorsklia': 'lemma_nynorsklia', 'no.ner.bert.wikiann.cased_multilingual_base_finetuned': 'bert_token_classifier_base_multilingual_cased_finetuned_norsk_ner', 'no.ner.norne': 'norne_6B_100', 'no.ner.norne.100d': 'norne_6B_100', 'no.ner.norne.glove.6B_300': 'norne_6B_300', 'no.ner.norne.glove.840B_300': 'norne_840B_300', 'no.pos': 'pos_bokmaal', 'no.pos.bokmaal': 'pos_bokmaal', 'no.pos.nynorsk': 'pos_nynorsk', 'no.pos.nynorsklia': 'pos_nynorsklia'}, 'nso': {'nso.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'oc': {'oc.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'or': {'or.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'orv': {'orv.lemma': 'lemma_torot', 'orv.lemma.rnc': 'lemma_rnc', 'orv.lemma.torot': 'lemma_torot', 'orv.pos.rnc': 'pos_rnc', 'orv.pos.torot': 'pos_torot'}, 'os': {'os.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'pa': {'pa.detect_sentence': 'sentence_detector_dl', 'pa.embed.muril_adapted_local': 'bert_embeddings_muril_adapted_local', 'pa.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'pcm': {'pcm.embed.xlm_roberta': 'xlm_roberta_base_finetuned_naija', 'pcm.embed_sentence.xlm_roberta': 'sent_xlm_roberta_base_finetuned_naija', 'pcm.lemma.nsc': 'lemma_nsc', 'pcm.ner.bert.uncased_base': 'bert_ner_mbert_base_uncased_ner_pcm', 'pcm.ner.bert.uncased_base.by_arnolfokam': 'bert_ner_mbert_base_uncased_pcm', 'pcm.ner.roberta.base': 'roberta_ner_roberta_base_pcm', 'pcm.ner.xlmr_roberta.base_finetuned': 'xlmroberta_ner_base_finetuned_naija', 'pcm.ner.xlmr_roberta.base_finetuned_naija.by_mbeukman': 'xlmroberta_ner_base_finetuned_naija_finetuned_naija', 'pcm.ner.xlmr_roberta.base_finetuned_swahili.by_mbeukman': 'xlmroberta_ner_base_finetuned_swahili_finetuned_naija', 'pcm.pos.nsc': 'pos_nsc'}, 'pfl': {'pfl.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'pl': { 'pl.answer_question.squad.bert.multilingual_base_cased': 'bert_qa_bert_base_multilingual_cased_finetuned_polish_squad1', 'pl.answer_question.squadv2.bert.multilingual_base_cased': 'bert_qa_bert_base_multilingual_cased_finetuned_polish_squad2', 'pl.classify.bert.hate.': 'bert_classifier_dehatebert_mono_polish', 'pl.classify.bert.multi2convai.': 'bert_classifier_multi2convai_logistics', 'pl.embed.bert.cased_base': 'bert_embeddings_base_pl_cased', 'pl.embed.distilbert_base_cased': 'distilbert_embeddings_distilbert_base_pl_cased', 'pl.embed.w2v_cc_300d': 'w2v_cc_300d', 'pl.lemma': 'lemma_lfg', 'pl.lemma.lfg': 'lemma_lfg', 'pl.lemma.pdb': 'lemma_pdb', 'pl.ner': 'wikiner_6B_100', 'pl.ner.wikiner': 'wikiner_6B_100', 'pl.ner.wikiner.glove.6B_100': 'wikiner_6B_100', 'pl.ner.wikiner.glove.6B_300': 'wikiner_6B_300', 'pl.ner.wikiner.glove.840B_300': 'wikiner_840B_300', 'pl.pos': 'pos_ud_lfg', 'pl.pos.flg': 'pos_lfg', 'pl.pos.pdb': 'pos_pdb', 'pl.pos.ud_lfg': 'pos_ud_lfg', 'pl.stopwords': 'stopwords_iso', 'pl.stopwords.iso': 'stopwords_iso'}, 'pms': {'pms.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'pnb': {'pnb.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'ps': {'ps.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'pt': { 'pt.answer_question.distil_bert.squad.finetuned': 'distilbert_qa_finetuned_squad', 'pt.answer_question.squad.bert.base_cased': 'bert_qa_bert_base_portuguese_cased_finetuned_squad_v1_pt_mrm8488', 'pt.answer_question.squad.bert.base_cased.by_mrm8488': 'bert_qa_bert_base_portuguese_cased_finetuned_squad_v1_pt_mrm8488', 'pt.answer_question.squad.bert.base_cased.by_pierreguillou': 'bert_qa_bert_base_cased_squad_v1.1_portuguese', 'pt.answer_question.squad.bert.large_cased': 'bert_qa_bert_large_cased_squad_v1.1_portuguese', 'pt.answer_question.squad.biobert': 'bert_qa_bioBERTpt_squad_v1.1_portuguese', 'pt.answer_question.squad.distil_bert': 'distilbert_qa_multi_finedtuned_squad', 'pt.bert': 'bert_portuguese_base_cased', 'pt.bert.cased': 'bert_portuguese_base_cased', 'pt.bert.cased.large': 'bert_portuguese_large_cased', 'pt.classify.bert': 'bert_classifier_paraphrase', 'pt.classify.bert.hate.': 'bert_classifier_dehatebert_mono_portugese', 'pt.embed.BR_BERTo': 'roberta_embeddings_BR_BERTo', 'pt.embed.bert.cased_base': 'bert_embeddings_base_portuguese_cased', 'pt.embed.bert.cased_base.by_geotrend': 'bert_embeddings_base_pt_cased', 'pt.embed.bert_base_cased_pt_lenerbr': 'bert_embeddings_bert_base_cased_pt_lenerbr', 'pt.embed.bert_base_gl_cased': 'bert_embeddings_bert_base_gl_cased', 'pt.embed.bert_base_portuguese_cased': 'bert_embeddings_bert_base_portuguese_cased', 'pt.embed.bert_base_portuguese_cased_finetuned_peticoes': 'bert_embeddings_bert_base_portuguese_cased_finetuned_peticoes', 'pt.embed.bert_base_portuguese_cased_finetuned_tcu_acordaos': 'bert_embeddings_bert_base_portuguese_cased_finetuned_tcu_acordaos', 'pt.embed.bert_base_pt_cased': 'bert_embeddings_bert_base_pt_cased', 'pt.embed.bert_large_cased_pt_lenerbr': 'bert_embeddings_bert_large_cased_pt_lenerbr', 'pt.embed.bert_large_portuguese_cased': 'bert_embeddings_bert_large_portuguese_cased', 'pt.embed.bert_small_gl_cased': 'bert_embeddings_bert_small_gl_cased', 'pt.embed.biobert': 'bert_embeddings_biobertpt_all', 'pt.embed.biobert.by_pucpr': 'bert_embeddings_biobertpt_bio', 'pt.embed.biobert.clinical.by_pucpr': 'bert_embeddings_biobertpt_clin', 'pt.embed.distilbert_base_cased': 'distilbert_embeddings_distilbert_base_pt_cased', 'pt.embed.gs_all': 'biobert_embeddings_all', 'pt.embed.gs_biomedical': 'biobert_embeddings_biomedical', 'pt.embed.gs_clinical': 'biobert_embeddings_clinical', 'pt.embed.w2v_cc_300d': 'w2v_cc_300d', 'pt.embed_sentence.bert.base_legal': 'sbert_legal_bertimbau_base_tsdae_sts', 'pt.embed_sentence.bert.cased_large_legal': 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.1', 'pt.embed_sentence.bert.large_legal': 'sbert_legal_bertimbau_large_gpl_sts', 'pt.embed_sentence.bert.legal.cased_large_mlm_sts_v0.10.by_stjiris': 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.10', 'pt.embed_sentence.bert.legal.cased_large_mlm_sts_v0.2.by_stjiris': 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.2', 'pt.embed_sentence.bert.legal.cased_large_mlm_sts_v0.3.by_stjiris': 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.3', 'pt.embed_sentence.bert.legal.cased_large_mlm_sts_v0.4.by_stjiris': 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.4', 'pt.embed_sentence.bert.legal.cased_large_mlm_sts_v0.5.by_stjiris': 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.5', 'pt.embed_sentence.bert.legal.cased_large_mlm_sts_v0.7.by_stjiris': 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.7', 'pt.embed_sentence.bert.legal.cased_large_mlm_sts_v0.8.by_stjiris': 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.8', 'pt.embed_sentence.bert.legal.cased_large_mlm_sts_v0.9.by_stjiris': 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.9', 'pt.embed_sentence.bert.legal.cased_large_mlm_sts_v1.0.by_stjiris': 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v1.0', 'pt.embed_sentence.bert.legal.cased_large_mlm_v0.11_gpl_nli_sts_v0.by_stjiris': 'sbert_bert_large_portuguese_cased_legal_mlm_v0.11_gpl_nli_sts_v0', 'pt.embed_sentence.bert.legal.cased_large_mlm_v0.11_gpl_nli_sts_v1.by_stjiris': 'sbert_bert_large_portuguese_cased_legal_mlm_v0.11_gpl_nli_sts_v1', 'pt.embed_sentence.bert.legal.cased_large_mlm_v0.11_nli_sts_v0.by_stjiris': 'sbert_bert_large_portuguese_cased_legal_mlm_v0.11_nli_sts_v0', 'pt.embed_sentence.bert.legal.cased_large_mlm_v0.11_nli_sts_v1.by_stjiris': 'sbert_bert_large_portuguese_cased_legal_mlm_v0.11_nli_sts_v1', 'pt.embed_sentence.bert.legal.cased_large_mlm_v0.11_sts_v0.by_stjiris': 'sbert_bert_large_portuguese_cased_legal_mlm_v0.11_sts_v0', 'pt.embed_sentence.bert.legal.cased_large_mlm_v0.11_sts_v1.by_stjiris': 'sbert_bert_large_portuguese_cased_legal_mlm_v0.11_sts_v1', 'pt.embed_sentence.bert.v2_base_legal': 'sbert_legal_bertimbau_sts_base_ma_v2', 'pt.embed_sentence.bert.v2_large_legal': 'sbert_legal_bertimbau_large_tsdae_sts_v2', 'pt.embed_sentence.bertimbau.legal.assin.base.by_rufimelo': 'sbert_legal_bertimbau_sts_base_ma', 'pt.embed_sentence.bertimbau.legal.assin2.base.by_rufimelo': 'sbert_legal_bertimbau_sts_base', 'pt.embed_sentence.bertimbau.legal.large_sts_by_rufimelo': 'sbert_legal_bertimbau_sts_large', 'pt.embed_sentence.bertimbau.legal.large_sts_ma.by_rufimelo': 'sbert_legal_bertimbau_sts_large_ma', 'pt.embed_sentence.bertimbau.legal.large_sts_ma_v3.by_rufimelo': 'sbert_legal_bertimbau_sts_large_ma_v3', 'pt.embed_sentence.bertimbau.legal.large_tsdae_sts.by_rufimelo': 'sbert_legal_bertimbau_large_tsdae_sts', 'pt.embed_sentence.bertimbau.legal.large_tsdae_sts_v4.by_rufimelo': 'sbert_legal_bertimbau_large_tsdae_sts_v4', 'pt.embed_sentence.bertimbau.legal.large_tsdae_v4_gpl_sts.by_rufimelo': 'sbert_legal_bertimbau_large_tsdae_v4_gpl_sts', 'pt.embed_sentence.bertimbau.legal.v2_large_sts_v2.by_rufimelo': 'sbert_legal_bertimbau_sts_large_v2', 'pt.embed_sentence.bertimbau.legal.v2_large_v2_sts.by_rufimelo': 'sbert_legal_bertimbau_large_v2_sts', 'pt.lemma': 'lemma_bosque', 'pt.lemma.bosque': 'lemma_bosque', 'pt.lemma.gsd': 'lemma_gsd', 'pt.lemma.spacylookup': 'lemma_spacylookup', 'pt.ner': 'wikiner_6B_100', 'pt.ner.bert': 'bert_ner_satellite_instrument_bert_ner', 'pt.ner.bert.base': 'bert_ner_bert_base_pt_archive', 'pt.ner.bert.base.by_luciano': 'bert_ner_bertimbau_base_lener_br_luciano', 'pt.ner.bert.by_dominguesm': 'bert_token_classifier_restore_punctuation_ptbr', 'pt.ner.bert.cased_base': 'bert_ner_ner_bert_base_cased_pt_lenerbr', 'pt.ner.bert.clinical.': 'bert_token_classifier_clinicalnerpt_diagnostic', 'pt.ner.bert.clinical.disease': 'bert_token_classifier_clinicalnerpt_disease', 'pt.ner.bert.clinical_chemical.': 'bert_token_classifier_clinicalnerpt_chemical', 'pt.ner.bert.clinical_disorder.by_pucpr': 'bert_token_classifier_clinicalnerpt_disorder', 'pt.ner.bert.clinical_finding.by_pucpr': 'bert_token_classifier_clinicalnerpt_finding', 'pt.ner.bert.large': 'bert_ner_bertimbau_large_lener_br_luciano', 'pt.ner.bert.news.': 'bert_ner_ner_news_portuguese', 'pt.ner.biobert': 'bert_token_classifier_biobertpt_clin_tempclinbr', 'pt.ner.pos': 'bert_pos_autonlp_pos_tag_bosque', 'pt.ner.satellite_instrument_roberta_NER': 'roberta_ner_satellite_instrument_roberta_NER', 'pt.ner.wikiner.glove.6B_100': 'wikiner_6B_100', 'pt.ner.wikiner.glove.6B_300': 'wikiner_6B_300', 'pt.ner.wikiner.glove.840B_300': 'wikiner_840B_300', 'pt.pos': 'pos_ud_bosque', 'pt.pos.bosque': 'pos_bosque', 'pt.pos.gsd': 'pos_gsd', 'pt.pos.ud_bosque': 'pos_ud_bosque', 'pt.speech2text.wav2vec2.v2_gpu_s250_exp': 'asr_exp_w2v2t_wav2vec2_s250_gpu', 'pt.speech2text.wav2vec2.v2_gpu_s515_exp': 'asr_exp_w2v2t_wav2vec2_s515_gpu', 'pt.speech2text.wav2vec2.v2_gpu_s645_vp_exp': 'asr_exp_w2v2t_vp_100k_s645_gpu', 'pt.speech2text.wav2vec2.v2_gpu_s660_vp_exp': 'asr_exp_w2v2t_vp_100k_s660_gpu', 'pt.speech2text.wav2vec2.v2_gpu_s69_vp_exp': 'asr_exp_w2v2t_vp_100k_s69_gpu', 'pt.speech2text.wav2vec2.v2_gpu_s859_exp': 'asr_exp_w2v2t_wav2vec2_s859_gpu', 'pt.speech2text.wav2vec2.v2_s250_exp': 'asr_exp_w2v2t_wav2vec2_s250', 'pt.speech2text.wav2vec2.v2_s515_exp': 'asr_exp_w2v2t_wav2vec2_s515', 'pt.speech2text.wav2vec2.v2_s645_vp_exp': 'asr_exp_w2v2t_vp_100k_s645', 'pt.speech2text.wav2vec2.v2_s660_vp_exp': 'asr_exp_w2v2t_vp_100k_s660', 'pt.speech2text.wav2vec2.v2_s69_vp_exp': 'asr_exp_w2v2t_vp_100k_s69', 'pt.speech2text.wav2vec2.v2_s859_exp': 'asr_exp_w2v2t_wav2vec2_s859', 'pt.speech2text.wav2vec2.voxpopuli.v2_large': 'asr_wav2vec2_large_100k_voxpopuli_tts_dataset_portuguese', 'pt.speech2text.wav2vec2.voxpopuli.v2_large_gpu': 'asr_wav2vec2_large_100k_voxpopuli_tts_dataset_portuguese_gpu', 'pt.speech2text.wav2vec_xlsr': 'asr_bp500_xlsr', 'pt.speech2text.wav2vec_xlsr.commonvoice10.by_lgris': 'asr_bp_commonvoice10_xlsr', 'pt.speech2text.wav2vec_xlsr.commonvoice10.gpu.by_lgris': 'asr_bp_commonvoice10_xlsr_gpu', 'pt.speech2text.wav2vec_xlsr.commonvoice100.by_lgris': 'asr_bp_commonvoice100_xlsr', 'pt.speech2text.wav2vec_xlsr.commonvoice100.gpu.by_lgris': 'asr_bp_commonvoice100_xlsr_gpu', 'pt.speech2text.wav2vec_xlsr.gpu': 'asr_bp500_xlsr_gpu', 'pt.speech2text.wav2vec_xlsr.lapsbm1.by_lgris': 'asr_bp_lapsbm1_xlsr', 'pt.speech2text.wav2vec_xlsr.lapsbm1.gpu.by_lgris': 'asr_bp_lapsbm1_xlsr_gpu', 'pt.speech2text.wav2vec_xlsr.sidi10.by_lgris': 'asr_bp_sid10_xlsr', 'pt.speech2text.wav2vec_xlsr.sidi10.gpu.by_lgris': 'asr_bp_sid10_xlsr_gpu', 'pt.speech2text.wav2vec_xlsr.tedx100.by_lgris': 'asr_bp_tedx100_xlsr', 'pt.speech2text.wav2vec_xlsr.tedx100.gpu.by_lgris': 'asr_bp_tedx100_xlsr_gpu', 'pt.speech2text.wav2vec_xlsr.v2_400h': 'asr_wav2vec2_xls_r_cv7_from_bp400h', 'pt.speech2text.wav2vec_xlsr.v2_400h_gpu': 'asr_wav2vec2_xls_r_cv7_from_bp400h_gpu', 'pt.speech2text.wav2vec_xlsr.v2_gpu': 'asr_wav2vec2_xls_r_1b_portuguese_coraa_3_gpu', 'pt.speech2text.wav2vec_xlsr.v2_gpu_s454_exp': 'asr_exp_w2v2t_xlsr_53_s454_gpu', 'pt.speech2text.wav2vec_xlsr.v2_gpu_s677_exp': 'asr_exp_w2v2t_xlsr_53_s677_gpu', 'pt.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_coraa_portuguese_cv7', 'pt.speech2text.wav2vec_xlsr.v2_large_300m_gpu': 'asr_wav2vec2_large_xls_r_300m_gpu', 'pt.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_coraa_portuguese_cv7_gpu', 'pt.speech2text.wav2vec_xlsr.v2_s454_exp': 'asr_exp_w2v2t_xlsr_53_s454', 'pt.speech2text.wav2vec_xlsr.v2_s677_exp': 'asr_exp_w2v2t_xlsr_53_s677', 'pt.speech2text.wav2vec_xlsr.voxforge1.by_lgris': 'asr_bp_voxforge1_xlsr', 'pt.speech2text.wav2vec_xlsr.voxforge1.gpu.by_lgris': 'asr_bp_voxforge1_xlsr_gpu', 'pt.stopwords': 'stopwords_iso', 'pt.stopwords.iso': 'stopwords_iso'}, 'qhe': {'qhe.lemma': 'lemma_hiencs', 'qhe.pos': 'pos_hiencs', 'qhe.pos.hiencs': 'pos_hiencs'}, 'qtd': {'qtd.lemma': 'lemma_sagt', 'qtd.lemma.sagt': 'lemma_sagt', 'qtd.pos': 'pos_sagt'}, 'qu': {'qu.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'rm': {'rm.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'ro': {'ro.embed.ALR_BERT': 'albert_embeddings_ALR_BERT', 'ro.embed.bert.cased_base': 'bert_base_cased', 'ro.embed.bert.cased_base.by_geotrend': 'bert_embeddings_base_ro_cased', 'ro.embed.distilbert_base_cased': 'distilbert_embeddings_distilbert_base_ro_cased', 'ro.embed.w2v_cc_300d': 'w2v_cc_300d', 'ro.lemma': 'lemma_spacylookup', 'ro.lemma.nonstandard': 'lemma_nonstandard', 'ro.lemma.rrt': 'lemma_rrt', 'ro.lemma.simonero': 'lemma_simonero', 'ro.lemma.spacylookup': 'lemma_spacylookup', 'ro.pos': 'pos_nonstandard', 'ro.pos.nonstandard': 'pos_nonstandard', 'ro.pos.rrt': 'pos_rrt', 'ro.pos.simonero': 'pos_simonero', 'ro.pos.ud_rrt': 'pos_ud_rrt', 'ro.speech2text.wav2vec2.v2': 'asr_romanian_wav2vec2', 'ro.speech2text.wav2vec2.v2_gpu': 'asr_romanian_wav2vec2_gpu', 'ro.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_53_romanian_by_anton_l', 'ro.speech2text.wav2vec_xlsr.v2_large.by_gmihaila': 'asr_wav2vec2_large_xlsr_53_romanian_by_gmihaila', 'ro.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_romanian_by_gmihaila_gpu', 'ro.stopwords': 'stopwords_iso', 'ro.stopwords.iso': 'stopwords_iso'}, 'ru': {'ru.answer_question.bert': 'bert_qa_deep_pavlov_full', 'ru.answer_question.bert.pavalov.by_ruselkomp': 'bert_qa_deep_pavlov_full_2', 'ru.answer_question.bert.squad.large_finetuned': 'bert_qa_ruselkomp_sbert_large_nlu_ru_finetuned_squad_full', 'ru.answer_question.bert.squad.large_finetuned.by_Timur1984': 'bert_qa_timur1984_sbert_large_nlu_ru_finetuned_squad_full', 'ru.answer_question.bert.tes.by_ruselkomp': 'bert_qa_sber_full_tes', 'ru.answer_question.distil_bert': 'distilbert_qa_model_QA_5_epoch_RU', 'ru.answer_question.xlm_roberta.multilingual_large': 'xlm_roberta_qa_xlm_roberta_large_qa_multilingual_finedtuned_ru_ru_alexkay', 'ru.classify.bert': 'bert_classifier_rubertconv_toxic_clf', 'ru.classify.bert.base': 'bert_classifier_response_quality_base', 'ru.classify.bert.base.by_rexhaif': 'bert_classifier_ru_base_srl', 'ru.classify.bert.base.by_skolkovoinstitute': 'bert_classifier_rubert_base_corruption_detector', 'ru.classify.bert.base.by_tinkoff_ai': 'bert_classifier_response_toxicity_base', 'ru.classify.bert.by_skolkovoinstitute': 'bert_classifier_russian_toxicity', 'ru.classify.bert.cased_base': 'bert_classifier_rubert_base_cased_dp_paraphrase_detection', 'ru.classify.bert.sentiment.': 'bert_sequence_classifier_rubert_sentiment', 'ru.classify.bert.sentiment.cased_base': 'bert_classifier_rubert_base_cased_sentiment', 'ru.classify.bert.sentiment.cased_base.by_Tatyana': 'bert_classifier_rubert_base_cased_sentiment_new', 'ru.classify.bert.sentiment.tiny': 'bert_classifier_rubert_tiny_sentiment_balanced', 'ru.classify.bert.sentiment_medicine.cased_base.by_blanchefort': 'bert_classifier_rubert_base_cased_sentiment_med', 'ru.classify.bert.tiny': 'bert_classifier_response_quality_tiny', 'ru.classify.bert.tiny.by_aniemore': 'bert_classifier_rubert_tiny2_russian_emotion_detection', 'ru.classify.bert.tiny.by_chgk13': 'bert_classifier_tiny_russian_toxic', 'ru.classify.emotion.bert.tiny.by_cointegrated': 'bert_classifier_rubert_tiny2_cedr_emotion_detection', 'ru.classify.sentiment.bert.sentiment.cased_base.by_blanchefort': 'bert_classifier_rubert_base_cased_sentiment_rusentiment', 'ru.classify.sentiment.reviews.bert.sentiment.cased_base.by_blanchefort': 'bert_classifier_rubert_base_cased_sentiment_rurewiews', 'ru.classify.toxic': 'bert_sequence_classifier_toxicity', 'ru.classify.toxic.bert.tiny.by_cointegrated': 'bert_classifier_rubert_tiny_toxicity', 'ru.detect_sentence': 'sentence_detector_dl', 'ru.embed.bert.cased_base': 'bert_embeddings_base_ru_cased', 'ru.embed.bert_base_ru_cased': 'bert_embeddings_bert_base_ru_cased', 'ru.embed.distilbert_base_cased': 'distilbert_embeddings_distilbert_base_ru_cased', 'ru.embed.roberta_base_russian_v0': 'roberta_embeddings_roberta_base_russian_v0', 'ru.embed.ruRoberta_large': 'roberta_embeddings_ruRoberta_large', 'ru.embed.w2v_cc_300d': 'w2v_cc_300d', 'ru.lemma': 'lemma_gsd', 'ru.lemma.gsd': 'lemma_gsd', 'ru.lemma.spacylookup': 'lemma_spacylookup', 'ru.lemma.syntagrus': 'lemma_syntagrus', 'ru.lemma.taiga': 'lemma_taiga', 'ru.ner': 'wikiner_6B_100', 'ru.ner.bert': 'bert_ner_labse_ner_nerel', 'ru.ner.bert.by_ilyagusev': 'bert_ner_rubertconv_toxic_editor', 'ru.ner.pos.universal_dependencies.base': 'bert_pos_bert_base_russian_upos', 'ru.ner.wikiner': 'wikiner_6B_100', 'ru.ner.wikiner.glove.6B_100': 'wikiner_6B_100', 'ru.ner.wikiner.glove.6B_300': 'wikiner_6B_300', 'ru.ner.wikiner.glove.840B_300': 'wikiner_840B_300', 'ru.ner.xlmr_roberta.base': 'xlmroberta_ner_base_panx_dataset', 'ru.ner.xlmr_roberta.large': 'xlmroberta_ner_large_panx_dataset', 'ru.pos': 'pos_syntagrus', 'ru.pos.gsd': 'pos_gsd', 'ru.pos.syntagrus': 'pos_syntagrus', 'ru.pos.taiga': 'pos_taiga', 'ru.pos.ud_gsd': 'pos_ud_gsd', 'ru.stopwords': 'stopwords_iso', 'ru.stopwords.iso': 'stopwords_iso'}, 'rw': {'rw.embed.xlm_roberta': 'xlm_roberta_base_finetuned_kinyarwanda', 'rw.embed_sentence.xlm_roberta': 'sent_xlm_roberta_base_finetuned_kinyarwanda', 'rw.ner.xlmr_roberta.base_finetuned': 'xlmroberta_ner_base_finetuned_kinyarwanda_finetuned_ner_kinyarwand', 'rw.ner.xlmr_roberta.base_finetuned_kinyarwand.by_mbeukman': 'xlmroberta_ner_base_finetuned_ner_kinyarwand', 'rw.ner.xlmr_roberta.base_finetuned_swahili.by_mbeukman': 'xlmroberta_ner_base_finetuned_swahili_finetuned_ner_kinyarwand'}, 'sa': {'sa.embed.w2v_cc_300d': 'w2v_cc_300d', 'sa.lemma': 'lemma_vedic', 'sa.pos': 'pos_vedic', 'sa.stopwords': 'stopwords_iso'}, 'sae': { 'sae.answer_question.roberta.squad.base': 'roberta_qa_sae_roberta_base_squad'}, 'sah': {'sah.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'sc': {'sc.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'scn': {'scn.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'sco': {'sco.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'sd': {'sd.detect_sentence': 'sentence_detector_dl', 'sd.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'sg': { 'sg.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_53_swiss_german', 'sg.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_swiss_german_gpu'}, 'sh': {'sh.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'si': {'si.answer_question.bert.base': 'bert_qa_bert_base_sinhala_qa', 'si.embed.roberta': 'roberta_embeddings_sinhalaberto', 'si.embed.w2v_cc_300d': 'w2v_cc_300d', 'si.stopwords': 'stopwords_iso'}, 'sk': {'sk.embed.bert': 'bert_embeddings_fernet_cc', 'sk.embed.roberta': 'roberta_embeddings_slovakbert', 'sk.embed.roberta.news.': 'roberta_embeddings_fernet_news', 'sk.embed.w2v_cc_300d': 'w2v_cc_300d', 'sk.lemma': 'lemma_snk', 'sk.lemma.snk': 'lemma_snk', 'sk.ner.bert.wikiann.': 'bert_token_classifier_fernet_cc_sk_ner', 'sk.ner.pos': 'roberta_pos_veganuary_pos', 'sk.ner.pos.universal_dependencies.': 'roberta_pos_slovakbert_pos', 'sk.ner.roberta.wikiann.distilled': 'roberta_ner_distil_slovakbert_ner', 'sk.pos': 'pos_ud_snk', 'sk.pos.snk': 'pos_snk', 'sk.pos.ud_snk': 'pos_ud_snk', 'sk.stopwords': 'stopwords_iso', 'sk.stopwords.iso': 'stopwords_iso'}, 'sl': {'sl.embed.camembert': 'camembert_embeddings_sloberta', 'sl.embed.w2v_cc_300d': 'w2v_cc_300d', 'sl.lemma': 'lemma_sst', 'sl.lemma.ssj': 'lemma_ssj', 'sl.lemma.sst': 'lemma_sst', 'sl.pos': 'pos_sst', 'sl.pos.ssj': 'pos_ssj', 'sl.pos.sst': 'pos_sst', 'sl.stopwords': 'stopwords_iso', 'sl.stopwords.iso': 'stopwords_iso'}, 'sme': {'sme.lemma': 'lemma_giella', 'sme.pos': 'pos_giella'}, 'so': {'so.detect_sentence': 'sentence_detector_dl', 'so.embed.w2v_cc_300d': 'w2v_cc_300d', 'so.stopwords': 'stopwords_so'}, 'sq': { 'sq.answer_question.bert.uncased_base': 'bert_qa_bert_base_uncased_fiqa_flm_sq_flit', 'sq.embed.w2v_cc_300d': 'w2v_cc_300d', 'sq.ner.bert.cased_base': 'bert_ner_mbert_base_albanian_cased_ner', 'sq.ner.bert.wikiann.cased_multilingual_base_finetuned': 'bert_token_classifier_base_multilingual_cased_finetuned_albanian_ner', 'sq.stopwords': 'stopwords_iso'}, 'sr': {'sr.embed.w2v_cc_300d': 'w2v_cc_300d', 'sr.lemma': 'lemma_spacylookup', 'sr.lemma.set': 'lemma_set', 'sr.lemma.spacylookup': 'lemma_spacylookup', 'sr.pos': 'pos_set', 'sr.pos.set': 'pos_set', 'sr.stopwords': 'stopwords_iso', 'sr.stopwords.iso': 'stopwords_iso'}, 'st': {'st.ner.roberta.tweet.large': 'roberta_ner_tner_roberta_large_tweet_st_2020', 'st.ner.roberta.tweet.large.by_asahi417': 'roberta_ner_tner_roberta_large_tweet_st_asahi417', 'st.stopwords': 'stopwords_st'}, 'su': {'su.classify.bert.base': 'bert_classifier_sundanese_base_emotion', 'su.classify.roberta.base': 'roberta_classifier_sundanese_base_emotion', 'su.embed.sundanese_roberta_base': 'roberta_embeddings_sundanese_roberta_base', 'su.embed.w2v_cc_300d': 'w2v_cc_300d', 'su.speech2text.wav2vec2.v2': 'asr_wav2vec2_indonesian_javanese_sundanese', 'su.speech2text.wav2vec2.v2_gpu': 'asr_wav2vec2_indonesian_javanese_sundanese_gpu', 'su.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_sundanese', 'su.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_sundanese_gpu'}, 'sv': { 'sv.answer_question.bert.squad.cased': 'bert_qa_bert_multi_cased_squad_sv_marbogusz', 'sv.answer_question.bert.squad.cased_base': 'bert_qa_bert_base_swedish_cased_squad_experimental', 'sv.answer_question.bert.squad.cased_base.by_KBLab': 'bert_qa_base_swedish_cased_squad_experimental', 'sv.answer_question.squadv2.bert.base': 'bert_qa_bert_base_swedish_squad2', 'sv.answer_question.xlmr_roberta.large': 'xlm_roberta_qa_xlmr_large_qa_sv_sv_m3hrdadfi', 'sv.classify.bert.sentiment.cased_base': 'bert_classifier_base_swedish_cased_sentiment', 'sv.classify.token_bert.swedish_ner': 'bert_token_classifier_swedish_ner', 'sv.embed.bert.base_cased': 'bert_base_cased', 'sv.embed.bert.cased_base': 'bert_embeddings_kb_base_swedish_cased', 'sv.embed.bert.cased_base.by_kblab': 'bert_embeddings_kblab_base_swedish_cased', 'sv.embed.bert.distilled_cased': 'bert_embeddings_kb_distilled_cased', 'sv.embed.w2v_cc_300d': 'w2v_cc_300d', 'sv.embed_sentence.bert.base_cased': 'sent_bert_base_cased', 'sv.lemma': 'lemma_lines', 'sv.lemma.lines': 'lemma_lines', 'sv.lemma.spacylookup': 'lemma_spacylookup', 'sv.lemma.talbanken': 'lemma_talbanken', 'sv.ner': 'swedish_ner_6B_100', 'sv.ner.6B_100': 'swedish_ner_6B_100', 'sv.ner.6B_300': 'swedish_ner_6B_300', 'sv.ner.840B_300': 'swedish_ner_840B_300', 'sv.ner.bert': 'bert_ner_swedish_ner', 'sv.ner.bert.base': 'bert_token_classifier_base_swedish_lowermix_reallysimple_ner', 'sv.ner.bert.cased_base': 'bert_ner_kb_bert_base_swedish_cased_ner', 'sv.ner.bert.cased_base.by_kblab': 'bert_ner_kblab_bert_base_swedish_cased_ner', 'sv.ner.bert.cased_base.neriob.by_kblab': 'bert_ner_bert_base_swedish_cased_neriob', 'sv.ner.bert.finetuned': 'bert_ner_bert_finetuned_ner_swedish_test', 'sv.ner.bert.finetuned.by_nonzerophilip': 'bert_ner_bert_finetuned_ner_swedish_test_numb_2', 'sv.ner.bert.large_finetuned': 'bert_ner_bert_finetuned_ner_swedish_test_large_set', 'sv.ner.bert.sentiment.': 'bert_ner_swedish_sentiment_fear_targets', 'sv.ner.bert.sentiment.by_recordedfuture': 'bert_ner_swedish_sentiment_violence_targets', 'sv.ner.bert.small_finetuned': 'bert_ner_bert_finetuned_ner_swedish_small_set_health_and_standart', 'sv.ner.pos.cased_base': 'bert_pos_bert_base_swedish_cased_pos', 'sv.pos': 'pos_ud_tal', 'sv.pos.lines': 'pos_lines', 'sv.pos.talbanken': 'pos_talbanken', 'sv.pos.ud_tal': 'pos_ud_tal', 'sv.speech2text.wav2vec2': 'asr_lm_swedish', 'sv.speech2text.wav2vec2.by_marma': 'asr_test_by_marma', 'sv.speech2text.wav2vec2.gpu': 'asr_test_by_marma_gpu', 'sv.speech2text.wav2vec2.v2': 'asr_wav2vec2_swedish_common_voice', 'sv.speech2text.wav2vec2.v2_large': 'asr_wav2vec2_large_voxrex_swedish_4gram', 'sv.speech2text.wav2vec2.v2_large_gpu': 'asr_wav2vec2_large_voxrex_swedish_4gram_gpu', 'sv.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_welsh', 'sv.speech2text.wav2vec_xlsr.v2_large.by_marma': 'asr_wav2vec2_large_xlsr_swedish', 'sv.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_welsh_gpu', 'sv.speech2text.wav2vec_xlsr.v2_large_gpu.by_marma': 'asr_wav2vec2_large_xlsr_swedish_gpu', 'sv.stopwords': 'stopwords_iso', 'sv.stopwords.iso': 'stopwords_iso'}, 'sw': { 'sw.answer_question.tydiqa.xlm_roberta.base': 'xlm_roberta_qa_afriberta_base_finetuned_tydiqa', 'sw.classify.bert.news.': 'bert_classifier_bert_swahili_news_classification', 'sw.classify.sentiment.': 'classifierdl_xlm_roberta_sentiment', 'sw.embed.bert.cased_base': 'bert_embeddings_base_sw_cased', 'sw.embed.w2v_cc_300d': 'w2v_cc_300d', 'sw.embed.xlm_roberta': 'xlm_roberta_base_finetuned_swahili', 'sw.embed_sentence.xlm_roberta': 'sent_xlm_roberta_base_finetuned_swahili', 'sw.ner.bert.uncased_base': 'bert_ner_bert_base_uncased_swa', 'sw.ner.xlmr_roberta.base_finetuned': 'xlmroberta_ner_base_finetuned_amharic_finetuned_ner_swahili', 'sw.ner.xlmr_roberta.base_finetuned_hausa.by_mbeukman': 'xlmroberta_ner_base_finetuned_hausa_finetuned_ner_swahili', 'sw.ner.xlmr_roberta.base_finetuned_igbo.by_mbeukman': 'xlmroberta_ner_base_finetuned_igbo_finetuned_ner_swahili', 'sw.ner.xlmr_roberta.base_finetuned_kinyarwanda.by_mbeukman': 'xlmroberta_ner_base_finetuned_kinyarwanda_finetuned_ner_swahili', 'sw.ner.xlmr_roberta.base_finetuned_luganda.by_mbeukman': 'xlmroberta_ner_base_finetuned_luganda_finetuned_ner_swahili', 'sw.ner.xlmr_roberta.base_finetuned_luo.by_mbeukman': 'xlmroberta_ner_base_finetuned_luo_finetuned_ner_swahili', 'sw.ner.xlmr_roberta.base_finetuned_naija.by_mbeukman': 'xlmroberta_ner_base_finetuned_naija_finetuned_ner_swahili', 'sw.ner.xlmr_roberta.base_finetuned_swahili.by_mbeukman': 'xlmroberta_ner_base_finetuned_ner_swahili', 'sw.ner.xlmr_roberta.base_finetuned_swahili.v2.by_mbeukman': 'xlmroberta_ner_base_finetuned_swahili_finetuned_ner_swahili', 'sw.ner.xlmr_roberta.base_finetuned_wolof.by_mbeukman': 'xlmroberta_ner_base_finetuned_wolof_finetuned_ner_swahili', 'sw.ner.xlmr_roberta.base_finetuned_yoruba.by_mbeukman': 'xlmroberta_ner_base_finetuned_yoruba_finetuned_ner_swahili', 'sw.stopwords': 'stopwords_sw'}, 'swa': {'swa.ner.bert.uncased_base': 'bert_ner_mbert_base_uncased_ner_swa', 'swa.ner.bert.uncased_base.by_arnolfokam': 'bert_ner_mbert_base_uncased_swa', 'swa.ner.roberta.base': 'roberta_ner_roberta_base_swa'}, 'ta': { 'ta.answer_question.bert.chaii.cased_multilingual_base_finetuned': 'bert_qa_bert_base_multilingual_cased_finetuned_chaii', 'ta.answer_question.squad.xlm_roberta': 'xlm_roberta_qa_xlm_roberta_squad_tamil', 'ta.classify.xlmr_roberta': 'xlmroberta_classifier_deoffxlmr_mono_tamil', 'ta.detect_sentence': 'sentence_detector_dl', 'ta.embed.muril_adapted_local': 'bert_embeddings_muril_adapted_local', 'ta.embed.w2v_cc_300d': 'w2v_cc_300d', 'ta.lemma': 'lemma', 'ta.lemma.ttb': 'lemma_ttb', 'ta.pos': 'pos_ttb', 'ta.stopwords': 'stopwords_iso'}, 'te': {'te.detect_sentence': 'sentence_detector_dl', 'te.embed.bert': 'bert_embeddings_indic_transformers', 'te.embed.distil_bert': 'distilbert_embeddings_indic_transformers', 'te.embed.distilbert': 'distilbert_uncased', 'te.embed.indic_transformers_te_bert': 'bert_embeddings_indic_transformers_te_bert', 'te.embed.indic_transformers_te_roberta': 'roberta_embeddings_indic_transformers_te_roberta', 'te.embed.muril_adapted_local': 'bert_embeddings_muril_adapted_local', 'te.embed.roberta': 'roberta_embeddings_indic_transformers', 'te.embed.telugu_bertu': 'bert_embeddings_telugu_bertu', 'te.embed.w2v_cc_300d': 'w2v_cc_300d', 'te.embed.xlmr_roberta': 'xlmroberta_embeddings_indic_transformers_te_xlmroberta', 'te.lemma': 'lemma_mtg', 'te.pos': 'pos_mtg', 'te.stopwords': 'stopwords_iso'}, 'tg': {'tg.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'th': { 'th.answer_question.bert.multilingual_base_cased': 'bert_qa_bert_base_multilingual_cased_finetune_qa', 'th.answer_question.squadv2.xlm_roberta.base': 'xlm_roberta_qa_thai_xlm_roberta_base_squad2', 'th.answer_question.xquad.multi_lingual_bert.base': 'bert_qa_xquad_th_mbert_base', 'th.answer_question.xquad_squad.bert.cased': 'bert_qa_thai_bert_multi_cased_finetuned_xquadv1_finetuned_squad', 'th.classify.sentiment': 'sentiment_jager_use', 'th.embed.bert.cased_base': 'bert_embeddings_base_th_cased', 'th.embed.distilbert_base_cased': 'distilbert_embeddings_distilbert_base_th_cased', 'th.embed.w2v_cc_300d': 'w2v_cc_300d', 'th.ner.lst20.glove_840B_300D': 'ner_lst20_glove_840B_300d', 'th.ner.pos.universal_dependencies.base': 'bert_pos_bert_base_thai_upos', 'th.pos': 'pos_lst20', 'th.segment_words': 'wordseg_best', 'th.sentiment': 'sentiment_jager_use', 'th.stopwords': 'stopwords_iso', 'th.stopwords.iso': 'stopwords_iso'}, 'ti': {'ti.classify.roberta': 'roberta_classifier_ti_geezswitch', 'ti.classify.roberta.sentiment.': 'roberta_classifier_ti_sentiment', 'ti.ner.pos': 'roberta_pos_veganuary_pos', 'ti.stopwords': 'stopwords_iso'}, 'tk': {'tk.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'tl': { 'tl.embed.electra.cased_base': 'electra_embeddings_electra_tagalog_base_cased_generator', 'tl.embed.electra.cased_small': 'electra_embeddings_electra_tagalog_small_cased_generator', 'tl.embed.electra.uncased_base': 'electra_embeddings_electra_tagalog_base_uncased_generator', 'tl.embed.electra.uncased_small': 'electra_embeddings_electra_tagalog_small_uncased_generator', 'tl.embed.roberta.base': 'roberta_embeddings_tagalog_base', 'tl.embed.roberta.large': 'roberta_embeddings_tagalog_large', 'tl.embed.roberta_tagalog_base': 'roberta_embeddings_roberta_tagalog_base', 'tl.embed.roberta_tagalog_large': 'roberta_embeddings_roberta_tagalog_large', 'tl.embed.w2v_cc_300d': 'w2v_cc_300d', 'tl.lemma': 'lemma_spacylookup', 'tl.stopwords': 'stopwords_iso'}, 'tn': {'tn.embed.roberta': 'roberta_embeddings_tswanabert', 'tn.stopwords': 'stopwords_iso'}, 'tr': {'tr.answer_question.bert': 'bert_qa_bert_turkish_question_answering', 'tr.distilbert.zero_shot_classifier.multinli': 'distilbert_base_zero_shot_classifier_turkish_cased_multinli', 'tr.distilbert.zero_shot_classifier.allnli': 'distilbert_base_zero_shot_classifier_turkish_cased_allnli', 'tr.distilbert.zero_shot_classifier.snli': 'distilbert_base_zero_shot_classifier_turkish_cased_snli', 'tr.answer_question.bert.base_uncased': 'bert_qa_loodos_bert_base_uncased_QA_fine_tuned', 'tr.answer_question.bert.by_lserinol': 'bert_qa_bert_turkish_question_answering', 'tr.answer_question.bert.by_yunusemreemik': 'bert_qa_logo_qna_model', 'tr.answer_question.bert.cased_base_128d_finetuned_epochs_3': 'bert_qa_base_turkish_128k_cased_finetuned_lr_2e_05_epochs_3', 'tr.answer_question.bert.cased_base_finetuned_epochs_3': 'bert_qa_bert_base_turkish_cased_finetuned_lr_2e_05_epochs_3', 'tr.answer_question.bert.distilled': 'bert_qa_distilbert_tr_q_a', 'tr.answer_question.bert.onwhol': 'bert_qa_modelonwhol', 'tr.answer_question.bert.squadv2.cased_v2': 'bert_qa_question_answering_cased_squadv2', 'tr.answer_question.bert.squadv2.uncased_v2': 'bert_qa_question_answering_uncased_squadv2', 'tr.answer_question.bert.tquad.': 'bert_qa_modelontquad', 'tr.answer_question.bert.tquad.cased_base_128d_finetuned_epochs_1': 'bert_qa_base_turkish_128k_cased_tquad2_finetuned_lr_2e_05_epochs_1', 'tr.answer_question.bert.tquad.cased_base_128d_finetuned_epochs_3': 'bert_qa_base_turkish_128k_cased_tquad2_finetuned_lr_2e_05_epochs_3', 'tr.answer_question.electra': 'electra_qa_turkish', 'tr.answer_question.electra.small_uncased': 'electra_qa_small_turkish_uncased_discriminator_finetuned', 'tr.answer_question.squad.bert.base': 'bert_qa_bert_base_turkish_squad', 'tr.answer_question.squad.electra': 'electra_qa_enelpi_squad', 'tr.answer_question.squad.electra.base': 'electra_qa_base_discriminator_finetuned_squadv1', 'tr.answer_question.squadv2.electra.base_v2': 'electra_qa_base_discriminator_finetuned_squadv2', 'tr.answer_question.xlm_roberta': 'xlm_roberta_qa_XLM_Turkish', 'tr.classify.bert': 'bert_classifier_autotrain_sentanaly_1016134101', 'tr.classify.bert.base': 'bert_classifier_bert_base_turkish_bullying', 'tr.classify.bert.by_akoksal': 'bert_classifier_bounti', 'tr.classify.bert.by_gurkan08': 'bert_classifier_gurkan08_turkish_text_classification', 'tr.classify.bert.by_hemekci': 'bert_classifier_off_detection_turkish', 'tr.classify.bert.by_savasy': 'bert_classifier_savasy_turkish_text_classification', 'tr.classify.bert.multi2convai.': 'bert_classifier_multi2convai_logistics', 'tr.classify.bert.news.': 'bert_classifier_interpress_turkish_news_classification', 'tr.classify.bert.sentiment.': 'bert_sequence_classifier_turkish_sentiment', 'tr.classify.bert.sentiment.by_emre': 'bert_classifier_turkish_sentiment_analysis', 'tr.classify.bert.sentiment.by_gurkan08': 'bert_classifier_turkish_product_comment_sentiment_classification', 'tr.classify.bert.sentiment.cased_base': 'bert_classifier_bert_base_turkish_sentiment_cased', 'tr.classify.bert.tweet_sentiment.': 'bert_classifier_sentiment_tweets', 'tr.classify.bert.uncased': 'bert_classifier_lex_textclassification_turkish_uncased', 'tr.classify.cyberbullying': 'classifierdl_berturk_cyberbullying', 'tr.classify.distil_bert.cased_base': 'distilbert_sequence_classifier_distilbert_base_turkish_cased_emotion', 'tr.classify.electra.base': 'electra_classifier_qd_dialog_base_turkish', 'tr.classify.electra.base.by_izzet': 'electra_classifier_qd_quora_base_turkish', 'tr.classify.electra.sentiment.': 'electra_classifier_turkish_sentiment_analysis', 'tr.classify.electra.tweet.base': 'electra_classifier_qd_tweet_base_turkish', 'tr.classify.news': 'classifierdl_bert_news', 'tr.classify.sentiment.': 'classifierdl_use_sentiment', 'tr.classify.token_bert.turkish_ner': 'bert_token_classifier_turkish_ner', 'tr.embed.bert': 'bert_base_turkish_cased', 'tr.embed.bert.cased_base': 'bert_embeddings_base_tr_cased', 'tr.embed.bert.uncased': 'bert_base_turkish_uncased', 'tr.embed.bert_cased': 'bert_embeddings_bert_base_tr_cased', 'tr.embed.distilbert_base_cased': 'distilbert_embeddings_distilbert_base_tr_cased', 'tr.embed.electra.cased_base': 'electra_embeddings_electra_base_turkish_mc4_cased_generator', 'tr.embed.electra.uncased_base': 'electra_embeddings_electra_base_turkish_mc4_uncased_generator', 'tr.embed.w2v_cc_300d': 'w2v_cc_300d', 'tr.lemma': 'lemma_penn', 'tr.lemma.atis': 'lemma_atis', 'tr.lemma.boun': 'lemma_boun', 'tr.lemma.framenet': 'lemma_framenet', 'tr.lemma.imst': 'lemma_imst', 'tr.lemma.kenet': 'lemma_kenet', 'tr.lemma.penn': 'lemma_penn', 'tr.lemma.spacylookup': 'lemma_spacylookup', 'tr.lemma.tourism': 'lemma_tourism', 'tr.ner': 'turkish_ner_840B_300', 'tr.ner.bert': 'turkish_ner_bert', 'tr.ner.bert.128d': 'bert_ner_berturk_128k_keyword_discriminator', 'tr.ner.bert.by_busecarik': 'bert_token_classifier_berturk_sunlp_ner_turkish', 'tr.ner.bert.by_gurkan08': 'bert_ner_turkish_ner', 'tr.ner.bert.cased': 'bert_token_classifier_berturk_cased_keyword_discriminator', 'tr.ner.bert.cased.by_alierenak': 'bert_token_classifier_berturk_cased_ner', 'tr.ner.bert.cased_base': 'bert_ner_bert_base_turkish_cased_ner', 'tr.ner.bert.cased_base.by_beyhan': 'bert_ner_bert_base_turkish_ner_cased_pretrained', 'tr.ner.bert.cased_base.by_savasy': 'bert_ner_bert_base_turkish_ner_cased', 'tr.ner.bert.cased_base.by_winvoker': 'bert_ner_bert_base_turkish_cased_ner_tf', 'tr.ner.bert.keyword_extractor': 'bert_ner_berturk_keyword_extractor', 'tr.ner.bert.lang': 'bert_ner_offlangdetectionturkish', 'tr.ner.bert.loodos.by_busecarik': 'bert_token_classifier_loodos_sunlp_ner_turkish', 'tr.ner.bert.uncased': 'bert_ner_berturk_uncased_keyword_discriminator', 'tr.ner.bert.uncased.by_yanekyuk': 'bert_ner_berturk_uncased_keyword_extractor', 'tr.ner.xlm_roberta': 'xlm_roberta_base_token_classifier_ner', 'tr.ner.xlmr_roberta.base': 'xlmroberta_ner_xlm_roberta_base_turkish_ner', 'tr.pos': 'pos_boun', 'tr.pos.atis': 'pos_atis', 'tr.pos.boun': 'pos_boun', 'tr.pos.framenet': 'pos_framenet', 'tr.pos.imst': 'pos_imst', 'tr.pos.kenet': 'pos_kenet', 'tr.pos.penn': 'pos_penn', 'tr.pos.tourism': 'pos_tourism', 'tr.pos.ud_imst': 'pos_ud_imst', 'tr.stopwords': 'stopwords_iso', 'tr.stopwords.iso': 'stopwords_iso'}, 'tt': {'tt.embed.w2v_cc_300d': 'w2v_cc_300d', 'tt.speech2text.wav2vec_xlsr.l_v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_tatar_by_anton_l_gpu', 'tt.speech2text.wav2vec_xlsr.v2_gpu': 'asr_wav2vec2_xlsr_tatar_gpu', 'tt.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_53_tatar_by_anton_l', 'tt.speech2text.wav2vec_xlsr.v2_large.by_crang': 'asr_wav2vec2_large_xlsr_53_tatar_by_crang', 'tt.speech2text.wav2vec_xlsr.v2_large_300m': 'asr_wav2vec2_large_xls_r_300m_tatar', 'tt.speech2text.wav2vec_xlsr.v2_large_300m_gpu': 'asr_wav2vec2_large_xls_r_300m_tatar_gpu', 'tt.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_tatar_by_crang_gpu', 'tt.speech2text.wav2vec_xlsr.v2_large_small': 'asr_wav2vec2_large_xlsr_53_W2V2_TATAR_SMALL', 'tt.speech2text.wav2vec_xlsr.v2_large_small_gpu': 'asr_wav2vec2_large_xlsr_53_w2v2_tatar_small_gpu', 'tt.stopwords': 'stopwords_iso'}, 'tw': {'tw.speech2text.wav2vec_xlsr.v2': 'asr_wav2vec2large_xlsr_akan'}, 'ug': {'ug.embed.w2v_cc_300d': 'w2v_cc_300d', 'ug.lemma': 'lemma_udt', 'ug.lemma.udt': 'lemma_udt', 'ug.pos.udt': 'pos_udt'}, 'uk': {'uk.answer_question.xlmr_roberta': 'xlmroberta_qa_ukrainian', 'uk.detect_sentence': 'sentence_detector_dl', 'uk.embed.bert.cased_base': 'bert_embeddings_base_uk_cased', 'uk.embed.distilbert_base_cased': 'distilbert_embeddings_distilbert_base_uk_cased', 'uk.embed.ukr_roberta_base': 'roberta_embeddings_ukr_roberta_base', 'uk.embed.w2v_cc_300d': 'w2v_cc_300d', 'uk.embed.xlmr_roberta.base': 'xlmroberta_embeddings_xlm_roberta_base', 'uk.lemma': 'lemma', 'uk.lemma.iu': 'lemma_iu', 'uk.ner.pos': 'xlmroberta_pos_uk_morph', 'uk.ner.xlmr_roberta': 'xlmroberta_ner_uk_ner', 'uk.pos': 'pos_ud_iu', 'uk.pos.bert_base_slavic_cyrillic_upos': 'bert_pos_bert_base_slavic_cyrillic_upos', 'uk.pos.bert_large_slavic_cyrillic_upos': 'bert_pos_bert_large_slavic_cyrillic_upos', 'uk.pos.iu': 'pos_iu', 'uk.pos.ud_iu': 'pos_ud_iu', 'uk.stopwords': 'stopwords_iso'}, 'unk': { 'unk.ner.bert': 'bert_token_classifier_autotrain_medicaltokenclassification_1279048948'}, 'ur': { 'ur.classify.distilbert_sequence.imdb': 'distilbert_base_sequence_classifier_imdb', 'ur.classify.fakenews': 'classifierdl_urduvec_fakenews', 'ur.classify.news': 'classifierdl_bert_news', 'ur.classify.sentiment.imdb': 'mdeberta_v3_base_sequence_classifier_imdb', 'ur.embed': 'urduvec_140M_300d', 'ur.embed.bert.cased_base': 'bert_embeddings_base_ur_cased', 'ur.embed.bert_cased': 'bert_embeddings_bert_base_ur_cased', 'ur.embed.distilbert_base_cased': 'distilbert_embeddings_distilbert_base_ur_cased', 'ur.embed.glove.300d': 'urduvec_140M_300d', 'ur.embed.muril_adapted_local': 'bert_embeddings_muril_adapted_local', 'ur.embed.roberta_urdu_small': 'roberta_embeddings_roberta_urdu_small', 'ur.embed.urdu_vec_140M_300d': 'urduvec_140M_300d', 'ur.embed.w2v_cc_300d': 'w2v_cc_300d', 'ur.lemma': 'lemma_udtb', 'ur.lemma.spacylookup': 'lemma_spacylookup', 'ur.lemma.udtb': 'lemma_udtb', 'ur.ner': 'uner_mk_140M_300d', 'ur.ner.mk_140M_300d': 'uner_mk_140M_300d', 'ur.pos': 'pos_udtb', 'ur.pos.ud_udtb': 'pos_ud_udtb', 'ur.pos.udtb': 'pos_udtb', 'ur.sentiment': 'sentimentdl_urduvec_imdb', 'ur.stopwords': 'stopwords_iso'}, 'uz': {'uz.classify.bert.news.': 'bert_classifier_uzbek_news_category', 'uz.embed.w2v_cc_300d': 'w2v_cc_300d', 'uz.speech2text.wav2vec2': 'asr_uzbek_stt', 'uz.speech2text.wav2vec2.gpu': 'asr_uzbek_stt_gpu'}, 'vec': {'vec.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'vi': {'vi.answer_question.bert': 'bert_qa_bert_qa_vi_nvkha', 'vi.answer_question.bert.cased_multilingual_base': 'bert_qa_multilingual_bert_base_cased_vietnamese', 'vi.answer_question.bert.uncased_base_finetuned': 'bert_qa_bert_base_uncased_finetuned_vi_infovqa', 'vi.answer_question.xlm_roberta.large': 'xlm_roberta_qa_xlm_roberta_large_vi_qa', 'vi.classify.sentiment.distilled': 'classifierdl_distilbert_sentiment', 'vi.embed.bert.cased_base': 'bert_embeddings_base_vi_cased', 'vi.embed.bert_cased': 'bert_embeddings_bert_base_vi_cased', 'vi.embed.distilbert.cased': 'distilbert_base_cased', 'vi.embed.w2v_cc_300d': 'w2v_cc_300d', 'vi.lemma': 'lemma', 'vi.lemma.vtb': 'lemma_vtb', 'vi.pos': 'pos_vtb', 'vi.speech2text.wav2vec2.v2_base_160h': 'asr_wav2vec2_base_vietnamese_160h', 'vi.speech2text.wav2vec2.v2_base_160h_gpu': 'asr_wav2vec2_base_vietnamese_160h_gpu', 'vi.speech2text.wav2vec2.v2_base_250h': 'asr_wav2vec2_base_vietnamese_250h', 'vi.speech2text.wav2vec2.v2_base_250h_gpu': 'asr_wav2vec2_base_vietnamese_250h_gpu', 'vi.speech2text.wav2vec2.v2_base_gpu': 'asr_wav2vec2_base_vietnamese_gpu', 'vi.speech2text.wav2vec_xlsr.cord19.v2_large': 'asr_wav2vec2_large_xlsr_vietnamese_by_ontocord', 'vi.speech2text.wav2vec_xlsr.cord19.v2_large_gpu': 'asr_wav2vec2_large_xlsr_vietnamese_by_ontocord_gpu', 'vi.speech2text.wav2vec_xlsr.v2': 'asr_fine_tune_xlsr_wav2vec2_speech2text_vietnamese', 'vi.speech2text.wav2vec_xlsr.v2_gpu': 'asr_fine_tune_xlsr_wav2vec2_speech2text_vietnamese_gpu', 'vi.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_53_vietnamese_by_anuragshas', 'vi.speech2text.wav2vec_xlsr.v2_large.by_cuongld': 'asr_wav2vec2_large_xlsr_vietnamese_by_cuongld', 'vi.speech2text.wav2vec_xlsr.v2_large.by_nhut': 'asr_wav2vec2_large_xlsr_vietnamese_by_nhut', 'vi.speech2text.wav2vec_xlsr.v2_large.by_not_tanh': 'asr_wav2vec2_large_xlsr_53_vietnamese_by_not_tanh', 'vi.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_53_vietnamese_by_anuragshas_gpu', 'vi.speech2text.wav2vec_xlsr.v2_large_gpu.by_cuongld': 'asr_wav2vec2_large_xlsr_vietnamese_by_cuongld_gpu', 'vi.speech2text.wav2vec_xlsr.v2_large_gpu.by_nhut': 'asr_wav2vec2_large_xlsr_vietnamese_by_nhut_gpu', 'vi.speech2text.wav2vec_xlsr.v2_large_gpu.by_not_tanh': 'asr_wav2vec2_large_xlsr_53_vietnamese_by_not_tanh_gpu', 'vi.stopwords': 'stopwords_iso'}, 'vls': {'vls.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'vn': { 'vn.answer_question.xlm_roberta.base': 'xlm_roberta_qa_xlm_roberta_base_vietnamese'}, 'vo': {'vo.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'wa': {'wa.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'war': {'war.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'wo': {'wo.embed.xlm_roberta': 'xlm_roberta_base_finetuned_wolof', 'wo.embed_sentence.xlm_roberta': 'sent_xlm_roberta_base_finetuned_wolof', 'wo.lemma.wtb': 'lemma_wtb', 'wo.ner.xlmr_roberta.base_finetuned': 'xlmroberta_ner_base_finetuned_ner_wolof', 'wo.ner.xlmr_roberta.base_finetuned_swahili.by_mbeukman': 'xlmroberta_ner_base_finetuned_swahili_finetuned_ner_wolof', 'wo.ner.xlmr_roberta.base_finetuned_wolof.by_mbeukman': 'xlmroberta_ner_base_finetuned_wolof_finetuned_ner_wolof', 'wo.pos': 'pos_wtb', 'wo.speech2text.wav2vec_xlsr.v2_300m': 'asr_av2vec2_xls_r_300m_wolof_lm', 'wo.speech2text.wav2vec_xlsr.v2_300m_gpu': 'asr_av2vec2_xls_r_300m_wolof_lm_gpu'}, 'xmf': {'xmf.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'xx': {'xx.aav.marian.translate_to.en': 'opus_mt_aav_en', 'xx.aed.marian.translate_to.es': 'opus_mt_es_aed', 'xx.af.marian.translate_to.de': 'opus_mt_de_af', 'xx.af.marian.translate_to.en': 'opus_mt_af_en', 'xx.af.marian.translate_to.eo': 'opus_mt_eo_af', 'xx.af.marian.translate_to.es': 'opus_mt_es_af', 'xx.af.marian.translate_to.fi': 'opus_mt_fi_af', 'xx.af.marian.translate_to.fr': 'opus_mt_fr_af', 'xx.af.marian.translate_to.nl': 'opus_mt_nl_af', 'xx.af.marian.translate_to.ru': 'opus_mt_ru_af', 'xx.af.marian.translate_to.sv': 'opus_mt_sv_af', 'xx.af.translate_to.ru': 'opus_tatoeba_af_ru', 'xx.afa.marian.translate_to.afa': 'opus_mt_afa_afa', 'xx.afa.marian.translate_to.en': 'opus_mt_afa_en', 'xx.alv.marian.translate_to.en': 'opus_mt_alv_en', 'xx.answer_question.bert.mlqa.finetuned.extra_support_ar_hi': 'bert_qa_mbert_finetuned_mlqa_ar_hi_dev', 'xx.answer_question.bert.mlqa.finetuned.extra_support_de_hi': 'bert_qa_mbert_finetuned_mlqa_de_hi_dev', 'xx.answer_question.bert.mlqa.finetuned.extra_support_es_hi': 'bert_qa_mbert_finetuned_mlqa_es_hi_dev', 'xx.answer_question.bert.mlqa.finetuned.extra_support_hi_en': 'bert_qa_mbert_finetuned_mlqa_en_hi_dev', 'xx.answer_question.bert.mlqa.finetuned.extra_support_vi_hi': 'bert_qa_mbert_finetuned_mlqa_vi_hi_dev', 'xx.answer_question.bert.mlqa.finetuned.extra_support_zh_hi': 'bert_qa_mbert_finetuned_mlqa_zh_hi_dev', 'xx.answer_question.bert.mlqa.finetuned.extra_support_zh_hi_en': 'bert_qa_mbert_finetuned_mlqa_en_zh_hi_dev', 'xx.answer_question.bert.squad.cased_multilingual_base_finetuned': 'bert_qa_paul_vinh_bert_base_multilingual_cased_finetuned_squad', 'xx.answer_question.bert.squad.cased_multilingual_base_finetuned.by_salti': 'bert_qa_salti_bert_base_multilingual_cased_finetuned_squad', 'xx.answer_question.bert.squad.cased_multilingual_base_finetuned.by_vanichandna': 'bert_qa_bert_base_multilingual_cased_finetuned_squadv1', 'xx.answer_question.bert.squad.extra_support_es_de_en': 'bert_qa_squad_mbert_en_de_es_model', 'xx.answer_question.bert.squad.extra_support_zh_de_en_es_vi': 'bert_qa_squad_mbert_en_de_es_vi_zh_model', 'xx.answer_question.bert.squad.uncased_multilingual_base_finetuned': 'bert_qa_bert_base_multilingual_uncased_finetuned_squad', 'xx.answer_question.bert.tydiqa.3lang': 'bert_qa_3lang', 'xx.answer_question.bert.uncased_base': 'bert_qa_base_multi_uncased', 'xx.answer_question.bert.xquad.multilingual_base': 'bert_qa_bert_base_multilingual_xquad', 'xx.answer_question.chaii.xlm_roberta': 'xlm_roberta_qa_xlm_roberta_qa_chaii', 'xx.answer_question.distil_bert.en_de_es_tuned.by_ZYW': 'distilbert_qa_en_de_es_model', 'xx.answer_question.distil_bert.en_de_tuned.by_ZYW': 'distilbert_qa_en_de_model', 'xx.answer_question.distil_bert.vi_zh_es_tuned.by_ZYW': 'distilbert_qa_en_de_vi_zh_es_model', 'xx.answer_question.roberta': 'roberta_qa_ft_lr_cu_leolin12345', 'xx.answer_question.squad.distil_bert._en_de_es_vi_zh_tuned.by_ZYW': 'distilbert_qa_squad_en_de_es_vi_zh_model', 'xx.answer_question.squad.distil_bert.en_de_es_tuned.by_ZYW': 'distilbert_qa_squad_en_de_es_model', 'xx.answer_question.tydiqa.bert': 'bert_qa_telugu_bertu_tydiqa', 'xx.answer_question.tydiqa.multi_lingual_bert': 'bert_qa_Part_1_mBERT_Model_E1', 'xx.answer_question.xlm_roberta.distilled': 'xlm_roberta_qa_distill_xlm_mrc', 'xx.answer_question.xquad.bert.cased': 'bert_qa_bert_multi_cased_finetuned_xquadv1', 'xx.answer_question.xquad.bert.uncased': 'bert_qa_bert_multi_uncased_finetuned_xquadv1', 'xx.answer_question.xquad_tydiqa.bert.cased': 'bert_qa_bert_multi_cased_finedtuned_xquad_tydiqa_goldp', 'xx.ar.marian.translate_to.de': 'opus_mt_de_ar', 'xx.ar.marian.translate_to.el': 'opus_mt_el_ar', 'xx.ar.marian.translate_to.en': 'opus_mt_ar_en', 'xx.ar.marian.translate_to.es': 'opus_mt_es_ar', 'xx.ar.marian.translate_to.fr': 'opus_mt_fr_ar', 'xx.ar.marian.translate_to.he': 'opus_mt_he_ar', 'xx.ar.marian.translate_to.it': 'opus_mt_it_ar', 'xx.ar.marian.translate_to.ja': 'opus_mt_ja_ar', 'xx.ar.marian.translate_to.pl': 'opus_mt_pl_ar', 'xx.ar.marian.translate_to.ru': 'opus_mt_ru_ar', 'xx.ar.marian.translate_to.tr': 'opus_mt_tr_ar', 'xx.art.marian.translate_to.en': 'opus_mt_art_en', 'xx.ase.marian.translate_to.de': 'opus_mt_de_ase', 'xx.ase.marian.translate_to.en': 'opus_mt_ase_en', 'xx.ase.marian.translate_to.es': 'opus_mt_es_ase', 'xx.ase.marian.translate_to.fr': 'opus_mt_fr_ase', 'xx.ase.marian.translate_to.sv': 'opus_mt_sv_ase', 'xx.az.marian.translate_to.en': 'opus_mt_az_en', 'xx.az.marian.translate_to.tr': 'opus_mt_tr_az', 'xx.bat.marian.translate_to.en': 'opus_mt_bat_en', 'xx.bcl.marian.translate_to.de': 'opus_mt_de_bcl', 'xx.bcl.marian.translate_to.en': 'opus_mt_bcl_en', 'xx.bcl.marian.translate_to.es': 'opus_mt_es_bcl', 'xx.bcl.marian.translate_to.fi': 'opus_mt_fi_bcl', 'xx.bcl.marian.translate_to.fr': 'opus_mt_fr_bcl', 'xx.bcl.marian.translate_to.sv': 'opus_mt_sv_bcl', 'xx.bem.marian.translate_to.en': 'opus_mt_bem_en', 'xx.bem.marian.translate_to.fi': 'opus_mt_fi_bem', 'xx.bem.marian.translate_to.fr': 'opus_mt_fr_bem', 'xx.bem.marian.translate_to.sv': 'opus_mt_sv_bem', 'xx.ber.marian.translate_to.en': 'opus_mt_ber_en', 'xx.ber.marian.translate_to.es': 'opus_mt_es_ber', 'xx.ber.marian.translate_to.fr': 'opus_mt_fr_ber', 'xx.bg.marian.translate_to.de': 'opus_mt_de_bg', 'xx.bg.marian.translate_to.en': 'opus_mt_bg_en', 'xx.bg.marian.translate_to.eo': 'opus_mt_eo_bg', 'xx.bg.marian.translate_to.es': 'opus_mt_es_bg', 'xx.bg.marian.translate_to.fi': 'opus_mt_fi_bg', 'xx.bg.marian.translate_to.fr': 'opus_mt_fr_bg', 'xx.bg.marian.translate_to.it': 'opus_mt_it_bg', 'xx.bg.marian.translate_to.ja': 'opus_mt_ja_bg', 'xx.bg.marian.translate_to.ru': 'opus_mt_ru_bg', 'xx.bg.marian.translate_to.sv': 'opus_mt_sv_bg', 'xx.bg.marian.translate_to.uk': 'opus_mt_uk_bg', 'xx.bg.marian.translate_to.zh': 'opus_mt_zh_bg', 'xx.bi.marian.translate_to.de': 'opus_mt_de_bi', 'xx.bi.marian.translate_to.en': 'opus_mt_bi_en', 'xx.bi.marian.translate_to.es': 'opus_mt_es_bi', 'xx.bi.marian.translate_to.fr': 'opus_mt_fr_bi', 'xx.bi.marian.translate_to.sv': 'opus_mt_sv_bi', 'xx.bn.marian.translate_to.en': 'opus_mt_bn_en', 'xx.bnt.marian.translate_to.en': 'opus_mt_bnt_en', 'xx.bzs.marian.translate_to.de': 'opus_mt_de_bzs', 'xx.bzs.marian.translate_to.en': 'opus_mt_bzs_en', 'xx.bzs.marian.translate_to.es': 'opus_mt_es_bzs', 'xx.bzs.marian.translate_to.fi': 'opus_mt_fi_bzs', 'xx.bzs.marian.translate_to.fr': 'opus_mt_fr_bzs', 'xx.bzs.marian.translate_to.sv': 'opus_mt_sv_bzs', 'xx.ca.marian.translate_to.de': 'opus_mt_de_ca', 'xx.ca.marian.translate_to.en': 'opus_mt_ca_en', 'xx.ca.marian.translate_to.es': 'opus_mt_es_ca', 'xx.ca.marian.translate_to.fr': 'opus_mt_fr_ca', 'xx.ca.marian.translate_to.it': 'opus_mt_it_ca', 'xx.ca.marian.translate_to.nl': 'opus_mt_nl_ca', 'xx.ca.marian.translate_to.pt': 'opus_mt_pt_ca', 'xx.ca.marian.translate_to.uk': 'opus_mt_uk_ca', 'xx.cau.marian.translate_to.en': 'opus_mt_cau_en', 'xx.ccs.marian.translate_to.en': 'opus_mt_ccs_en', 'xx.ceb.marian.translate_to.en': 'opus_mt_ceb_en', 'xx.ceb.marian.translate_to.es': 'opus_mt_es_ceb', 'xx.ceb.marian.translate_to.fi': 'opus_mt_fi_ceb', 'xx.ceb.marian.translate_to.fr': 'opus_mt_fr_ceb', 'xx.ceb.marian.translate_to.sv': 'opus_mt_sv_ceb', 'xx.cel.marian.translate_to.en': 'opus_mt_cel_en', 'xx.chk.marian.translate_to.en': 'opus_mt_chk_en', 'xx.chk.marian.translate_to.sv': 'opus_mt_sv_chk', 'xx.classify.bert': 'bert_classifier_arabic_ner_ace', 'xx.classify.bert.by_meghanabhange': 'bert_classifier_hinglish_class', 'xx.classify.bert.by_mlkorra': 'bert_classifier_obgv_gder', 'xx.classify.bert.by_ychennlp': 'bert_classifier_arabic_relation_extraction', 'xx.classify.bert.cased_base.extra_support_es_en': 'bert_classifier_bert_base_en_es_codemix_cased', 'xx.classify.bert.cased_base.extra_support_hi_en': 'bert_classifier_bert_base_en_hi_codemix_cased', 'xx.classify.bert.lang': 'bert_classifier_tanglish_offensive_language_identification', 'xx.classify.bert.news.': 'bert_classifier_english_news_classification_headlines', 'xx.classify.bert.news.by_m47labs': 'bert_classifier_italian_news_classification_headlines', 'xx.classify.bert.sentiment.': 'bert_classifier_hinglish11k_sentiment_analysis', 'xx.classify.bert.sentiment.cased_tiny': 'bert_classifier_bert_tiny_bahasa_cased_sentiment', 'xx.classify.bert.sentiment.multilingual': 'bert_sequence_classifier_multilingual_sentiment', 'xx.classify.bert.sentiment.uncased_multilingual_base': 'bert_classifier_bert_base_multilingual_uncased_sentiment', 'xx.classify.bert.tweet.': 'bert_classifier_mbert_corona_tweets_belgium_curfew_support', 'xx.classify.electra.tweet_sentiment.': 'electra_classifier_bertic_tweetsentiment', 'xx.classify.roberta.base': 'roberta_classifier_zabanshenas_base_mix', 'xx.classify.token_xlm_roberta.token_classifier_ner_40_lang': 'xlm_roberta_token_classifier_ner_40_lang', 'xx.classify.wiki_21': 'ld_wiki_tatoeba_cnn_21', 'xx.classify.wiki_21.bigru': 'ld_tatoeba_bigru_21', 'xx.classify.wiki_220': 'ld_wiki_tatoeba_cnn_220', 'xx.classify.wiki_231': 'ld_wiki_cnn_231', 'xx.classify.wiki_375': 'ld_wiki_tatoeba_cnn_375', 'xx.classify.wiki_43': 'ld_wiki_tatoeba_cnn_43', 'xx.classify.wiki_95': 'ld_wiki_tatoeba_cnn_95', 'xx.classify.wiki_99': 'ld_tatoeba_cnn_99', 'xx.classify.xlmr_roberta': 'xlmroberta_classifier_verdict', 'xx.classify.xlmr_roberta.glue.large.extra_support_ru_en': 'xlmroberta_classifier_large_en_ru_mnli', 'xx.classify.xlmr_roberta.xnli.base': 'xlmroberta_classifier_base_snli_mnli_anli_xnli', 'xx.cpf.marian.translate_to.en': 'opus_mt_cpf_en', 'xx.cpp.marian.translate_to.cpp': 'opus_mt_cpp_cpp', 'xx.cpp.marian.translate_to.en': 'opus_mt_cpp_en', 'xx.crs.marian.translate_to.de': 'opus_mt_de_crs', 'xx.crs.marian.translate_to.en': 'opus_mt_crs_en', 'xx.crs.marian.translate_to.es': 'opus_mt_es_crs', 'xx.crs.marian.translate_to.fi': 'opus_mt_fi_crs', 'xx.crs.marian.translate_to.fr': 'opus_mt_fr_crs', 'xx.crs.marian.translate_to.sv': 'opus_mt_sv_crs', 'xx.cs.marian.translate_to.de': 'opus_mt_de_cs', 'xx.cs.marian.translate_to.en': 'opus_mt_cs_en', 'xx.cs.marian.translate_to.eo': 'opus_mt_eo_cs', 'xx.cs.marian.translate_to.es': 'opus_mt_es_cs', 'xx.cs.marian.translate_to.fi': 'opus_mt_fi_cs', 'xx.cs.marian.translate_to.sv': 'opus_mt_sv_cs', 'xx.cs.marian.translate_to.uk': 'opus_mt_uk_cs', 'xx.cs.translate_to.sv': 'opus_mt_cs_sv', 'xx.csg.marian.translate_to.es': 'opus_mt_es_csg', 'xx.csn.marian.translate_to.es': 'opus_mt_es_csn', 'xx.cus.marian.translate_to.en': 'opus_mt_cus_en', 'xx.cy.marian.translate_to.en': 'opus_mt_cy_en', 'xx.da.marian.translate_to.de': 'opus_mt_de_da', 'xx.da.marian.translate_to.en': 'opus_mt_da_en', 'xx.da.marian.translate_to.eo': 'opus_mt_eo_da', 'xx.da.marian.translate_to.es': 'opus_mt_es_da', 'xx.da.marian.translate_to.ja': 'opus_mt_ja_da', 'xx.da.marian.translate_to.no': 'opus_mt_no_da', 'xx.da.marian.translate_to.ru': 'opus_mt_ru_da', 'xx.de.marian.translate_to.af': 'opus_mt_af_de', 'xx.de.marian.translate_to.ar': 'opus_mt_ar_de', 'xx.de.marian.translate_to.ase': 'opus_mt_ase_de', 'xx.de.marian.translate_to.bcl': 'opus_mt_bcl_de', 'xx.de.marian.translate_to.bg': 'opus_mt_bg_de', 'xx.de.marian.translate_to.ca': 'opus_mt_ca_de', 'xx.de.marian.translate_to.crs': 'opus_mt_crs_de', 'xx.de.marian.translate_to.cs': 'opus_mt_cs_de', 'xx.de.marian.translate_to.da': 'opus_mt_da_de', 'xx.de.marian.translate_to.de': 'opus_mt_de_de', 'xx.de.marian.translate_to.ee': 'opus_mt_ee_de', 'xx.de.marian.translate_to.efi': 'opus_mt_efi_de', 'xx.de.marian.translate_to.en': 'opus_mt_de_en', 'xx.de.marian.translate_to.eo': 'opus_mt_eo_de', 'xx.de.marian.translate_to.es': 'opus_mt_es_de', 'xx.de.marian.translate_to.et': 'opus_mt_et_de', 'xx.de.marian.translate_to.eu': 'opus_mt_eu_de', 'xx.de.marian.translate_to.fi': 'opus_mt_fi_de', 'xx.de.marian.translate_to.fr': 'opus_mt_fr_de', 'xx.de.marian.translate_to.gaa': 'opus_mt_gaa_de', 'xx.de.marian.translate_to.guw': 'opus_mt_guw_de', 'xx.de.marian.translate_to.he': 'opus_mt_he_de', 'xx.de.marian.translate_to.hil': 'opus_mt_hil_de', 'xx.de.marian.translate_to.hu': 'opus_mt_hu_de', 'xx.de.marian.translate_to.ig': 'opus_mt_ig_de', 'xx.de.marian.translate_to.ilo': 'opus_mt_ilo_de', 'xx.de.marian.translate_to.is': 'opus_mt_is_de', 'xx.de.marian.translate_to.it': 'opus_mt_it_de', 'xx.de.marian.translate_to.ja': 'opus_mt_ja_de', 'xx.de.marian.translate_to.ko': 'opus_mt_ko_de', 'xx.de.marian.translate_to.ln': 'opus_mt_ln_de', 'xx.de.marian.translate_to.loz': 'opus_mt_loz_de', 'xx.de.marian.translate_to.lt': 'opus_mt_lt_de', 'xx.de.marian.translate_to.ms': 'opus_mt_ms_de', 'xx.de.marian.translate_to.niu': 'opus_mt_niu_de', 'xx.de.marian.translate_to.no': 'opus_mt_no_de', 'xx.de.marian.translate_to.nso': 'opus_mt_nso_de', 'xx.de.marian.translate_to.ny': 'opus_mt_ny_de', 'xx.de.marian.translate_to.pag': 'opus_mt_pag_de', 'xx.de.marian.translate_to.pap': 'opus_mt_pap_de', 'xx.de.marian.translate_to.pl': 'opus_mt_pl_de', 'xx.de.marian.translate_to.rn': 'opus_mt_rn_de', 'xx.de.marian.translate_to.tl': 'opus_mt_tl_de', 'xx.de.marian.translate_to.uk': 'opus_mt_uk_de', 'xx.de.marian.translate_to.vi': 'opus_mt_vi_de', 'xx.de.marian.translate_to.zh': 'opus_mt_zh_de', 'xx.dra.marian.translate_to.en': 'opus_mt_dra_en', 'xx.ee.marian.translate_to.de': 'opus_mt_de_ee', 'xx.ee.marian.translate_to.en': 'opus_mt_ee_en', 'xx.ee.marian.translate_to.es': 'opus_mt_es_ee', 'xx.ee.marian.translate_to.fi': 'opus_mt_fi_ee', 'xx.ee.marian.translate_to.fr': 'opus_mt_fr_ee', 'xx.ee.marian.translate_to.sv': 'opus_mt_sv_ee', 'xx.efi.marian.translate_to.de': 'opus_mt_de_efi', 'xx.efi.marian.translate_to.en': 'opus_mt_efi_en', 'xx.efi.marian.translate_to.es': 'opus_mt_es_efi', 'xx.efi.marian.translate_to.fi': 'opus_mt_fi_efi', 'xx.efi.marian.translate_to.fr': 'opus_mt_fr_efi', 'xx.efi.marian.translate_to.sv': 'opus_mt_sv_efi', 'xx.el.marian.translate_to.ar': 'opus_mt_ar_el', 'xx.el.marian.translate_to.de': 'opus_mt_de_el', 'xx.el.marian.translate_to.eo': 'opus_mt_eo_el', 'xx.el.marian.translate_to.es': 'opus_mt_es_el', 'xx.el.marian.translate_to.fi': 'opus_mt_fi_el', 'xx.el.marian.translate_to.fr': 'opus_mt_fr_el', 'xx.el.marian.translate_to.sv': 'opus_mt_sv_el', 'xx.embed': 'glove_840B_300', 'xx.embed.albert.indic': 'albert_indic', 'xx.embed.bert': 'bert_multi_cased', 'xx.embed.bert.muril': 'bert_muril', 'xx.embed.bert_base_multilingual_cased': 'bert_base_multilingual_cased', 'xx.embed.bert_base_multilingual_uncased': 'bert_base_multilingual_uncased', 'xx.embed.bert_multi_cased': 'bert_multi_cased', 'xx.embed.distilbert': 'distilbert_base_multilingual_cased', 'xx.embed.glove.6B_300': 'glove_6B_300', 'xx.embed.glove.840B_300': 'glove_840B_300', 'xx.embed.glove.glove_6B_100': 'glove_6B_100', 'xx.embed.mdeberta_v3_base': 'mdeberta_v3_base', 'xx.embed.xlm': 'xlm_roberta_base', 'xx.embed.xlm.base': 'xlm_roberta_base', 'xx.embed.xlm.twitter': 'twitter_xlm_roberta_base', 'xx.embed.xlm_roberta_large': 'xlm_roberta_large', 'xx.embed.xlm_roberta_xtreme_base': 'xlm_roberta_xtreme_base', 'xx.embed.xlmr_roberta.base': 'xlmroberta_embeddings_afriberta_base', 'xx.embed.xlmr_roberta.large': 'xlmroberta_embeddings_afriberta_large', 'xx.embed.xlmr_roberta.large.by_hfl': 'xlmroberta_embeddings_cino_large', 'xx.embed.xlmr_roberta.large_128d': 'xlmroberta_embeddings_roberta_large_eng_ara_128k', 'xx.embed.xlmr_roberta.mini_lm_mini': 'xlmroberta_embeddings_fairlex_fscs_minilm', 'xx.embed.xlmr_roberta.small': 'xlmroberta_embeddings_afriberta_small', 'xx.embed.xlmr_roberta.v2_base': 'xlmroberta_embeddings_cino_base_v2', 'xx.embed.xlmr_roberta.v2_large': 'xlmroberta_embeddings_cino_large_v2', 'xx.embed.xlmr_roberta.v2_small': 'xlmroberta_embeddings_cino_small_v2', 'xx.embed_sentence': 'sent_bert_multi_cased', 'xx.embed_sentence.bert': 'sent_bert_multi_cased', 'xx.embed_sentence.bert.cased': 'sent_bert_multi_cased', 'xx.embed_sentence.bert.muril': 'sent_bert_muril', 'xx.embed_sentence.bert_use_cmlm_multi_base': 'sent_bert_use_cmlm_multi_base', 'xx.embed_sentence.bert_use_cmlm_multi_base_br': 'sent_bert_use_cmlm_multi_base_br', 'xx.embed_sentence.labse': 'labse', 'xx.embed_sentence.xlm_roberta.base': 'sent_xlm_roberta_base', 'xx.en.marian.translate_to.aav': 'opus_mt_en_aav', 'xx.en.marian.translate_to.af': 'opus_mt_en_af', 'xx.en.marian.translate_to.afa': 'opus_mt_en_afa', 'xx.en.marian.translate_to.alv': 'opus_mt_en_alv', 'xx.en.marian.translate_to.ar': 'opus_mt_en_ar', 'xx.en.marian.translate_to.az': 'opus_mt_en_az', 'xx.en.marian.translate_to.bat': 'opus_mt_en_bat', 'xx.en.marian.translate_to.bcl': 'opus_mt_en_bcl', 'xx.en.marian.translate_to.bem': 'opus_mt_en_bem', 'xx.en.marian.translate_to.ber': 'opus_mt_en_ber', 'xx.en.marian.translate_to.bg': 'opus_mt_en_bg', 'xx.en.marian.translate_to.bi': 'opus_mt_en_bi', 'xx.en.marian.translate_to.bnt': 'opus_mt_en_bnt', 'xx.en.marian.translate_to.bzs': 'opus_mt_en_bzs', 'xx.en.marian.translate_to.ca': 'opus_mt_en_ca', 'xx.en.marian.translate_to.ceb': 'opus_mt_en_ceb', 'xx.en.marian.translate_to.cel': 'opus_mt_en_cel', 'xx.en.marian.translate_to.chk': 'opus_mt_en_chk', 'xx.en.marian.translate_to.cpf': 'opus_mt_en_cpf', 'xx.en.marian.translate_to.cpp': 'opus_mt_en_cpp', 'xx.en.marian.translate_to.crs': 'opus_mt_en_crs', 'xx.en.marian.translate_to.cs': 'opus_mt_en_cs', 'xx.en.marian.translate_to.cus': 'opus_mt_en_cus', 'xx.en.marian.translate_to.cy': 'opus_mt_en_cy', 'xx.en.marian.translate_to.da': 'opus_mt_en_da', 'xx.en.marian.translate_to.de': 'opus_mt_en_de', 'xx.en.marian.translate_to.dra': 'opus_mt_en_dra', 'xx.en.marian.translate_to.ee': 'opus_mt_en_ee', 'xx.en.marian.translate_to.efi': 'opus_mt_en_efi', 'xx.en.marian.translate_to.el': 'opus_mt_en_el', 'xx.en.marian.translate_to.eo': 'opus_mt_en_eo', 'xx.en.marian.translate_to.es': 'opus_mt_en_es', 'xx.en.marian.translate_to.et': 'opus_mt_en_et', 'xx.en.marian.translate_to.eu': 'opus_mt_en_eu', 'xx.en.marian.translate_to.euq': 'opus_mt_en_euq', 'xx.en.marian.translate_to.fi': 'opus_mt_en_fi', 'xx.en.marian.translate_to.fiu': 'opus_mt_en_fiu', 'xx.en.marian.translate_to.fj': 'opus_mt_en_fj', 'xx.en.marian.translate_to.fr': 'opus_mt_en_fr', 'xx.en.marian.translate_to.ga': 'opus_mt_en_ga', 'xx.en.marian.translate_to.gaa': 'opus_mt_en_gaa', 'xx.en.marian.translate_to.gem': 'opus_mt_en_gem', 'xx.en.marian.translate_to.gil': 'opus_mt_en_gil', 'xx.en.marian.translate_to.gl': 'opus_mt_en_gl', 'xx.en.marian.translate_to.gmq': 'opus_mt_en_gmq', 'xx.en.marian.translate_to.gmw': 'opus_mt_en_gmw', 'xx.en.marian.translate_to.grk': 'opus_mt_en_grk', 'xx.en.marian.translate_to.guw': 'opus_mt_en_guw', 'xx.en.marian.translate_to.gv': 'opus_mt_en_gv', 'xx.en.marian.translate_to.ha': 'opus_mt_en_ha', 'xx.en.marian.translate_to.he': 'opus_mt_en_he', 'xx.en.marian.translate_to.hi': 'opus_mt_en_hi', 'xx.en.marian.translate_to.hil': 'opus_mt_en_hil', 'xx.en.marian.translate_to.ho': 'opus_mt_en_ho', 'xx.en.marian.translate_to.ht': 'opus_mt_en_ht', 'xx.en.marian.translate_to.hu': 'opus_mt_en_hu', 'xx.en.marian.translate_to.hy': 'opus_mt_en_hy', 'xx.en.marian.translate_to.id': 'opus_mt_en_id', 'xx.en.marian.translate_to.ig': 'opus_mt_en_ig', 'xx.en.marian.translate_to.iir': 'opus_mt_en_iir', 'xx.en.marian.translate_to.ilo': 'opus_mt_en_ilo', 'xx.en.marian.translate_to.inc': 'opus_mt_en_inc', 'xx.en.marian.translate_to.ine': 'opus_mt_en_ine', 'xx.en.marian.translate_to.is': 'opus_mt_en_is', 'xx.en.marian.translate_to.iso': 'opus_mt_en_iso', 'xx.en.marian.translate_to.it': 'opus_mt_en_it', 'xx.en.marian.translate_to.itc': 'opus_mt_en_itc', 'xx.en.marian.translate_to.jap': 'opus_mt_en_jap', 'xx.en.marian.translate_to.kg': 'opus_mt_en_kg', 'xx.en.marian.translate_to.kj': 'opus_mt_en_kj', 'xx.en.marian.translate_to.kqn': 'opus_mt_en_kqn', 'xx.en.marian.translate_to.kwn': 'opus_mt_en_kwn', 'xx.en.marian.translate_to.kwy': 'opus_mt_en_kwy', 'xx.en.marian.translate_to.lg': 'opus_mt_en_lg', 'xx.en.marian.translate_to.ln': 'opus_mt_en_ln', 'xx.en.marian.translate_to.loz': 'opus_mt_en_loz', 'xx.en.marian.translate_to.lu': 'opus_mt_en_lu', 'xx.en.marian.translate_to.lua': 'opus_mt_en_lua', 'xx.en.marian.translate_to.lue': 'opus_mt_en_lue', 'xx.en.marian.translate_to.lun': 'opus_mt_en_lun', 'xx.en.marian.translate_to.luo': 'opus_mt_en_luo', 'xx.en.marian.translate_to.lus': 'opus_mt_en_lus', 'xx.en.marian.translate_to.map': 'opus_mt_en_map', 'xx.en.marian.translate_to.mfe': 'opus_mt_en_mfe', 'xx.en.marian.translate_to.mg': 'opus_mt_en_mg', 'xx.en.marian.translate_to.mh': 'opus_mt_en_mh', 'xx.en.marian.translate_to.mk': 'opus_mt_en_mk', 'xx.en.marian.translate_to.mkh': 'opus_mt_en_mkh', 'xx.en.marian.translate_to.ml': 'opus_mt_en_ml', 'xx.en.marian.translate_to.mos': 'opus_mt_en_mos', 'xx.en.marian.translate_to.mr': 'opus_mt_en_mr', 'xx.en.marian.translate_to.mt': 'opus_mt_en_mt', 'xx.en.marian.translate_to.mul': 'opus_mt_en_mul', 'xx.en.marian.translate_to.ng': 'opus_mt_en_ng', 'xx.en.marian.translate_to.nic': 'opus_mt_en_nic', 'xx.en.marian.translate_to.niu': 'opus_mt_en_niu', 'xx.en.marian.translate_to.nl': 'opus_mt_en_nl', 'xx.en.marian.translate_to.nso': 'opus_mt_en_nso', 'xx.en.marian.translate_to.ny': 'opus_mt_en_ny', 'xx.en.marian.translate_to.nyk': 'opus_mt_en_nyk', 'xx.en.marian.translate_to.om': 'opus_mt_en_om', 'xx.en.marian.translate_to.pag': 'opus_mt_en_pag', 'xx.en.marian.translate_to.pap': 'opus_mt_en_pap', 'xx.en.marian.translate_to.phi': 'opus_mt_en_phi', 'xx.en.marian.translate_to.pis': 'opus_mt_en_pis', 'xx.en.marian.translate_to.pon': 'opus_mt_en_pon', 'xx.en.marian.translate_to.poz': 'opus_mt_en_poz', 'xx.en.marian.translate_to.pqe': 'opus_mt_en_pqe', 'xx.en.marian.translate_to.pqw': 'opus_mt_en_pqw', 'xx.en.marian.translate_to.rn': 'opus_mt_en_rn', 'xx.en.marian.translate_to.rnd': 'opus_mt_en_rnd', 'xx.en.marian.translate_to.ro': 'opus_mt_en_ro', 'xx.en.marian.translate_to.roa': 'opus_mt_en_roa', 'xx.en.marian.translate_to.ru': 'opus_mt_en_ru', 'xx.en.marian.translate_to.run': 'opus_mt_en_run', 'xx.en.marian.translate_to.rw': 'opus_mt_en_rw', 'xx.en.marian.translate_to.sal': 'opus_mt_en_sal', 'xx.en.marian.translate_to.sem': 'opus_mt_en_sem', 'xx.en.marian.translate_to.sg': 'opus_mt_en_sg', 'xx.en.marian.translate_to.sit': 'opus_mt_en_sit', 'xx.en.marian.translate_to.sk': 'opus_mt_en_sk', 'xx.en.marian.translate_to.sla': 'opus_mt_en_sla', 'xx.en.marian.translate_to.sm': 'opus_mt_en_sm', 'xx.en.marian.translate_to.sn': 'opus_mt_en_sn', 'xx.en.marian.translate_to.sq': 'opus_mt_en_sq', 'xx.en.marian.translate_to.ss': 'opus_mt_en_ss', 'xx.en.marian.translate_to.st': 'opus_mt_en_st', 'xx.en.marian.translate_to.sv': 'opus_mt_en_sv', 'xx.en.marian.translate_to.sw': 'opus_mt_en_sw', 'xx.en.marian.translate_to.swc': 'opus_mt_en_swc', 'xx.en.marian.translate_to.tdt': 'opus_mt_en_tdt', 'xx.en.marian.translate_to.ti': 'opus_mt_en_ti', 'xx.en.marian.translate_to.tiv': 'opus_mt_en_tiv', 'xx.en.marian.translate_to.tl': 'opus_mt_en_tl', 'xx.en.marian.translate_to.tll': 'opus_mt_en_tll', 'xx.en.marian.translate_to.tn': 'opus_mt_en_tn', 'xx.en.marian.translate_to.to': 'opus_mt_en_to', 'xx.en.marian.translate_to.toi': 'opus_mt_en_toi', 'xx.en.marian.translate_to.tpi': 'opus_mt_en_tpi', 'xx.en.marian.translate_to.trk': 'opus_mt_en_trk', 'xx.en.marian.translate_to.ts': 'opus_mt_en_ts', 'xx.en.marian.translate_to.tut': 'opus_mt_en_tut', 'xx.en.marian.translate_to.tvl': 'opus_mt_en_tvl', 'xx.en.marian.translate_to.tw': 'opus_mt_en_tw', 'xx.en.marian.translate_to.ty': 'opus_mt_en_ty', 'xx.en.marian.translate_to.uk': 'opus_mt_en_uk', 'xx.en.marian.translate_to.umb': 'opus_mt_en_umb', 'xx.en.marian.translate_to.ur': 'opus_mt_en_ur', 'xx.en.marian.translate_to.urj': 'opus_mt_en_urj', 'xx.en.marian.translate_to.vi': 'opus_mt_en_vi', 'xx.en.marian.translate_to.xh': 'opus_mt_en_xh', 'xx.en.marian.translate_to.zh': 'opus_mt_en_zh', 'xx.en.marian.translate_to.zle': 'opus_mt_en_zle', 'xx.en.marian.translate_to.zls': 'opus_mt_en_zls', 'xx.en.marian.translate_to.zlw': 'opus_mt_en_zlw', 'xx.eo.marian.translate_to.af': 'opus_mt_af_eo', 'xx.eo.marian.translate_to.ar': 'opus_mt_ar_eo', 'xx.eo.marian.translate_to.bg': 'opus_mt_bg_eo', 'xx.eo.marian.translate_to.cs': 'opus_mt_cs_eo', 'xx.eo.marian.translate_to.da': 'opus_mt_da_eo', 'xx.eo.marian.translate_to.de': 'opus_mt_de_eo', 'xx.eo.marian.translate_to.el': 'opus_mt_el_eo', 'xx.eo.marian.translate_to.en': 'opus_mt_eo_en', 'xx.eo.marian.translate_to.es': 'opus_mt_es_eo', 'xx.eo.marian.translate_to.fi': 'opus_mt_fi_eo', 'xx.eo.marian.translate_to.fr': 'opus_mt_fr_eo', 'xx.eo.marian.translate_to.he': 'opus_mt_he_eo', 'xx.eo.marian.translate_to.hu': 'opus_mt_hu_eo', 'xx.eo.marian.translate_to.is': 'opus_mt_is_eo', 'xx.eo.marian.translate_to.it': 'opus_mt_it_eo', 'xx.eo.marian.translate_to.lt': 'opus_mt_lt_eo', 'xx.eo.marian.translate_to.nl': 'opus_mt_nl_eo', 'xx.eo.marian.translate_to.pl': 'opus_mt_pl_eo', 'xx.eo.marian.translate_to.pt': 'opus_mt_pt_eo', 'xx.eo.marian.translate_to.ro': 'opus_mt_ro_eo', 'xx.eo.marian.translate_to.ru': 'opus_mt_ru_eo', 'xx.eo.marian.translate_to.sh': 'opus_mt_sh_eo', 'xx.eo.marian.translate_to.sv': 'opus_mt_sv_eo', 'xx.eo.marian.translate_to.tr': 'opus_mt_tr_eo', 'xx.eo.marian.translate_to.vi': 'opus_mt_vi_eo', 'xx.es.marian.translate_to.aed': 'opus_mt_aed_es', 'xx.es.marian.translate_to.af': 'opus_mt_af_es', 'xx.es.marian.translate_to.ar': 'opus_mt_ar_es', 'xx.es.marian.translate_to.ase': 'opus_mt_ase_es', 'xx.es.marian.translate_to.az': 'opus_mt_az_es', 'xx.es.marian.translate_to.bcl': 'opus_mt_bcl_es', 'xx.es.marian.translate_to.be': 'opus_mt_be_es', 'xx.es.marian.translate_to.bem': 'opus_mt_bem_es', 'xx.es.marian.translate_to.ber': 'opus_mt_ber_es', 'xx.es.marian.translate_to.bg': 'opus_mt_bg_es', 'xx.es.marian.translate_to.bi': 'opus_mt_bi_es', 'xx.es.marian.translate_to.bzs': 'opus_mt_bzs_es', 'xx.es.marian.translate_to.ca': 'opus_mt_ca_es', 'xx.es.marian.translate_to.ceb': 'opus_mt_ceb_es', 'xx.es.marian.translate_to.chk': 'opus_mt_chk_es', 'xx.es.marian.translate_to.crs': 'opus_mt_crs_es', 'xx.es.marian.translate_to.csg': 'opus_mt_csg_es', 'xx.es.marian.translate_to.csn': 'opus_mt_csn_es', 'xx.es.marian.translate_to.da': 'opus_mt_da_es', 'xx.es.marian.translate_to.de': 'opus_mt_de_es', 'xx.es.marian.translate_to.ee': 'opus_mt_ee_es', 'xx.es.marian.translate_to.en': 'opus_mt_es_en', 'xx.es.marian.translate_to.eo': 'opus_mt_eo_es', 'xx.es.marian.translate_to.es': 'opus_mt_es_es', 'xx.es.marian.translate_to.et': 'opus_mt_et_es', 'xx.es.marian.translate_to.eu': 'opus_mt_eu_es', 'xx.es.marian.translate_to.fi': 'opus_mt_fi_es', 'xx.es.marian.translate_to.fr': 'opus_mt_fr_es', 'xx.es.marian.translate_to.gaa': 'opus_mt_gaa_es', 'xx.es.marian.translate_to.gil': 'opus_mt_gil_es', 'xx.es.marian.translate_to.gl': 'opus_mt_gl_es', 'xx.es.marian.translate_to.guw': 'opus_mt_guw_es', 'xx.es.marian.translate_to.ha': 'opus_mt_ha_es', 'xx.es.marian.translate_to.he': 'opus_mt_he_es', 'xx.es.marian.translate_to.hr': 'opus_mt_hr_es', 'xx.es.marian.translate_to.ht': 'opus_mt_ht_es', 'xx.es.marian.translate_to.id': 'opus_mt_id_es', 'xx.es.marian.translate_to.ig': 'opus_mt_ig_es', 'xx.es.marian.translate_to.ilo': 'opus_mt_ilo_es', 'xx.es.marian.translate_to.is': 'opus_mt_is_es', 'xx.es.marian.translate_to.iso': 'opus_mt_iso_es', 'xx.es.marian.translate_to.it': 'opus_mt_it_es', 'xx.es.marian.translate_to.ja': 'opus_mt_ja_es', 'xx.es.marian.translate_to.kg': 'opus_mt_kg_es', 'xx.es.marian.translate_to.ko': 'opus_mt_ko_es', 'xx.es.marian.translate_to.kqn': 'opus_mt_kqn_es', 'xx.es.marian.translate_to.lg': 'opus_mt_lg_es', 'xx.es.marian.translate_to.ln': 'opus_mt_ln_es', 'xx.es.marian.translate_to.loz': 'opus_mt_loz_es', 'xx.es.marian.translate_to.lt': 'opus_mt_lt_es', 'xx.es.marian.translate_to.lu': 'opus_mt_lu_es', 'xx.es.marian.translate_to.lua': 'opus_mt_lua_es', 'xx.es.marian.translate_to.lue': 'opus_mt_lue_es', 'xx.es.marian.translate_to.lus': 'opus_mt_lus_es', 'xx.es.marian.translate_to.lv': 'opus_mt_lv_es', 'xx.es.marian.translate_to.mfe': 'opus_mt_mfe_es', 'xx.es.marian.translate_to.mfs': 'opus_mt_mfs_es', 'xx.es.marian.translate_to.mg': 'opus_mt_mg_es', 'xx.es.marian.translate_to.mh': 'opus_mt_mh_es', 'xx.es.marian.translate_to.mk': 'opus_mt_mk_es', 'xx.es.marian.translate_to.mt': 'opus_mt_mt_es', 'xx.es.marian.translate_to.niu': 'opus_mt_niu_es', 'xx.es.marian.translate_to.nl': 'opus_mt_nl_es', 'xx.es.marian.translate_to.no': 'opus_mt_no_es', 'xx.es.marian.translate_to.nso': 'opus_mt_nso_es', 'xx.es.marian.translate_to.ny': 'opus_mt_ny_es', 'xx.es.marian.translate_to.pag': 'opus_mt_pag_es', 'xx.es.marian.translate_to.pap': 'opus_mt_pap_es', 'xx.es.marian.translate_to.pis': 'opus_mt_pis_es', 'xx.es.marian.translate_to.pl': 'opus_mt_pl_es', 'xx.es.marian.translate_to.pon': 'opus_mt_pon_es', 'xx.es.marian.translate_to.prl': 'opus_mt_prl_es', 'xx.es.marian.translate_to.rn': 'opus_mt_rn_es', 'xx.es.marian.translate_to.ru': 'opus_mt_ru_es', 'xx.es.marian.translate_to.run': 'opus_mt_run_es', 'xx.es.marian.translate_to.rw': 'opus_mt_rw_es', 'xx.es.marian.translate_to.sg': 'opus_mt_sg_es', 'xx.es.marian.translate_to.sk': 'opus_mt_sk_es', 'xx.es.marian.translate_to.sl': 'opus_mt_sl_es', 'xx.es.marian.translate_to.sm': 'opus_mt_sm_es', 'xx.es.marian.translate_to.sn': 'opus_mt_sn_es', 'xx.es.marian.translate_to.sq': 'opus_mt_sq_es', 'xx.es.marian.translate_to.srn': 'opus_mt_srn_es', 'xx.es.marian.translate_to.ssp': 'opus_mt_ssp_es', 'xx.es.marian.translate_to.st': 'opus_mt_st_es', 'xx.es.marian.translate_to.sv': 'opus_mt_sv_es', 'xx.es.marian.translate_to.swc': 'opus_mt_swc_es', 'xx.es.marian.translate_to.tl': 'opus_mt_tl_es', 'xx.es.marian.translate_to.tll': 'opus_mt_tll_es', 'xx.es.marian.translate_to.tn': 'opus_mt_tn_es', 'xx.es.marian.translate_to.to': 'opus_mt_to_es', 'xx.es.marian.translate_to.toi': 'opus_mt_toi_es', 'xx.es.marian.translate_to.tr': 'opus_mt_tr_es', 'xx.es.marian.translate_to.ts': 'opus_mt_ts_es', 'xx.es.marian.translate_to.tum': 'opus_mt_tum_es', 'xx.es.marian.translate_to.tvl': 'opus_mt_tvl_es', 'xx.es.marian.translate_to.tw': 'opus_mt_tw_es', 'xx.es.marian.translate_to.ty': 'opus_mt_ty_es', 'xx.es.marian.translate_to.tzo': 'opus_mt_tzo_es', 'xx.es.marian.translate_to.uk': 'opus_mt_uk_es', 'xx.es.marian.translate_to.ve': 'opus_mt_ve_es', 'xx.es.marian.translate_to.vi': 'opus_mt_vi_es', 'xx.es.marian.translate_to.vsl': 'opus_mt_vsl_es', 'xx.es.marian.translate_to.war': 'opus_mt_war_es', 'xx.es.marian.translate_to.xh': 'opus_mt_xh_es', 'xx.es.marian.translate_to.yo': 'opus_mt_yo_es', 'xx.es.marian.translate_to.zai': 'opus_mt_zai_es', 'xx.es.marian.translate_to.zh': 'opus_tatoeba_es_zh', 'xx.es.marian.translate_to.zne': 'opus_mt_zne_es', 'xx.et.marian.translate_to.de': 'opus_mt_de_et', 'xx.et.marian.translate_to.en': 'opus_mt_et_en', 'xx.et.marian.translate_to.es': 'opus_mt_es_et', 'xx.et.marian.translate_to.fi': 'opus_mt_fi_et', 'xx.et.marian.translate_to.ru': 'opus_mt_ru_et', 'xx.et.marian.translate_to.sv': 'opus_mt_sv_et', 'xx.eu.marian.translate_to.de': 'opus_mt_de_eu', 'xx.eu.marian.translate_to.en': 'opus_mt_eu_en', 'xx.eu.marian.translate_to.es': 'opus_mt_es_eu', 'xx.eu.marian.translate_to.ru': 'opus_mt_ru_eu', 'xx.euq.marian.translate_to.en': 'opus_mt_euq_en', 'xx.fi.marian.translate_to.af': 'opus_mt_af_fi', 'xx.fi.marian.translate_to.bcl': 'opus_mt_bcl_fi', 'xx.fi.marian.translate_to.bem': 'opus_mt_bem_fi', 'xx.fi.marian.translate_to.bg': 'opus_mt_bg_fi', 'xx.fi.marian.translate_to.bzs': 'opus_mt_bzs_fi', 'xx.fi.marian.translate_to.ceb': 'opus_mt_ceb_fi', 'xx.fi.marian.translate_to.crs': 'opus_mt_crs_fi', 'xx.fi.marian.translate_to.cs': 'opus_mt_cs_fi', 'xx.fi.marian.translate_to.da': 'opus_mt_da_fi', 'xx.fi.marian.translate_to.de': 'opus_mt_de_fi', 'xx.fi.marian.translate_to.ee': 'opus_mt_ee_fi', 'xx.fi.marian.translate_to.efi': 'opus_mt_efi_fi', 'xx.fi.marian.translate_to.el': 'opus_mt_el_fi', 'xx.fi.marian.translate_to.en': 'opus_mt_fi_en', 'xx.fi.marian.translate_to.eo': 'opus_mt_eo_fi', 'xx.fi.marian.translate_to.es': 'opus_mt_es_fi', 'xx.fi.marian.translate_to.et': 'opus_mt_et_fi', 'xx.fi.marian.translate_to.fi': 'opus_mt_fi_fi', 'xx.fi.marian.translate_to.fse': 'opus_mt_fse_fi', 'xx.fi.marian.translate_to.gaa': 'opus_mt_gaa_fi', 'xx.fi.marian.translate_to.gil': 'opus_mt_gil_fi', 'xx.fi.marian.translate_to.guw': 'opus_mt_guw_fi', 'xx.fi.marian.translate_to.ha': 'opus_mt_ha_fi', 'xx.fi.marian.translate_to.he': 'opus_mt_he_fi', 'xx.fi.marian.translate_to.hil': 'opus_mt_hil_fi', 'xx.fi.marian.translate_to.hr': 'opus_mt_hr_fi', 'xx.fi.marian.translate_to.ht': 'opus_mt_ht_fi', 'xx.fi.marian.translate_to.hu': 'opus_mt_hu_fi', 'xx.fi.marian.translate_to.id': 'opus_mt_id_fi', 'xx.fi.marian.translate_to.ig': 'opus_mt_ig_fi', 'xx.fi.marian.translate_to.ilo': 'opus_mt_ilo_fi', 'xx.fi.marian.translate_to.is': 'opus_mt_is_fi', 'xx.fi.marian.translate_to.iso': 'opus_mt_iso_fi', 'xx.fi.marian.translate_to.ja': 'opus_mt_ja_fi', 'xx.fi.marian.translate_to.ko': 'opus_mt_ko_fi', 'xx.fi.marian.translate_to.lg': 'opus_mt_lg_fi', 'xx.fi.marian.translate_to.loz': 'opus_mt_loz_fi', 'xx.fi.marian.translate_to.lu': 'opus_mt_lu_fi', 'xx.fi.marian.translate_to.lua': 'opus_mt_lua_fi', 'xx.fi.marian.translate_to.lue': 'opus_mt_lue_fi', 'xx.fi.marian.translate_to.lus': 'opus_mt_lus_fi', 'xx.fi.marian.translate_to.lv': 'opus_mt_lv_fi', 'xx.fi.marian.translate_to.mh': 'opus_mt_mh_fi', 'xx.fi.marian.translate_to.mk': 'opus_mt_mk_fi', 'xx.fi.marian.translate_to.mt': 'opus_mt_mt_fi', 'xx.fi.marian.translate_to.niu': 'opus_mt_niu_fi', 'xx.fi.marian.translate_to.nl': 'opus_mt_nl_fi', 'xx.fi.marian.translate_to.no': 'opus_mt_no_fi', 'xx.fi.marian.translate_to.nso': 'opus_mt_nso_fi', 'xx.fi.marian.translate_to.pag': 'opus_mt_pag_fi', 'xx.fi.marian.translate_to.pap': 'opus_mt_pap_fi', 'xx.fi.marian.translate_to.pis': 'opus_mt_pis_fi', 'xx.fi.marian.translate_to.pon': 'opus_mt_pon_fi', 'xx.fi.marian.translate_to.ro': 'opus_mt_ro_fi', 'xx.fi.marian.translate_to.ru': 'opus_mt_ru_fi', 'xx.fi.marian.translate_to.sg': 'opus_mt_sg_fi', 'xx.fi.marian.translate_to.sk': 'opus_mt_sk_fi', 'xx.fi.marian.translate_to.sl': 'opus_mt_sl_fi', 'xx.fi.marian.translate_to.st': 'opus_mt_st_fi', 'xx.fi.marian.translate_to.sv': 'opus_mt_sv_fi', 'xx.fi.marian.translate_to.swc': 'opus_mt_swc_fi', 'xx.fi.marian.translate_to.tll': 'opus_mt_tll_fi', 'xx.fi.marian.translate_to.toi': 'opus_mt_toi_fi', 'xx.fi.marian.translate_to.ts': 'opus_mt_ts_fi', 'xx.fi.marian.translate_to.tvl': 'opus_mt_tvl_fi', 'xx.fi.marian.translate_to.tw': 'opus_mt_tw_fi', 'xx.fi.marian.translate_to.ty': 'opus_mt_ty_fi', 'xx.fi.marian.translate_to.uk': 'opus_mt_uk_fi', 'xx.fi.marian.translate_to.war': 'opus_mt_war_fi', 'xx.fi.marian.translate_to.yo': 'opus_mt_yo_fi', 'xx.fi.marian.translate_to.zh': 'opus_mt_zh_fi', 'xx.fi.marian.translate_to.zne': 'opus_mt_zne_fi', 'xx.fiu.marian.translate_to.en': 'opus_mt_fiu_en', 'xx.fiu.marian.translate_to.fiu': 'opus_mt_fiu_fiu', 'xx.fj.marian.translate_to.de': 'opus_mt_de_fj', 'xx.fj.marian.translate_to.en': 'opus_mt_fj_en', 'xx.fj.marian.translate_to.es': 'opus_mt_es_fj', 'xx.fj.marian.translate_to.fi': 'opus_mt_fi_fj', 'xx.fj.marian.translate_to.fr': 'opus_mt_fr_fj', 'xx.fj.marian.translate_to.sv': 'opus_mt_sv_fj', 'xx.fr.marian.translate_to.af': 'opus_mt_af_fr', 'xx.fr.marian.translate_to.ar': 'opus_mt_ar_fr', 'xx.fr.marian.translate_to.ase': 'opus_mt_ase_fr', 'xx.fr.marian.translate_to.bcl': 'opus_mt_bcl_fr', 'xx.fr.marian.translate_to.bem': 'opus_mt_bem_fr', 'xx.fr.marian.translate_to.ber': 'opus_mt_ber_fr', 'xx.fr.marian.translate_to.bg': 'opus_mt_bg_fr', 'xx.fr.marian.translate_to.bi': 'opus_mt_bi_fr', 'xx.fr.marian.translate_to.bzs': 'opus_mt_bzs_fr', 'xx.fr.marian.translate_to.ca': 'opus_mt_ca_fr', 'xx.fr.marian.translate_to.ceb': 'opus_mt_ceb_fr', 'xx.fr.marian.translate_to.chk': 'opus_mt_chk_fr', 'xx.fr.marian.translate_to.crs': 'opus_mt_crs_fr', 'xx.fr.marian.translate_to.cs': 'opus_mt_cs_fr', 'xx.fr.marian.translate_to.da': 'opus_mt_da_fr', 'xx.fr.marian.translate_to.de': 'opus_mt_de_fr', 'xx.fr.marian.translate_to.ee': 'opus_mt_ee_fr', 'xx.fr.marian.translate_to.efi': 'opus_mt_efi_fr', 'xx.fr.marian.translate_to.el': 'opus_mt_el_fr', 'xx.fr.marian.translate_to.en': 'opus_mt_fr_en', 'xx.fr.marian.translate_to.eo': 'opus_mt_eo_fr', 'xx.fr.marian.translate_to.es': 'opus_mt_es_fr', 'xx.fr.marian.translate_to.et': 'opus_mt_et_fr', 'xx.fr.marian.translate_to.fi': 'opus_mt_fi_fr', 'xx.fr.marian.translate_to.fj': 'opus_mt_fj_fr', 'xx.fr.marian.translate_to.gaa': 'opus_mt_gaa_fr', 'xx.fr.marian.translate_to.gil': 'opus_mt_gil_fr', 'xx.fr.marian.translate_to.guw': 'opus_mt_guw_fr', 'xx.fr.marian.translate_to.ha': 'opus_mt_ha_fr', 'xx.fr.marian.translate_to.hr': 'opus_mt_hr_fr', 'xx.fr.marian.translate_to.ht': 'opus_mt_ht_fr', 'xx.fr.marian.translate_to.hu': 'opus_mt_hu_fr', 'xx.fr.marian.translate_to.id': 'opus_mt_id_fr', 'xx.fr.marian.translate_to.ig': 'opus_mt_ig_fr', 'xx.fr.marian.translate_to.is': 'opus_mt_is_fr', 'xx.fr.marian.translate_to.iso': 'opus_mt_iso_fr', 'xx.fr.marian.translate_to.it': 'opus_mt_it_fr', 'xx.fr.marian.translate_to.ja': 'opus_mt_ja_fr', 'xx.fr.marian.translate_to.kg': 'opus_mt_kg_fr', 'xx.fr.marian.translate_to.ko': 'opus_mt_ko_fr', 'xx.fr.marian.translate_to.kqn': 'opus_mt_kqn_fr', 'xx.fr.marian.translate_to.kwy': 'opus_mt_kwy_fr', 'xx.fr.marian.translate_to.lg': 'opus_mt_lg_fr', 'xx.fr.marian.translate_to.ln': 'opus_mt_ln_fr', 'xx.fr.marian.translate_to.loz': 'opus_mt_loz_fr', 'xx.fr.marian.translate_to.lt': 'opus_mt_lt_fr', 'xx.fr.marian.translate_to.lu': 'opus_mt_lu_fr', 'xx.fr.marian.translate_to.lua': 'opus_mt_lua_fr', 'xx.fr.marian.translate_to.lue': 'opus_mt_lue_fr', 'xx.fr.marian.translate_to.lus': 'opus_mt_lus_fr', 'xx.fr.marian.translate_to.lv': 'opus_mt_lv_fr', 'xx.fr.marian.translate_to.mk': 'opus_mt_mk_fr', 'xx.fr.marian.translate_to.ms': 'opus_mt_ms_fr', 'xx.fr.marian.translate_to.mt': 'opus_mt_mt_fr', 'xx.fr.marian.translate_to.niu': 'opus_mt_niu_fr', 'xx.fr.marian.translate_to.nl': 'opus_mt_nl_fr', 'xx.fr.marian.translate_to.no': 'opus_mt_no_fr', 'xx.fr.marian.translate_to.nso': 'opus_mt_nso_fr', 'xx.fr.marian.translate_to.pap': 'opus_mt_pap_fr', 'xx.fr.marian.translate_to.pis': 'opus_mt_pis_fr', 'xx.fr.marian.translate_to.pl': 'opus_mt_pl_fr', 'xx.fr.marian.translate_to.pon': 'opus_mt_pon_fr', 'xx.fr.marian.translate_to.rn': 'opus_mt_rn_fr', 'xx.fr.marian.translate_to.rnd': 'opus_mt_rnd_fr', 'xx.fr.marian.translate_to.ro': 'opus_mt_ro_fr', 'xx.fr.marian.translate_to.ru': 'opus_mt_ru_fr', 'xx.fr.marian.translate_to.rw': 'opus_mt_rw_fr', 'xx.fr.marian.translate_to.sg': 'opus_mt_sg_fr', 'xx.fr.marian.translate_to.sk': 'opus_mt_sk_fr', 'xx.fr.marian.translate_to.sl': 'opus_mt_sl_fr', 'xx.fr.marian.translate_to.sm': 'opus_mt_sm_fr', 'xx.fr.marian.translate_to.sn': 'opus_mt_sn_fr', 'xx.fr.marian.translate_to.srn': 'opus_mt_srn_fr', 'xx.fr.marian.translate_to.st': 'opus_mt_st_fr', 'xx.fr.marian.translate_to.sv': 'opus_mt_sv_fr', 'xx.fr.marian.translate_to.swc': 'opus_mt_swc_fr', 'xx.fr.marian.translate_to.th': 'opus_mt_th_fr', 'xx.fr.marian.translate_to.tiv': 'opus_mt_tiv_fr', 'xx.fr.marian.translate_to.tll': 'opus_mt_tll_fr', 'xx.fr.marian.translate_to.tn': 'opus_mt_tn_fr', 'xx.fr.marian.translate_to.to': 'opus_mt_to_fr', 'xx.fr.marian.translate_to.toi': 'opus_mt_toi_fr', 'xx.fr.marian.translate_to.tr': 'opus_mt_tr_fr', 'xx.fr.marian.translate_to.ts': 'opus_mt_ts_fr', 'xx.fr.marian.translate_to.tum': 'opus_mt_tum_fr', 'xx.fr.marian.translate_to.tvl': 'opus_mt_tvl_fr', 'xx.fr.marian.translate_to.tw': 'opus_mt_tw_fr', 'xx.fr.marian.translate_to.ty': 'opus_mt_ty_fr', 'xx.fr.marian.translate_to.uk': 'opus_mt_uk_fr', 'xx.fr.marian.translate_to.vi': 'opus_mt_vi_fr', 'xx.fr.marian.translate_to.war': 'opus_mt_war_fr', 'xx.fr.marian.translate_to.wls': 'opus_mt_wls_fr', 'xx.fr.marian.translate_to.xh': 'opus_mt_xh_fr', 'xx.fr.marian.translate_to.yap': 'opus_mt_yap_fr', 'xx.fr.marian.translate_to.yo': 'opus_mt_yo_fr', 'xx.fr.marian.translate_to.zne': 'opus_mt_zne_fr', 'xx.fse.marian.translate_to.fi': 'opus_mt_fi_fse', 'xx.ga.marian.translate_to.en': 'opus_mt_ga_en', 'xx.gaa.marian.translate_to.de': 'opus_mt_de_gaa', 'xx.gaa.marian.translate_to.en': 'opus_mt_gaa_en', 'xx.gaa.marian.translate_to.es': 'opus_mt_es_gaa', 'xx.gaa.marian.translate_to.fi': 'opus_mt_fi_gaa', 'xx.gaa.marian.translate_to.fr': 'opus_mt_fr_gaa', 'xx.gaa.marian.translate_to.sv': 'opus_mt_sv_gaa', 'xx.gem.marian.translate_to.en': 'opus_mt_gem_en', 'xx.gem.marian.translate_to.gem': 'opus_mt_gem_gem', 'xx.gil.marian.translate_to.de': 'opus_mt_de_gil', 'xx.gil.marian.translate_to.en': 'opus_mt_gil_en', 'xx.gil.marian.translate_to.es': 'opus_mt_es_gil', 'xx.gil.marian.translate_to.fi': 'opus_mt_fi_gil', 'xx.gil.marian.translate_to.fr': 'opus_mt_fr_gil', 'xx.gil.marian.translate_to.sv': 'opus_mt_sv_gil', 'xx.gl.marian.translate_to.en': 'opus_mt_gl_en', 'xx.gl.marian.translate_to.es': 'opus_mt_es_gl', 'xx.gl.marian.translate_to.pt': 'opus_mt_pt_gl', 'xx.gmq.marian.translate_to.en': 'opus_mt_gmq_en', 'xx.gmq.marian.translate_to.gmq': 'opus_mt_gmq_gmq', 'xx.gmw.marian.translate_to.en': 'opus_mt_gmw_en', 'xx.gmw.marian.translate_to.gmw': 'opus_mt_gmw_gmw', 'xx.grk.marian.translate_to.en': 'opus_mt_grk_en', 'xx.guw.marian.translate_to.de': 'opus_mt_de_guw', 'xx.guw.marian.translate_to.en': 'opus_mt_guw_en', 'xx.guw.marian.translate_to.es': 'opus_mt_es_guw', 'xx.guw.marian.translate_to.fi': 'opus_mt_fi_guw', 'xx.guw.marian.translate_to.fr': 'opus_mt_fr_guw', 'xx.guw.marian.translate_to.sv': 'opus_mt_sv_guw', 'xx.gv.marian.translate_to.en': 'opus_mt_gv_en', 'xx.ha.marian.translate_to.de': 'opus_mt_de_ha', 'xx.ha.marian.translate_to.en': 'opus_mt_ha_en', 'xx.ha.marian.translate_to.es': 'opus_mt_es_ha', 'xx.ha.marian.translate_to.fi': 'opus_mt_fi_ha', 'xx.ha.marian.translate_to.fr': 'opus_mt_fr_ha', 'xx.ha.marian.translate_to.sv': 'opus_mt_sv_ha', 'xx.he.marian.translate_to.ar': 'opus_mt_ar_he', 'xx.he.marian.translate_to.de': 'opus_mt_de_he', 'xx.he.marian.translate_to.eo': 'opus_mt_eo_he', 'xx.he.marian.translate_to.es': 'opus_mt_es_he', 'xx.he.marian.translate_to.fi': 'opus_mt_fi_he', 'xx.he.marian.translate_to.fr': 'opus_mt_fr_he', 'xx.he.marian.translate_to.it': 'opus_tatoeba_he_it', 'xx.he.marian.translate_to.ja': 'opus_mt_ja_he', 'xx.he.marian.translate_to.ru': 'opus_mt_ru_he', 'xx.he.marian.translate_to.sv': 'opus_mt_sv_he', 'xx.he.marian.translate_to.uk': 'opus_mt_uk_he', 'xx.he.marian.translate_to.zh': 'opus_mt_zh_he', 'xx.he.translate_to.fr': 'opus_tatoeba_he_fr', 'xx.hi.marian.translate_to.en': 'opus_mt_hi_en', 'xx.hil.marian.translate_to.de': 'opus_mt_de_hil', 'xx.hil.marian.translate_to.en': 'opus_mt_hil_en', 'xx.hil.marian.translate_to.es': 'opus_mt_es_hil', 'xx.hil.marian.translate_to.fi': 'opus_mt_fi_hil', 'xx.hil.marian.translate_to.fr': 'opus_mt_fr_hil', 'xx.hil.marian.translate_to.sv': 'opus_mt_sv_hil', 'xx.ho.marian.translate_to.de': 'opus_mt_de_ho', 'xx.ho.marian.translate_to.en': 'opus_mt_ho_en', 'xx.ho.marian.translate_to.es': 'opus_mt_es_ho', 'xx.ho.marian.translate_to.fi': 'opus_mt_fi_ho', 'xx.ho.marian.translate_to.fr': 'opus_mt_fr_ho', 'xx.ho.marian.translate_to.sv': 'opus_mt_sv_ho', 'xx.hr.marian.translate_to.de': 'opus_mt_de_hr', 'xx.hr.marian.translate_to.es': 'opus_mt_es_hr', 'xx.hr.marian.translate_to.fi': 'opus_mt_fi_hr', 'xx.hr.marian.translate_to.fr': 'opus_mt_fr_hr', 'xx.hr.marian.translate_to.sv': 'opus_mt_sv_hr', 'xx.ht.marian.translate_to.de': 'opus_mt_de_ht', 'xx.ht.marian.translate_to.en': 'opus_mt_ht_en', 'xx.ht.marian.translate_to.es': 'opus_mt_es_ht', 'xx.ht.marian.translate_to.fi': 'opus_mt_fi_ht', 'xx.ht.marian.translate_to.fr': 'opus_mt_fr_ht', 'xx.ht.marian.translate_to.sv': 'opus_mt_sv_ht', 'xx.hu.marian.translate_to.de': 'opus_mt_de_hu', 'xx.hu.marian.translate_to.en': 'opus_mt_hu_en', 'xx.hu.marian.translate_to.eo': 'opus_mt_eo_hu', 'xx.hu.marian.translate_to.fi': 'opus_mt_fi_hu', 'xx.hu.marian.translate_to.fr': 'opus_mt_fr_hu', 'xx.hu.marian.translate_to.ja': 'opus_mt_ja_hu', 'xx.hu.marian.translate_to.ko': 'opus_mt_ko_hu', 'xx.hu.marian.translate_to.sv': 'opus_mt_sv_hu', 'xx.hu.marian.translate_to.uk': 'opus_mt_uk_hu', 'xx.hy.marian.translate_to.en': 'opus_mt_hy_en', 'xx.hy.marian.translate_to.ru': 'opus_mt_ru_hy', 'xx.id.marian.translate_to.en': 'opus_mt_id_en', 'xx.id.marian.translate_to.es': 'opus_mt_es_id', 'xx.id.marian.translate_to.fi': 'opus_mt_fi_id', 'xx.id.marian.translate_to.fr': 'opus_mt_fr_id', 'xx.id.marian.translate_to.sv': 'opus_mt_sv_id', 'xx.ig.marian.translate_to.de': 'opus_mt_de_ig', 'xx.ig.marian.translate_to.en': 'opus_mt_ig_en', 'xx.ig.marian.translate_to.es': 'opus_mt_es_ig', 'xx.ig.marian.translate_to.fi': 'opus_mt_fi_ig', 'xx.ig.marian.translate_to.fr': 'opus_mt_fr_ig', 'xx.ig.marian.translate_to.sv': 'opus_mt_sv_ig', 'xx.iir.marian.translate_to.en': 'opus_mt_iir_en', 'xx.iir.marian.translate_to.iir': 'opus_mt_iir_iir', 'xx.ilo.marian.translate_to.de': 'opus_mt_de_ilo', 'xx.ilo.marian.translate_to.en': 'opus_mt_ilo_en', 'xx.ilo.marian.translate_to.es': 'opus_mt_es_ilo', 'xx.ilo.marian.translate_to.fi': 'opus_mt_fi_ilo', 'xx.ilo.marian.translate_to.fr': 'opus_mt_fr_ilo', 'xx.ilo.marian.translate_to.sv': 'opus_mt_sv_ilo', 'xx.inc.marian.translate_to.en': 'opus_mt_inc_en', 'xx.inc.marian.translate_to.inc': 'opus_mt_inc_inc', 'xx.ine.marian.translate_to.en': 'opus_mt_ine_en', 'xx.ine.marian.translate_to.ine': 'opus_mt_ine_ine', 'xx.is.marian.translate_to.de': 'opus_mt_de_is', 'xx.is.marian.translate_to.en': 'opus_mt_is_en', 'xx.is.marian.translate_to.es': 'opus_mt_es_is', 'xx.is.marian.translate_to.fi': 'opus_mt_fi_is', 'xx.is.marian.translate_to.it': 'opus_mt_it_is', 'xx.is.marian.translate_to.sv': 'opus_mt_sv_is', 'xx.iso.marian.translate_to.de': 'opus_mt_de_iso', 'xx.iso.marian.translate_to.en': 'opus_mt_iso_en', 'xx.iso.marian.translate_to.es': 'opus_mt_es_iso', 'xx.iso.marian.translate_to.fi': 'opus_mt_fi_iso', 'xx.iso.marian.translate_to.fr': 'opus_mt_fr_iso', 'xx.iso.marian.translate_to.sv': 'opus_mt_sv_iso', 'xx.it.marian.translate_to.ar': 'opus_mt_ar_it', 'xx.it.marian.translate_to.bg': 'opus_mt_bg_it', 'xx.it.marian.translate_to.ca': 'opus_mt_ca_it', 'xx.it.marian.translate_to.de': 'opus_mt_de_it', 'xx.it.marian.translate_to.en': 'opus_mt_it_en', 'xx.it.marian.translate_to.eo': 'opus_mt_eo_it', 'xx.it.marian.translate_to.es': 'opus_mt_es_it', 'xx.it.marian.translate_to.fi': 'opus_mt_fi_it', 'xx.it.marian.translate_to.he': 'opus_mt_he_it', 'xx.it.marian.translate_to.is': 'opus_mt_is_it', 'xx.it.marian.translate_to.ja': 'opus_mt_ja_it', 'xx.it.marian.translate_to.lt': 'opus_mt_lt_it', 'xx.it.marian.translate_to.ms': 'opus_mt_ms_it', 'xx.it.marian.translate_to.uk': 'opus_mt_uk_it', 'xx.it.marian.translate_to.vi': 'opus_mt_vi_it', 'xx.it.marian.translate_to.zh': 'opus_mt_zh_it', 'xx.it.translate_to.he': 'opus_tatoeba_it_he', 'xx.itc.marian.translate_to.en': 'opus_mt_itc_en', 'xx.itc.marian.translate_to.itc': 'opus_mt_itc_itc', 'xx.ja.marian.translate_to.en': 'opus_mt_ja_en', 'xx.jap.marian.translate_to.en': 'opus_mt_jap_en', 'xx.ka.marian.translate_to.en': 'opus_mt_ka_en', 'xx.kab.marian.translate_to.en': 'opus_mt_kab_en', 'xx.kg.marian.translate_to.de': 'opus_mt_de_kg', 'xx.kg.marian.translate_to.en': 'opus_mt_kg_en', 'xx.kg.marian.translate_to.es': 'opus_mt_es_kg', 'xx.kg.marian.translate_to.fi': 'opus_mt_fi_kg', 'xx.kg.marian.translate_to.fr': 'opus_mt_fr_kg', 'xx.kg.marian.translate_to.sv': 'opus_mt_sv_kg', 'xx.kj.marian.translate_to.en': 'opus_mt_kj_en', 'xx.kl.marian.translate_to.en': 'opus_mt_kl_en', 'xx.ko.marian.translate_to.en': 'opus_mt_ko_en', 'xx.kqn.marian.translate_to.en': 'opus_mt_kqn_en', 'xx.kqn.marian.translate_to.fi': 'opus_mt_fi_kqn', 'xx.kqn.marian.translate_to.fr': 'opus_mt_fr_kqn', 'xx.kqn.marian.translate_to.sv': 'opus_mt_sv_kqn', 'xx.kwn.marian.translate_to.en': 'opus_mt_kwn_en', 'xx.kwy.marian.translate_to.en': 'opus_mt_kwy_en', 'xx.kwy.marian.translate_to.fr': 'opus_mt_fr_kwy', 'xx.kwy.marian.translate_to.sv': 'opus_mt_sv_kwy', 'xx.lg.marian.translate_to.en': 'opus_mt_lg_en', 'xx.lg.marian.translate_to.fi': 'opus_mt_fi_lg', 'xx.lg.marian.translate_to.fr': 'opus_mt_fr_lg', 'xx.lg.marian.translate_to.sv': 'opus_mt_sv_lg', 'xx.ln.marian.translate_to.de': 'opus_mt_de_ln', 'xx.ln.marian.translate_to.en': 'opus_mt_ln_en', 'xx.ln.marian.translate_to.es': 'opus_mt_es_ln', 'xx.ln.marian.translate_to.fi': 'opus_mt_fi_ln', 'xx.ln.marian.translate_to.fr': 'opus_mt_fr_ln', 'xx.ln.marian.translate_to.sv': 'opus_mt_sv_ln', 'xx.loz.marian.translate_to.de': 'opus_mt_de_loz', 'xx.loz.marian.translate_to.en': 'opus_mt_loz_en', 'xx.loz.marian.translate_to.es': 'opus_mt_es_loz', 'xx.loz.marian.translate_to.fr': 'opus_mt_fr_loz', 'xx.lt.marian.translate_to.de': 'opus_mt_de_lt', 'xx.lt.marian.translate_to.es': 'opus_mt_es_lt', 'xx.lt.marian.translate_to.it': 'opus_mt_it_lt', 'xx.lt.marian.translate_to.pl': 'opus_mt_pl_lt', 'xx.lt.marian.translate_to.ru': 'opus_mt_ru_lt', 'xx.lt.marian.translate_to.tr': 'opus_mt_tr_lt', 'xx.lu.marian.translate_to.en': 'opus_mt_lu_en', 'xx.lu.marian.translate_to.fi': 'opus_mt_fi_lu', 'xx.lu.marian.translate_to.fr': 'opus_mt_fr_lu', 'xx.lu.marian.translate_to.sv': 'opus_mt_sv_lu', 'xx.lua.marian.translate_to.de': 'opus_mt_de_lua', 'xx.lua.marian.translate_to.en': 'opus_mt_lua_en', 'xx.lua.marian.translate_to.es': 'opus_mt_es_lua', 'xx.lua.marian.translate_to.fi': 'opus_mt_fi_lua', 'xx.lua.marian.translate_to.fr': 'opus_mt_fr_lua', 'xx.lua.marian.translate_to.sv': 'opus_mt_sv_lua', 'xx.lue.marian.translate_to.en': 'opus_mt_lue_en', 'xx.lue.marian.translate_to.fi': 'opus_mt_fi_lue', 'xx.lue.marian.translate_to.fr': 'opus_mt_fr_lue', 'xx.lue.marian.translate_to.sv': 'opus_mt_sv_lue', 'xx.lun.marian.translate_to.en': 'opus_mt_lun_en', 'xx.luo.marian.translate_to.en': 'opus_mt_luo_en', 'xx.lus.marian.translate_to.en': 'opus_mt_lus_en', 'xx.lus.marian.translate_to.es': 'opus_mt_es_lus', 'xx.lus.marian.translate_to.fi': 'opus_mt_fi_lus', 'xx.lus.marian.translate_to.fr': 'opus_mt_fr_lus', 'xx.lus.marian.translate_to.sv': 'opus_mt_sv_lus', 'xx.lv.marian.translate_to.en': 'opus_mt_lv_en', 'xx.lv.marian.translate_to.fi': 'opus_mt_fi_lv', 'xx.lv.marian.translate_to.ru': 'opus_mt_ru_lv', 'xx.lv.marian.translate_to.sv': 'opus_mt_sv_lv', 'xx.marian': 'opus_mt_en_fr', 'xx.mfe.marian.translate_to.en': 'opus_mt_mfe_en', 'xx.mfe.marian.translate_to.fi': 'opus_mt_fi_mfe', 'xx.mfe.marian.translate_to.fr': 'opus_mt_fr_mfe', 'xx.mfe.marian.translate_to.sv': 'opus_mt_sv_mfe', 'xx.mfs.marian.translate_to.es': 'opus_mt_es_mfs', 'xx.mg.marian.translate_to.en': 'opus_mt_mg_en', 'xx.mg.marian.translate_to.fi': 'opus_mt_fi_mg', 'xx.mh.marian.translate_to.en': 'opus_mt_mh_en', 'xx.mh.marian.translate_to.fi': 'opus_mt_fi_mh', 'xx.mh.marian.translate_to.fr': 'opus_mt_fr_mh', 'xx.mh.marian.translate_to.sv': 'opus_mt_sv_mh', 'xx.mk.marian.translate_to.en': 'opus_mt_mk_en', 'xx.mk.marian.translate_to.es': 'opus_mt_es_mk', 'xx.mk.marian.translate_to.fi': 'opus_mt_fi_mk', 'xx.mkh.marian.translate_to.en': 'opus_mt_mkh_en', 'xx.ml.marian.translate_to.en': 'opus_mt_ml_en', 'xx.mos.marian.translate_to.en': 'opus_mt_mos_en', 'xx.mos.marian.translate_to.fi': 'opus_mt_fi_mos', 'xx.mos.marian.translate_to.fr': 'opus_mt_fr_mos', 'xx.mos.marian.translate_to.sv': 'opus_mt_sv_mos', 'xx.mr.marian.translate_to.en': 'opus_mt_mr_en', 'xx.ms.marian.translate_to.de': 'opus_mt_de_ms', 'xx.ms.marian.translate_to.fr': 'opus_mt_fr_ms', 'xx.ms.marian.translate_to.it': 'opus_mt_it_ms', 'xx.ms.marian.translate_to.ja': 'opus_mt_ja_ms', 'xx.ms.marian.translate_to.ms': 'opus_mt_ms_ms', 'xx.ms.marian.translate_to.zh': 'opus_mt_zh_ms', 'xx.mt.marian.translate_to.de': 'opus_mt_de_mt', 'xx.mt.marian.translate_to.en': 'opus_mt_mt_en', 'xx.mt.marian.translate_to.es': 'opus_mt_es_mt', 'xx.mt.marian.translate_to.fi': 'opus_mt_fi_mt', 'xx.mt.marian.translate_to.fr': 'opus_mt_fr_mt', 'xx.mt.marian.translate_to.sv': 'opus_mt_sv_mt', 'xx.mul.marian.translate_to.en': 'opus_mt_mul_en', 'xx.ner.bert': 'bert_ner_indicner', 'xx.ner.bert.extra_support_it_es_vi_en': 'bert_ner_ner_en_vi_it_es_tinparadox', 'xx.ner.bert.wikiann.base': 'bert_ner_nbailab_base_ner_scandi', 'xx.ner.biobert.craft.cased_base_augmented_finetuned.extra_support_en': 'bert_ner_biobert_base_cased_v1.2_finetuned_ner_craft_augmentedtransfer_en', 'xx.ner.biobert.craft.cased_base_augmented_finetuned.extra_support_en.by_StivenLancheros': 'bert_ner_biobert_base_cased_v1.2_finetuned_ner_craft_augmented_en', 'xx.ner.biobert.craft.cased_base_augmented_finetuned.extra_support_es': 'bert_ner_biobert_base_cased_v1.2_finetuned_ner_craft_augmentedtransfer_es', 'xx.ner.biobert.craft.cased_base_augmented_finetuned.extra_support_es.by_StivenLancheros': 'bert_ner_biobert_base_cased_v1.2_finetuned_ner_craft_augmented_es', 'xx.ner.biobert.craft.cased_base_finetuned': 'bert_ner_biobert_base_cased_v1.2_finetuned_ner_craft_english', 'xx.ner.biobert.craft.cased_base_finetuned.extra_support_es': 'bert_ner_biobert_base_cased_v1.2_finetuned_ner_concat_craft_es_stivenlancheros', 'xx.ner.biobert.craft.cased_base_finetuned.extra_support_es_en': 'bert_ner_biobert_base_cased_v1.2_finetuned_ner_craft_es_en_stivenlancheros', 'xx.ner.distil_bert.cased_base_finetuned': 'distilbert_ner_base_multi_cased_finetuned_typo_detection', 'xx.ner.high_resourced_lang': 'xlm_roberta_large_token_classifier_hrl', 'xx.ner.masakhaner': 'xlm_roberta_large_token_classifier_masakhaner', 'xx.ner.masakhaner.distilbert': 'distilbert_base_token_classifier_masakhaner', 'xx.ner.masakhaner.xlm_roberta': 'xlm_roberta_large_token_classifier_masakhaner', 'xx.ner.pos.cased_base': 'bert_pos_bert_base_dutch_cased_upos_alpino_frisian', 'xx.ner.pos.xtreme.base': 'bert_pos_bert_base_ft_pos_xtreme', 'xx.ner.roberta': 'roberta_ner_sroberta_ner', 'xx.ner.roberta.base': 'roberta_ner_sroberta_base_ner', 'xx.ner.roberta.clinical_bio_medical_craft.base_augmented_finetuned.extra_support_es_en': 'roberta_ner_roberta_base_biomedical_clinical_es_finetuned_ner_craft_augmentedtransfer_en', 'xx.ner.roberta.clinical_bio_medical_craft.base_augmented_finetuned.extra_support_es_en.by_StivenLancheros': 'roberta_ner_roberta_base_biomedical_clinical_es_finetuned_ner_craft_augmented_en', 'xx.ner.roberta.clinical_bio_medical_craft.base_finetuned.extra_support_es': 'roberta_ner_roberta_base_biomedical_clinical_es_finetuned_ner_craft', 'xx.ner.roberta.l': 'roberta_ner_sroberta_l_ner', 'xx.ner.roberta.xl': 'roberta_ner_sroberta_xl_ner', 'xx.ner.scandinavian': 'bert_token_classifier_scandi_ner', 'xx.ner.wikiner_glove_840B_300': 'ner_wikiner_glove_840B_300', 'xx.ner.wikiner_xlm_roberta_base': 'ner_wikiner_xlm_roberta_base', 'xx.ner.xlmr_roberta': 'xlmroberta_ner_extract_names', 'xx.ner.xlmr_roberta.base': 'xlmroberta_ner_base_masakhan', 'xx.ner.xlmr_roberta.base.by_davlan': 'xlmroberta_ner_base_sadilar', 'xx.ner.xlmr_roberta.base_finetuned': 'xlmroberta_ner_cj_mills_base_finetuned_panx_all', 'xx.ner.xlmr_roberta.base_finetuned.by_cj_mills': 'xlmroberta_ner_cj_mills_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned.by_danhsf': 'xlmroberta_ner_danhsf_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned.by_jdang': 'xlmroberta_ner_jdang_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned.by_k3nneth': 'xlmroberta_ner_k3nneth_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned.by_leizhang': 'xlmroberta_ner_leizhang_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned.by_moghis': 'xlmroberta_ner_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned.by_olpa': 'xlmroberta_ner_olpa_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned.by_osanseviero': 'xlmroberta_ner_osanseviero_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned.by_transformersbook': 'xlmroberta_ner_transformersbook_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned.by_zdepablo': 'xlmroberta_ner_zpablo_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned_panx.by_dkasti': 'xlmroberta_ner_dkasti_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned_panx.by_edwardjross': 'xlmroberta_ner_edwardjross_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned_panx.by_flood': 'xlmroberta_ner_flood_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned_panx.by_haesun': 'xlmroberta_ner_haesun_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned_panx.by_iis2009002': 'xlmroberta_ner_iis2009002_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned_panx.by_jgriffi': 'xlmroberta_ner_jgriffi_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned_panx.by_lijingxin': 'xlmroberta_ner_lijingxin_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned_panx.by_neha2608': 'xlmroberta_ner_neha2608_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned_panx.by_robkayinto': 'xlmroberta_ner_robkayinto_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned_panx.by_skr3178': 'xlmroberta_ner_skr3178_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned_panx.by_v3rx2000': 'xlmroberta_ner_v3rx2000_base_finetuned_panx', 'xx.ner.xlmr_roberta.base_finetuned_panx_all.by_dkasti': 'xlmroberta_ner_dkasti_base_finetuned_panx_all', 'xx.ner.xlmr_roberta.base_finetuned_panx_all.by_edwardjross': 'xlmroberta_ner_edwardjross_base_finetuned_panx_all', 'xx.ner.xlmr_roberta.base_finetuned_panx_all.by_flood': 'xlmroberta_ner_flood_base_finetuned_panx_all', 'xx.ner.xlmr_roberta.base_finetuned_panx_all.by_haesun': 'xlmroberta_ner_haesun_base_finetuned_panx_all', 'xx.ner.xlmr_roberta.base_finetuned_panx_all.by_iis2009002': 'xlmroberta_ner_iis2009002_base_finetuned_panx_all', 'xx.ner.xlmr_roberta.base_finetuned_panx_all.by_jgriffi': 'xlmroberta_ner_jgriffi_base_finetuned_panx_all', 'xx.ner.xlmr_roberta.base_finetuned_panx_all.by_lijingxin': 'xlmroberta_ner_lijingxin_base_finetuned_panx_all', 'xx.ner.xlmr_roberta.base_finetuned_panx_all.by_neha2608': 'xlmroberta_ner_neha2608_base_finetuned_panx_all', 'xx.ner.xlmr_roberta.base_finetuned_panx_all.by_robkayinto': 'xlmroberta_ner_robkayinto_base_finetuned_panx_all', 'xx.ner.xlmr_roberta.base_finetuned_panx_all.by_skr3178': 'xlmroberta_ner_skr3178_base_finetuned_panx_all', 'xx.ner.xlmr_roberta.base_finetuned_panx_all.by_v3rx2000': 'xlmroberta_ner_v3rx2000_base_finetuned_panx_all', 'xx.ner.xlmr_roberta.conll.large_finetuned': 'xlmroberta_ner_large_finetuned_conll03_english', 'xx.ner.xlmr_roberta.lang': 'xlmroberta_ner_fullstop_punctuation_multilang_larg', 'xx.ner.xlmr_roberta.lang.by_jplu': 'xlmroberta_ner_tf_r_40_lang', 'xx.ner.xlmr_roberta.lang.by_nbroad': 'xlmroberta_ner_jplu_r_40_lang', 'xx.ner.xlmr_roberta.multilingual': 'xlmroberta_ner_roberta_multilingual_medieval', 'xx.ner.xlmr_roberta.wikiann.base_finetuned': 'xlmroberta_ner_transformersbook_base_finetuned_panx_all', 'xx.ner.xlmr_roberta.wikineural.multilingual': 'xlmroberta_ner_skyr_wikineural_multilingual', 'xx.ner.xtreme_glove_840B_300': 'ner_xtreme_glove_840B_300', 'xx.ner.xtreme_xlm_roberta_xtreme_base': 'ner_xtreme_xlm_roberta_xtreme_base', 'xx.ng.marian.translate_to.en': 'opus_mt_ng_en', 'xx.nic.marian.translate_to.en': 'opus_mt_nic_en', 'xx.niu.marian.translate_to.de': 'opus_mt_de_niu', 'xx.niu.marian.translate_to.en': 'opus_mt_niu_en', 'xx.niu.marian.translate_to.es': 'opus_mt_es_niu', 'xx.niu.marian.translate_to.fi': 'opus_mt_fi_niu', 'xx.niu.marian.translate_to.fr': 'opus_mt_fr_niu', 'xx.niu.marian.translate_to.sv': 'opus_mt_sv_niu', 'xx.nl.marian.translate_to.af': 'opus_mt_af_nl', 'xx.nl.marian.translate_to.ca': 'opus_mt_ca_nl', 'xx.nl.marian.translate_to.de': 'opus_mt_de_nl', 'xx.nl.marian.translate_to.en': 'opus_mt_nl_en', 'xx.nl.marian.translate_to.eo': 'opus_mt_eo_nl', 'xx.nl.marian.translate_to.es': 'opus_mt_es_nl', 'xx.nl.marian.translate_to.fi': 'opus_mt_fi_nl', 'xx.nl.marian.translate_to.ja': 'opus_mt_ja_nl', 'xx.nl.marian.translate_to.no': 'opus_mt_no_nl', 'xx.nl.marian.translate_to.sv': 'opus_mt_sv_nl', 'xx.nl.marian.translate_to.uk': 'opus_mt_uk_nl', 'xx.nl.marian.translate_to.zh': 'opus_mt_zh_nl', 'xx.no.marian.translate_to.da': 'opus_mt_da_no', 'xx.no.marian.translate_to.de': 'opus_mt_de_no', 'xx.no.marian.translate_to.es': 'opus_mt_es_no', 'xx.no.marian.translate_to.fi': 'opus_mt_fi_no', 'xx.no.marian.translate_to.fr': 'opus_mt_fr_no', 'xx.no.marian.translate_to.nl': 'opus_mt_nl_no', 'xx.no.marian.translate_to.no': 'opus_mt_no_no', 'xx.no.marian.translate_to.pl': 'opus_mt_pl_no', 'xx.no.marian.translate_to.ru': 'opus_mt_ru_no', 'xx.no.marian.translate_to.sv': 'opus_mt_sv_no', 'xx.no.marian.translate_to.uk': 'opus_mt_uk_no', 'xx.nso.marian.translate_to.de': 'opus_mt_de_nso', 'xx.nso.marian.translate_to.en': 'opus_mt_nso_en', 'xx.nso.marian.translate_to.es': 'opus_mt_es_nso', 'xx.nso.marian.translate_to.fi': 'opus_mt_fi_nso', 'xx.nso.marian.translate_to.fr': 'opus_mt_fr_nso', 'xx.nso.marian.translate_to.sv': 'opus_mt_sv_nso', 'xx.ny.marian.translate_to.de': 'opus_mt_de_ny', 'xx.ny.marian.translate_to.en': 'opus_mt_ny_en', 'xx.ny.marian.translate_to.es': 'opus_mt_es_ny', 'xx.ny.marian.translate_to.fi': 'opus_mt_fi_ny', 'xx.ny.marian.translate_to.fr': 'opus_mt_fr_ny', 'xx.ny.marian.translate_to.sv': 'opus_mt_sv_ny', 'xx.nyk.marian.translate_to.en': 'opus_mt_nyk_en', 'xx.om.marian.translate_to.en': 'opus_mt_om_en', 'xx.pa.marian.translate_to.en': 'opus_mt_pa_en', 'xx.pag.marian.translate_to.de': 'opus_mt_de_pag', 'xx.pag.marian.translate_to.en': 'opus_mt_pag_en', 'xx.pag.marian.translate_to.es': 'opus_mt_es_pag', 'xx.pag.marian.translate_to.fi': 'opus_mt_fi_pag', 'xx.pag.marian.translate_to.fr': 'opus_mt_fr_pag', 'xx.pag.marian.translate_to.sv': 'opus_mt_sv_pag', 'xx.pap.marian.translate_to.de': 'opus_mt_de_pap', 'xx.pap.marian.translate_to.en': 'opus_mt_pap_en', 'xx.pap.marian.translate_to.es': 'opus_mt_es_pap', 'xx.pap.marian.translate_to.fi': 'opus_mt_fi_pap', 'xx.pap.marian.translate_to.fr': 'opus_mt_fr_pap', 'xx.pap.marian.translate_to.sv': 'opus_mt_sv_pap', 'xx.phi.marian.translate_to.en': 'opus_mt_phi_en', 'xx.pis.marian.translate_to.de': 'opus_mt_de_pis', 'xx.pis.marian.translate_to.en': 'opus_mt_pis_en', 'xx.pis.marian.translate_to.es': 'opus_mt_es_pis', 'xx.pis.marian.translate_to.fi': 'opus_mt_fi_pis', 'xx.pis.marian.translate_to.fr': 'opus_mt_fr_pis', 'xx.pis.marian.translate_to.sv': 'opus_mt_sv_pis', 'xx.pl.marian.translate_to.ar': 'opus_mt_ar_pl', 'xx.pl.marian.translate_to.de': 'opus_mt_de_pl', 'xx.pl.marian.translate_to.en': 'opus_mt_pl_en', 'xx.pl.marian.translate_to.eo': 'opus_mt_eo_pl', 'xx.pl.marian.translate_to.es': 'opus_mt_es_pl', 'xx.pl.marian.translate_to.fr': 'opus_mt_fr_pl', 'xx.pl.marian.translate_to.ja': 'opus_mt_ja_pl', 'xx.pl.marian.translate_to.lt': 'opus_mt_lt_pl', 'xx.pl.marian.translate_to.no': 'opus_mt_no_pl', 'xx.pl.marian.translate_to.uk': 'opus_mt_uk_pl', 'xx.pon.marian.translate_to.de': 'opus_mt_de_pon', 'xx.pon.marian.translate_to.en': 'opus_mt_pon_en', 'xx.pon.marian.translate_to.es': 'opus_mt_es_pon', 'xx.pon.marian.translate_to.fi': 'opus_mt_fi_pon', 'xx.pon.marian.translate_to.fr': 'opus_mt_fr_pon', 'xx.pon.marian.translate_to.sv': 'opus_mt_sv_pon', 'xx.pqe.marian.translate_to.en': 'opus_mt_pqe_en', 'xx.prl.marian.translate_to.es': 'opus_mt_es_prl', 'xx.pt.marian.translate_to.ca': 'opus_mt_ca_pt', 'xx.pt.marian.translate_to.eo': 'opus_mt_eo_pt', 'xx.pt.marian.translate_to.gl': 'opus_mt_gl_pt', 'xx.pt.marian.translate_to.ja': 'opus_mt_ja_pt', 'xx.pt.marian.translate_to.tl': 'opus_mt_tl_pt', 'xx.pt.marian.translate_to.uk': 'opus_mt_uk_pt', 'xx.rn.marian.translate_to.en': 'opus_mt_rn_en', 'xx.rn.marian.translate_to.es': 'opus_mt_es_rn', 'xx.rnd.marian.translate_to.en': 'opus_mt_rnd_en', 'xx.rnd.marian.translate_to.fr': 'opus_mt_fr_rnd', 'xx.rnd.marian.translate_to.sv': 'opus_mt_sv_rnd', 'xx.ro.marian.translate_to.eo': 'opus_mt_eo_ro', 'xx.ro.marian.translate_to.es': 'opus_mt_es_ro', 'xx.ro.marian.translate_to.fi': 'opus_mt_fi_ro', 'xx.ro.marian.translate_to.fr': 'opus_mt_fr_ro', 'xx.ro.marian.translate_to.sv': 'opus_mt_sv_ro', 'xx.roa.marian.translate_to.en': 'opus_mt_roa_en', 'xx.ru.marian.translate_to.af': 'opus_mt_af_ru', 'xx.ru.marian.translate_to.ar': 'opus_mt_ar_ru', 'xx.ru.marian.translate_to.bg': 'opus_mt_bg_ru', 'xx.ru.marian.translate_to.da': 'opus_mt_da_ru', 'xx.ru.marian.translate_to.en': 'opus_mt_ru_en', 'xx.ru.marian.translate_to.eo': 'opus_mt_eo_ru', 'xx.ru.marian.translate_to.es': 'opus_mt_es_ru', 'xx.ru.marian.translate_to.et': 'opus_mt_et_ru', 'xx.ru.marian.translate_to.eu': 'opus_mt_eu_ru', 'xx.ru.marian.translate_to.fi': 'opus_mt_fi_ru', 'xx.ru.marian.translate_to.fr': 'opus_mt_fr_ru', 'xx.ru.marian.translate_to.he': 'opus_mt_he_ru', 'xx.ru.marian.translate_to.hy': 'opus_mt_hy_ru', 'xx.ru.marian.translate_to.ja': 'opus_mt_ja_ru', 'xx.ru.marian.translate_to.ka': 'opus_mt_ka_ru', 'xx.ru.marian.translate_to.ko': 'opus_mt_ko_ru', 'xx.ru.marian.translate_to.lt': 'opus_mt_lt_ru', 'xx.ru.marian.translate_to.lv': 'opus_mt_lv_ru', 'xx.ru.marian.translate_to.no': 'opus_mt_no_ru', 'xx.ru.marian.translate_to.rn': 'opus_mt_rn_ru', 'xx.ru.marian.translate_to.sl': 'opus_mt_sl_ru', 'xx.ru.marian.translate_to.sv': 'opus_mt_sv_ru', 'xx.ru.marian.translate_to.uk': 'opus_mt_uk_ru', 'xx.ru.marian.translate_to.vi': 'opus_mt_vi_ru', 'xx.run.marian.translate_to.en': 'opus_mt_run_en', 'xx.run.marian.translate_to.fi': 'opus_mt_fi_run', 'xx.run.marian.translate_to.fr': 'opus_mt_fr_run', 'xx.run.marian.translate_to.sv': 'opus_mt_sv_run', 'xx.rw.marian.translate_to.en': 'opus_mt_rw_en', 'xx.rw.marian.translate_to.es': 'opus_mt_es_rw', 'xx.rw.marian.translate_to.fi': 'opus_mt_fi_rw', 'xx.rw.marian.translate_to.fr': 'opus_mt_fr_rw', 'xx.rw.marian.translate_to.sv': 'opus_mt_sv_rw', 'xx.sal.marian.translate_to.en': 'opus_mt_sal_en', 'xx.sem.marian.translate_to.en': 'opus_mt_sem_en', 'xx.sem.marian.translate_to.sem': 'opus_mt_sem_sem', 'xx.sentence_detector': 'sentence_detector_dl', 'xx.sg.marian.translate_to.en': 'opus_mt_sg_en', 'xx.sg.marian.translate_to.es': 'opus_mt_es_sg', 'xx.sg.marian.translate_to.fi': 'opus_mt_fi_sg', 'xx.sg.marian.translate_to.fr': 'opus_mt_fr_sg', 'xx.sg.marian.translate_to.sv': 'opus_mt_sv_sg', 'xx.sh.marian.translate_to.eo': 'opus_mt_eo_sh', 'xx.sh.marian.translate_to.ja': 'opus_mt_ja_sh', 'xx.sh.marian.translate_to.uk': 'opus_mt_uk_sh', 'xx.sk.marian.translate_to.en': 'opus_mt_sk_en', 'xx.sk.marian.translate_to.fi': 'opus_mt_fi_sk', 'xx.sk.marian.translate_to.fr': 'opus_mt_fr_sk', 'xx.sk.marian.translate_to.sv': 'opus_mt_sv_sk', 'xx.sl.marian.translate_to.es': 'opus_mt_es_sl', 'xx.sl.marian.translate_to.fi': 'opus_mt_fi_sl', 'xx.sl.marian.translate_to.fr': 'opus_mt_fr_sl', 'xx.sl.marian.translate_to.ru': 'opus_mt_ru_sl', 'xx.sl.marian.translate_to.sv': 'opus_mt_sv_sl', 'xx.sl.marian.translate_to.uk': 'opus_mt_uk_sl', 'xx.sla.marian.translate_to.en': 'opus_mt_sla_en', 'xx.sla.marian.translate_to.sla': 'opus_mt_sla_sla', 'xx.sm.marian.translate_to.en': 'opus_mt_sm_en', 'xx.sm.marian.translate_to.es': 'opus_mt_es_sm', 'xx.sm.marian.translate_to.fi': 'opus_mt_fi_sm', 'xx.sm.marian.translate_to.fr': 'opus_mt_fr_sm', 'xx.sm.marian.translate_to.sv': 'opus_mt_sv_sm', 'xx.sn.marian.translate_to.en': 'opus_mt_sn_en', 'xx.sn.marian.translate_to.es': 'opus_mt_es_sn', 'xx.sn.marian.translate_to.fi': 'opus_mt_fi_sn', 'xx.sn.marian.translate_to.fr': 'opus_mt_fr_sn', 'xx.sn.marian.translate_to.sv': 'opus_mt_sv_sn', 'xx.sq.marian.translate_to.en': 'opus_mt_sq_en', 'xx.sq.marian.translate_to.fi': 'opus_mt_fi_sq', 'xx.sq.marian.translate_to.sv': 'opus_mt_sv_sq', 'xx.srn.marian.translate_to.en': 'opus_mt_srn_en', 'xx.srn.marian.translate_to.es': 'opus_mt_es_srn', 'xx.srn.marian.translate_to.fi': 'opus_mt_fi_srn', 'xx.srn.marian.translate_to.fr': 'opus_mt_fr_srn', 'xx.srn.marian.translate_to.sv': 'opus_mt_sv_srn', 'xx.ss.marian.translate_to.en': 'opus_mt_ss_en', 'xx.st.marian.translate_to.en': 'opus_mt_st_en', 'xx.st.marian.translate_to.es': 'opus_mt_es_st', 'xx.st.marian.translate_to.fi': 'opus_mt_fi_st', 'xx.st.marian.translate_to.fr': 'opus_mt_fr_st', 'xx.st.marian.translate_to.sv': 'opus_mt_sv_st', 'xx.sv.marian.translate_to.af': 'opus_mt_af_sv', 'xx.sv.marian.translate_to.am': 'opus_mt_am_sv', 'xx.sv.marian.translate_to.ase': 'opus_mt_ase_sv', 'xx.sv.marian.translate_to.bcl': 'opus_mt_bcl_sv', 'xx.sv.marian.translate_to.bem': 'opus_mt_bem_sv', 'xx.sv.marian.translate_to.bg': 'opus_mt_bg_sv', 'xx.sv.marian.translate_to.bi': 'opus_mt_bi_sv', 'xx.sv.marian.translate_to.bzs': 'opus_mt_bzs_sv', 'xx.sv.marian.translate_to.ceb': 'opus_mt_ceb_sv', 'xx.sv.marian.translate_to.chk': 'opus_mt_chk_sv', 'xx.sv.marian.translate_to.crs': 'opus_mt_crs_sv', 'xx.sv.marian.translate_to.ee': 'opus_mt_ee_sv', 'xx.sv.marian.translate_to.efi': 'opus_mt_efi_sv', 'xx.sv.marian.translate_to.el': 'opus_mt_el_sv', 'xx.sv.marian.translate_to.en': 'opus_mt_sv_en', 'xx.sv.marian.translate_to.eo': 'opus_mt_eo_sv', 'xx.sv.marian.translate_to.et': 'opus_mt_et_sv', 'xx.sv.marian.translate_to.fi': 'opus_mt_fi_sv', 'xx.sv.marian.translate_to.fr': 'opus_mt_fr_sv', 'xx.sv.marian.translate_to.gaa': 'opus_mt_gaa_sv', 'xx.sv.marian.translate_to.gil': 'opus_mt_gil_sv', 'xx.sv.marian.translate_to.guw': 'opus_mt_guw_sv', 'xx.sv.marian.translate_to.ha': 'opus_mt_ha_sv', 'xx.sv.marian.translate_to.he': 'opus_mt_he_sv', 'xx.sv.marian.translate_to.hr': 'opus_mt_hr_sv', 'xx.sv.marian.translate_to.ht': 'opus_mt_ht_sv', 'xx.sv.marian.translate_to.hu': 'opus_mt_hu_sv', 'xx.sv.marian.translate_to.id': 'opus_mt_id_sv', 'xx.sv.marian.translate_to.ig': 'opus_mt_ig_sv', 'xx.sv.marian.translate_to.ilo': 'opus_mt_ilo_sv', 'xx.sv.marian.translate_to.is': 'opus_mt_is_sv', 'xx.sv.marian.translate_to.iso': 'opus_mt_iso_sv', 'xx.sv.marian.translate_to.it': 'opus_mt_it_sv', 'xx.sv.marian.translate_to.ja': 'opus_mt_ja_sv', 'xx.sv.marian.translate_to.kg': 'opus_mt_kg_sv', 'xx.sv.marian.translate_to.ko': 'opus_mt_ko_sv', 'xx.sv.marian.translate_to.kqn': 'opus_mt_kqn_sv', 'xx.sv.marian.translate_to.kwy': 'opus_mt_kwy_sv', 'xx.sv.marian.translate_to.lg': 'opus_mt_lg_sv', 'xx.sv.marian.translate_to.loz': 'opus_mt_loz_sv', 'xx.sv.marian.translate_to.lt': 'opus_mt_lt_sv', 'xx.sv.marian.translate_to.lu': 'opus_mt_lu_sv', 'xx.sv.marian.translate_to.lua': 'opus_mt_lua_sv', 'xx.sv.marian.translate_to.lue': 'opus_mt_lue_sv', 'xx.sv.marian.translate_to.lus': 'opus_mt_lus_sv', 'xx.sv.marian.translate_to.lv': 'opus_mt_lv_sv', 'xx.sv.marian.translate_to.mt': 'opus_mt_mt_sv', 'xx.sv.marian.translate_to.niu': 'opus_mt_niu_sv', 'xx.sv.marian.translate_to.nl': 'opus_mt_nl_sv', 'xx.sv.marian.translate_to.no': 'opus_mt_no_sv', 'xx.sv.marian.translate_to.nso': 'opus_mt_nso_sv', 'xx.sv.marian.translate_to.pag': 'opus_mt_pag_sv', 'xx.sv.marian.translate_to.pis': 'opus_mt_pis_sv', 'xx.sv.marian.translate_to.pl': 'opus_mt_pl_sv', 'xx.sv.marian.translate_to.pon': 'opus_mt_pon_sv', 'xx.sv.marian.translate_to.rnd': 'opus_mt_rnd_sv', 'xx.sv.marian.translate_to.ro': 'opus_mt_ro_sv', 'xx.sv.marian.translate_to.ru': 'opus_mt_ru_sv', 'xx.sv.marian.translate_to.run': 'opus_mt_run_sv', 'xx.sv.marian.translate_to.rw': 'opus_mt_rw_sv', 'xx.sv.marian.translate_to.sg': 'opus_mt_sg_sv', 'xx.sv.marian.translate_to.sk': 'opus_mt_sk_sv', 'xx.sv.marian.translate_to.sl': 'opus_mt_sl_sv', 'xx.sv.marian.translate_to.sn': 'opus_mt_sn_sv', 'xx.sv.marian.translate_to.sq': 'opus_mt_sq_sv', 'xx.sv.marian.translate_to.srn': 'opus_mt_srn_sv', 'xx.sv.marian.translate_to.st': 'opus_mt_st_sv', 'xx.sv.marian.translate_to.sv': 'opus_mt_sv_sv', 'xx.sv.marian.translate_to.swc': 'opus_mt_swc_sv', 'xx.sv.marian.translate_to.tiv': 'opus_mt_tiv_sv', 'xx.sv.marian.translate_to.tll': 'opus_mt_tll_sv', 'xx.sv.marian.translate_to.tn': 'opus_mt_tn_sv', 'xx.sv.marian.translate_to.to': 'opus_mt_to_sv', 'xx.sv.marian.translate_to.toi': 'opus_mt_toi_sv', 'xx.sv.marian.translate_to.tpi': 'opus_mt_tpi_sv', 'xx.sv.marian.translate_to.tr': 'opus_mt_tr_sv', 'xx.sv.marian.translate_to.ts': 'opus_mt_ts_sv', 'xx.sv.marian.translate_to.tum': 'opus_mt_tum_sv', 'xx.sv.marian.translate_to.tvl': 'opus_mt_tvl_sv', 'xx.sv.marian.translate_to.tw': 'opus_mt_tw_sv', 'xx.sv.marian.translate_to.ty': 'opus_mt_ty_sv', 'xx.sv.marian.translate_to.uk': 'opus_mt_uk_sv', 'xx.sv.marian.translate_to.war': 'opus_mt_war_sv', 'xx.sv.marian.translate_to.wls': 'opus_mt_wls_sv', 'xx.sv.marian.translate_to.xh': 'opus_mt_xh_sv', 'xx.sv.marian.translate_to.yap': 'opus_mt_yap_sv', 'xx.sv.marian.translate_to.yo': 'opus_mt_yo_sv', 'xx.sv.marian.translate_to.zh': 'opus_mt_zh_sv', 'xx.sv.marian.translate_to.zne': 'opus_mt_zne_sv', 'xx.sw.marian.translate_to.fi': 'opus_mt_fi_sw', 'xx.swc.marian.translate_to.en': 'opus_mt_swc_en', 'xx.swc.marian.translate_to.es': 'opus_mt_es_swc', 'xx.swc.marian.translate_to.fi': 'opus_mt_fi_swc', 'xx.swc.marian.translate_to.fr': 'opus_mt_fr_swc', 'xx.swc.marian.translate_to.sv': 'opus_mt_sv_swc', 'xx.taw.marian.translate_to.en': 'opus_mt_taw_en', 'xx.th.marian.translate_to.en': 'opus_mt_th_en', 'xx.th.marian.translate_to.sv': 'opus_mt_sv_th', 'xx.ti.marian.translate_to.en': 'opus_mt_ti_en', 'xx.tiv.marian.translate_to.en': 'opus_mt_tiv_en', 'xx.tiv.marian.translate_to.fi': 'opus_mt_fi_tiv', 'xx.tiv.marian.translate_to.fr': 'opus_mt_fr_tiv', 'xx.tiv.marian.translate_to.sv': 'opus_mt_sv_tiv', 'xx.tl.marian.translate_to.de': 'opus_mt_de_tl', 'xx.tl.marian.translate_to.en': 'opus_mt_tl_en', 'xx.tl.marian.translate_to.es': 'opus_mt_es_tl', 'xx.tl.marian.translate_to.fr': 'opus_mt_fr_tl', 'xx.tl.marian.translate_to.pt': 'opus_mt_pt_tl', 'xx.tll.marian.translate_to.en': 'opus_mt_tll_en', 'xx.tll.marian.translate_to.es': 'opus_mt_es_tll', 'xx.tll.marian.translate_to.fi': 'opus_mt_fi_tll', 'xx.tll.marian.translate_to.fr': 'opus_mt_fr_tll', 'xx.tll.marian.translate_to.sv': 'opus_mt_sv_tll', 'xx.tn.marian.translate_to.en': 'opus_mt_tn_en', 'xx.tn.marian.translate_to.es': 'opus_mt_es_tn', 'xx.tn.marian.translate_to.fi': 'opus_mt_fi_tn', 'xx.tn.marian.translate_to.fr': 'opus_mt_fr_tn', 'xx.tn.marian.translate_to.sv': 'opus_mt_sv_tn', 'xx.to.marian.translate_to.en': 'opus_mt_to_en', 'xx.to.marian.translate_to.es': 'opus_mt_es_to', 'xx.to.marian.translate_to.fi': 'opus_mt_fi_to', 'xx.to.marian.translate_to.fr': 'opus_mt_fr_to', 'xx.to.marian.translate_to.sv': 'opus_mt_sv_to', 'xx.toi.marian.translate_to.en': 'opus_mt_toi_en', 'xx.toi.marian.translate_to.fi': 'opus_mt_fi_toi', 'xx.toi.marian.translate_to.sv': 'opus_mt_sv_toi', 'xx.tpi.marian.translate_to.en': 'opus_mt_tpi_en', 'xx.tpi.marian.translate_to.es': 'opus_mt_es_tpi', 'xx.tpi.marian.translate_to.fi': 'opus_mt_fi_tpi', 'xx.tpi.marian.translate_to.fr': 'opus_mt_fr_tpi', 'xx.tpi.marian.translate_to.sv': 'opus_mt_sv_tpi', 'xx.tr.marian.translate_to.ar': 'opus_mt_ar_tr', 'xx.tr.marian.translate_to.az': 'opus_mt_az_tr', 'xx.tr.marian.translate_to.bg': 'opus_mt_bg_tr', 'xx.tr.marian.translate_to.en': 'opus_mt_tr_en', 'xx.tr.marian.translate_to.fi': 'opus_mt_fi_tr', 'xx.tr.marian.translate_to.ja': 'opus_mt_ja_tr', 'xx.tr.marian.translate_to.lt': 'opus_mt_lt_tr', 'xx.tr.marian.translate_to.uk': 'opus_mt_uk_tr', 'xx.trk.marian.translate_to.en': 'opus_mt_trk_en', 'xx.ts.marian.translate_to.en': 'opus_mt_ts_en', 'xx.ts.marian.translate_to.fi': 'opus_mt_fi_ts', 'xx.ts.marian.translate_to.fr': 'opus_mt_fr_ts', 'xx.ts.marian.translate_to.sv': 'opus_mt_sv_ts', 'xx.tum.marian.translate_to.en': 'opus_mt_tum_en', 'xx.tum.marian.translate_to.fr': 'opus_mt_fr_tum', 'xx.tum.marian.translate_to.sv': 'opus_mt_sv_tum', 'xx.tvl.marian.translate_to.en': 'opus_mt_tvl_en', 'xx.tvl.marian.translate_to.es': 'opus_mt_es_tvl', 'xx.tvl.marian.translate_to.fi': 'opus_mt_fi_tvl', 'xx.tvl.marian.translate_to.fr': 'opus_mt_fr_tvl', 'xx.tvl.marian.translate_to.sv': 'opus_mt_sv_tvl', 'xx.tw.marian.translate_to.es': 'opus_mt_es_tw', 'xx.tw.marian.translate_to.fi': 'opus_mt_fi_tw', 'xx.tw.marian.translate_to.fr': 'opus_mt_fr_tw', 'xx.tw.marian.translate_to.sv': 'opus_mt_sv_tw', 'xx.ty.marian.translate_to.es': 'opus_mt_es_ty', 'xx.ty.marian.translate_to.fi': 'opus_mt_fi_ty', 'xx.ty.marian.translate_to.fr': 'opus_mt_fr_ty', 'xx.ty.marian.translate_to.sv': 'opus_mt_sv_ty', 'xx.tzo.marian.translate_to.es': 'opus_mt_es_tzo', 'xx.uk.marian.translate_to.bg': 'opus_mt_bg_uk', 'xx.uk.marian.translate_to.ca': 'opus_mt_ca_uk', 'xx.uk.marian.translate_to.cs': 'opus_mt_cs_uk', 'xx.uk.marian.translate_to.de': 'opus_mt_de_uk', 'xx.uk.marian.translate_to.en': 'opus_mt_uk_en', 'xx.uk.marian.translate_to.es': 'opus_mt_es_uk', 'xx.uk.marian.translate_to.fi': 'opus_mt_fi_uk', 'xx.uk.marian.translate_to.fr': 'opus_mt_fr_uk', 'xx.uk.marian.translate_to.he': 'opus_mt_he_uk', 'xx.uk.marian.translate_to.hu': 'opus_mt_hu_uk', 'xx.uk.marian.translate_to.it': 'opus_mt_it_uk', 'xx.uk.marian.translate_to.nl': 'opus_mt_nl_uk', 'xx.uk.marian.translate_to.no': 'opus_mt_no_uk', 'xx.uk.marian.translate_to.pl': 'opus_mt_pl_uk', 'xx.uk.marian.translate_to.pt': 'opus_mt_pt_uk', 'xx.uk.marian.translate_to.ru': 'opus_mt_ru_uk', 'xx.uk.marian.translate_to.sh': 'opus_mt_sh_uk', 'xx.uk.marian.translate_to.sl': 'opus_mt_sl_uk', 'xx.uk.marian.translate_to.sv': 'opus_mt_sv_uk', 'xx.uk.marian.translate_to.tr': 'opus_mt_tr_uk', 'xx.uk.marian.translate_to.zh': 'opus_mt_zh_uk', 'xx.umb.marian.translate_to.en': 'opus_mt_umb_en', 'xx.umb.marian.translate_to.sv': 'opus_mt_sv_umb', 'xx.ur.marian.translate_to.en': 'opus_mt_ur_en', 'xx.ur.marian.translate_to.hi': 'opus_mt_hi_ur', 'xx.urj.marian.translate_to.en': 'opus_mt_urj_en', 'xx.urj.marian.translate_to.urj': 'opus_mt_urj_urj', 'xx.use.multi': 'tfhub_use_multi', 'xx.use.multi_lg': 'tfhub_use_multi_lg', 'xx.ve.marian.translate_to.en': 'opus_mt_ve_en', 'xx.ve.marian.translate_to.es': 'opus_mt_es_ve', 'xx.ve.marian.translate_to.fi': 'opus_mt_fi_ve', 'xx.ve.marian.translate_to.fr': 'opus_mt_fr_ve', 'xx.ve.marian.translate_to.sv': 'opus_mt_sv_ve', 'xx.vi.marian.translate_to.de': 'opus_mt_de_vi', 'xx.vi.marian.translate_to.en': 'opus_mt_vi_en', 'xx.vi.marian.translate_to.es': 'opus_mt_es_vi', 'xx.vi.marian.translate_to.fr': 'opus_mt_fr_vi', 'xx.vi.marian.translate_to.it': 'opus_mt_it_vi', 'xx.vi.marian.translate_to.ja': 'opus_mt_ja_vi', 'xx.vi.marian.translate_to.ru': 'opus_mt_ru_vi', 'xx.vi.marian.translate_to.zh': 'opus_mt_zh_vi', 'xx.wa.marian.translate_to.en': 'opus_mt_wa_en', 'xx.wal.marian.translate_to.en': 'opus_mt_wal_en', 'xx.war.marian.translate_to.en': 'opus_mt_war_en', 'xx.war.marian.translate_to.es': 'opus_mt_es_war', 'xx.war.marian.translate_to.fi': 'opus_mt_fi_war', 'xx.war.marian.translate_to.fr': 'opus_mt_fr_war', 'xx.war.marian.translate_to.sv': 'opus_mt_sv_war', 'xx.wls.marian.translate_to.en': 'opus_mt_wls_en', 'xx.wls.marian.translate_to.es': 'opus_mt_es_wls', 'xx.wls.marian.translate_to.fi': 'opus_mt_fi_wls', 'xx.wls.marian.translate_to.fr': 'opus_mt_fr_wls', 'xx.wls.marian.translate_to.sv': 'opus_mt_sv_wls', 'xx.xh.marian.translate_to.en': 'opus_mt_xh_en', 'xx.xh.marian.translate_to.es': 'opus_mt_es_xh', 'xx.xh.marian.translate_to.fi': 'opus_mt_fi_xh', 'xx.xh.marian.translate_to.fr': 'opus_mt_fr_xh', 'xx.xh.marian.translate_to.sv': 'opus_mt_sv_xh', 'xx.yap.marian.translate_to.en': 'opus_mt_yap_en', 'xx.yap.marian.translate_to.fi': 'opus_mt_fi_yap', 'xx.yap.marian.translate_to.fr': 'opus_mt_fr_yap', 'xx.yap.marian.translate_to.sv': 'opus_mt_sv_yap', 'xx.yo.marian.translate_to.en': 'opus_mt_yo_en', 'xx.yo.marian.translate_to.es': 'opus_mt_es_yo', 'xx.yo.marian.translate_to.fi': 'opus_mt_fi_yo', 'xx.yo.marian.translate_to.fr': 'opus_mt_fr_yo', 'xx.yo.marian.translate_to.sv': 'opus_mt_sv_yo', 'xx.yua.marian.translate_to.es': 'opus_mt_es_yua', 'xx.zai.marian.translate_to.es': 'opus_mt_es_zai', 'xx.zh.marian.translate_to.en': 'opus_mt_zh_en', 'xx.zle.marian.translate_to.en': 'opus_mt_zle_en', 'xx.zle.marian.translate_to.zle': 'opus_mt_zle_zle', 'xx.zls.marian.translate_to.en': 'opus_mt_zls_en', 'xx.zls.marian.translate_to.zls': 'opus_mt_zls_zls', 'xx.zlw.marian.translate_to.en': 'opus_mt_zlw_en', 'xx.zlw.marian.translate_to.zlw': 'opus_mt_zlw_zlw', 'xx.zne.marian.translate_to.fi': 'opus_mt_fi_zne', 'xx.zne.marian.translate_to.fr': 'opus_mt_fr_zne', 'xx.zne.marian.translate_to.sv': 'opus_mt_sv_zne'}, 'yi': {'yi.detect_sentence': 'sentence_detector_dl', 'yi.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'yo': { 'yo.embed.bert.cased_multilingual_base_finetuned': 'bert_embeddings_base_multilingual_cased_finetuned_yoruba', 'yo.embed.w2v_cc_300d': 'w2v_cc_300d', 'yo.embed.xlm_roberta': 'xlm_roberta_base_finetuned_yoruba', 'yo.embed_sentence.xlm_roberta': 'sent_xlm_roberta_base_finetuned_yoruba', 'yo.lemma': 'lemma', 'yo.ner.xlmr_roberta.base_finetuned': 'xlmroberta_ner_xlm_roberta_base_finetuned_ner_yoruba', 'yo.ner.xlmr_roberta.base_finetuned_swahili.by_mbeukman': 'xlmroberta_ner_xlm_roberta_base_finetuned_swahili_finetuned_ner_yoruba', 'yo.ner.xlmr_roberta.base_finetuned_yoruba.by_mbeukman': 'xlmroberta_ner_xlm_roberta_base_finetuned_yoruba_finetuned_ner_yoruba', 'yo.pos': 'pos_ud_ytb', 'yo.stopwords': 'stopwords_yo'}, 'yue': { 'yue.speech2text.wav2vec_xlsr.v2_large': 'asr_wav2vec2_large_xlsr_cantonese_by_ctl', 'yue.speech2text.wav2vec_xlsr.v2_large_gpu': 'asr_wav2vec2_large_xlsr_cantonese_by_ctl_gpu'}, 'zea': {'zea.embed.w2v_cc_300d': 'w2v_cc_300d'}, 'zh': {'zh.answer_question.bert': 'bert_qa_question_answering_chinese', 'zh.answer_question.bert.base': 'bert_qa_chinese_pert_base_mrc', 'zh.answer_question.bert.base.by_hfl': 'bert_qa_chinese_pert_base_mrc', 'zh.answer_question.bert.base.by_jackh1995': 'bert_qa_roberta_base_chinese_extractive_qa_scratch', 'zh.answer_question.bert.base.by_liam168': 'bert_qa_qa_roberta_base_chinese_extractive', 'zh.answer_question.bert.base.by_uer': 'bert_qa_roberta_base_chinese_extractive_qa', 'zh.answer_question.bert.base_extractive': 'bert_qa_roberta_base_chinese_extractive', 'zh.answer_question.bert.by_jackh1995': 'bert_qa_bert_chinese_finetuned', 'zh.answer_question.bert.by_yechen': 'bert_qa_question_answering_chinese', 'zh.answer_question.bert.cased_multilingual_base': 'bert_qa_multilingual_base_cased_chines', 'zh.answer_question.bert.large': 'bert_qa_chinese_pert_large_open_domain_mrc', 'zh.answer_question.bert.large.by_hfl': 'bert_qa_chinese_pert_large_mrc', 'zh.answer_question.bert.large.by_luhua': 'bert_qa_chinese_pretrain_mrc_roberta_wwm_ext_large', 'zh.answer_question.bert.large.by_qalover': 'bert_qa_chinese_pert_large_open_domain_mrc', 'zh.answer_question.bert.multilingual_base_cased': 'bert_qa_multilingual_bert_base_cased_chinese', 'zh.answer_question.bert_voidful': 'bert_qa_question_answering_zh_voidful', 'zh.answer_question.mac_bert.large': 'bert_qa_chinese_pretrain_mrc_macbert_large', 'zh.answer_question.squad.bert.base': 'bert_qa_bert_base_chinese_finetuned_squad_colab', 'zh.answer_question.xlm_roberta.base': 'xlm_roberta_qa_xlm_roberta_base_chinese', 'zh.classify.bert': 'bert_classifier_autonlp_abbb_622117836', 'zh.classify.bert.base_finetuned': 'bert_classifier_c2_roberta_base_finetuned_dianping_chinese', 'zh.classify.bert.base_finetuned_dianping_chinese.by_uer': 'bert_classifier_roberta_base_finetuned_dianping_chinese', 'zh.classify.bert.base_finetuned_ifeng_chinese.by_uer': 'bert_classifier_roberta_base_finetuned_ifeng_chinese', 'zh.classify.bert.base_finetuned_jd_binary_chinese.by_uer': 'bert_classifier_roberta_base_finetuned_jd_binary_chinese', 'zh.classify.bert.base_finetuned_jd_full_chinese.by_uer': 'bert_classifier_roberta_base_finetuned_jd_full_chinese', 'zh.classify.bert.by_lgodwangl': 'bert_classifier_sent_chineses', 'zh.classify.bert.cat333_624217911.by_kyleinincubated': 'bert_classifier_autonlp_cat333_624217911', 'zh.classify.bert.cat33_624317932.by_kyleinincubated': 'bert_classifier_autonlp_cat33_624317932', 'zh.classify.bert.cls.by_celtics1863': 'bert_classifier_env_cls_chinese', 'zh.classify.bert.finetuned': 'bert_classifier_finetuned_semantic_chinese', 'zh.classify.bert.lang_110m': 'bert_classifier_erlangshen_roberta_110m_nli', 'zh.classify.bert.lang_110m.by_idea_ccnl': 'bert_classifier_idea_ccnl_erlangshen_roberta_110m_similarity', 'zh.classify.bert.lang_110m.by_swtx': 'bert_classifier_swtx_erlangshen_roberta_110m_similarity', 'zh.classify.bert.lang_330m': 'bert_classifier_erlangshen_roberta_330m_nli', 'zh.classify.bert.lang_330m.by_idea_ccnl': 'bert_classifier_erlangshen_roberta_330m_similarity', 'zh.classify.bert.maysix.by_east': 'bert_classifier_autotrain_maysix_828926405', 'zh.classify.bert.news.base_finetuned': 'bert_classifier_roberta_base_finetuned_chinanews_chinese', 'zh.classify.bert.rule.by_east': 'bert_classifier_autotrain_rule_793324440', 'zh.classify.bert.sentiment.': 'bert_classifier_chinese_sentiment', 'zh.classify.bert.sentiment.lang': 'bert_classifier_erlangshen_sentiment_finetune', 'zh.classify.bert.sentiment.lang_110m': 'bert_classifier_erlangshen_roberta_110m_sentiment', 'zh.classify.bert.sentiment.lang_330m': 'bert_classifier_erlangshen_roberta_330m_sentiment', 'zh.classify.bert.topic.by_celtics1863': 'bert_classifier_env_topic', 'zh.classify.distil_bert.uncased_base': 'distilbert_sequence_classifier_c4_zh_distilbert_base_uncased', 'zh.classify.genre.bert.by_herais': 'bert_classifier_pred_genre', 'zh.classify.timeperiod.bert.by_herais': 'bert_classifier_pred_timeperiod', 'zh.embed': 'bert_base_chinese', 'zh.embed.bert': 'bert_base_chinese', 'zh.embed.bert.10l_128d_128d': 'bert_embeddings_chinese_roberta_l_10_h_128', 'zh.embed.bert.10l_256d_256d': 'bert_embeddings_chinese_roberta_l_10_h_256', 'zh.embed.bert.10l_512d_512d': 'bert_embeddings_chinese_roberta_l_10_h_512', 'zh.embed.bert.10l_768d_768d': 'bert_embeddings_chinese_roberta_l_10_h_768', 'zh.embed.bert.12l_128d_128d': 'bert_embeddings_chinese_roberta_l_12_h_128', 'zh.embed.bert.12l_256d_256d': 'bert_embeddings_chinese_roberta_l_12_h_256', 'zh.embed.bert.12l_512d_512d': 'bert_embeddings_chinese_roberta_l_12_h_512', 'zh.embed.bert.12l_768d_768d': 'bert_embeddings_chinese_roberta_l_12_h_768', 'zh.embed.bert.2l_128d_128d': 'bert_embeddings_chinese_roberta_l_2_h_128', 'zh.embed.bert.2l_256d_256d': 'bert_embeddings_chinese_roberta_l_2_h_256', 'zh.embed.bert.2l_512d_512d': 'bert_embeddings_chinese_roberta_l_2_h_512', 'zh.embed.bert.2l_768d_768d': 'bert_embeddings_chinese_roberta_l_2_h_768', 'zh.embed.bert.4l_128d_128d': 'bert_embeddings_chinese_roberta_l_4_h_128', 'zh.embed.bert.4l_256d_256d': 'bert_embeddings_chinese_roberta_l_4_h_256', 'zh.embed.bert.4l_512d_512d': 'bert_embeddings_chinese_roberta_l_4_h_512', 'zh.embed.bert.4l_768d_768d': 'bert_embeddings_chinese_roberta_l_4_h_768', 'zh.embed.bert.6l_128d_128d': 'bert_embeddings_chinese_roberta_l_6_h_128', 'zh.embed.bert.6l_256d_256d': 'bert_embeddings_chinese_roberta_l_6_h_256', 'zh.embed.bert.6l_512d_512d': 'bert_embeddings_chinese_roberta_l_6_h_512', 'zh.embed.bert.6l_768d_768d': 'bert_embeddings_chinese_roberta_l_6_h_768', 'zh.embed.bert.8l_128d_128d': 'bert_embeddings_chinese_roberta_l_8_h_128', 'zh.embed.bert.8l_256d_256d': 'bert_embeddings_chinese_roberta_l_8_h_256', 'zh.embed.bert.8l_512d_512d': 'bert_embeddings_chinese_roberta_l_8_h_512', 'zh.embed.bert.8l_768d_768d': 'bert_embeddings_chinese_roberta_l_8_h_768', 'zh.embed.bert.base': 'bert_embeddings_base_chinese', 'zh.embed.bert.base.by_model_attribution_challenge': 'bert_embeddings_model_attribution_challenge_base_chinese', 'zh.embed.bert.base.by_ptrsxu': 'bert_embeddings_ptrsxu_base_chinese', 'zh.embed.bert.by_ptrsxu': 'bert_embeddings_ptrsxu_chinese_wwm_ext', 'zh.embed.bert.by_qinluo': 'bert_embeddings_wo_chinese_plus', 'zh.embed.bert.cased_base': 'bert_embeddings_base_zh_cased', 'zh.embed.bert.chinese_wwm': 'bert_embeddings_chinese_wwm', 'zh.embed.bert.large': 'bert_embeddings_chinese_lert_large', 'zh.embed.bert.large.by_hfl': 'bert_embeddings_chinese_mac_large', 'zh.embed.bert.lert.base.by_hfl': 'bert_embeddings_chinese_lert_base', 'zh.embed.bert.mac.base.by_hfl': 'bert_embeddings_chinese_mac_base', 'zh.embed.bert.mini': 'bert_embeddings_minirbt_h256', 'zh.embed.bert.mini.by_hfl': 'bert_embeddings_minirbt_h288', 'zh.embed.bert.rbt4_h312.by_hfl': 'bert_embeddings_rbt4_h312', 'zh.embed.bert.small': 'bert_embeddings_chinese_lert_small', 'zh.embed.bert.wwm': 'chinese_bert_wwm', 'zh.embed.bert.wwm_ext.by_hfl': 'bert_embeddings_hfl_chinese_wwm_ext', 'zh.embed.bert_5lang_cased': 'bert_embeddings_bert_base_5lang_cased', 'zh.embed.bert_base_chinese_jinyong': 'bert_embeddings_bert_base_chinese_jinyong', 'zh.embed.bert_base_zh_cased': 'bert_embeddings_bert_base_zh_cased', 'zh.embed.bert_large_chinese': 'bert_embeddings_bert_large_chinese', 'zh.embed.chinese_bert_wwm_ext': 'bert_embeddings_chinese_bert_wwm_ext', 'zh.embed.chinese_macbert_base': 'bert_embeddings_chinese_macbert_base', 'zh.embed.chinese_macbert_large': 'bert_embeddings_chinese_macbert_large', 'zh.embed.chinese_roberta_wwm_ext': 'bert_embeddings_chinese_roberta_wwm_ext', 'zh.embed.chinese_roberta_wwm_ext_large': 'bert_embeddings_chinese_roberta_wwm_ext_large', 'zh.embed.chinese_roberta_wwm_large_ext_fix_mlm': 'bert_embeddings_chinese_roberta_wwm_large_ext_fix_mlm', 'zh.embed.distilbert_base_cased': 'distilbert_embeddings_distilbert_base_zh_cased', 'zh.embed.env_bert_chinese': 'bert_embeddings_env_bert_chinese', 'zh.embed.jdt_fin_roberta_wwm': 'bert_embeddings_jdt_fin_roberta_wwm', 'zh.embed.jdt_fin_roberta_wwm_large': 'bert_embeddings_jdt_fin_roberta_wwm_large', 'zh.embed.macbert4csc_base_chinese': 'bert_embeddings_macbert4csc_base_chinese', 'zh.embed.mengzi_bert_base': 'bert_embeddings_mengzi_bert_base', 'zh.embed.mengzi_bert_base_fin': 'bert_embeddings_mengzi_bert_base_fin', 'zh.embed.mengzi_oscar_base': 'bert_embeddings_mengzi_oscar_base', 'zh.embed.mengzi_oscar_base_caption': 'bert_embeddings_mengzi_oscar_base_caption', 'zh.embed.mengzi_oscar_base_retrieval': 'bert_embeddings_mengzi_oscar_base_retrieval', 'zh.embed.rbt3': 'bert_embeddings_rbt3', 'zh.embed.rbt4': 'bert_embeddings_rbt4', 'zh.embed.rbt6': 'bert_embeddings_rbt6', 'zh.embed.rbtl3': 'bert_embeddings_rbtl3', 'zh.embed.roberta.wwm_ext.by_hfl': 'bert_embeddings_hfl_chinese_roberta_wwm_ext', 'zh.embed.roberta_base_wechsel_chinese': 'roberta_embeddings_roberta_base_wechsel_chinese', 'zh.embed.sikubert': 'bert_embeddings_sikubert', 'zh.embed.sikuroberta': 'bert_embeddings_sikuroberta', 'zh.embed.uer_large': 'bert_embeddings_uer_large', 'zh.embed.w2v_cc_300d': 'w2v_cc_300d', 'zh.embed.wobert_chinese_base': 'bert_embeddings_wobert_chinese_base', 'zh.embed.wobert_chinese_plus': 'bert_embeddings_wobert_chinese_plus', 'zh.embed.wobert_chinese_plus_base': 'bert_embeddings_wobert_chinese_plus_base', 'zh.embed.xlmr_roberta.mini_lm_mini': 'xlmroberta_embeddings_fairlex_cail_minilm', 'zh.embed.xlnet': 'chinese_xlnet_base', 'zh.embed_sentence.bert': 'sbert_chinese_qmc_finance_v1', 'zh.embed_sentence.bert.distilled': 'sbert_chinese_qmc_finance_v1_distill', 'zh.lemma': 'lemma', 'zh.lemma.gsd': 'lemma_gsd', 'zh.lemma.gsdsimp': 'lemma_gsdsimp', 'zh.ner': 'ner_msra_bert_768d', 'zh.ner.bert': 'ner_msra_bert_768d', 'zh.ner.bert.base': 'bert_ner_bert_base_chinese_ner', 'zh.ner.bert.base.by_ckiplab': 'bert_token_classifier_base_chinese_ws', 'zh.ner.bert.base_finetuned': 'bert_ner_roberta_base_finetuned_cluener2020_chinese', 'zh.ner.bert.finetuned': 'bert_ner_bert_finetuned_ner_chinese', 'zh.ner.bert.han.base.by_ckiplab': 'bert_token_classifier_base_han_chinese_ws', 'zh.ner.bert.tiny': 'bert_ner_bert_tiny_chinese_ner', 'zh.ner.bert.tiny.by_ckiplab': 'bert_token_classifier_tiny_chinese_ws', 'zh.ner.bert_token': 'bert_token_classifier_chinese_ner', 'zh.ner.msra.bert_768D': 'ner_msra_bert_768d', 'zh.ner.pos': 'bert_pos_classical_chinese_punctuation_guwen_biaodian', 'zh.ner.pos.base': 'bert_pos_bert_base_han_chinese_pos', 'zh.ner.pos.tiny': 'bert_pos_bert_tiny_chinese_pos', 'zh.ner.pos.universal_dependencies.base': 'bert_pos_bert_ancient_chinese_base_upos', 'zh.ner.weibo.bert_768d': 'ner_weibo_bert_768d', 'zh.pos': 'pos_gsdsimp', 'zh.pos.bert_base_chinese_pos': 'bert_pos_bert_base_chinese_pos', 'zh.pos.chinese_bert_wwm_ext_upos': 'bert_pos_chinese_bert_wwm_ext_upos', 'zh.pos.chinese_roberta_base_upos': 'bert_pos_chinese_roberta_base_upos', 'zh.pos.chinese_roberta_large_upos': 'bert_pos_chinese_roberta_large_upos', 'zh.pos.gsd': 'pos_gsd', 'zh.pos.gsdsimp': 'pos_gsdsimp', 'zh.pos.ud_gsd': 'pos_ud_gsd', 'zh.pos.ud_gsd_trad': 'pos_ud_gsd_trad', 'zh.segment_words': 'wordseg_weibo', 'zh.segment_words.ctb9': 'wordseg_ctb9', 'zh.segment_words.gsd': 'wordseg_gsd_ud_trad', 'zh.segment_words.large': 'wordseg_large', 'zh.segment_words.msra': 'wordseg_msra', 'zh.segment_words.pku': 'wordseg_pku', 'zh.segment_words.weibo': 'wordseg_weibo', 'zh.stopwords': 'stopwords_iso', 'zh.tokenize': 'wordseg_weibo'}, 'zu': {'zu.embed.roberta': 'roberta_embeddings_zuberta', 'zu.stopwords': 'stopwords_zu'}} healthcare_component_alias_references = {} pretrained_healthcare_pipe_references = {'de': {'de.deid.clinical': 'clinical_deidentification', 'de.deid.ner_subentity.pipeline': 'ner_deid_subentity_pipeline', 'de.deid.pipeline': 'german_deid_pipeline_spark24', 'de.med_ner.deid_generic.pipeline': 'ner_deid_generic_pipeline'}, 'en': {'en.classify.bert_sequence.binary_rct_biobert.pipeline': 'bert_sequence_classifier_binary_rct_biobert_pipeline', 'en.classify.bert_token_ner_jsl.pipeline': 'bert_token_classifier_ner_jsl_pipeline', 'en.classify.rct_binary_biobert.pipeline': 'rct_binary_classifier_biobert_pipeline', 'en.classify.rct_binary_use.pipeline': 'rct_binary_classifier_use_pipeline', 'en.classify.token_bert.ade_pipeline': 'bert_token_classifier_ner_ade_pipeline', 'en.classify.token_bert.anatomy_pipeline': 'bert_token_classifier_ner_anatomy_pipeline', 'en.classify.token_bert.bacteria_ner.pipeline': 'bert_token_classifier_ner_bacteria_pipeline', 'en.classify.token_bert.biolp.pipeline': 'bert_token_classifier_ner_bionlp_pipeline', 'en.classify.token_bert.cellular_pipeline': 'bert_token_classifier_ner_cellular_pipeline', 'en.classify.token_bert.chemicals_pipeline': 'bert_token_classifier_ner_chemicals_pipeline', 'en.classify.token_bert.chemprot_pipeline': 'bert_token_classifier_ner_chemprot_pipeline', 'en.classify.token_bert.clinical_pipeline': 'bert_token_classifier_ner_clinical_pipeline', 'en.classify.token_bert.druge_developement.pipeline': 'bert_token_classifier_drug_development_trials_pipeline', 'en.classify.token_bert.jsl_slim.pipeline': 'bert_token_classifier_ner_jsl_slim_pipeline', 'en.classify.token_bert.ner_ade.pipeline': 'bert_token_classifier_ner_drugs_pipeline', 'en.classify.token_bert.ner_deid.pipeline': 'bert_token_classifier_ner_deid_pipeline', 'en.de_identify.clinical_pipeline': 'clinical_deidentification', 'en.de_identify.clinical_slim': 'clinical_deidentification_slim', 'en.deid.clinical_wip': 'clinical_deidentification_wip', 'en.deid.glove_augmented.pipeline': 'clinical_deidentification_glove_augmented', 'en.deid.glove_pipeline': 'clinical_deidentification_glove', 'en.deid.med_ner_large.pipeline': 'ner_deid_sd_large_pipeline', 'en.deid.ner_augmented.pipeline': 'ner_deid_augmented_pipeline', 'en.deid.ner_biobert.pipeline': 'ner_deid_biobert_pipeline', 'en.deid.ner_enriched_biobert.pipeline': 'ner_deid_enriched_biobert_pipeline', 'en.deid.sd.pipeline': 'ner_deid_sd_pipeline', 'en.deid.subentity_ner_augmented.pipeline': 'ner_deid_subentity_augmented_pipeline', 'en.deid.subentity_ner_augmented_i2b2.pipeline': 'ner_deid_subentity_augmented_i2b2_pipeline', 'en.explain_dco.clinical_medication.pipeline': 'explain_clinical_doc_medication', 'en.explain_doc.carp': 'explain_clinical_doc_carp', 'en.explain_doc.clinical_ade': 'explain_clinical_doc_ade', 'en.explain_doc.clinical_radiology.pipeline': 'explain_clinical_doc_radiology', 'en.explain_doc.era': 'explain_clinical_doc_era', 'en.icd10_icd9.mapping': 'icd10_icd9_mapping', 'en.icd10cm.umls.mapping': 'icd10cm_umls_mapping', 'en.icd10cm_resolver.pipeline': 'icd10cm_resolver_pipeline', 'en.map_entity.icd10cm_to_snomed.pipe': 'icd10cm_snomed_mapping', 'en.map_entity.icdo_to_snomed.pipe': 'icdo_snomed_mapping', 'en.map_entity.rxnorm_to_ndc.pipe': 'rxnorm_ndc_mapping', 'en.map_entity.snomed_to_icd10cm.pipe': 'snomed_icd10cm_mapping', 'en.map_entity.snomed_to_icdo.pipe': 'snomed_icdo_mapping', 'en.map_entity.umls_clinical_findings_resolver': 'umls_clinical_findings_resolver_pipeline', 'en.map_entity.umls_disease_syndrome_resolver': 'umls_disease_syndrome_resolver_pipeline', 'en.map_entity.umls_drug_resolver': 'umls_drug_resolver_pipeline', 'en.map_entity.umls_drug_substance_resolver': 'umls_drug_substance_resolver_pipeline', 'en.map_entity.umls_major_concepts_resolver': 'umls_major_concepts_resolver_pipeline', 'en.med_ner.ade_clinical.pipeline': 'ner_ade_clinical_pipeline', 'en.med_ner.anatom.pipeline': 'ner_anatomy_pipeline', 'en.med_ner.anatomy_biobert.pipeline': 'ner_anatomy_biobert_pipeline', 'en.med_ner.anatomy_coarse.pipeline': 'ner_anatomy_coarse_pipeline', 'en.med_ner.anatomy_coarse_biobert.pipeline': 'ner_anatomy_coarse_biobert_pipeline', 'en.med_ner.bacterial_species.pipeline': 'ner_bacterial_species_pipeline', 'en.med_ner.biobert_ade.pipeline': 'ner_ade_biobert_pipeline', 'en.med_ner.biobert_events.pipeline': 'ner_events_biobert_pipeline', 'en.med_ner.biobert_jsl_greedy.pipeline': 'ner_jsl_greedy_biobert_pipeline', 'en.med_ner.biomarker.pipeline': 'ner_biomarker_pipeline', 'en.med_ner.biomedical_bc2gm.pipeline': 'ner_biomedical_bc2gm_pipeline', 'en.med_ner.bionlp.pipeline': 'ner_bionlp_pipeline', 'en.med_ner.bionlp_biobert.pipeline': 'ner_bionlp_biobert_pipeline', 'en.med_ner.cancer_genetics.pipeline': 'ner_cancer_genetics_pipeline', 'en.med_ner.cellular.pipeline': 'ner_cellular_pipeline', 'en.med_ner.cellular_biobert.pipeline': 'ner_cellular_biobert_pipeline', 'en.med_ner.chemicals.pipeline': 'ner_chemicals_pipeline', 'en.med_ner.chemprot_biobert.pipeline': 'ner_chemprot_biobert_pipeline', 'en.med_ner.chemprot_clinical.pipeline': 'ner_chemprot_clinical_pipeline', 'en.med_ner.chexpert.pipeline': 'ner_chexpert_pipeline', 'en.med_ner.clinical-abbreviation.pipeline': 'ner_abbreviation_clinical_pipeline', 'en.med_ner.clinical.pipeline': 'ner_clinical_pipeline', 'en.med_ner.clinical_bert_ade.pipeline': 'ner_ade_clinicalbert_pipeline', 'en.med_ner.clinical_biobert.pipeline': 'ner_clinical_biobert_pipeline', 'en.med_ner.clinical_drugprot.pipeline': 'ner_drugprot_clinical_pipeline', 'en.med_ner.clinical_large.pipeline': 'ner_clinical_large_pipeline', 'en.med_ner.clinical_measurements.pipeline': 'ner_measurements_clinical_pipeline', 'en.med_ner.clinical_trials_abstracts.pipe': 'ner_clinical_trials_abstracts_pipeline', 'en.med_ner.deid_enriched.pipeline': 'ner_deid_enriched_pipeline', 'en.med_ner.deid_generic_augmented.pipeline': 'ner_deid_generic_augmented_pipeline', 'en.med_ner.deid_large.pipeline': 'ner_deid_large_pipeline', 'en.med_ner.deidentify.pipeline': 'ner_deidentify_dl_pipeline', 'en.med_ner.diseases.pipeline': 'ner_diseases_pipeline', 'en.med_ner.diseases_biobert.pipeline': 'ner_diseases_biobert_pipeline', 'en.med_ner.diseases_large.pipeline': 'ner_diseases_large_pipeline', 'en.med_ner.drugs.pipeline': 'ner_drugs_pipeline', 'en.med_ner.drugs_greedy.pipeline': 'ner_drugs_greedy_pipeline', 'en.med_ner.drugs_large.pipeline': 'ner_drugs_large_pipeline', 'en.med_ner.events_admission_clinical.pipeline': 'ner_events_admission_clinical_pipeline', 'en.med_ner.events_clinical.pipeline': 'ner_events_clinical_pipeline', 'en.med_ner.genetic_variants.pipeline': 'ner_genetic_variants_pipeline', 'en.med_ner.greedy_wip_biobert.pipeline': 'jsl_ner_wip_greedy_biobert_pipeline', 'en.med_ner.healthcare_ade.pipeline': 'ner_ade_healthcare_pipeline', 'en.med_ner.healthcare_events.pipeline': 'ner_events_healthcare_pipeline', 'en.med_ner.healthcare_pipeline': 'ner_healthcare_pipeline', 'en.med_ner.human_phenotype_clinical.pipeline': 'ner_human_phenotype_go_clinical_pipeline', 'en.med_ner.human_phenotype_gene_biobert.pipeline': 'ner_human_phenotype_gene_biobert_pipeline', 'en.med_ner.human_phnotype_gene_clinical.pipeline': 'ner_human_phenotype_gene_clinical_pipeline', 'en.med_ner.jsl.pipeline': 'ner_jsl_pipeline', 'en.med_ner.jsl_biobert.pipeline': 'ner_jsl_biobert_pipeline', 'en.med_ner.jsl_enriched.pipeline': 'ner_jsl_enriched_pipeline', 'en.med_ner.jsl_enriched_biobert.pipeline': 'ner_jsl_enriched_biobert_pipeline', 'en.med_ner.jsl_greedy.pipeline': 'ner_jsl_greedy_pipeline', 'en.med_ner.jsl_rd_wip_greedy.pipeline': 'jsl_rd_ner_wip_greedy_clinical_pipeline', 'en.med_ner.jsl_slim.pipeline': 'ner_jsl_slim_pipeline', 'en.med_ner.jsl_wip_clinical.pipeline': 'jsl_ner_wip_clinical_pipeline', 'en.med_ner.medmentions_coarse.pipeline': 'ner_medmentions_coarse_pipeline', 'en.med_ner.nihss_pipeline': 'ner_nihss_pipeline', 'en.med_ner.pathogen.pipeline': 'ner_pathogen_pipeline', 'en.med_ner.phenotype_go_biobert.pipeline': 'ner_human_phenotype_go_biobert_pipeline', 'en.med_ner.posology.healthcare_pipeline': 'ner_posology_healthcare_pipeline', 'en.med_ner.posology_biobert.pipeline': 'ner_posology_biobert_pipeline', 'en.med_ner.posology_biobert_large.pipeline': 'ner_posology_large_biobert_pipeline', 'en.med_ner.posology_experimental.pipeline': 'ner_posology_experimental_pipeline', 'en.med_ner.posology_greedy.pipeline': 'ner_posology_greedy_pipeline', 'en.med_ner.posology_pipeline': 'ner_posology_pipeline', 'en.med_ner.posology_small.pipeline': 'ner_posology_small_pipeline', 'en.med_ner.posoloy_large.pipeline': 'ner_posology_large_pipeline', 'en.med_ner.profiling_biobert': 'ner_profiling_biobert', 'en.med_ner.profiling_clinical': 'ner_profiling_clinical', 'en.med_ner.radiology.clinical_wip.pipeline': 'ner_radiology_wip_clinical_pipeline', 'en.med_ner.radiology.pipeline': 'ner_radiology_pipeline', 'en.med_ner.risk_factors.pipeline': 'ner_risk_factors_pipeline', 'en.med_ner.risk_factors_biobert.pipeline': 'ner_risk_factors_biobert_pipeline', 'en.med_ner.wip_greedy_biobert.pipeline': 'jsl_rd_ner_wip_greedy_biobert_pipeline', 'en.med_ner.wip_greedy_clinical.pipeline': 'jsl_ner_wip_greedy_clinical_pipeline', 'en.med_ner.wip_modifier_clinical.pipeline': 'jsl_ner_wip_modifier_clinical_pipeline', 'en.mesh.umls.mapping': 'mesh_umls_mapping', 'en.ner.medication': 'ner_medication_pipeline', 'en.ner.model_finder.pipeline': 'ner_model_finder', 'en.oncology_biomarker.pipeline': 'oncology_biomarker_pipeline', 'en.oncology_diagnosis.pipeline': 'oncology_diagnosis_pipeline', 'en.oncology_general.pipeline': 'oncology_general_pipeline', 'en.oncology_therpay.pipeline': 'oncology_therapy_pipeline', 'en.recognize_entities.posology': 'recognize_entities_posology', 'en.relation.bodypart_directions.pipeline': 're_bodypart_directions_pipeline', 'en.relation.bodypart_proceduretest.pipeline': 're_bodypart_proceduretest_pipeline', 'en.relation.date_clinical.pipeline': 're_date_clinical_pipeline', 'en.relation.date_test_result.pipeline': 're_test_result_date_pipeline', 'en.relation.human_gene_clinical.pipeline': 're_human_phenotype_gene_clinical_pipeline', 'en.relation.temporal_event_clinical.pipeline': 're_temporal_events_clinical_pipeline', 'en.relation.temproal_enriched.pipeline': 're_temporal_events_enriched_clinical_pipeline', 'en.relation.test_problem_finding.pipeline': 're_test_problem_finding_pipeline', 'en.resolve.cvx_pipeline': 'cvx_resolver_pipeline', 'en.resolve.icd9.pipeline': 'icd9_resolver_pipeline', 'en.resolve.medication': 'medication_resolver_pipeline', 'en.resolve.medication_transform.pipeline': 'medication_resolver_transform_pipeline', 'en.rxnorm.umls.mapping': 'rxnorm_umls_mapping', 'en.snomed.umls.mapping': 'snomed_umls_mapping', 'en.spell.clinical.pipeline': 'spellcheck_clinical_pipeline'}, 'es': {'es.deid.clinical': 'clinical_deidentification', 'es.deid.clinical_augmented': 'clinical_deidentification_augmented', 'es.deid.pipeline': 'deid_pipeline'}, 'fr': {'fr.deid_obfuscated': 'clinical_deidentification'}, 'it': {'it.deid.clinical': 'clinical_deidentification'}, 'pt': {'pt.deid.clinical': 'clinical_deidentification'}, 'ro': {'ro.deid.clinical': 'clinical_deidentification'}} pretrained_healthcare_model_references = {'ca': {'ca.med_ner.living_species': 'ner_living_species'}, 'de': {'de.classify.bert_sequence.health_mentions_bert': 'bert_sequence_classifier_health_mentions_bert', 'de.classify.bert_sequence.health_mentions_gbert': 'bert_sequence_classifier_health_mentions_gbert', 'de.classify.bert_sequence.health_mentions_gbert_large': 'bert_sequence_classifier_health_mentions_gbert_large', 'de.classify.bert_sequence.health_mentions_medbert': 'bert_sequence_classifier_health_mentions_medbert', 'de.embed': 'w2v_cc_300d', 'de.embed.w2v': 'w2v_cc_300d', 'de.med_ner': 'ner_healthcare_slim', 'de.med_ner.deid_generic': 'ner_deid_generic', 'de.med_ner.deid_subentity': 'ner_deid_subentity', 'de.med_ner.healthcare': 'ner_healthcare', 'de.med_ner.legal': 'ner_legal', 'de.med_ner.traffic': 'ner_traffic', 'de.resolve.icd10gm': 'sbertresolve_icd10gm', 'de.resolve.snomed': 'sbertresolve_snomed'}, 'en': {'en.assert': 'assertion_dl', 'en.assert.biobert': 'assertion_dl_biobert', 'en.assert.biobert_l10210': 'assertion_dl_biobert_scope_L10R10', 'en.assert.healthcare': 'assertion_dl_healthcare', 'en.assert.jsl': 'assertion_jsl', 'en.assert.jsl_augmented': 'assertion_jsl_augmented', 'en.assert.jsl_large': 'assertion_jsl_large', 'en.assert.l10r10': 'assertion_dl_scope_L10R10', 'en.assert.large': 'assertion_dl_large', 'en.assert.oncology_demographic_binary_wip': 'assertion_oncology_demographic_binary_wip', 'en.assert.oncology_family_history': 'assertion_oncology_family_history_wip', 'en.assert.oncology_problem_wip': 'assertion_oncology_problem_wip', 'en.assert.oncology_response_to_treatment_wip': 'assertion_oncology_response_to_treatment_wip', 'en.assert.oncology_smoking_status': 'assertion_oncology_smoking_status_wip', 'en.assert.oncology_test_binary': 'assertion_oncology_test_binary_wip', 'en.assert.oncology_treatment_binary': 'assertion_oncology_treatment_binary_wip', 'en.assert.oncology_wip': 'assertion_oncology_wip', 'en.assert.radiology': 'assertion_dl_radiology', 'en.clasify.health_premise': 'bert_sequence_classifier_health_mandates_premise_tweet', 'en.classify .bert_sequence.depression': 'bert_sequence_classifier_depression', 'en.classify.ade.biobert': 'classifierdl_ade_biobert', 'en.classify.ade.clinical': 'classifierdl_ade_clinicalbert', 'en.classify.ade.clinicalbert': 'classifierdl_ade_clinicalbert', 'en.classify.ade.conversational': 'classifierdl_ade_conversational_biobert', 'en.classify.ade.seq_biobert': 'bert_sequence_classifier_ade', 'en.classify.ade.seq_distilbert': 'distilbert_sequence_classifier_ade', 'en.classify.adverse_drug_events': 'bert_sequence_classifier_ade_augmented', 'en.classify.bert_sequence.binary_biobert': 'bert_sequence_classifier_binary_rct_biobert', 'en.classify.bert_sequence.covid_sentiment': 'bert_sequence_classifier_covid_sentiment', 'en.classify.bert_sequence.depression_binary': 'bert_sequence_classifier_depression_binary', 'en.classify.bert_sequence.depression_twitter': 'bert_sequence_classifier_depression_twitter', 'en.classify.bert_sequence.question_statement_clinical': 'bert_sequence_classifier_question_statement_clinical', 'en.classify.bert_sequence.sdoh.environment_status': 'bert_sequence_classifier_sdoh_environment_status', 'en.classify.bert_sequence.sdoh_community_absent_status': 'bert_sequence_classifier_sdoh_community_absent_status', 'en.classify.bert_sequence.sdoh_community_present_status': 'bert_sequence_classifier_sdoh_community_present_status', 'en.classify.bert_sequence.treatment_sentiment_tweets': 'bert_sequence_classifier_treatment_changes_sentiment_tweet', 'en.classify.bert_sequence_vaccine_sentiment': 'bert_sequence_classifier_vaccine_sentiment', 'en.classify.bert_token.binary_ade_tweet': 'bert_token_classifier_ade_tweet_binary', 'en.classify.bert_token.ner_ade_bert': 'bert_token_classifier_ner_ade_binary', 'en.classify.drug_reviews': 'bert_sequence_classifier_drug_reviews_webmd', 'en.classify.exact_age': 'bert_sequence_classifier_exact_age_reddit', 'en.classify.gender.biobert': 'classifierdl_gender_biobert', 'en.classify.gender.sbert': 'classifierdl_gender_sbert', 'en.classify.gender.seq_biobert': 'bert_sequence_classifier_gender_biobert', 'en.classify.generic.sdoh_alchol_binary_sbiobert_cased': 'genericclassifier_sdoh_alcohol_usage_binary_sbiobert_cased_mli', 'en.classify.generic.sdoh_alchol_usage_sbiobert_cased': 'genericclassifier_sdoh_alcohol_usage_sbiobert_cased_mli', 'en.classify.generic.sdoh_ecnomics_sbiobert_cased': 'genericclassifier_sdoh_economics_binary_sbiobert_cased_mli', 'en.classify.generic.sdoh_substance_binary_sbiobert_cased': 'genericclassifier_sdoh_substance_usage_binary_sbiobert_cased_mli', 'en.classify.generic.sdoh_tobacco_sbiobert_cased': 'genericclassifier_sdoh_tobacco_usage_sbiobert_cased_mli', 'en.classify.health': 'classifierdl_health_mentions', 'en.classify.health_mentions': 'bert_sequence_classifier_health_mentions', 'en.classify.health_stance': 'bert_sequence_classifier_health_mandates_stance_tweet', 'en.classify.pico': 'classifierdl_pico_biobert', 'en.classify.pico.seq_biobert': 'bert_sequence_classifier_pico_biobert', 'en.classify.rct_binary_biobert': 'rct_binary_classifier_biobert', 'en.classify.rct_binary_use': 'rct_binary_classifier_use', 'en.classify.self_reported_age': 'bert_sequence_classifier_self_reported_age_tweet', 'en.classify.self_reported_partner_violence': 'bert_sequence_classifier_self_reported_partner_violence_tweet', 'en.classify.self_reported_stress': 'bert_sequence_classifier_self_reported_stress_tweet', 'en.classify.self_reported_vaccine_status': 'bert_sequence_classifier_self_reported_vaccine_status_tweet', 'en.classify.stress': 'bert_sequence_classifier_stress', 'en.classify.stressor': 'bert_sequence_classifier_stressor', 'en.classify.token_bert.anatem': 'bert_token_classifier_ner_anatem', 'en.classify.token_bert.bc2gm_gene': 'bert_token_classifier_ner_bc2gm_gene', 'en.classify.token_bert.bc4chemd_chemicals': 'bert_token_classifier_ner_bc4chemd_chemicals', 'en.classify.token_bert.bc5cdr_chemicals': 'bert_token_classifier_ner_bc5cdr_chemicals', 'en.classify.token_bert.bc5cdr_disease': 'bert_token_classifier_ner_bc5cdr_disease', 'en.classify.token_bert.bionlp': 'bert_token_classifier_ner_bionlp', 'en.classify.token_bert.cellular': 'bert_token_classifier_ner_cellular', 'en.classify.token_bert.chemicals': 'bert_token_classifier_ner_chemicals', 'en.classify.token_bert.jnlpba_cellular': 'bert_token_classifier_ner_jnlpba_cellular', 'en.classify.token_bert.linnaeus_species': 'bert_token_classifier_ner_linnaeus_species', 'en.classify.token_bert.ncbi_disease': 'bert_token_classifier_ner_ncbi_disease', 'en.classify.token_bert.ner_ade': 'bert_token_classifier_ner_ade', 'en.classify.token_bert.ner_anatomy': 'bert_token_classifier_ner_anatomy', 'en.classify.token_bert.ner_bacteria': 'bert_token_classifier_ner_bacteria', 'en.classify.token_bert.ner_chemical': 'bert_token_classifier_ner_chemicals', 'en.classify.token_bert.ner_clinical': 'bert_token_classifier_ner_clinical', 'en.classify.token_bert.ner_deid': 'bert_token_classifier_ner_deid', 'en.classify.token_bert.ner_drugs': 'bert_token_classifier_ner_drugs', 'en.classify.token_bert.ner_jsl': 'bert_token_classifier_ner_jsl', 'en.classify.token_bert.ner_jsl_slim': 'bert_token_classifier_ner_jsl_slim', 'en.classify.token_bert.pathogen': 'bert_token_classifier_ner_pathogen', 'en.classify.token_bert.species': 'bert_token_classifier_ner_species', 'en.classify.token_bert.supplement': 'bert_token_classifier_ner_supplement', 'en.classify.treatment_sentiment': 'bert_sequence_classifier_treatement_changes_sentiment_tweet', 'en.classify.vaccine_sentiment': 'classifierdl_vaccine_sentiment', 'en.de_identify': 'deidentify_rb', 'en.de_identify.clinical': 'deidentify_enriched_clinical', 'en.de_identify.large': 'deidentify_large', 'en.de_identify.rb': 'deidentify_rb', 'en.de_identify.rb_no_regex': 'deidentify_rb_no_regex', 'en.de_identify.rules': 'deid_rules', 'en.detect_sentence.clinical': 'sentence_detector_dl_healthcare', 'en.embed.glove.biovec': 'embeddings_biovec', 'en.embed.glove.clinical': 'embeddings_clinical', 'en.embed.glove.healthcare': 'embeddings_healthcare', 'en.embed.glove.healthcare_100d': 'embeddings_healthcare_100d', 'en.embed.glove.icdoem': 'embeddings_icdoem', 'en.embed.glove.icdoem_2ng': 'embeddings_icdoem_2ng', 'en.embed_sentence.bert.jsl_medium_umls_uncased': 'sbert_jsl_medium_umls_uncased', 'en.embed_sentence.bert.jsl_medium_uncased': 'sbert_jsl_medium_uncased', 'en.embed_sentence.bert.jsl_mini_umlsuncased': 'sbert_jsl_mini_umls_uncased', 'en.embed_sentence.bert.jsl_mini_uncased': 'sbert_jsl_mini_uncased', 'en.embed_sentence.bert.jsl_tiny_umls_uncased': 'sbert_jsl_tiny_umls_uncased', 'en.embed_sentence.bert.jsl_tiny_uncased': 'sbert_jsl_tiny_uncased', 'en.embed_sentence.bert_medium.rxnorm': 'jsl_sbert_medium_rxnorm', 'en.embed_sentence.bert_uncased.rxnorm': 'sbert_jsl_medium_rxnorm_uncased', 'en.embed_sentence.biobert.jsl_cased': 'sbiobert_jsl_cased', 'en.embed_sentence.biobert.jsl_umls_cased': 'sbiobert_jsl_umls_cased', 'en.embed_sentence.biobert.mli': 'sbiobert_base_cased_mli', 'en.embed_sentence.biobert.rxnorm': 'sbiobert_jsl_rxnorm_cased', 'en.embed_sentence.bluebert.mli': 'sbluebert_base_uncased_mli', 'en.extract_relation.nihss': 'redl_nihss_biobert', 'en.icd10cm_to_snomed': 'icd10cm_snomed_mapper', 'en.icd10cm_to_umls': 'icd10cm_umls_mapper', 'en.icdo_to_snomed': 'icdo_snomed_mapper', 'en.map_entity.abbreviation_augmented': 'abbreviation_mapper_augmented', 'en.map_entity.abbreviation_category': 'abbreviation_category_mapper', 'en.map_entity.abbreviation_to_definition': 'abbreviation_mapper', 'en.map_entity.cvx_code': 'cvx_code_mapper', 'en.map_entity.cvx_name': 'cvx_name_mapper', 'en.map_entity.drug_ade': 'drug_ade_mapper', 'en.map_entity.drug_brand_to_ndc': 'drug_brandname_ndc_mapper', 'en.map_entity.drug_category': 'drug_category_mapper', 'en.map_entity.drug_to_action_treatment': 'drug_action_treatment_mapper', 'en.map_entity.icd10_ic9': 'icd10_icd9_mapper', 'en.map_entity.icd10cm': 'icd10cm_mapper', 'en.map_entity.icd10cm_to_snomed': 'icd10cm_snomed_mapper', 'en.map_entity.icd10cm_to_umls': 'icd10cm_umls_mapper', 'en.map_entity.icd9': 'icd9_mapper', 'en.map_entity.icd9_icd10': 'icd9_icd10_mapper', 'en.map_entity.icdo_to_snomed': 'icdo_snomed_mapper', 'en.map_entity.kegg_disease': 'kegg_disease_mapper', 'en.map_entity.kegg_drug': 'kegg_drug_mapper', 'en.map_entity.mesh_to_umls': 'mesh_umls_mapper', 'en.map_entity.rxnorm_normalized': 'rxnorm_normalized_mapper', 'en.map_entity.rxnorm_resolver': 'rxnorm_mapper', 'en.map_entity.rxnorm_to_action_treatment': 'rxnorm_action_treatment_mapper', 'en.map_entity.rxnorm_to_ndc': 'rxnorm_ndc_mapper', 'en.map_entity.rxnorm_to_umls': 'rxnorm_umls_mapper', 'en.map_entity.section_headers_normalized': 'normalized_section_header_mapper', 'en.map_entity.snomed_to_icd10cm': 'snomed_icd10cm_mapper', 'en.map_entity.snomed_to_icdo': 'snomed_icdo_mapper', 'en.map_entity.snomed_to_umls': 'snomed_umls_mapper', 'en.map_entity.umls_clinical_drugs_mapper': 'umls_clinical_drugs_mapper', 'en.map_entity.umls_clinical_findings_mapper': 'umls_clinical_findings_mapper', 'en.map_entity.umls_disease_syndrome_mapper': 'umls_disease_syndrome_mapper', 'en.map_entity.umls_drug_substance_mapper': 'umls_drug_substance_mapper', 'en.map_entity.umls_major_concepts_mapper': 'umls_major_concepts_mapper', 'en.med_ner': 'jsl_ner_wip_clinical', 'en.med_ner.abbreviation_clinical': 'ner_abbreviation_clinical', 'en.med_ner.ade.ade_healthcare': 'ner_ade_healthcare', 'en.med_ner.ade.clinical': 'ner_ade_clinical', 'en.med_ner.ade.clinical_bert': 'ner_ade_clinicalbert', 'en.med_ner.ade_biobert': 'ner_ade_biobert', 'en.med_ner.admission_events': 'ner_events_admission_clinical', 'en.med_ner.anatomy': 'ner_anatomy', 'en.med_ner.anatomy.biobert': 'ner_anatomy_biobert', 'en.med_ner.anatomy.coarse': 'ner_anatomy_coarse', 'en.med_ner.anatomy.coarse_biobert': 'ner_anatomy_coarse_biobert', 'en.med_ner.anatomy_general': 'ner_oncology_anatomy_general', 'en.med_ner.aspect_sentiment': 'ner_aspect_based_sentiment', 'en.med_ner.bacterial_species': 'ner_bacterial_species', 'en.med_ner.binary_ade': 'ner_ade_binary', 'en.med_ner.biomarker': 'ner_biomarker', 'en.med_ner.biomedical_bc2gm': 'ner_biomedical_bc2gm', 'en.med_ner.bionlp': 'ner_bionlp', 'en.med_ner.bionlp.biobert': 'ner_bionlp_biobert', 'en.med_ner.cancer': 'ner_cancer_genetics', 'en.med_ner.cellular': 'ner_cellular', 'en.med_ner.cellular.biobert': 'ner_cellular_biobert', 'en.med_ner.chemd': 'ner_chemd_clinical', 'en.med_ner.chemicals': 'ner_chemicals', 'en.med_ner.chemprot': 'ner_chemprot_biobert', 'en.med_ner.chemprot.bert': 'bert_token_classifier_ner_chemprot', 'en.med_ner.chemprot.clinical': 'ner_chemprot_clinical', 'en.med_ner.chexpert': 'ner_chexpert', 'en.med_ner.clinical': 'ner_clinical', 'en.med_ner.clinical.biobert': 'ner_clinical_biobert', 'en.med_ner.clinical_case_eu': 'ner_eu_clinical_case', 'en.med_ner.clinical_trials': 'bert_sequence_classifier_rct_biobert', 'en.med_ner.clinical_trials_abstracts': 'bert_token_classifier_ner_clinical_trials_abstracts', 'en.med_ner.covid_trials': 'ner_covid_trials', 'en.med_ner.deid': 'ner_deidentify_dl', 'en.med_ner.deid.augmented': 'ner_deid_augmented', 'en.med_ner.deid.biobert': 'ner_deid_biobert', 'en.med_ner.deid.enriched': 'ner_deid_enriched', 'en.med_ner.deid.enriched_biobert': 'ner_deid_enriched_biobert', 'en.med_ner.deid.generic_augmented': 'ner_deid_generic_augmented', 'en.med_ner.deid.large': 'ner_deid_large', 'en.med_ner.deid.sd': 'ner_deid_sd', 'en.med_ner.deid.sd_large': 'ner_deid_sd_large', 'en.med_ner.deid.subentity_augmented': 'ner_deid_subentity_augmented', 'en.med_ner.deid.synthetic': 'ner_deid_synthetic', 'en.med_ner.deid_subentity_augmented_i2b2': 'ner_deid_subentity_augmented_i2b2', 'en.med_ner.diseases': 'ner_diseases', 'en.med_ner.diseases.biobert': 'ner_diseases_biobert', 'en.med_ner.diseases.large': 'ner_diseases_large', 'en.med_ner.drugprot_clinical': 'ner_drugprot_clinical', 'en.med_ner.drugs': 'ner_drugs', 'en.med_ner.drugs.large': 'ner_drugs_large', 'en.med_ner.drugsgreedy': 'ner_drugs_greedy', 'en.med_ner.events_biobert': 'ner_events_biobert', 'en.med_ner.events_clinical': 'ner_events_clinical', 'en.med_ner.events_healthcre': 'ner_events_healthcare', 'en.med_ner.financial_contract': 'ner_financial_contract', 'en.med_ner.genetic_variants': 'ner_genetic_variants', 'en.med_ner.healthcare': 'ner_healthcare', 'en.med_ner.human_phenotype.gene_biobert': 'ner_human_phenotype_gene_biobert', 'en.med_ner.human_phenotype.gene_clinical': 'ner_human_phenotype_gene_clinical', 'en.med_ner.human_phenotype.go_biobert': 'ner_human_phenotype_go_biobert', 'en.med_ner.human_phenotype.go_clinical': 'ner_human_phenotype_go_clinical', 'en.med_ner.jsl': 'ner_jsl', 'en.med_ner.jsl.biobert': 'ner_jsl_biobert', 'en.med_ner.jsl.enriched': 'ner_jsl_enriched', 'en.med_ner.jsl.enriched_biobert': 'ner_jsl_enriched_biobert', 'en.med_ner.jsl.wip.clinical': 'jsl_ner_wip_clinical', 'en.med_ner.jsl.wip.clinical.greedy': 'jsl_ner_wip_greedy_clinical', 'en.med_ner.jsl.wip.clinical.modifier': 'jsl_ner_wip_modifier_clinical', 'en.med_ner.jsl.wip.clinical.rd': 'jsl_rd_ner_wip_greedy_clinical', 'en.med_ner.jsl_greedy_biobert': 'ner_jsl_greedy_biobert', 'en.med_ner.jsl_slim': 'ner_jsl_slim', 'en.med_ner.living_species': 'ner_living_species', 'en.med_ner.living_species.biobert': 'ner_living_species_biobert', 'en.med_ner.living_species.token_bert': 'bert_token_classifier_ner_living_species', 'en.med_ner.measurements': 'ner_measurements_clinical', 'en.med_ner.medmentions': 'ner_medmentions_coarse', 'en.med_ner.nero_clinical.nature': 'ner_nature_nero_clinical', 'en.med_ner.nihss': 'ner_nihss', 'en.med_ner.oncology': 'ner_oncology', 'en.med_ner.oncology_anatom_general_healthcare': 'ner_oncology_anatomy_general_healthcare', 'en.med_ner.oncology_anatomy_general': 'ner_oncology_anatomy_general_wip', 'en.med_ner.oncology_anatomy_granular': 'ner_oncology_anatomy_granular', 'en.med_ner.oncology_anatomy_granular_wip': 'ner_oncology_anatomy_granular_wip', 'en.med_ner.oncology_biomarker': 'ner_oncology_biomarker', 'en.med_ner.oncology_biomarker_healthcare': 'ner_oncology_biomarker_healthcare', 'en.med_ner.oncology_biomarker_wip': 'ner_oncology_biomarker_wip', 'en.med_ner.oncology_demographics': 'ner_oncology_demographics', 'en.med_ner.oncology_demographics_wip': 'ner_oncology_demographics_wip', 'en.med_ner.oncology_diagnosis': 'ner_oncology_diagnosis', 'en.med_ner.oncology_diseases_wip': 'ner_oncology_diagnosis_wip', 'en.med_ner.oncology_posology': 'ner_oncology_posology', 'en.med_ner.oncology_posology_wip': 'ner_oncology_posology_wip', 'en.med_ner.oncology_response_to_treatment': 'ner_oncology_response_to_treatment', 'en.med_ner.oncology_response_to_treatment_wip': 'ner_oncology_response_to_treatment_wip', 'en.med_ner.oncology_test': 'ner_oncology_test', 'en.med_ner.oncology_test_wip': 'ner_oncology_test_wip', 'en.med_ner.oncology_therapy': 'ner_oncology_therapy', 'en.med_ner.oncology_therapy_wip': 'ner_oncology_therapy_wip', 'en.med_ner.oncology_tnm': 'ner_oncology_tnm', 'en.med_ner.oncology_tnm_wip': 'ner_oncology_tnm_wip', 'en.med_ner.oncology_unspecific_posology': 'ner_oncology_unspecific_posology', 'en.med_ner.oncology_unspecific_posology_healthcare': 'ner_oncology_unspecific_posology_healthcare', 'en.med_ner.oncology_unspecific_posology_wip': 'ner_oncology_unspecific_posology_wip', 'en.med_ner.oncology_wip': 'ner_oncology_wip', 'en.med_ner.pathogen': 'ner_pathogen', 'en.med_ner.posology': 'ner_posology', 'en.med_ner.posology.biobert': 'ner_posology_biobert', 'en.med_ner.posology.experimental': 'ner_posology_experimental', 'en.med_ner.posology.greedy': 'ner_posology_greedy', 'en.med_ner.posology.healthcare': 'ner_posology_healthcare', 'en.med_ner.posology.large': 'ner_posology_large', 'en.med_ner.posology.large_biobert': 'ner_posology_large_biobert', 'en.med_ner.posology.small': 'ner_posology_small', 'en.med_ner.radiology': 'ner_radiology', 'en.med_ner.radiology.wip_clinical': 'ner_radiology_wip_clinical', 'en.med_ner.radiology.wip_greedy_biobert': 'jsl_rd_ner_wip_greedy_biobert', 'en.med_ner.risk_factors': 'ner_risk_factors', 'en.med_ner.risk_factors.biobert': 'ner_risk_factors_biobert', 'en.med_ner.sdoh_mentions': 'ner_sdoh_mentions', 'en.med_ner.sdoh_mentions_test': 'ner_sdoh_mentions_test', 'en.med_ner.sdoh_slim_wip': 'ner_sdoh_slim_wip', 'en.med_ner.supplement_clinical': 'ner_supplement_clinical', 'en.med_ner.tumour': 'nerdl_tumour_demo', 'en.mesh_to_umls': 'mesh_umls_mapper', 'en.ner.clinical_trials_abstracts': 'ner_clinical_trials_abstracts', 'en.ner.drug_development_trials': 'bert_token_classifier_drug_development_trials', 'en.norm_drugs': 'drug_normalizer', 'en.pos.clinical': 'pos_clinical', 'en.relation': 'redl_bodypart_direction_biobert', 'en.relation.ade': 'redl_ade_biobert', 'en.relation.ade_biobert': 're_ade_biobert', 'en.relation.ade_clinical': 're_ade_clinical', 'en.relation.adverse_drug_events.clinical': 're_ade_clinical', 'en.relation.adverse_drug_events.clinical.biobert': 'redl_ade_biobert', 'en.relation.adverse_drug_events.conversational': 're_ade_conversational', 'en.relation.bodypart.direction': 'redl_bodypart_direction_biobert', 'en.relation.bodypart.problem': 'redl_bodypart_problem_biobert', 'en.relation.bodypart.procedure': 'redl_bodypart_procedure_test_biobert', 'en.relation.chemprot': 'redl_chemprot_biobert', 'en.relation.clinical': 'redl_clinical_biobert', 'en.relation.date': 'redl_date_clinical_biobert', 'en.relation.drug_drug_interaction': 'redl_drug_drug_interaction_biobert', 'en.relation.drugprot': 'redl_drugprot_biobert', 'en.relation.drugprot.clinical': 're_drugprot_clinical', 'en.relation.humen_phenotype_gene': 'redl_human_phenotype_gene_biobert', 'en.relation.oncology.size_wip': 're_oncology_size_wip', 'en.relation.oncology.test_result': 're_oncology_test_result_wip', 'en.relation.oncology.test_result_biobert': 'redl_oncology_test_result_biobert_wip', 'en.relation.oncology_biobert_wip': 'redl_oncology_biobert_wip', 'en.relation.oncology_biomarker_result': 're_oncology_biomarker_result_wip', 'en.relation.oncology_biomarker_result_biobert_wip': 'redl_oncology_biomarker_result_biobert_wip', 'en.relation.oncology_granular_biobert_wip': 'redl_oncology_granular_biobert_wip', 'en.relation.oncology_granular_wip': 're_oncology_granular_wip', 'en.relation.oncology_location_biobert_wip': 'redl_oncology_location_biobert_wip', 'en.relation.oncology_location_wip': 're_oncology_location_wip', 'en.relation.oncology_size_biobert': 'redl_oncology_size_biobert_wip', 'en.relation.oncology_temporal_biobert_wip': 'redl_oncology_temporal_biobert_wip', 'en.relation.oncology_temporal_wip': 're_oncology_temporal_wip', 'en.relation.oncology_wip': 're_oncology_wip', 'en.relation.temporal_events': 'redl_temporal_events_biobert', 'en.relation.temporal_events_clinical': 're_temporal_events_clinical', 'en.relation.test_result_date': 're_test_result_date', 'en.relation.zeroshot': 'zero_shot_re', 'en.relation.zeroshot_biobert': 're_zeroshot_biobert', 'en.resolve': 'sbiobertresolve_cpt', 'en.resolve.HPO': 'sbiobertresolve_HPO', 'en.resolve.atc': 'sbiobertresolve_atc', 'en.resolve.clinical_abbreviation_acronym': 'sbiobertresolve_clinical_abbreviation_acronym', 'en.resolve.clinical_snomed_procedures_measurements': 'sbiobertresolve_clinical_snomed_procedures_measurements', 'en.resolve.cpt': 'sbiobertresolve_cpt', 'en.resolve.cpt.augmented': 'sbiobertresolve_cpt_augmented', 'en.resolve.cpt.procedures_augmented': 'sbiobertresolve_cpt_procedures_augmented', 'en.resolve.cpt.procedures_measurements': 'sbiobertresolve_cpt_procedures_measurements_augmented', 'en.resolve.cvx': 'sbiobertresolve_cvx', 'en.resolve.hcc': 'sbiobertresolve_hcc_augmented', 'en.resolve.hcc.augmented': 'sbiobertresolve_hcc_augmented', 'en.resolve.hcpcs': 'sbiobertresolve_hcpcs', 'en.resolve.ic9': 'sbiobertresolve_icd9', 'en.resolve.icd10cm': 'sbiobertresolve_icd10cm', 'en.resolve.icd10cm.augmented': 'sbiobertresolve_icd10cm_augmented', 'en.resolve.icd10cm.augmented_billable': 'sbiobertresolve_icd10cm_augmented_billable_hcc', 'en.resolve.icd10cm.slim_billable_hcc': 'sbiobertresolve_icd10cm_slim_billable_hcc', 'en.resolve.icd10cm.slim_billable_hcc_med': 'sbertresolve_icd10cm_slim_billable_hcc_med', 'en.resolve.icd10cm.slim_normalized': 'sbiobertresolve_icd10cm_slim_normalized', 'en.resolve.icd10cm_generalised': 'sbiobertresolve_icd10cm_generalised', 'en.resolve.icd10pcs': 'sbiobertresolve_icd10pcs', 'en.resolve.icd10pcs_augmented': 'sbiobertresolve_icd10pcs_augmented', 'en.resolve.icdo': 'sbiobertresolve_icdo', 'en.resolve.icdo.base': 'sbiobertresolve_icdo_base', 'en.resolve.icdo_augmented': 'sbiobertresolve_icdo_augmented', 'en.resolve.loinc': 'sbiobertresolve_loinc', 'en.resolve.loinc.augmented': 'sbiobertresolve_loinc_augmented', 'en.resolve.loinc.biobert': 'sbiobertresolve_loinc', 'en.resolve.loinc_cased': 'sbiobertresolve_loinc_cased', 'en.resolve.loinc_uncased': 'sbluebertresolve_loinc_uncased', 'en.resolve.mesh': 'sbiobertresolve_mesh', 'en.resolve.ndc': 'sbiobertresolve_ndc', 'en.resolve.ner.model_finder': 'sbertresolve_ner_model_finder', 'en.resolve.rxcui': 'sbiobertresolve_rxcui', 'en.resolve.rxnen.med_ner.deid_subentityorm_augmented': 'sbiobertresolve_rxnorm_augmented', 'en.resolve.rxnorm': 'sbiobertresolve_rxnorm', 'en.resolve.rxnorm.augmented': 'sbiobertresolve_jsl_rxnorm_augmented', 'en.resolve.rxnorm.augmented_cased': 'sbiobertresolve_rxnorm_augmented_cased', 'en.resolve.rxnorm.augmented_med': 'sbertresolve_jsl_rxnorm_augmented_med', 'en.resolve.rxnorm.augmented_re': 'sbiobertresolve_rxnorm_augmented_re', 'en.resolve.rxnorm.augmented_uncased': 'sbluebertresolve_rxnorm_augmented_uncased', 'en.resolve.rxnorm.disposition': 'sbertresolve_rxnorm_disposition', 'en.resolve.rxnorm.disposition.sbert': 'sbertresolve_rxnorm_disposition', 'en.resolve.rxnorm_action_treatment': 'sbiobertresolve_rxnorm_action_treatment', 'en.resolve.rxnorm_disposition': 'sbiobertresolve_rxnorm_disposition', 'en.resolve.rxnorm_disposition.sbert': 'sbertresolve_rxnorm_disposition', 'en.resolve.rxnorm_ndc': 'sbiobertresolve_rxnorm_ndc', 'en.resolve.snomed': 'sbiobertresolve_snomed_auxConcepts', 'en.resolve.snomed.aux_concepts': 'sbiobertresolve_snomed_auxConcepts', 'en.resolve.snomed.aux_concepts_int': 'sbiobertresolve_snomed_auxConcepts_int', 'en.resolve.snomed.findings': 'sbiobertresolve_snomed_findings', 'en.resolve.snomed.findings_aux_concepts': 'sbiobertresolve_snomed_findings_aux_concepts', 'en.resolve.snomed.findings_int': 'sbiobertresolve_snomed_findings_int', 'en.resolve.snomed.procedures_measurements': 'sbiobertresolve_snomed_procedures_measurements', 'en.resolve.snomed_body_structure': 'sbiobertresolve_snomed_bodyStructure', 'en.resolve.snomed_body_structure_med': 'sbertresolve_snomed_bodyStructure_med', 'en.resolve.snomed_conditions': 'sbertresolve_snomed_conditions', 'en.resolve.snomed_drug': 'sbiobertresolve_snomed_drug', 'en.resolve.umls': 'sbiobertresolve_umls_major_concepts', 'en.resolve.umls.findings': 'sbiobertresolve_umls_findings', 'en.resolve.umls_clinical_drugs': 'sbiobertresolve_umls_clinical_drugs', 'en.resolve.umls_disease_syndrome': 'sbiobertresolve_umls_disease_syndrome', 'en.resolve.umls_drug_substance': 'sbiobertresolve_umls_drug_substance', 'en.rxnorm_to_ndc': 'rxnorm_ndc_mapper', 'en.rxnorm_to_umls': 'rxnorm_umls_mapper', 'en.snomed_to_icd10cm': 'snomed_icd10cm_mapper', 'en.snomed_to_icdo': 'snomed_icdo_mapper', 'en.snomed_to_umls': 'snomed_umls_mapper', 'en.spell.clinical': 'spellcheck_clinical', 'en.spell.drug_norvig': 'spellcheck_drug_norvig', 'en.t5.base_pubmedqa': 't5_base_pubmedqa', 'en.t5.mediqa': 't5_base_mediqa_mnli', 'en.zero_shot.ner_roberta': 'zero_shot_ner_roberta', 'es.embed.roberta_base_biomedical': 'roberta_base_biomedical', 'es.med_ner.roberta_ner_diag_proc': 'roberta_ner_diag_proc', 'es.resolve.snomed': 'robertaresolve_snomed' }, 'es': {'es.classify.bert_token.clinical_trials_abstract': 'bert_token_classifier_ner_clinical_trials_abstracts', 'es.classify.bert_token.negation_uncertainty': 'bert_token_classifier_negation_uncertainty', 'es.classify.bert_token.ner_living_species': 'bert_token_classifier_ner_living_species', 'es.classify.bert_token.pharmacology': 'bert_token_classifier_pharmacology', 'es.classify.disease_mentions': 'bert_token_classifier_disease_mentions_tweet', 'es.classify.self_reported_symptoms': 'bert_sequence_classifier_self_reported_symptoms_tweet', 'es.embed.roberta_base_biomedical': 'roberta_base_biomedical', 'es.embed.scielo.150d': 'embeddings_scielo_150d', 'es.embed.scielo.300d': 'embeddings_scielo_300d', 'es.embed.scielo.50d': 'embeddings_scielo_50d', 'es.embed.scielo300d': 'embeddings_scielo_300d', 'es.embed.scielowiki.150d': 'embeddings_scielowiki_150d', 'es.embed.scielowiki.300d': 'embeddings_scielowiki_300d', 'es.embed.scielowiki.50d': 'embeddings_scielowiki_50d', 'es.embed.sciwiki.150d': 'embeddings_sciwiki_150d', 'es.embed.sciwiki.300d': 'embeddings_sciwiki_300d', 'es.embed.sciwiki.50d': 'embeddings_sciwiki_50d', 'es.embed.sciwiki_300d': 'embeddings_sciwiki_300d', 'es.med_ner': 'ner_diag_proc', 'es.med_ner.clinical_trial_abstracts': 'ner_clinical_trials_abstracts', 'es.med_ner.deid.generic': 'ner_deid_generic', 'es.med_ner.deid.generic.roberta': 'ner_deid_generic_roberta_augmented', 'es.med_ner.deid.generic_augmented': 'ner_deid_generic_augmented', 'es.med_ner.deid.generic_roberta': 'ner_deid_generic_roberta', 'es.med_ner.deid.subentity': 'ner_deid_subentity', 'es.med_ner.deid.subentity.roberta': 'ner_deid_subentity_roberta_augmented', 'es.med_ner.deid.subentity_augmented': 'ner_deid_subentity_augmented', 'es.med_ner.deid.subentity_roberta': 'ner_deid_subentity_roberta', 'es.med_ner.diag_proc': 'ner_diag_proc', 'es.med_ner.living_species': 'ner_living_species', 'es.med_ner.living_species.300': 'ner_living_species_300', 'es.med_ner.living_species.bert': 'ner_living_species_bert', 'es.med_ner.living_species.roberta': 'ner_living_species_roberta', 'es.med_ner.negtaion_uncertainty': 'ner_negation_uncertainty', 'es.med_ner.neoplasm': 'ner_neoplasms', 'es.med_ner.pharmacology': 'ner_pharmacology', 'es.med_ner.roberta_ner_diag_proc': 'roberta_ner_diag_proc', 'es.med_ner.scielowiki': 'meddroprof_scielowiki', 'es.med_ner.tweet_disease_mention': 'disease_mentions_tweet', 'es.resolve.snomed': 'robertaresolve_snomed'}, 'fr': {'fr.med_ner.deid_generic': 'ner_deid_generic', 'fr.med_ner.deid_subentity': 'ner_deid_subentity', 'fr.med_ner.living_species': 'ner_living_species', 'fr.med_ner.living_species.bert': 'ner_living_species_bert'}, 'gl': {'gl.med_ner.living_species': 'ner_living_species'}, 'it': {'it.classify.bert_token.ner_living_species': 'bert_token_classifier_ner_living_species', 'it.med_ner.deid_generic': 'ner_deid_generic', 'it.med_ner.deid_subentity': 'ner_deid_subentity', 'it.med_ner.living_species': 'ner_living_species', 'it.med_ner.living_species.bert': 'ner_living_species_bert'}, 'pt': {'pt.med_ner.deid': 'ner_deid_generic', 'pt.med_ner.deid.generic': 'ner_deid_generic', 'pt.med_ner.deid.subentity': 'ner_deid_subentity', 'pt.med_ner.living_species': 'ner_living_species', 'pt.med_ner.living_species.bert': 'ner_living_species_bert', 'pt.med_ner.living_species.roberta': 'ner_living_species_roberta', 'pt.med_ner.living_species.token_bert': 'bert_token_classifier_ner_living_species'}, 'ro': {'ro.embed.clinical.bert.base_cased': 'ner_clinical_bert', 'ro.med_ner.clinical': 'ner_clinical', 'ro.med_ner.deid.subentity': 'ner_deid_subentity', 'ro.med_ner.deid.subentity.bert': 'ner_deid_subentity_bert', 'ro.med_ner.deid_generic': 'ner_deid_generic', 'ro.med_ner.deid_generic_bert': 'ner_deid_generic_bert', 'ro.med_ner.living_species.bert': 'ner_living_species_bert'}} ocr_model_references = { 'img2text': OCR_NODE_IDS.IMAGE2TEXT, 'png2text': OCR_NODE_IDS.IMAGE2TEXT, # Alias for img2text 'jpg2text': OCR_NODE_IDS.IMAGE2TEXT, # Alias for img2text 'pdf2text': OCR_NODE_IDS.PDF2TEXT, 'doc2text': OCR_NODE_IDS.DOC2TEXT, 'pdf2table': OCR_NODE_IDS.PDF2TEXT_TABLE, 'doc2table': OCR_NODE_IDS.DOC2TEXT_TABLE, 'ppt2table': OCR_NODE_IDS.PPT2TEXT_TABLE, 'classify.image': OCR_NODE_IDS.VISUAL_DOCUMENT_CLASSIFIER, 'en.classify_image.tabacco': 'visual_document_classifier_tobacco3482' } ocr_model_references = { 'img2text': OCR_NODE_IDS.IMAGE2TEXT, 'png2text': OCR_NODE_IDS.IMAGE2TEXT, # Alias for img2text 'jpg2text': OCR_NODE_IDS.IMAGE2TEXT, # Alias for img2text 'pdf2text': OCR_NODE_IDS.PDF2TEXT, 'doc2text': OCR_NODE_IDS.DOC2TEXT, 'pdf2table': OCR_NODE_IDS.PDF2TEXT_TABLE, 'doc2table': OCR_NODE_IDS.DOC2TEXT_TABLE, 'ppt2table': OCR_NODE_IDS.PPT2TEXT_TABLE, 'classify.image': OCR_NODE_IDS.VISUAL_DOCUMENT_CLASSIFIER, 'en.classify_image.tabacco': 'visual_document_classifier_tobacco3482' } # map storage ref to nlu refner_jsl storage_ref_2_nlu_ref = {'ar': {'glove_300d': 'ar.embed'}, 'bn': {'bengali_cc_300d': 'bn.embed.glove', 'glove_840B_300': 'xx.embed.glove.840B_300'}, 'da': {'glove_100d': 'xx.embed.glove.glove_6B_100'}, 'de': {'clinical': 'de.embed_sentence.bert.base_cased', 'glove_6B_300': 'xx.embed.glove.6B_300', 'glove_840B_300': 'xx.embed.glove.840B_300', 'labse': 'xx.embed_sentence.labse', 'sent_bert_base_cased': 'de.embed_sentence.bert.base_cased', 'sent_bert_multi_cased': 'xx.embed_sentence', 'w2v_cc_300d': 'de.embed.w2v'}, 'en': { 'albert_base_uncased': 'en.embed_sentence.albert', 'albert_large_uncased': 'en.embed.albert.large_uncased', 'bert_large_cased': 'en.embed.bert.large_cased', 'biobert_pubmed_base_cased': 'en.embed.biobert', 'distilbert_base_cased': 'en.embed.distilbert.base', 'electra_base_uncased': 'en.embed.electra.base_uncased', 'electra_large_uncased': 'en.embed.electra.large_uncased', 'electra_small_uncased': 'en.embed.electra.small_uncased', 'elmo': 'en.embed.elmo', 'glove_100d': 'en.embed.glove.100d', 'glove_6B_300': 'xx.embed.glove.6B_300', 'glove_840B_300': 'xx.embed.glove.840B_300', 'longformer_large_4096': 'en.embed.longformer.large', 'roberta_base': 'en.embed.roberta.base', 'roberta_large': 'en.embed.roberta.large', 'sent_biobert_pubmed_base_cased': 'en.embed_sentence.biobert.pubmed_base_cased', 'small_bert_L2_128': 'en.embed.bert.small_L2_128', 'small_bert_L4_256': 'en.embed.bert.small_L4_256', 'small_bert_L4_512': 'en.embed.bert.small_L4_512', 'small_bert_L8_512': 'en.embed.bert.small_L8_512', 'tfhub_use': 'en.embed_sentence.use', 'xlm_roberta_base': 'xx.embed.xlm', 'xlm_roberta_xtreme_base': 'xx.embed.xlm_roberta_xtreme_base', 'xlnet_base_cased': 'en.embed.xlnet_base_cased'}, 'es': {'embeddings_scielo300': 'es.embed.scielo300', 'es.ner': 'roberta_token_classifier_bne_capitel_ner', 'glove_100d': 'xx.embed.glove.glove_6B_100', 'glove_6B_300': 'xx.embed.glove.6B_300', 'glove_840B_300': 'xx.embed.glove.840B_300', 'w2v_cc_300d_es': 'es.embed.w2v_cc_300d'}, 'fa': {'glove_300d': 'fa.embed'}, 'fi': {'glove_100d': 'xx.embed.glove.glove_6B_100', 'glove_6B_300': 'xx.embed.glove.6B_300', 'glove_840B_300': 'xx.embed.glove.840B_300'}, 'fr': {'glove_6B_300': 'xx.embed.glove.6B_300', 'glove_840B_300': 'xx.embed.glove.840B_300', 'labse': 'xx.embed_sentence.labse'}, 'he': {'glove_300d': 'he.embed'}, 'it': {'glove_6B_300': 'xx.embed.glove.6B_300'}, 'ja': {'fasttext_300_ja': 'ja.embed.glove.cc_300d', 'glove_840B_300': 'xx.embed.glove.840B_300', 'xlm_roberta_base': 'xx.embed.xlm'}, 'nl': {'glove_6B_300': 'xx.embed.glove.6B_300', 'glove_840B_300': 'xx.embed.glove.840B_300'}, 'no': {'glove_100d': 'xx.embed.glove.glove_6B_100', 'glove_6B_300': 'xx.embed.glove.6B_300', 'glove_840B_300': 'xx.embed.glove.840B_300'}, 'pl': {'glove_100d': 'xx.embed.glove.glove_6B_100'}, 'pt': {'glove_100d': 'xx.embed.glove.glove_6B_100', 'glove_840B_300': 'xx.embed.glove.840B_300'}, 'ro': {'bert_base_cased_v1': 'ro.embed.bert.cased_base', 'w2v_cc_300d_ro': 'ro.embed.w2v_cc_300d'}, 'ru': {'glove_100d': 'xx.embed.glove.glove_6B_100', 'glove_840B_300': 'xx.embed.glove.840B_300'}, 'sv': {'glove_100d': 'xx.embed.glove.glove_6B_100', 'glove_6B_300': 'xx.embed.glove.6B_300'}, 'th': {'labse': 'xx.embed_sentence.labse', 'tfhub_use_multi_lg': 'xx.use.multi'}, 'tr': {'bert_base_turkish_uncased': 'tr.embed.bert.uncased', 'bert_multi_cased': 'xx.embed.bert', 'glove_840B_300': 'xx.embed.glove.840B_300', 'labse': 'xx.embed_sentence.labse'}, 'ur': {'glove_300d': 'ur.embed', 'labse': 'xx.embed_sentence.labse'}, 'zh': {'bert_base_chinese': 'zh.embed', 'glove_6B_300': 'xx.embed.glove.6B_300'}} bad_storage_refs = [ 'BERT_SENTENCE_EMBEDDINGS_c7e5b6a772f5', 'RelationExtractionModel_ce79d77d1bf1', 'RelationExtractionModel_1fb1dfa024c7', 'RelationExtractionModel_6a65c9992836', 'BERT_SENTENCE_EMBEDDINGS_0bee53f1b2cc', 'BERT_SENTENCE_EMBEDDINGS_59c3cd1e17c4', 'RelationExtractionModel_3ab4750ad5b6', 'RelationExtractionModel_6b61602c8303', 'RelationExtractionModel_9c255241fec3', 'ROBERTA_EMBEDDINGS_39f3e48e5c3f', 'RelationExtractionModel_14b00157fc1a', 'RelationExtractionModel_53a12cc975cb', 'ROBERTA_EMBEDDINGS_39f3e48e5c3f', ] licensed_storage_ref_2_nlu_ref = {'ca': {'w2v_cc_300d_ca': 'ca.embed.w2v_cc_300d'}, 'de': {'clinical': 'de.embed_sentence.bert.base_cased', 'w2v_cc_300d': 'de.embed'}, 'en': {'BERT_SENTENCE_EMBEDDINGS_0bee53f1b2cc': 'en.embed_sentence.biobert.mli', 'BERT_SENTENCE_EMBEDDINGS_33904e9f754a': 'en.embed_sentence.bert_uncased.rxnorm', 'BERT_SENTENCE_EMBEDDINGS_59c3cd1e17c4': 'en.embed_sentence.bluebert.mli', 'BERT_SENTENCE_EMBEDDINGS_76a84231a63f': 'en.embed_sentence.biobert.rxnorm', 'BERT_SENTENCE_EMBEDDINGS_c7e5b6a772f5': 'en.embed_sentence.bert.jsl_medium_uncased', 'BERT_SENTENCE_EMBEDDINGS_f67c6fa21fe5': 'en.embed_sentence.biobert.jsl_cased', 'ROBERTA_EMBEDDINGS_39f3e48e5c3f': 'en.embed_sentence.biobert.clinical_base_cased', 'RelationExtractionModel_14b00157fc1a': 'en.embed.glove.clinical', 'RelationExtractionModel_189543bacd6e': 'en.embed.glove.clinical', 'RelationExtractionModel_1fb1dfa024c7': 'en.embed.glove.clinical', 'RelationExtractionModel_3ab4750ad5b6': 'en.embed.glove.clinical', 'RelationExtractionModel_513eb6317779': 'en.embed.glove.clinical', 'RelationExtractionModel_53a12cc975cb': 'en.embed.glove.clinical', 'RelationExtractionModel_68ebe11369b6': 'en.embed.glove.clinical', 'RelationExtractionModel_6a65c9992836': 'en.embed.biobert', 'RelationExtractionModel_6b61602c8303': 'en.embed.glove.clinical', 'RelationExtractionModel_723addf4b6aa': 'en.embed.glove.clinical', 'RelationExtractionModel_7bb24c4f22d1': 'en.embed.glove.clinical', 'RelationExtractionModel_9c255241fec3': 'en.embed.glove.clinical', 'RelationExtractionModel_ab6e5ebef610': 'en.embed.glove.clinical', 'RelationExtractionModel_bc96a0f8b566': 'en.embed.glove.clinical', 'RelationExtractionModel_ce79d77d1bf1': 'en.embed.glove.clinical', 'RelationExtractionModel_d0af74510daa': 'en.embed.glove.clinical', 'bert_base_cased': 'en.embed.bert.base_cased', 'bert_embeddings_PHS_BERT': 'en.embed.bert_phs', 'biobert_clinical_base_cased': 'en.embed.biobert.clinical_base_cased', 'biobert_pubmed_base_cased': 'en.embed.biobert', 'clinical': 'en.embed.glove.clinical', 'embeddings_healthcare100': 'en.embed.glove.healthcare_100d', 'sent_biobert_clinical_base_cased': 'en.embed_sentence.biobert.clinical_base_cased', 'sent_bluebert_base_uncased_mednli': 'en.embed_sentence.bluebert.mli'}, 'es': {'ROBERTA_EMBEDDINGS_39f3e48e5c3f': 'es.embed.roberta_base_biomedical', 'bert_base_cased_es': 'es.embed.bert.base_cased', 'clinical': 'es.embed.roberta_base_biomedical', 'embeddings_scielo300': 'es.embed.scielo300', 'embeddings_scielowiki300': 'es.embed.scielowiki.300d', 'embeddings_wiki300': 'es.embed.sciwiki_300d'}, 'fr': {'WordEmbeddings_b266d1e5126a': 'fr.embed.w2v_cc_300d', 'bert_embeddings_bert_base_fr_cased_fr': 'fr.embed.bert_base_fr_cased'}, 'gl': {'w2v_cc_300d_gl': 'gl.embed.w2v_cc_300d'}, 'it': {'bert_embeddings_bert_base_italian_xxl_cased_it': 'it.embed.bert_base_italian_xxl_cased', 'w2v_cc_300d_it': 'it.embed.word2vec'}, 'pt': {'biobert_embeddings_biomedical_pt': 'pt.embed.gs_biomedical', 'roberta_embeddings_BR_BERTo_pt': 'pt.embed.BR_BERTo', 'w2v_cc_300d_pt': 'pt.embed.w2v_cc_300d'}, 'ro': {'bert_base_cased_v1': 'ro.embed.bert.base_cased', 'w2v_cc_300d_ro': 'ro.embed.w2v_cc_300d'}} # Map NLU train references to JSL Anno IDs traianble_nlu_ref_to_jsl_anno_id = { 'train.deep_sentence_detector': NLP_NODE_IDS.TRAINABLE_SENTENCE_DETECTOR_DL, 'train.sentence_detector': NLP_NODE_IDS.TRAINABLE_SENTENCE_DETECTOR_DL, 'train.classifier_dl': NLP_NODE_IDS.TRAINABLE_CLASSIFIER_DL, 'train.classifier': NLP_NODE_IDS.TRAINABLE_CLASSIFIER_DL, 'train.ner': NLP_NODE_IDS.TRAINABLE_NER_DL, 'train.named_entity_recognizer_dl': NLP_NODE_IDS.TRAINABLE_NER_DL, 'train.sentiment_dl': NLP_NODE_IDS.TRAINABLE_SENTIMENT_DL, 'train.sentiment': NLP_NODE_IDS.TRAINABLE_SENTIMENT_DL, 'train.pos': NLP_NODE_IDS.TRAINABLE_POS, 'train.multi_classifier': NLP_NODE_IDS.TRAINABLE_MULTI_CLASSIFIER_DL, 'train.word_seg': NLP_NODE_IDS.TRAINABLE_WORD_SEGMENTER, 'train.word_segmenter': NLP_NODE_IDS.TRAINABLE_WORD_SEGMENTER, 'train.generic_classifier': NLP_HC_NODE_IDS.TRAINABLE_GENERIC_CLASSIFIER, 'train.resolve_sentence': NLP_HC_NODE_IDS.TRAINABLE_SENTENCE_ENTITY_RESOLVER, 'train.resolve': NLP_HC_NODE_IDS.TRAINABLE_SENTENCE_ENTITY_RESOLVER, 'train.assertion': NLP_HC_NODE_IDS.TRAINABLE_ASSERTION_DL, 'train.assert': NLP_HC_NODE_IDS.TRAINABLE_ASSERTION_DL, 'train.assertion_dl': NLP_HC_NODE_IDS.TRAINABLE_ASSERTION_DL, } # Map every nlp_ref to an Annotator class. Language Agnostic and includes HC+OS # For models with no pretrained weight, i.e. most OCR annotators, it maps AnnoId to Class nlp_ref_to_anno_class = {'abbreviation_category_mapper': 'ChunkMapperModel', 'abbreviation_mapper': 'ChunkMapperModel', 'abbreviation_mapper_augmented': 'ChunkMapperModel', 'albert_base_qa_squad2': 'AlbertForQuestionAnswering', 'albert_base_sequence_classifier_ag_news': 'AlbertForSequenceClassification', 'albert_base_sequence_classifier_imdb': 'AlbertForSequenceClassification', 'albert_base_token_classifier_conll03': 'AlbertForTokenClassification', 'albert_base_uncased': 'AlbertEmbeddings', 'albert_embeddings_ALR_BERT': 'AlbertEmbeddings', 'albert_embeddings_albert_base_arabic': 'AlbertEmbeddings', 'albert_embeddings_albert_base_bahasa_cased': 'AlbertEmbeddings', 'albert_embeddings_albert_base_japanese_v1': 'AlbertEmbeddings', 'albert_embeddings_albert_base_v1': 'AlbertEmbeddings', 'albert_embeddings_albert_fa_base_v2': 'AlbertEmbeddings', 'albert_embeddings_albert_fa_zwnj_base_v2': 'AlbertEmbeddings', 'albert_embeddings_albert_german_ner': 'AlbertEmbeddings', 'albert_embeddings_albert_large_arabic': 'AlbertEmbeddings', 'albert_embeddings_albert_large_bahasa_cased': 'AlbertEmbeddings', 'albert_embeddings_albert_tiny_bahasa_cased': 'AlbertEmbeddings', 'albert_embeddings_albert_xlarge_arabic': 'AlbertEmbeddings', 'albert_embeddings_albert_xlarge_v1': 'AlbertEmbeddings', 'albert_embeddings_albert_xxlarge_v1': 'AlbertEmbeddings', 'albert_embeddings_fralbert_base': 'AlbertEmbeddings', 'albert_embeddings_marathi_albert': 'AlbertEmbeddings', 'albert_embeddings_marathi_albert_v2': 'AlbertEmbeddings', 'albert_indic': 'AlbertEmbeddings', 'albert_large_token_classifier_conll03': 'AlbertForTokenClassification', 'albert_large_uncased': 'AlbertEmbeddings', 'albert_qa_BioM_xxlarge_SQuAD2': 'AlbertForQuestionAnswering', 'albert_qa_QA_1e': 'AlbertForQuestionAnswering', 'albert_qa_ai_club_inductions_21_nlp': 'AlbertForQuestionAnswering', 'albert_qa_base_v2_squad': 'AlbertForQuestionAnswering', 'albert_qa_cs224n_squad2.0_base_v2': 'AlbertForQuestionAnswering', 'albert_qa_cs224n_squad2.0_large_v2': 'AlbertForQuestionAnswering', 'albert_qa_cs224n_squad2.0_xxlarge_v1': 'AlbertForQuestionAnswering', 'albert_qa_generic': 'AlbertForQuestionAnswering', 'albert_qa_nlpunibo': 'AlbertForQuestionAnswering', 'albert_qa_slp': 'AlbertForQuestionAnswering', 'albert_qa_squad_2.0': 'AlbertForQuestionAnswering', 'albert_qa_squad_slp': 'AlbertForQuestionAnswering', 'albert_qa_vumichien_base_v2_squad2': 'AlbertForQuestionAnswering', 'albert_qa_xlarge_finetuned': 'AlbertForQuestionAnswering', 'albert_qa_xlarge_finetuned_squad': 'AlbertForQuestionAnswering', 'albert_qa_xlarge_v2_squad_v2': 'AlbertForQuestionAnswering', 'albert_qa_xxlarge_finetuned_squad': 'AlbertForQuestionAnswering', 'albert_qa_xxlarge_tweetqa': 'AlbertForQuestionAnswering', 'albert_qa_xxlarge_v1_finetuned_squad2': 'AlbertForQuestionAnswering', 'albert_qa_xxlarge_v2_squad2': 'AlbertForQuestionAnswering', 'albert_qa_xxlarge_v2_squad2_covid_deepset': 'AlbertForQuestionAnswering', 'albert_qa_xxlargev1_squad2_512': 'AlbertForQuestionAnswering', 'albert_xlarge_token_classifier_conll03': 'AlbertForTokenClassification', 'albert_xlarge_uncased': 'AlbertEmbeddings', 'albert_xxlarge_uncased': 'AlbertEmbeddings', 'aner_cc_300d': 'NerDLModel', 'arabic_w2v_cc_300d': 'WordEmbeddingsModel', 'asr_20220507_122935': 'Wav2Vec2ForCTC', 'asr_ai_light_dance_singing2_wav2vec2_large_xlsr_53_5gram_v4_2': 'Wav2Vec2ForCTC', 'asr_ai_light_dance_singing2_wav2vec2_large_xlsr_53_5gram_v4_2_gpu': 'Wav2Vec2ForCTC', 'asr_alvenir_wav2vec2_base_nst_cv9': 'Wav2Vec2ForCTC', 'asr_asr_with_transformers_wav2vec2': 'Wav2Vec2ForCTC', 'asr_asr_with_transformers_wav2vec2_gpu': 'Wav2Vec2ForCTC', 'asr_autonlp_hindi_asr': 'Wav2Vec2ForCTC', 'asr_autonlp_hindi_asr_gpu': 'Wav2Vec2ForCTC', 'asr_av2vec2_xls_r_300m_wolof_lm': 'Wav2Vec2ForCTC', 'asr_av2vec2_xls_r_300m_wolof_lm_gpu': 'Wav2Vec2ForCTC', 'asr_bach_arb': 'Wav2Vec2ForCTC', 'asr_bach_arb_gpu': 'Wav2Vec2ForCTC', 'asr_baseline': 'Wav2Vec2ForCTC', 'asr_baseline_gpu': 'Wav2Vec2ForCTC', 'asr_bp500_xlsr': 'Wav2Vec2ForCTC', 'asr_bp500_xlsr_gpu': 'Wav2Vec2ForCTC', 'asr_bp_commonvoice100_xlsr': 'Wav2Vec2ForCTC', 'asr_bp_commonvoice100_xlsr_gpu': 'Wav2Vec2ForCTC', 'asr_bp_commonvoice10_xlsr': 'Wav2Vec2ForCTC', 'asr_bp_commonvoice10_xlsr_gpu': 'Wav2Vec2ForCTC', 'asr_bp_lapsbm1_xlsr': 'Wav2Vec2ForCTC', 'asr_bp_lapsbm1_xlsr_gpu': 'Wav2Vec2ForCTC', 'asr_bp_sid10_xlsr': 'Wav2Vec2ForCTC', 'asr_bp_sid10_xlsr_gpu': 'Wav2Vec2ForCTC', 'asr_bp_tedx100_xlsr': 'Wav2Vec2ForCTC', 'asr_bp_tedx100_xlsr_gpu': 'Wav2Vec2ForCTC', 'asr_bp_voxforge1_xlsr': 'Wav2Vec2ForCTC', 'asr_bp_voxforge1_xlsr_gpu': 'Wav2Vec2ForCTC', 'asr_cdac_hindispeechrecognition': 'Wav2Vec2ForCTC', 'asr_cdac_hindispeechrecognition_gpu': 'Wav2Vec2ForCTC', 'asr_central_kurdish_xlsr': 'Wav2Vec2ForCTC', 'asr_central_kurdish_xlsr_gpu': 'Wav2Vec2ForCTC', 'asr_common_voice_lithuanian_fairseq': 'Wav2Vec2ForCTC', 'asr_common_voice_lithuanian_fairseq_gpu': 'Wav2Vec2ForCTC', 'asr_dansk_wav2vec21': 'Wav2Vec2ForCTC', 'asr_dansk_wav2vec21_gpu': 'Wav2Vec2ForCTC', 'asr_dansk_wav2vec2_stt': 'Wav2Vec2ForCTC', 'asr_dansk_wav2vec2_stt_gpu': 'Wav2Vec2ForCTC', 'asr_distil_wav2vec2': 'Wav2Vec2ForCTC', 'asr_distil_wav2vec2_gpu': 'Wav2Vec2ForCTC', 'asr_english_filipino_wav2vec2_l_xls_r_test_03': 'Wav2Vec2ForCTC', 'asr_english_filipino_wav2vec2_l_xls_r_test_04': 'Wav2Vec2ForCTC', 'asr_english_filipino_wav2vec2_l_xls_r_test_05': 'Wav2Vec2ForCTC', 'asr_english_filipino_wav2vec2_l_xls_r_test_05_gpu': 'Wav2Vec2ForCTC', 'asr_english_filipino_wav2vec2_l_xls_r_test_06': 'Wav2Vec2ForCTC', 'asr_english_filipino_wav2vec2_l_xls_r_test_07': 'Wav2Vec2ForCTC', 'asr_english_filipino_wav2vec2_l_xls_r_test_07_gpu': 'Wav2Vec2ForCTC', 'asr_english_filipino_wav2vec2_l_xls_r_test_09': 'Wav2Vec2ForCTC', 'asr_english_filipino_wav2vec2_l_xls_r_test_09_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_france_10_belgium_0_s271': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_france_10_belgium_0_s271_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_france_2_belgium_8_s709': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_france_2_belgium_8_s709_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_france_5_belgium_5_s607': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_france_5_belgium_5_s607_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_0_austria_10_s103': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_0_austria_10_s103_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_0_austria_10_s377': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_0_austria_10_s377_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_0_austria_10_s756': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_0_austria_10_s756_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_10_austria_0_s527': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_10_austria_0_s527_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_10_austria_0_s545': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_10_austria_0_s545_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_10_austria_0_s779': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_10_austria_0_s779_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_2_austria_8_s468': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_2_austria_8_s468_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_2_austria_8_s732': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_2_austria_8_s732_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_2_austria_8_s957': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_2_austria_8_s957_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_5_austria_5_s3': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_5_austria_5_s3_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_5_austria_5_s803': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_5_austria_5_s803_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_5_austria_5_s95': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_5_austria_5_s95_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_8_austria_2_s445': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_accent_germany_8_austria_2_s445_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_0_female_10_s400': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_0_female_10_s400_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_0_female_10_s469': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_0_female_10_s469_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_0_female_10_s934': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_0_female_10_s934_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_10_female_0_s156': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_10_female_0_s156_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_10_female_0_s626': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_10_female_0_s626_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_10_female_0_s714': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_10_female_0_s714_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_2_female_8_s255': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_2_female_8_s255_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_2_female_8_s3': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_2_female_8_s3_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_5_female_5_s21': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_5_female_5_s21_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_5_female_5_s474': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_5_female_5_s474_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_5_female_5_s722': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_5_female_5_s722_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_8_female_2_s428': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_8_female_2_s428_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_8_female_2_s500': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_8_female_2_s500_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_8_female_2_s911': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_vp_100k_gender_male_8_female_2_s911_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_0_austria_10_s350': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_0_austria_10_s350_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_0_austria_10_s381': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_0_austria_10_s381_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_0_austria_10_s673': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_0_austria_10_s673_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_10_austria_0_s295': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_10_austria_0_s728': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_10_austria_0_s728_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_10_austria_0_s886': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_10_austria_0_s886_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_2_austria_8_s368': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_2_austria_8_s458': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_2_austria_8_s458_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_2_austria_8_s543': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_2_austria_8_s543_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_5_austria_5_s412': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_5_austria_5_s412_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_5_austria_5_s534': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_5_austria_5_s534_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_8_austria_2_s42': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_accent_germany_8_austria_2_s42_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_gender_male_0_female_10_s922': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_gender_male_10_female_0_s204': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_gender_male_10_female_0_s204_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_gender_male_10_female_0_s287': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_gender_male_10_female_0_s287_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_gender_male_10_female_0_s559': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_gender_male_10_female_0_s559_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_gender_male_2_female_8_s295': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_gender_male_2_female_8_s295_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_gender_male_5_female_5_s896': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_gender_male_5_female_5_s896_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_gender_male_8_female_2_s755': 'Wav2Vec2ForCTC', 'asr_exp_w2v2r_xls_r_gender_male_8_female_2_s755_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_pretraining_s117_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_pretraining_s208_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_pretraining_s28_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_pretraining_s461_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_pretraining_s615_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_pretraining_s650_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_pretraining_s764_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_pretraining_s766_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_pretraining_s842_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_pretraining_s929_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s251': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s251_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s317': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s317_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s37': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s37_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s456': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s456_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s459': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s459_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s460': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s460_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s466': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s466_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s578': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s578_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s646': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_r_wav2vec2_s646_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s149': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s149_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s358': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s358_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s407': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s407_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s408': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s408_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s449': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s449_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s509': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s509_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s615': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s615_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s627': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s627_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s645': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s645_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s660': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s660_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s688': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s688_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s69': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s69_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s764': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s764_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s772': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s772_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s842': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s842_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s88': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s88_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s899': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s899_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s973': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_100k_s973_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s149': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s149_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s165': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s165_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s169': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s169_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s179': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s179_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s184': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s198': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s198_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s1_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s203': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s203_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s222': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s222_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s224': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s224_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s27': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s27_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s281': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s281_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s282': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s282_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s320': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s320_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s324': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s324_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s335': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s335_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s339': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s339_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s33_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s376': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s376_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s411': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s411_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s419': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s419_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s438': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s438_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s496': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s496_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s510': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s510_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s533': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s533_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s555': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s555_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s557': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s557_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s579': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s579_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s596': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s596_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s607': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s607_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s689': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s689_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s703': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s703_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s738': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s738_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s749': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s749_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s791': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s791_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s821': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s821_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s875': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s875_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s877': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s877_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s878': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s878_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s946': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s962': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s962_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s965': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_vp_s965_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s156': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s156_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s168': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s168_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s211': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s211_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s226': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s226_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s227': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s227_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s250': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s250_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s321': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s321_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s379': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s379_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s417': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s417_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s515': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s515_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s609': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s609_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s692': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s692_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s721': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s721_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s754': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s754_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s809': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s809_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s859': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s859_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s870': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s870_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s982': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_wav2vec2_s982_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xls_r_s143': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xls_r_s143_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xls_r_s226': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xls_r_s226_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xls_r_s250': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xls_r_s250_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xls_r_s417': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xls_r_s417_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xls_r_s438': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xls_r_s438_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xls_r_s44': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xls_r_s44_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s116': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s116_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s204': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s204_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s237': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s237_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s286': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s286_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s356': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s356_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s387': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s387_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s449': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s449_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s454': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s454_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s539': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s539_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s677': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s677_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s799': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s799_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s79_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s800': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s800_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s948': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s948_gpu': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s972': 'Wav2Vec2ForCTC', 'asr_exp_w2v2t_xlsr_53_s972_gpu': 'Wav2Vec2ForCTC', 'asr_filipino_wav2vec2_l_xls_r_300m_official': 'Wav2Vec2ForCTC', 'asr_filipino_wav2vec2_l_xls_r_300m_official_gpu': 'Wav2Vec2ForCTC', 'asr_final_wav2vec2_urdu_asr_project': 'Wav2Vec2ForCTC', 'asr_fine_tune_wav2vec2_xls_r_300m_indonesia': 'Wav2Vec2ForCTC', 'asr_fine_tune_wav2vec2_xls_r_300m_indonesia_gpu': 'Wav2Vec2ForCTC', 'asr_fine_tune_xlsr_wav2vec2_speech2text_vietnamese': 'Wav2Vec2ForCTC', 'asr_fine_tune_xlsr_wav2vec2_speech2text_vietnamese_gpu': 'Wav2Vec2ForCTC', 'asr_fine_tuned_xlsr_english': 'Wav2Vec2ForCTC', 'asr_fine_tuned_xlsr_english_gpu': 'Wav2Vec2ForCTC', 'asr_fine_tunning_on_cv_dataset': 'Wav2Vec2ForCTC', 'asr_fine_tunning_on_cv_dataset_gpu': 'Wav2Vec2ForCTC', 'asr_finetuned_audio_transcriber': 'Wav2Vec2ForCTC', 'asr_fonxlsr': 'Wav2Vec2ForCTC', 'asr_fonxlsr_gpu': 'Wav2Vec2ForCTC', 'asr_greek_lsr_1': 'Wav2Vec2ForCTC', 'asr_hausa_4_wa2vec_data_aug_xls_r_300m': 'Wav2Vec2ForCTC', 'asr_hausa_4_wa2vec_data_aug_xls_r_300m_gpu': 'Wav2Vec2ForCTC', 'asr_hausa_xlsr': 'Wav2Vec2ForCTC', 'asr_hausa_xlsr_gpu': 'Wav2Vec2ForCTC', 'asr_hf_challenge_test': 'Wav2Vec2ForCTC', 'asr_hf_challenge_test_gpu': 'Wav2Vec2ForCTC', 'asr_hindi_base_wav2vec2': 'Wav2Vec2ForCTC', 'asr_hindi_base_wav2vec2_gpu': 'Wav2Vec2ForCTC', 'asr_hindi_clsril_100': 'Wav2Vec2ForCTC', 'asr_hindi_clsril_100_gpu': 'Wav2Vec2ForCTC', 'asr_hindi_large_wav2vec2': 'Wav2Vec2ForCTC', 'asr_hindi_large_wav2vec2_gpu': 'Wav2Vec2ForCTC', 'asr_hindi_model_with_lm_vakyansh': 'Wav2Vec2ForCTC', 'asr_hindi_model_with_lm_vakyansh_gpu': 'Wav2Vec2ForCTC', 'asr_iloko': 'Wav2Vec2ForCTC', 'asr_iloko_gpu': 'Wav2Vec2ForCTC', 'asr_indicwav2vec_hindi': 'Wav2Vec2ForCTC', 'asr_indicwav2vec_hindi_gpu': 'Wav2Vec2ForCTC', 'asr_indicwav2vec_v1_bengali': 'Wav2Vec2ForCTC', 'asr_indicwav2vec_v1_bengali_gpu': 'Wav2Vec2ForCTC', 'asr_iwslt_asr_wav2vec_large_4500h': 'Wav2Vec2ForCTC', 'asr_iwslt_asr_wav2vec_large_4500h_gpu': 'Wav2Vec2ForCTC', 'asr_kabyle_xlsr': 'Wav2Vec2ForCTC', 'asr_kabyle_xlsr_gpu': 'Wav2Vec2ForCTC', 'asr_liepa_lithuanian': 'Wav2Vec2ForCTC', 'asr_liepa_lithuanian_gpu': 'Wav2Vec2ForCTC', 'asr_lm_swedish': 'Wav2Vec2ForCTC', 'asr_maialong_model': 'Wav2Vec2ForCTC', 'asr_maialong_model_gpu': 'Wav2Vec2ForCTC', 'asr_model_2': 'Wav2Vec2ForCTC', 'asr_model_2_gpu': 'Wav2Vec2ForCTC', 'asr_model_4': 'Wav2Vec2ForCTC', 'asr_model_4_gpu': 'Wav2Vec2ForCTC', 'asr_model_sid_voxforge_cetuc_1': 'Wav2Vec2ForCTC', 'asr_model_sid_voxforge_cetuc_1_gpu': 'Wav2Vec2ForCTC', 'asr_model_sid_voxforge_cetuc_2': 'Wav2Vec2ForCTC', 'asr_models_6': 'Wav2Vec2ForCTC', 'asr_models_6_gpu': 'Wav2Vec2ForCTC', 'asr_part1': 'Wav2Vec2ForCTC', 'asr_part1_gpu': 'Wav2Vec2ForCTC', 'asr_personal_speech_text_model': 'Wav2Vec2ForCTC', 'asr_personal_speech_text_model_gpu': 'Wav2Vec2ForCTC', 'asr_processor_with_lm': 'Wav2Vec2ForCTC', 'asr_processor_with_lm_gpu': 'Wav2Vec2ForCTC', 'asr_quran_speech_recognizer': 'Wav2Vec2ForCTC', 'asr_quran_speech_recognizer_gpu': 'Wav2Vec2ForCTC', 'asr_romanian_wav2vec2': 'Wav2Vec2ForCTC', 'asr_romanian_wav2vec2_gpu': 'Wav2Vec2ForCTC', 'asr_sanskrit': 'Wav2Vec2ForCTC', 'asr_sanskrit_gpu': 'Wav2Vec2ForCTC', 'asr_sp_proj': 'Wav2Vec2ForCTC', 'asr_sp_proj_gpu': 'Wav2Vec2ForCTC', 'asr_speech_sprint_test': 'Wav2Vec2ForCTC', 'asr_temp': 'Wav2Vec2ForCTC', 'asr_temp_gpu': 'Wav2Vec2ForCTC', 'asr_test_by_marma': 'Wav2Vec2ForCTC', 'asr_test_by_marma_gpu': 'Wav2Vec2ForCTC', 'asr_urdu_asr': 'Wav2Vec2ForCTC', 'asr_urdu_asr_gpu': 'Wav2Vec2ForCTC', 'asr_urdu_repo': 'Wav2Vec2ForCTC', 'asr_urdu_repo_gpu': 'Wav2Vec2ForCTC', 'asr_uzbek_stt': 'Wav2Vec2ForCTC', 'asr_uzbek_stt_gpu': 'Wav2Vec2ForCTC', 'asr_vakyansh_wav2vec2_hindi_him_4200': 'Wav2Vec2ForCTC', 'asr_vakyansh_wav2vec2_hindi_him_4200_gpu': 'Wav2Vec2ForCTC', 'asr_vakyansh_wav2vec2_indian_english_enm_700': 'Wav2Vec2ForCTC', 'asr_vakyansh_wav2vec2_indian_english_enm_700_gpu': 'Wav2Vec2ForCTC', 'asr_vakyansh_wav2vec2_maithili_maim_50': 'Wav2Vec2ForCTC', 'asr_vakyansh_wav2vec2_maithili_maim_50_gpu': 'Wav2Vec2ForCTC', 'asr_vakyansh_wav2vec2_sanskrit_sam_60': 'Wav2Vec2ForCTC', 'asr_vakyansh_wav2vec2_sanskrit_sam_60_gpu': 'Wav2Vec2ForCTC', 'asr_vakyansh_wav2vec2_telugu_tem_100_gpu': 'Wav2Vec2ForCTC', 'asr_voxpopuli_base_2_5_gram_doc4lm': 'Wav2Vec2ForCTC', 'asr_voxpopuli_base_2_5_gram_doc4lm_gpu': 'Wav2Vec2ForCTC', 'asr_w2v2_ami': 'Wav2Vec2ForCTC', 'asr_w2v2_ami_gpu': 'Wav2Vec2ForCTC', 'asr_w2v2_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_25_1aug_2022': 'Wav2Vec2ForCTC', 'asr_wav2vec2_25_1aug_2022_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_10000': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_10000_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_100h_13k_steps': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_100h_13k_steps_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_100h_by_facebook': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_100h_by_facebook_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_100h_by_vuiseng9': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_100h_ngram': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_100h_ngram_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_100h_test': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_100h_test_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_100h_with_lm_by_patrickvonplaten': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_100h_with_lm_by_patrickvonplaten_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_100h_with_lm_by_saahith': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_100h_with_lm_by_saahith_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_100h_with_lm_turkish': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_100h_with_lm_turkish_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_10k_voxpopuli': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_10k_voxpopuli_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_20sec_timit_and_dementiabank': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_20sec_timit_and_dementiabank_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_960h_4_gram': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_960h_4_gram_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_960h_by_facebook': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_960h_by_facebook_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_checkpoint_10': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_checkpoint_14': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_checkpoint_6': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_checkpoint_6_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_checkpoint_9': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_checkpoint_9_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_common_voice_second_colab': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_common_voice_second_colab_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_cynthia_tedlium_2500_v2': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_cynthia_tedlium_2500_v2_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_demo_colab_by_thyagosme': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_demo_colab_by_thyagosme_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_finetuned_spgi_speech_dev': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_finetuned_spgi_speech_dev_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_german': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_german_cv9': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_german_cv9_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_german_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_latin': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_latin_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_nptel_demo_colab_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_nst': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_nst_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_rj_try_5': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_rj_try_5_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_swbd_turn_eos_long_short_utt_removed_3percent': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_swbd_turn_eos_long_short_utt_removed_3percent_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_swbd_turn_eos_long_short_utt_removed_5percent': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_swbd_turn_eos_long_short_utt_removed_5percent_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_test': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_test_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_ali_hasan_colab_ex2': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_ali_hasan_colab_ex2_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_asr': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_asr_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_by_patrickvonplaten': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_by_patrickvonplaten_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab0_by_cuzeverynameistaken': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab0_by_cuzeverynameistaken_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab0_by_hassnain': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab0_by_hassnain_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab0_by_sherry7144': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab0_by_sherry7144_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab0_by_tahazakir': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab0_by_tahazakir_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab10': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab10_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab11_by_sameearif88': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab1_by_cuzeverynameistaken': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab1_by_cuzeverynameistaken_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab1_by_sherry7144': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab1_by_sherry7144_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab1_by_tahazakir': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab1_by_tahazakir_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab240': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab2_by_ahmad573': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab2_by_ahmad573_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab2_by_hassnain': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab2_by_sameearif88': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab2_by_sherry7144': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab2_by_sherry7144_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab30': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab3_by_hassnain': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab3_by_hassnain_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab3_by_sherry7144': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab4': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab40': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab40_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab4_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab50': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab50_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab51': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab51_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab52': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab53_by_hassnain': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab53_by_hassnain_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab57': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab647': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab66': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab6_by_hassnain': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab6_by_hassnain_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab70': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab7_by_hassnain': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab7_by_hassnain_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab7_by_sameearif88': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab9': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab90': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab92': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab971': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab9_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_1_by_doddle124578': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_2': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_3': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_32_epochs30': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_32_epochs30_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_32_epochs50_earlystop': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_32_epochs50_earlystop_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_ali221000262': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_anan0329': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_anan0329_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_doddle124578': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_doddle124578_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_gullenasatish': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_gullenasatish_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_hady': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_hady_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_izzy_lazerson': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_izzy_lazerson_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_jessiejohnson': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_jessiejohnson_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_mohai': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_mohai_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_nadaalnada': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_nadaalnada_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_ntp0102': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_ntp0102_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_radhakri119': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_radhakri119_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_samantharhay': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_samantharhay_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_sarahliu186': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_terri1102': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_testimonial': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_testimonial_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_tingtingyuli': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_by_tingtingyuli_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_test': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_colab_test_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_google_colab_by_adelgalu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_google_colab_by_adelgalu_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_google_colab_by_patrickvonplaten': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_google_colab_by_patrickvonplaten_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_test_jong': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_demo_test_jong_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_moaiz_exp1': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_moaiz_exp2': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_moaiz_explast': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_timit_moaiz_explast_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_vietnamese_160h': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_vietnamese_160h_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_vietnamese_250h': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_vietnamese_250h_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_vietnamese_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_voxpopuli_v2_finetuned': 'Wav2Vec2ForCTC', 'asr_wav2vec2_base_voxpopuli_v2_finetuned_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_bilal_2022': 'Wav2Vec2ForCTC', 'asr_wav2vec2_bilal_2022_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_bilal_20epoch': 'Wav2Vec2ForCTC', 'asr_wav2vec2_bilal_20epoch_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_burak_v2.1': 'Wav2Vec2ForCTC', 'asr_wav2vec2_burak_v2.1_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_cetuc_sid_voxforge_mls_0': 'Wav2Vec2ForCTC', 'asr_wav2vec2_cetuc_sid_voxforge_mls_0_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_cetuc_sid_voxforge_mls_1': 'Wav2Vec2ForCTC', 'asr_wav2vec2_common_voice_ab_demo': 'Wav2Vec2ForCTC', 'asr_wav2vec2_common_voice_ab_demo_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_common_voice_accents_3': 'Wav2Vec2ForCTC', 'asr_wav2vec2_common_voice_accents_3_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_common_voice_accents_indian': 'Wav2Vec2ForCTC', 'asr_wav2vec2_common_voice_accents_indian_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_common_voice_demo': 'Wav2Vec2ForCTC', 'asr_wav2vec2_common_voice_demo_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_common_voice_lithuanian': 'Wav2Vec2ForCTC', 'asr_wav2vec2_common_voice_lithuanian_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_coral_300ep': 'Wav2Vec2ForCTC', 'asr_wav2vec2_coral_300ep_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_ctc_earnings22_baseline_5_gram': 'Wav2Vec2ForCTC', 'asr_wav2vec2_ctc_earnings22_baseline_5_gram_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_dutch_large_cgn': 'Wav2Vec2ForCTC', 'asr_wav2vec2_dutch_large_cgn_3hrs': 'Wav2Vec2ForCTC', 'asr_wav2vec2_dutch_large_cgn_3hrs_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_dutch_large_cgn_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_from_scratch_finetune_dummy': 'Wav2Vec2ForCTC', 'asr_wav2vec2_from_scratch_finetune_dummy_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_indian_english': 'Wav2Vec2ForCTC', 'asr_wav2vec2_indian_english_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_indonesian_javanese_sundanese': 'Wav2Vec2ForCTC', 'asr_wav2vec2_indonesian_javanese_sundanese_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_japanese_hiragana_vtuber': 'Wav2Vec2ForCTC', 'asr_wav2vec2_japanese_hiragana_vtuber_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_ksponspeech': 'Wav2Vec2ForCTC', 'asr_wav2vec2_ksponspeech_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_100k_voxpopuli_catala_by_ccoreilly': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_100k_voxpopuli_catala_by_ccoreilly_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_100k_voxpopuli_catala_by_softcatala': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_100k_voxpopuli_catala_by_softcatala_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_100k_voxpopuli_tts_dataset_portuguese': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_100k_voxpopuli_tts_dataset_portuguese_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_10min_lv60_self': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_10min_lv60_self_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_960h': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_960h_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_960h_lv60': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_960h_lv60_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_960h_lv60_self': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_960h_lv60_self_4_gram': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_960h_lv60_self_4_gram_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_960h_lv60_self_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_960h_lv60_self_with_wikipedia_lm': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_960h_lv60_self_with_wikipedia_lm_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_a': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_a_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_cgn_3hrs': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_cgn_3hrs_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_english': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_english_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_in_lm': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_in_lm_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_lv60_timit_asr': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_lv60_timit_asr_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_robust_libri_960h': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_robust_libri_960h_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_robust_ls960': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_robust_ls960_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_robust_swbd_300h': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_robust_swbd_300h_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_tedlium': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_tedlium_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_uralic_voxpopuli_v2_finnish': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_uralic_voxpopuli_v2_finnish_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_voxpopuli_french': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_voxpopuli_french_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_voxrex_swedish_4gram': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_voxrex_swedish_4gram_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_1b_bemba_fds': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_1b_bemba_fds_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_1b_common_voice7_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_bashkir_cv7_opt': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_bashkir_cv7_opt_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_bemba_fds': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_bemba_fds_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_by_reichenbach': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_by_reichenbach_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_cantonese': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_cantonese_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_cn_colab': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_cn_colab_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_colab_by_robertodtg': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_colab_by_robertodtg_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_colab_by_tonyalves': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_colab_by_tonyalves_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_cv7': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_cv7_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_cv8': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_cv8_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_d2': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_d2_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_d3': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_d3_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_english_colab': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_english_colab_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_georgian_large': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_georgian_large_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_georgian_v0.6': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_guarani_colab': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_guarani_colab_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_guarani_small_by_jhonparra18': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_guarani_small_by_jhonparra18_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_hausa_v1.2': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_hausa_v1.2_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_hindi_by_ravirajoshi': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_hindi_by_ravirajoshi_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_hindi_home_colab_11': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_hindi_kaggle': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_hindi_kaggle_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_hyam_batch4': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_hyam_batch4_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_hyam_batch4_lr4': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_hyam_batch4_lr8': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_ie_cv_la_as': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_ie_cv_la_as_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_indonesian': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_indonesian_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_j_kana_colab': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_j_kana_colab_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_j_phoneme_colab': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_j_phoneme_colab_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_spanish_small_by_tomascufaro': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_spanish_small_by_tomascufaro_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_tatar': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_tatar_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_4': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_9': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_chaitanya97': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_chaitanya97_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_emeson77': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_emeson77_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_izzy_lazerson': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_izzy_lazerson_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_krirk': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_lilitket': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_masapasa': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_nimrah': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_nimrah_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_patrickvonplaten': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_patrickvonplaten_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_satyamatury': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_satyamatury_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_vai6hav': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_turkish_colab_by_vai6hav_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_urdu_10': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_urdu_10_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_urdu_proj': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_urdu_proj_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_urdu_v2': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_wx1': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_300m_wx1_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xls_r_thai_test': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_arabic_by_logicbloke': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_arabic_by_logicbloke_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_arabic_egyptian_by_arbml': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_arabic_egyptian_by_arbml_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_catalan': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_czech': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_czech_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_demo_colab_by_project2you': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_demo_colab_by_project2you_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_dutch': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_dutch_by_facebook': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_dutch_by_facebook_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_dutch_by_jonatasgrosman': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_dutch_by_jonatasgrosman_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_dutch_by_nithinholla': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_dutch_by_nithinholla_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_dutch_by_wietsedv': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_dutch_by_wietsedv_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_dutch_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_english_by_jonatasgrosman': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_english_by_jonatasgrosman_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_finnish_by_aapot': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_finnish_by_aapot_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_finnish_by_jonatasgrosman': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_finnish_by_jonatasgrosman_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_finnish_by_tommi': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_finnish_by_tommi_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_finnish_by_vasilis': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_finnish_by_vasilis_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_french': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_french_by_facebook': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_french_by_facebook_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_french_by_ilyes': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_french_by_ilyes_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_french_by_jonatasgrosman': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_french_by_jonatasgrosman_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_french_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_german': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_german_by_facebook': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_german_by_facebook_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_german_by_jonatasgrosman': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_german_by_marcel': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_german_by_marcel_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_german_by_oliverguhr': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_german_by_oliverguhr_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_german_cv8': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_german_cv8_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_german_cv9': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_german_cv9_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_german_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_german_with_lm': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_gpt': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_gpt_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_greek_by_jonatasgrosman': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_greek_by_lighteternal': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_greek_by_lighteternal_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_greek_by_perelluis13': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_greek_by_perelluis13_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_greek_by_vasilis': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_hungarian_by_anton_l': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_hungarian_by_anton_l_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_hungarian_by_jonatasgrosman': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_hungarian_by_jonatasgrosman_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_irish': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_irish_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_kyrgyz': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_kyrgyz_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_latvian': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_latvian_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_lithuanian_by_anton_l': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_lithuanian_by_anton_l_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_lithuanian_by_deividasm': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_lithuanian_by_deividasm_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_lithuanian_by_dundar': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_lithuanian_by_dundar_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_mongolian_by_anton_l': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_mongolian_by_anton_l_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_mongolian_by_tugstugi': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_mongolian_by_tugstugi_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_persian': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_persian_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_polish_by_facebook': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_polish_by_facebook_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_romanian_by_anton_l': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_romanian_by_gmihaila': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_romanian_by_gmihaila_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_swiss_german': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_swiss_german_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_tatar_by_anton_l': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_tatar_by_anton_l_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_tatar_by_crang': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_tatar_by_crang_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_vietnamese_by_anuragshas': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_vietnamese_by_anuragshas_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_vietnamese_by_not_tanh': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_vietnamese_by_not_tanh_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_W2V2_TATAR_SMALL': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_53_w2v2_tatar_small_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_bahasa_indonesia': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_bahasa_indonesia_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_bemba': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_bemba_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_cantonese_by_ctl': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_cantonese_by_ctl_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_coraa_portuguese_cv7': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_coraa_portuguese_cv7_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_coraa_portuguese_cv8': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_czech': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_dutch': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_dutch_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_egyptian': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_egyptian_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_finnish': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_finnish_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_french': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_french_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_german_by_maxidl': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_german_by_maxidl_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_german_demo': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_greek_1': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_greek_1_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_greek_2': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_greek_2_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_hindi_demo_colab_by_rafiulrumy': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_hindi_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_hungarian': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_hungarian_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_indonesian_artificial': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_indonesian_artificial_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_indonesian_by_cahya': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_indonesian_by_cahya_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_indonesian_by_galuh': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_indonesian_by_galuh_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_indonesian_by_indonesian_nlp': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_indonesian_by_indonesian_nlp_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_italian': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_italian_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_japanese_hiragana': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_korean': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_korean_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_ksponspeech_1_20': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_ksponspeech_1_20_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_kyrgyz_by_adilism': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_kyrgyz_by_adilism_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_kyrgyz_by_iarfmoose': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_kyrgyz_by_iarfmoose_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_latvian': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_latvian_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_lithuanian': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_lithuanian_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_luganda': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_luganda_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_mongolian_by_manandey': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_mongolian_by_manandey_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_moroccan': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_moroccan_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_nahuatl': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_nahuatl_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_persian_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_persian_v3_by_m3hrdadfi': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_persian_v3_by_m3hrdadfi_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_sermon': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_sermon_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_slovakian': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_slovakian_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_sundanese': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_sundanese_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_swedish': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_swedish_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_turkish_demo': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_turkish_demo_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_upper_sorbian_mixed': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_upper_sorbian_mixed_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_vietnamese_by_cuongld': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_vietnamese_by_cuongld_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_vietnamese_by_nhut': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_vietnamese_by_nhut_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_vietnamese_by_ontocord': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_vietnamese_by_ontocord_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_welsh': 'Wav2Vec2ForCTC', 'asr_wav2vec2_large_xlsr_welsh_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_luganda_by_birgermoell': 'Wav2Vec2ForCTC', 'asr_wav2vec2_luganda_by_birgermoell_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_luganda_by_cahya': 'Wav2Vec2ForCTC', 'asr_wav2vec2_luganda_by_cahya_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_luganda_by_indonesian_nlp': 'Wav2Vec2ForCTC', 'asr_wav2vec2_luganda_by_indonesian_nlp_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_med_custom_train_large': 'Wav2Vec2ForCTC', 'asr_wav2vec2_murad_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_murad_with_some_data': 'Wav2Vec2ForCTC', 'asr_wav2vec2_murad_with_some_data_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_russian': 'Wav2Vec2ForCTC', 'asr_wav2vec2_russian_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_swedish_common_voice': 'Wav2Vec2ForCTC', 'asr_wav2vec2_tcrs_runtest': 'Wav2Vec2ForCTC', 'asr_wav2vec2_tcrs_runtest_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_test': 'Wav2Vec2ForCTC', 'asr_wav2vec2_test_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_thai_asr': 'Wav2Vec2ForCTC', 'asr_wav2vec2_vee_demo_colab': 'Wav2Vec2ForCTC', 'asr_wav2vec2_vee_demo_colab_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_1b_english': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_1b_english_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_1b_french_by_bhuang_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_1b_portuguese_coraa_3_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_250': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_250_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_ab_cv8': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_ab_cv8_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_am_cv8_v1': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_am_cv8_v1_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_bangla_command_generated_data_finetune': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_bangla_command_generated_data_finetune_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_cv8': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_cv8_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_english_by_aware_ai': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_english_by_aware_ai_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_final': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_final_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_ftspeech': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_german_english': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_german_english_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_german_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_hindi_lm': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_hindi_lm_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_indonesian': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_indonesian_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_italian_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_italian_robust': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_italian_robust_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_kh': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_kh_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_mixed_by_malay_huggingface': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_mixed_by_malay_huggingface_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_turkish_tr_med': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_300m_turkish_tr_med_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_cv7_from_bp400h': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_cv7_from_bp400h_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_lm_300m': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_lm_300m_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_tf_left_right_shuru': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_tf_left_right_shuru_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_timit_tokenizer_base': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xls_r_timit_tokenizer_base_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr50k_english_phoneme': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr50k_english_phoneme_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr50k_korean_phoneme_aihub_40m': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr50k_korean_phoneme_aihub_40m_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_1b_finnish': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_1b_finnish_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_1b_finnish_lm_by_aapot': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_1b_finnish_lm_by_finnish_nlp': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_1b_finnish_lm_v2_by_aapot': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_1b_finnish_lm_v2_by_aapot_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_1b_finnish_lm_v2_by_finnish_nlp': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_1b_finnish_lm_v2_by_finnish_nlp_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_300m_finnish': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_300m_finnish_lm_by_aapot': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_300m_finnish_lm_by_finnish_nlp': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_53_common_voice_indonesian': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_53_common_voice_indonesian_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_53_phon': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_53_phon_babel': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_53_phon_babel_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_53_phon_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_bengali_10500': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_korean_senior': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_korean_senior_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_lithuanian_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_multilingual_53': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_multilingual_53_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_multilingual_56': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_multilingual_56_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_tatar_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_train_aug_biglm_1b': 'Wav2Vec2ForCTC', 'asr_wav2vec2_xlsr_train_aug_biglm_1b_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec2large_xlsr_akan': 'Wav2Vec2ForCTC', 'asr_wav2vec_asr_swbd': 'Wav2Vec2ForCTC', 'asr_wav2vec_asr_swbd_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec_demo': 'Wav2Vec2ForCTC', 'asr_wav2vec_demo_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec_finetuned_on_cryptocurrency': 'Wav2Vec2ForCTC', 'asr_wav2vec_finetuned_on_cryptocurrency_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec_large_xlsr_korean': 'Wav2Vec2ForCTC', 'asr_wav2vec_large_xlsr_korean_gpu': 'Wav2Vec2ForCTC', 'asr_wav2vec_lite': 'Wav2Vec2ForCTC', 'asr_wav2vec_lite_gpu': 'Wav2Vec2ForCTC', 'asr_wave2vec2_large_xlsr_hindi': 'Wav2Vec2ForCTC', 'asr_wave2vec2_large_xlsr_hindi_gpu': 'Wav2Vec2ForCTC', 'asr_wolof_asr': 'Wav2Vec2ForCTC', 'asr_wolof_asr_gpu': 'Wav2Vec2ForCTC', 'asr_wynehills_mimi_asr': 'Wav2Vec2ForCTC', 'asr_xls_r_300m_danish_nst_cv9': 'Wav2Vec2ForCTC', 'asr_xls_r_300m_nst_cv9': 'Wav2Vec2ForCTC', 'asr_xls_r_300m_nst_cv9_gpu': 'Wav2Vec2ForCTC', 'asr_xls_r_300m_prod': 'Wav2Vec2ForCTC', 'asr_xls_r_300m_prod_gpu': 'Wav2Vec2ForCTC', 'asr_xls_r_300m_yaswanth_hindi2': 'Wav2Vec2ForCTC', 'asr_xls_r_300m_yaswanth_hindi2_gpu': 'Wav2Vec2ForCTC', 'asr_xls_r_53_english': 'Wav2Vec2ForCTC', 'asr_xls_r_53_english_gpu': 'Wav2Vec2ForCTC', 'asr_xls_r_ab_spanish': 'Wav2Vec2ForCTC', 'asr_xls_r_ab_spanish_gpu': 'Wav2Vec2ForCTC', 'asr_xls_r_ab_test_by_baaastien': 'Wav2Vec2ForCTC', 'asr_xls_r_ab_test_by_cahya': 'Wav2Vec2ForCTC', 'asr_xls_r_ab_test_by_cahya_gpu': 'Wav2Vec2ForCTC', 'asr_xls_r_ab_test_by_fitods': 'Wav2Vec2ForCTC', 'asr_xls_r_ab_test_by_fitods_gpu': 'Wav2Vec2ForCTC', 'asr_xls_r_ab_test_by_hf_test': 'Wav2Vec2ForCTC', 'asr_xls_r_ab_test_by_hf_test_gpu': 'Wav2Vec2ForCTC', 'asr_xls_r_ab_test_by_muneson': 'Wav2Vec2ForCTC', 'asr_xls_r_ab_test_by_pablouribe': 'Wav2Vec2ForCTC', 'asr_xls_r_ab_test_by_pablouribe_gpu': 'Wav2Vec2ForCTC', 'asr_xls_r_ab_test_by_priyajay': 'Wav2Vec2ForCTC', 'asr_xls_r_ab_test_by_priyajay_gpu': 'Wav2Vec2ForCTC', 'asr_xls_r_demo_test': 'Wav2Vec2ForCTC', 'asr_xls_r_demo_test_gpu': 'Wav2Vec2ForCTC', 'asr_xls_r_eng': 'Wav2Vec2ForCTC', 'asr_xls_r_eng_gpu': 'Wav2Vec2ForCTC', 'asr_xls_r_test': 'Wav2Vec2ForCTC', 'asr_xls_r_test_gpu': 'Wav2Vec2ForCTC', 'asr_xls_test': 'Wav2Vec2ForCTC', 'asr_xlsr300m_7.0_lm': 'Wav2Vec2ForCTC', 'asr_xlsr300m_7.0_lm_gpu': 'Wav2Vec2ForCTC', 'asr_xlsr300m_8.0': 'Wav2Vec2ForCTC', 'asr_xlsr300m_8.0_gpu': 'Wav2Vec2ForCTC', 'asr_xlsr_53_bemba_5hrs': 'Wav2Vec2ForCTC', 'asr_xlsr_53_bemba_5hrs_gpu': 'Wav2Vec2ForCTC', 'asr_xlsr_53_wav2vec_gpu': 'Wav2Vec2ForCTC', 'asr_xlsr_53_wav2vec_greek': 'Wav2Vec2ForCTC', 'asr_xlsr_hungarian': 'Wav2Vec2ForCTC', 'asr_xlsr_hungarian_gpu': 'Wav2Vec2ForCTC', 'asr_xlsr_indonesia': 'Wav2Vec2ForCTC', 'asr_xlsr_indonesia_gpu': 'Wav2Vec2ForCTC', 'asr_xlsr_kurmanji_kurdish': 'Wav2Vec2ForCTC', 'asr_xlsr_kurmanji_kurdish_gpu': 'Wav2Vec2ForCTC', 'asr_xlsr_lm': 'Wav2Vec2ForCTC', 'asr_xlsr_lm_gpu': 'Wav2Vec2ForCTC', 'asr_xlsr_punctuation': 'Wav2Vec2ForCTC', 'asr_xlsr_punctuation_gpu': 'Wav2Vec2ForCTC', 'asr_xlsr_training_gpu': 'Wav2Vec2ForCTC', 'asr_xlsr_wav2vec2_base_commonvoice_demo_colab_4': 'Wav2Vec2ForCTC', 'asr_xlsr_wav2vec2_base_commonvoice_demo_colab_4_gpu': 'Wav2Vec2ForCTC', 'asr_xlsr_wav2vec2_base_commonvoice_demo_colab_6': 'Wav2Vec2ForCTC', 'asr_xlsr_wav2vec2_final': 'Wav2Vec2ForCTC', 'asr_xlsr_wav2vec2_final_gpu': 'Wav2Vec2ForCTC', 'asr_xlsr_wav2vec_english': 'Wav2Vec2ForCTC', 'asr_xlsr_wav2vec_english_gpu': 'Wav2Vec2ForCTC', 'assertion_dl': 'AssertionDLModel', 'assertion_dl_biobert': 'AssertionDLModel', 'assertion_dl_biobert_scope_L10R10': 'AssertionDLModel', 'assertion_dl_healthcare': 'AssertionDLModel', 'assertion_dl_large': 'AssertionDLModel', 'assertion_dl_radiology': 'AssertionDLModel', 'assertion_dl_scope_L10R10': 'AssertionDLModel', 'assertion_jsl': 'AssertionDLModel', 'assertion_jsl_augmented': 'AssertionDLModel', 'assertion_jsl_large': 'AssertionDLModel', 'assertion_oncology_demographic_binary_wip': 'AssertionDLModel', 'assertion_oncology_family_history_wip': 'AssertionDLModel', 'assertion_oncology_problem_wip': 'AssertionDLModel', 'assertion_oncology_response_to_treatment_wip': 'AssertionDLModel', 'assertion_oncology_smoking_status_wip': 'AssertionDLModel', 'assertion_oncology_test_binary_wip': 'AssertionDLModel', 'assertion_oncology_treatment_binary_wip': 'AssertionDLModel', 'assertion_oncology_wip': 'AssertionDLModel', 'bengali_cc_300d': 'WordEmbeddingsModel', 'bengaliner_cc_300d': 'NerDLModel', 'bert_base_cased': 'BertEmbeddings', 'bert_base_cased_qa_squad2': 'BertForQuestionAnswering', 'bert_base_chinese': 'BertEmbeddings', 'bert_base_dutch_cased': 'BertEmbeddings', 'bert_base_finnish_cased': 'BertSentenceEmbeddings', 'bert_base_finnish_uncased': 'BertSentenceEmbeddings', 'bert_base_german_cased': 'BertEmbeddings', 'bert_base_german_uncased': 'BertEmbeddings', 'bert_base_italian_cased': 'BertEmbeddings', 'bert_base_italian_uncased': 'BertEmbeddings', 'bert_base_japanese': 'BertEmbeddings', 'bert_base_multilingual_cased': 'BertEmbeddings', 'bert_base_multilingual_uncased': 'BertEmbeddings', 'bert_base_sequence_classifier_ag_news': 'BertForSequenceClassification', 'bert_base_sequence_classifier_dbpedia_14': 'BertForSequenceClassification', 'bert_base_sequence_classifier_imdb': 'BertForSequenceClassification', 'bert_base_token_classifier_conll03': 'BertForTokenClassification', 'bert_base_token_classifier_few_nerd': 'BertForTokenClassification', 'bert_base_token_classifier_ontonote': 'BertForTokenClassification', 'bert_base_turkish_cased': 'BertEmbeddings', 'bert_base_turkish_uncased': 'BertEmbeddings', 'bert_base_uncased': 'BertEmbeddings', 'bert_base_uncased_contracts': 'BertEmbeddings', 'bert_base_uncased_legal': 'BertEmbeddings', 'bert_biolink_base': 'BertEmbeddings', 'bert_biolink_large': 'BertEmbeddings', 'bert_biomed_pubmed_uncased': 'BertEmbeddings', 'bert_base_cased_zero_shot_classifier_xnli': 'BertForZeroShotClassification', 'distilbert_base_zero_shot_classifier_uncased_mnli': 'DistilBertForZeroShotClassification', 'distilbert_base_zero_shot_classifier_turkish_cased_multinli': 'DistilBertForZeroShotClassification', 'distilbert_base_zero_shot_classifier_turkish_cased_allnli': 'DistilBertForZeroShotClassification', 'distilbert_base_zero_shot_classifier_turkish_cased_snli': 'DistilBertForZeroShotClassification', 'roberta_base_zero_shot_classifier_nli': 'RoBertaForZeroShotClassification', 'bert_classifier_2ch_text_classification': 'BertForSequenceClassification', 'bert_classifier_amazon_review_sentiment_analysis': 'BertForSequenceClassification', 'bert_classifier_amitkayal_finetuned_sem_eval_english': 'BertForSequenceClassification', 'bert_classifier_analytical': 'BertForSequenceClassification', 'bert_classifier_ara_multiclass_news': 'BertForSequenceClassification', 'bert_classifier_arabic_marbert_dialect_identification_city': 'BertForSequenceClassification', 'bert_classifier_arabic_marbert_news_article_classification': 'BertForSequenceClassification', 'bert_classifier_arabic_marbert_poetry_classification': 'BertForSequenceClassification', 'bert_classifier_arabic_marbert_sentiment': 'BertForSequenceClassification', 'bert_classifier_arabic_ner_ace': 'BertForSequenceClassification', 'bert_classifier_arabic_poem_meter_3': 'BertForSequenceClassification', 'bert_classifier_arabic_poem_meter_classification': 'BertForSequenceClassification', 'bert_classifier_arabic_relation_extraction': 'BertForSequenceClassification', 'bert_classifier_auditor_sentiment_finetuned': 'BertForSequenceClassification', 'bert_classifier_autonlp_abbb_622117836': 'BertForSequenceClassification', 'bert_classifier_autonlp_antisemitism_2_21194454': 'BertForSequenceClassification', 'bert_classifier_autonlp_auto_nlp_lyrics_classification_19333717': 'BertForSequenceClassification', 'bert_classifier_autonlp_bank_transaction_classification_5521155': 'BertForSequenceClassification', 'bert_classifier_autonlp_bbc_news_classification_37229289': 'BertForSequenceClassification', 'bert_classifier_autonlp_bp_29016523': 'BertForSequenceClassification', 'bert_classifier_autonlp_cai_out_of_scope_649919116': 'BertForSequenceClassification', 'bert_classifier_autonlp_cat333_624217911': 'BertForSequenceClassification', 'bert_classifier_autonlp_cat33_624317932': 'BertForSequenceClassification', 'bert_classifier_autonlp_classification_9522090': 'BertForSequenceClassification', 'bert_classifier_autonlp_classification_with_all_labellers_9532137': 'BertForSequenceClassification', 'bert_classifier_autonlp_cola_gram_208681': 'BertForSequenceClassification', 'bert_classifier_autonlp_college_classification_164469': 'BertForSequenceClassification', 'bert_classifier_autonlp_creator_classifications_4021083': 'BertForSequenceClassification', 'bert_classifier_autonlp_cyberlandr_ai_4_614417500': 'BertForSequenceClassification', 'bert_classifier_autonlp_cyberlandr_ai_4_614417501': 'BertForSequenceClassification', 'bert_classifier_autonlp_doctor_de_24595545': 'BertForSequenceClassification', 'bert_classifier_autonlp_doctor_de_24595546': 'BertForSequenceClassification', 'bert_classifier_autonlp_email_classification_657119381': 'BertForSequenceClassification', 'bert_classifier_autonlp_emotion_14722565': 'BertForSequenceClassification', 'bert_classifier_autonlp_emotion_clf': 'BertForSequenceClassification', 'bert_classifier_autonlp_entity_selection_5771228': 'BertForSequenceClassification', 'bert_classifier_autonlp_eo_590516680': 'BertForSequenceClassification', 'bert_classifier_autonlp_gibb_detect_515314387': 'BertForSequenceClassification', 'bert_classifier_autonlp_imdb_classification_596216804': 'BertForSequenceClassification', 'bert_classifier_autonlp_imdb_eval_71421': 'BertForSequenceClassification', 'bert_classifier_autonlp_imdb_sentiment_analysis_623817873': 'BertForSequenceClassification', 'bert_classifier_autonlp_imdb_sentiment_analysis_english_470512388': 'BertForSequenceClassification', 'bert_classifier_autonlp_imdb_test_21134442': 'BertForSequenceClassification', 'bert_classifier_autonlp_japanese_sentiment_59362': 'BertForSequenceClassification', 'bert_classifier_autonlp_japanese_sentiment_59363': 'BertForSequenceClassification', 'bert_classifier_autonlp_kaggledays_625717986': 'BertForSequenceClassification', 'bert_classifier_autonlp_mrcooper_text_classification_529614927': 'BertForSequenceClassification', 'bert_classifier_autonlp_nlpisfun_251844': 'BertForSequenceClassification', 'bert_classifier_autonlp_old_data_trained_10022181': 'BertForSequenceClassification', 'bert_classifier_autonlp_sentiment_detection_1781580': 'BertForSequenceClassification', 'bert_classifier_autonlp_song_lyrics_18753417': 'BertForSequenceClassification', 'bert_classifier_autonlp_spanish_songs_202661': 'BertForSequenceClassification', 'bert_classifier_autonlp_spinner_check_16492731': 'BertForSequenceClassification', 'bert_classifier_autonlp_test3_2101779': 'BertForSequenceClassification', 'bert_classifier_autonlp_test3_2101782': 'BertForSequenceClassification', 'bert_classifier_autonlp_test_530014983': 'BertForSequenceClassification', 'bert_classifier_autonlp_text_classification_catalonia_independence_autonlp_633018323': 'BertForSequenceClassification', 'bert_classifier_autonlp_traffic_nlp_451311592': 'BertForSequenceClassification', 'bert_classifier_autonlp_triage_35248482': 'BertForSequenceClassification', 'bert_classifier_autonlp_user_review_classification_536415182': 'BertForSequenceClassification', 'bert_classifier_autonlp_wrime_joy_only_117396': 'BertForSequenceClassification', 'bert_classifier_autotrain_apm2_1212245840': 'BertForSequenceClassification', 'bert_classifier_autotrain_apmv2multiclass_1216046004': 'BertForSequenceClassification', 'bert_classifier_autotrain_arabic_poetry_by_subject_920730227': 'BertForSequenceClassification', 'bert_classifier_autotrain_arabic_poetry_by_subject_920730230': 'BertForSequenceClassification', 'bert_classifier_autotrain_argument_feedback_1154042511': 'BertForSequenceClassification', 'bert_classifier_autotrain_base_tweeteval_1281248996': 'BertForSequenceClassification', 'bert_classifier_autotrain_base_tweeteval_1281248997': 'BertForSequenceClassification', 'bert_classifier_autotrain_base_tweeteval_1281248998': 'BertForSequenceClassification', 'bert_classifier_autotrain_base_tweeteval_1281248999': 'BertForSequenceClassification', 'bert_classifier_autotrain_base_tweeteval_1281249000': 'BertForSequenceClassification', 'bert_classifier_autotrain_chat_bot_responses_949231426': 'BertForSequenceClassification', 'bert_classifier_autotrain_chemprot_re_838426740': 'BertForSequenceClassification', 'bert_classifier_autotrain_dontknowwhatimdoing_980432459': 'BertForSequenceClassification', 'bert_classifier_autotrain_financial_sentiment_765323474': 'BertForSequenceClassification', 'bert_classifier_autotrain_formality_1026434913': 'BertForSequenceClassification', 'bert_classifier_autotrain_gluefinetunedmodel_1013533786': 'BertForSequenceClassification', 'bert_classifier_autotrain_gluemodels_1010733562': 'BertForSequenceClassification', 'bert_classifier_autotrain_iine_classification10_737422470': 'BertForSequenceClassification', 'bert_classifier_autotrain_imdbtestmodel_9215210': 'BertForSequenceClassification', 'bert_classifier_autotrain_j_multi_classification_1181044057': 'BertForSequenceClassification', 'bert_classifier_autotrain_keywordextraction_882328335': 'BertForSequenceClassification', 'bert_classifier_autotrain_livedoor_news_722922024': 'BertForSequenceClassification', 'bert_classifier_autotrain_livedoor_news_732022289': 'BertForSequenceClassification', 'bert_classifier_autotrain_lucifer_multi_auto_831626529': 'BertForSequenceClassification', 'bert_classifier_autotrain_maysix_828926405': 'BertForSequenceClassification', 'bert_classifier_autotrain_not_interested_2_1213045881': 'BertForSequenceClassification', 'bert_classifier_autotrain_rule_793324440': 'BertForSequenceClassification', 'bert_classifier_autotrain_sentanaly_1016134101': 'BertForSequenceClassification', 'bert_classifier_autotrain_sentiment_4_812425472': 'BertForSequenceClassification', 'bert_classifier_autotrain_test_frank_896929583': 'BertForSequenceClassification', 'bert_classifier_autotrain_trec_fine_739422530': 'BertForSequenceClassification', 'bert_classifier_autotrain_wikipedia_sst_2_1034235509': 'BertForSequenceClassification', 'bert_classifier_bacnet_klassifizierung_gewerke': 'BertForSequenceClassification', 'bert_classifier_bacnet_klassifizierung_heizungstechnik': 'BertForSequenceClassification', 'bert_classifier_bacnet_klassifizierung_kaeltettechnik': 'BertForSequenceClassification', 'bert_classifier_bacnet_klassifizierung_raumlufttechnik': 'BertForSequenceClassification', 'bert_classifier_bacnet_klassifizierung_sanitaertechnik': 'BertForSequenceClassification', 'bert_classifier_base_arabic_camel_mix_poetry_finetuned_qawaf': 'BertForSequenceClassification', 'bert_classifier_base_cased_clickbait_news': 'BertForSequenceClassification', 'bert_classifier_base_cased_cv_studio_name_medium': 'BertForSequenceClassification', 'bert_classifier_base_cased_finetuned_sst2': 'BertForSequenceClassification', 'bert_classifier_base_cased_news_category': 'BertForSequenceClassification', 'bert_classifier_base_cased_tamil_mix_sentiment': 'BertForSequenceClassification', 'bert_classifier_base_dutch_cased_finetuned_sentiment': 'BertForSequenceClassification', 'bert_classifier_base_finance_sentiment_noisy_search': 'BertForSequenceClassification', 'bert_classifier_base_for_multilabel_sentence_classification': 'BertForSequenceClassification', 'bert_classifier_base_german_cased_hatespeech_germeval18coarse': 'BertForSequenceClassification', 'bert_classifier_base_german_cased_sentiment_germeval17': 'BertForSequenceClassification', 'bert_classifier_base_gpt2detector_random': 'BertForSequenceClassification', 'bert_classifier_base_gpt2detector_topk40': 'BertForSequenceClassification', 'bert_classifier_base_gpt2detector_topp92': 'BertForSequenceClassification', 'bert_classifier_base_gpt2detector_topp96': 'BertForSequenceClassification', 'bert_classifier_base_styleclassification_subjective_neutral': 'BertForSequenceClassification', 'bert_classifier_base_swedish_cased_sentiment': 'BertForSequenceClassification', 'bert_classifier_base_uncased_amazon_polarity': 'BertForSequenceClassification', 'bert_classifier_base_uncased_debiased_nli': 'BertForSequenceClassification', 'bert_classifier_base_uncased_english_multilable_classification': 'BertForSequenceClassification', 'bert_classifier_base_uncased_finetuned': 'BertForSequenceClassification', 'bert_classifier_base_uncased_finetuned_clinc': 'BertForSequenceClassification', 'bert_classifier_base_uncased_finetuned_glue_cola': 'BertForSequenceClassification', 'bert_classifier_base_uncased_finetuned_plutchik_emotion': 'BertForSequenceClassification', 'bert_classifier_base_uncased_offenseval2019_unbalanced': 'BertForSequenceClassification', 'bert_classifier_base_uncased_qnli': 'BertForSequenceClassification', 'bert_classifier_base_uncased_slue_goldtrascription_e3_lr1e_4': 'BertForSequenceClassification', 'bert_classifier_base_uncased_toxicity_a': 'BertForSequenceClassification', 'bert_classifier_based_uncased_sst2_e1': 'BertForSequenceClassification', 'bert_classifier_batterybert_cased_abstract': 'BertForSequenceClassification', 'bert_classifier_batterybert_uncased_abstract': 'BertForSequenceClassification', 'bert_classifier_batteryonlybert_cased_abstract': 'BertForSequenceClassification', 'bert_classifier_batteryonlybert_uncased_abstract': 'BertForSequenceClassification', 'bert_classifier_batteryscibert_cased_abstract': 'BertForSequenceClassification', 'bert_classifier_batteryscibert_uncased_abstract': 'BertForSequenceClassification', 'bert_classifier_bcms_ic_frenk_hate': 'BertForSequenceClassification', 'bert_classifier_beep_kc_base_bias': 'BertForSequenceClassification', 'bert_classifier_benchmark_finetuned': 'BertForSequenceClassification', 'bert_classifier_bert_banking77': 'BertForSequenceClassification', 'bert_classifier_bert_base_arabic_camelbert_ca_poetry': 'BertForSequenceClassification', 'bert_classifier_bert_base_arabic_camelbert_ca_sentiment': 'BertForSequenceClassification', 'bert_classifier_bert_base_arabic_camelbert_da_poetry': 'BertForSequenceClassification', 'bert_classifier_bert_base_arabic_camelbert_da_sentiment': 'BertForSequenceClassification', 'bert_classifier_bert_base_arabic_camelbert_mix_did_madar_corpus26': 'BertForSequenceClassification', 'bert_classifier_bert_base_arabic_camelbert_mix_did_madar_corpus6': 'BertForSequenceClassification', 'bert_classifier_bert_base_arabic_camelbert_mix_did_nadi': 'BertForSequenceClassification', 'bert_classifier_bert_base_arabic_camelbert_mix_poetry': 'BertForSequenceClassification', 'bert_classifier_bert_base_arabic_camelbert_mix_sentiment': 'BertForSequenceClassification', 'bert_classifier_bert_base_arabic_camelbert_msa_did_madar_twitter5': 'BertForSequenceClassification', 'bert_classifier_bert_base_arabic_camelbert_msa_did_nadi': 'BertForSequenceClassification', 'bert_classifier_bert_base_arabic_camelbert_msa_poetry': 'BertForSequenceClassification', 'bert_classifier_bert_base_arabic_camelbert_msa_sentiment': 'BertForSequenceClassification', 'bert_classifier_bert_base_cased_abstract': 'BertForSequenceClassification', 'bert_classifier_bert_base_cased_finetuned_cola': 'BertForSequenceClassification', 'bert_classifier_bert_base_cased_finetuned_mnli': 'BertForSequenceClassification', 'bert_classifier_bert_base_cased_finetuned_mrpc': 'BertForSequenceClassification', 'bert_classifier_bert_base_cased_finetuned_qnli': 'BertForSequenceClassification', 'bert_classifier_bert_base_cased_finetuned_qqp': 'BertForSequenceClassification', 'bert_classifier_bert_base_cased_finetuned_rte': 'BertForSequenceClassification', 'bert_classifier_bert_base_cased_finetuned_sst2': 'BertForSequenceClassification', 'bert_classifier_bert_base_cased_finetuned_wnli': 'BertForSequenceClassification', 'bert_classifier_bert_base_cased_trec_coarse': 'BertForSequenceClassification', 'bert_classifier_bert_base_dutch_cased_hebban_reviews': 'BertForSequenceClassification', 'bert_classifier_bert_base_en_es_codemix_cased': 'BertForSequenceClassification', 'bert_classifier_bert_base_en_hi_codemix_cased': 'BertForSequenceClassification', 'bert_classifier_bert_base_german_cased_german_hotel_classification': 'BertForSequenceClassification', 'bert_classifier_bert_base_german_cased_german_hotel_sentiment': 'BertForSequenceClassification', 'bert_classifier_bert_base_german_cased_gnad10': 'BertForSequenceClassification', 'bert_classifier_bert_base_indonesian_1.5g_sentiment_analysis_smsa': 'BertForSequenceClassification', 'bert_classifier_bert_base_italian_cased_sentiment': 'BertForSequenceClassification', 'bert_classifier_bert_base_japanese_v2_wrime_fine_tune': 'BertForSequenceClassification', 'bert_classifier_bert_base_multilingual_cased_hebban_reviews': 'BertForSequenceClassification', 'bert_classifier_bert_base_multilingual_cased_nsmc': 'BertForSequenceClassification', 'bert_classifier_bert_base_multilingual_uncased_sentiment': 'BertForSequenceClassification', 'bert_classifier_bert_base_turkish_bullying': 'BertForSequenceClassification', 'bert_classifier_bert_base_turkish_sentiment_cased': 'BertForSequenceClassification', 'bert_classifier_bert_base_uncased_abstract': 'BertForSequenceClassification', 'bert_classifier_bert_base_uncased_ag_news': 'BertForSequenceClassification', 'bert_classifier_bert_base_uncased_finetuned_surveyclassification': 'BertForSequenceClassification', 'bert_classifier_bert_base_uncased_german_multilable_classification': 'BertForSequenceClassification', 'bert_classifier_bert_base_uncased_hatexplain': 'BertForSequenceClassification', 'bert_classifier_bert_base_uncased_hatexplain_rationale_two': 'BertForSequenceClassification', 'bert_classifier_bert_based_uncased_hatespeech_movies': 'BertForSequenceClassification', 'bert_classifier_bert_fa_base_uncased_clf_digimag': 'BertForSequenceClassification', 'bert_classifier_bert_fa_base_uncased_clf_persiannews': 'BertForSequenceClassification', 'bert_classifier_bert_fa_base_uncased_farstail': 'BertForSequenceClassification', 'bert_classifier_bert_fa_base_uncased_haddad_wikinli': 'BertForSequenceClassification', 'bert_classifier_bert_fa_base_uncased_sentiment_deepsentipers_binary': 'BertForSequenceClassification', 'bert_classifier_bert_fa_base_uncased_sentiment_deepsentipers_multi': 'BertForSequenceClassification', 'bert_classifier_bert_fa_base_uncased_sentiment_digikala': 'BertForSequenceClassification', 'bert_classifier_bert_fa_base_uncased_sentiment_snappfood': 'BertForSequenceClassification', 'bert_classifier_bert_fa_base_uncased_wikinli': 'BertForSequenceClassification', 'bert_classifier_bert_finetuned_mrpc': 'BertForSequenceClassification', 'bert_classifier_bert_finetuning_cn': 'BertForSequenceClassification', 'bert_classifier_bert_imdb_1hidden': 'BertForSequenceClassification', 'bert_classifier_bert_large_cased_finetuned_cola': 'BertForSequenceClassification', 'bert_classifier_bert_large_cased_finetuned_mrpc': 'BertForSequenceClassification', 'bert_classifier_bert_large_cased_finetuned_rte': 'BertForSequenceClassification', 'bert_classifier_bert_large_cased_finetuned_wnli': 'BertForSequenceClassification', 'bert_classifier_bert_large_hatexplain': 'BertForSequenceClassification', 'bert_classifier_bert_large_mnli': 'BertForSequenceClassification', 'bert_classifier_bert_large_sst2': 'BertForSequenceClassification', 'bert_classifier_bert_multiclass_textclassification': 'BertForSequenceClassification', 'bert_classifier_bert_sentiment_analysis': 'BertForSequenceClassification', 'bert_classifier_bert_swahili_news_classification': 'BertForSequenceClassification', 'bert_classifier_bert_textclassification': 'BertForSequenceClassification', 'bert_classifier_bert_tiny_bahasa_cased_sentiment': 'BertForSequenceClassification', 'bert_classifier_bert_tweet_eval_emotion': 'BertForSequenceClassification', 'bert_classifier_berticelli': 'BertForSequenceClassification', 'bert_classifier_beto_4d': 'BertForSequenceClassification', 'bert_classifier_beto_emotion_analysis': 'BertForSequenceClassification', 'bert_classifier_beto_headlines_sentiment_analysis': 'BertForSequenceClassification', 'bert_classifier_beto_sentiment_analysis': 'BertForSequenceClassification', 'bert_classifier_bhadresh_savani_base_uncased_emotion': 'BertForSequenceClassification', 'bert_classifier_bi_classification': 'BertForSequenceClassification', 'bert_classifier_binary_classification_arabic': 'BertForSequenceClassification', 'bert_classifier_bio_pubmed200krct': 'BertForSequenceClassification', 'bert_classifier_biobert_v1.1_pub_section': 'BertForSequenceClassification', 'bert_classifier_bioformer_cased_v1.0_mnli': 'BertForSequenceClassification', 'bert_classifier_bioformer_cased_v1.0_qnli': 'BertForSequenceClassification', 'bert_classifier_biomednlp_pubmedbert_base_uncased_abstract_fulltext_pub_section': 'BertForSequenceClassification', 'bert_classifier_bluebert_pubmed_uncased_l_12_h_768_a_12_pub_section': 'BertForSequenceClassification', 'bert_classifier_bounti': 'BertForSequenceClassification', 'bert_classifier_c2_roberta_base_finetuned_dianping_chinese': 'BertForSequenceClassification', 'bert_classifier_cbert': 'BertForSequenceClassification', 'bert_classifier_chinese_sentiment': 'BertForSequenceClassification', 'bert_classifier_cl_1': 'BertForSequenceClassification', 'bert_classifier_claim_hugging': 'BertForSequenceClassification', 'bert_classifier_clinical_assertion_negation': 'BertForSequenceClassification', 'bert_classifier_clog_assessment_model': 'BertForSequenceClassification', 'bert_classifier_comments_text_classification_model': 'BertForSequenceClassification', 'bert_classifier_contextualized_hate_speech': 'BertForSequenceClassification', 'bert_classifier_core_clinical_mortality_prediction': 'BertForSequenceClassification', 'bert_classifier_coronabert': 'BertForSequenceClassification', 'bert_classifier_counterargument_hugging': 'BertForSequenceClassification', 'bert_classifier_covid_misinfo': 'BertForSequenceClassification', 'bert_classifier_curiosity_bio': 'BertForSequenceClassification', 'bert_classifier_da_emotion_binary': 'BertForSequenceClassification', 'bert_classifier_da_emotion_classification': 'BertForSequenceClassification', 'bert_classifier_da_hatespeech_classification': 'BertForSequenceClassification', 'bert_classifier_da_hatespeech_detection': 'BertForSequenceClassification', 'bert_classifier_da_hyggebert': 'BertForSequenceClassification', 'bert_classifier_da_tone_sentiment_polarity': 'BertForSequenceClassification', 'bert_classifier_da_tone_subjective_objective': 'BertForSequenceClassification', 'bert_classifier_danish_emotion_classification': 'BertForSequenceClassification', 'bert_classifier_decision_style_bio': 'BertForSequenceClassification', 'bert_classifier_dehate_mono_indonesian': 'BertForSequenceClassification', 'bert_classifier_dehate_mono_spanish_finetuned_sentiments_reviews_politicos': 'BertForSequenceClassification', 'bert_classifier_dehatebert_mono_arabic': 'BertForSequenceClassification', 'bert_classifier_dehatebert_mono_english': 'BertForSequenceClassification', 'bert_classifier_dehatebert_mono_french': 'BertForSequenceClassification', 'bert_classifier_dehatebert_mono_german': 'BertForSequenceClassification', 'bert_classifier_dehatebert_mono_italian': 'BertForSequenceClassification', 'bert_classifier_dehatebert_mono_polish': 'BertForSequenceClassification', 'bert_classifier_dehatebert_mono_portugese': 'BertForSequenceClassification', 'bert_classifier_dehatebert_mono_spanish': 'BertForSequenceClassification', 'bert_classifier_demo': 'BertForSequenceClassification', 'bert_classifier_demotest': 'BertForSequenceClassification', 'bert_classifier_descriptive': 'BertForSequenceClassification', 'bert_classifier_distil_base_uncased_finetuned_emotion': 'BertForSequenceClassification', 'bert_classifier_drug_sentiment_analysis': 'BertForSequenceClassification', 'bert_classifier_dum': 'BertForSequenceClassification', 'bert_classifier_dutch_news_clf_finetuned': 'BertForSequenceClassification', 'bert_classifier_dvs_f': 'BertForSequenceClassification', 'bert_classifier_emo_nojoylove': 'BertForSequenceClassification', 'bert_classifier_emojify_mvp': 'BertForSequenceClassification', 'bert_classifier_english_news_classification_headlines': 'BertForSequenceClassification', 'bert_classifier_english_tweetsentiment': 'BertForSequenceClassification', 'bert_classifier_english_yelp_sentiment': 'BertForSequenceClassification', 'bert_classifier_env_cls_chinese': 'BertForSequenceClassification', 'bert_classifier_env_topic': 'BertForSequenceClassification', 'bert_classifier_erlangshen_roberta_110m_nli': 'BertForSequenceClassification', 'bert_classifier_erlangshen_roberta_110m_sentiment': 'BertForSequenceClassification', 'bert_classifier_erlangshen_roberta_330m_nli': 'BertForSequenceClassification', 'bert_classifier_erlangshen_roberta_330m_sentiment': 'BertForSequenceClassification', 'bert_classifier_erlangshen_roberta_330m_similarity': 'BertForSequenceClassification', 'bert_classifier_erlangshen_sentiment_finetune': 'BertForSequenceClassification', 'bert_classifier_esg': 'BertForSequenceClassification', 'bert_classifier_evidence_types': 'BertForSequenceClassification', 'bert_classifier_extra_bio': 'BertForSequenceClassification', 'bert_classifier_extreme_go_emotion': 'BertForSequenceClassification', 'bert_classifier_fabriceyhc_base_uncased_imdb': 'BertForSequenceClassification', 'bert_classifier_financialbert_sentiment_analysis': 'BertForSequenceClassification', 'bert_classifier_finbert': 'BertForSequenceClassification', 'bert_classifier_finbert_esg': 'BertForSequenceClassification', 'bert_classifier_finbert_finnsentiment': 'BertForSequenceClassification', 'bert_classifier_finbert_fls': 'BertForSequenceClassification', 'bert_classifier_finbert_tone': 'BertForSequenceClassification', 'bert_classifier_fine_tuned_cola': 'BertForSequenceClassification', 'bert_classifier_fine_tuned_cola1': 'BertForSequenceClassification', 'bert_classifier_fine_tuned_cola2': 'BertForSequenceClassification', 'bert_classifier_fine_tuning_text_classification_model_habana_gaudi': 'BertForSequenceClassification', 'bert_classifier_finetuned_emotion': 'BertForSequenceClassification', 'bert_classifier_finetuned_location': 'BertForSequenceClassification', 'bert_classifier_finetuned_resumes_sections': 'BertForSequenceClassification', 'bert_classifier_finetuned_semantic_chinese': 'BertForSequenceClassification', 'bert_classifier_gbert_base_finetuned_cefr': 'BertForSequenceClassification', 'bert_classifier_gbert_base_germandpr_reranking': 'BertForSequenceClassification', 'bert_classifier_gbert_large_jobad_classification_34': 'BertForSequenceClassification', 'bert_classifier_german_news_sentiment': 'BertForSequenceClassification', 'bert_classifier_german_sentiment': 'BertForSequenceClassification', 'bert_classifier_german_sentiment_twitter': 'BertForSequenceClassification', 'bert_classifier_glue_mrpc': 'BertForSequenceClassification', 'bert_classifier_guns_relevant': 'BertForSequenceClassification', 'bert_classifier_gurkan08_turkish_text_classification': 'BertForSequenceClassification', 'bert_classifier_hashtag_to_hashtag': 'BertForSequenceClassification', 'bert_classifier_hashtag_to_hashtag_20': 'BertForSequenceClassification', 'bert_classifier_hate_speech_detector': 'BertForSequenceClassification', 'bert_classifier_hatescore_korean_hate_speech': 'BertForSequenceClassification', 'bert_classifier_hateval_re': 'BertForSequenceClassification', 'bert_classifier_he_sentiment_analysis': 'BertForSequenceClassification', 'bert_classifier_hinglish11k_sentiment_analysis': 'BertForSequenceClassification', 'bert_classifier_hinglish_class': 'BertForSequenceClassification', 'bert_classifier_ibrahim2030_tiny_sst2_distilled': 'BertForSequenceClassification', 'bert_classifier_idea_ccnl_erlangshen_roberta_110m_similarity': 'BertForSequenceClassification', 'bert_classifier_ilana_tiny_sst2_distilled': 'BertForSequenceClassification', 'bert_classifier_industry_classification_api': 'BertForSequenceClassification', 'bert_classifier_intel_base_uncased_mrpc': 'BertForSequenceClassification', 'bert_classifier_intent_classification_base_cased': 'BertForSequenceClassification', 'bert_classifier_interpress_turkish_news_classification': 'BertForSequenceClassification', 'bert_classifier_it_iptc': 'BertForSequenceClassification', 'bert_classifier_italian_news_classification_headlines': 'BertForSequenceClassification', 'bert_classifier_jd_resume_model': 'BertForSequenceClassification', 'bert_classifier_jeremiahz_base_uncased_mrpc': 'BertForSequenceClassification', 'bert_classifier_joniponi_finetuned_sem_eval_english': 'BertForSequenceClassification', 'bert_classifier_klue_tc_base_multilingual_cased': 'BertForSequenceClassification', 'bert_classifier_kor_3i4k_base_cased': 'BertForSequenceClassification', 'bert_classifier_kor_unsmile': 'BertForSequenceClassification', 'bert_classifier_krm_sa3': 'BertForSequenceClassification', 'bert_classifier_large_cased_whole_word_masking_sst2': 'BertForSequenceClassification', 'bert_classifier_large_gpt2detector_random': 'BertForSequenceClassification', 'bert_classifier_large_gpt2detector_topk40': 'BertForSequenceClassification', 'bert_classifier_larskjeldgaard_senda': 'BertForSequenceClassification', 'bert_classifier_letr_sol_profanity_filter': 'BertForSequenceClassification', 'bert_classifier_lex_textclassification_turkish_uncased': 'BertForSequenceClassification', 'bert_classifier_lupinlevorace_tiny_sst2_distilled': 'BertForSequenceClassification', 'bert_classifier_m_corona_tweets_belgium_topics': 'BertForSequenceClassification', 'bert_classifier_manglish_offensive_language_identification': 'BertForSequenceClassification', 'bert_classifier_mbert_corona_tweets_belgium_curfew_support': 'BertForSequenceClassification', 'bert_classifier_medical_article_by_icd_11_chapter': 'BertForSequenceClassification', 'bert_classifier_mental_health_trainer': 'BertForSequenceClassification', 'bert_classifier_mini_sst2_distilled': 'BertForSequenceClassification', 'bert_classifier_minilm_finetuned_emotion': 'BertForSequenceClassification', 'bert_classifier_minilm_finetuned_emotion_nm': 'BertForSequenceClassification', 'bert_classifier_minilm_l12_h384_uncased_mrpc': 'BertForSequenceClassification', 'bert_classifier_minilm_l12_h384_uncased_sst2_all_train': 'BertForSequenceClassification', 'bert_classifier_minilm_l6_mnli': 'BertForSequenceClassification', 'bert_classifier_minilm_l6_mnli_binary': 'BertForSequenceClassification', 'bert_classifier_minilm_l6_mnli_fever_docnli_ling_2c': 'BertForSequenceClassification', 'bert_classifier_minlm_finetuned_emotionnew1': 'BertForSequenceClassification', 'bert_classifier_minlm_finetuned_emotionnew2': 'BertForSequenceClassification', 'bert_classifier_mnli_base_cased_3': 'BertForSequenceClassification', 'bert_classifier_mnli_large_cased': 'BertForSequenceClassification', 'bert_classifier_model1_test': 'BertForSequenceClassification', 'bert_classifier_multi2convai_corona': 'BertForSequenceClassification', 'bert_classifier_multi2convai_logistics': 'BertForSequenceClassification', 'bert_classifier_multi2convai_quality': 'BertForSequenceClassification', 'bert_classifier_multi2convai_quality_en_mbert': 'BertForSequenceClassification', 'bert_classifier_multi2convai_quality_fr_mbert': 'BertForSequenceClassification', 'bert_classifier_multi2convai_quality_it_mbert': 'BertForSequenceClassification', 'bert_classifier_multi2convai_qualy': 'BertForSequenceClassification', 'bert_classifier_multi_label_classification_of_pubmed_articles': 'BertForSequenceClassification', 'bert_classifier_multilabel_inpatient_comments_14labels': 'BertForSequenceClassification', 'bert_classifier_nateraw_base_uncased_emotion': 'BertForSequenceClassification', 'bert_classifier_nateraw_base_uncased_imdb': 'BertForSequenceClassification', 'bert_classifier_navid_test': 'BertForSequenceClassification', 'bert_classifier_nb_base_target_group': 'BertForSequenceClassification', 'bert_classifier_nb_base_user_needs': 'BertForSequenceClassification', 'bert_classifier_nb_large_user_needs': 'BertForSequenceClassification', 'bert_classifier_ni_model_8_19': 'BertForSequenceClassification', 'bert_classifier_non_cl': 'BertForSequenceClassification', 'bert_classifier_non_contextualized_hate_speech': 'BertForSequenceClassification', 'bert_classifier_obgv_gder': 'BertForSequenceClassification', 'bert_classifier_off_detection_turkish': 'BertForSequenceClassification', 'bert_classifier_ogbv_gder_hi_mlkorra': 'BertForSequenceClassification', 'bert_classifier_paraphrase': 'BertForSequenceClassification', 'bert_classifier_pars_fa_sentiment_twitter': 'BertForSequenceClassification', 'bert_classifier_pathology_meningioma': 'BertForSequenceClassification', 'bert_classifier_philschmid_tiny_sst2_distilled': 'BertForSequenceClassification', 'bert_classifier_pin_senda': 'BertForSequenceClassification', 'bert_classifier_platzi_base_mrpc_glue_omar_espejel': 'BertForSequenceClassification', 'bert_classifier_poem_qafiyah_detection': 'BertForSequenceClassification', 'bert_classifier_pred_genre': 'BertForSequenceClassification', 'bert_classifier_pred_timeperiod': 'BertForSequenceClassification', 'bert_classifier_pro_cell_expert': 'BertForSequenceClassification', 'bert_classifier_prot_bfd_localization': 'BertForSequenceClassification', 'bert_classifier_prot_bfd_membrane': 'BertForSequenceClassification', 'bert_classifier_pubmed_pubmed200krct': 'BertForSequenceClassification', 'bert_classifier_qs': 'BertForSequenceClassification', 'bert_classifier_question_detection': 'BertForSequenceClassification', 'bert_classifier_reasoning_hugging': 'BertForSequenceClassification', 'bert_classifier_reddit_tc': 'BertForSequenceClassification', 'bert_classifier_regardv3': 'BertForSequenceClassification', 'bert_classifier_republic': 'BertForSequenceClassification', 'bert_classifier_response_quality_base': 'BertForSequenceClassification', 'bert_classifier_response_quality_tiny': 'BertForSequenceClassification', 'bert_classifier_response_toxicity_base': 'BertForSequenceClassification', 'bert_classifier_riad_finetuned_mrpc': 'BertForSequenceClassification', 'bert_classifier_roberta_base_finetuned_chinanews_chinese': 'BertForSequenceClassification', 'bert_classifier_roberta_base_finetuned_dianping_chinese': 'BertForSequenceClassification', 'bert_classifier_roberta_base_finetuned_ifeng_chinese': 'BertForSequenceClassification', 'bert_classifier_roberta_base_finetuned_jd_binary_chinese': 'BertForSequenceClassification', 'bert_classifier_roberta_base_finetuned_jd_full_chinese': 'BertForSequenceClassification', 'bert_classifier_robertabase_ana4': 'BertForSequenceClassification', 'bert_classifier_rotten_tomatoes_finetuned': 'BertForSequenceClassification', 'bert_classifier_ru_base_srl': 'BertForSequenceClassification', 'bert_classifier_rubert_base_cased_dp_paraphrase_detection': 'BertForSequenceClassification', 'bert_classifier_rubert_base_cased_sentiment': 'BertForSequenceClassification', 'bert_classifier_rubert_base_cased_sentiment_med': 'BertForSequenceClassification', 'bert_classifier_rubert_base_cased_sentiment_new': 'BertForSequenceClassification', 'bert_classifier_rubert_base_cased_sentiment_rurewiews': 'BertForSequenceClassification', 'bert_classifier_rubert_base_cased_sentiment_rusentiment': 'BertForSequenceClassification', 'bert_classifier_rubert_base_corruption_detector': 'BertForSequenceClassification', 'bert_classifier_rubert_tiny2_cedr_emotion_detection': 'BertForSequenceClassification', 'bert_classifier_rubert_tiny2_russian_emotion_detection': 'BertForSequenceClassification', 'bert_classifier_rubert_tiny_sentiment_balanced': 'BertForSequenceClassification', 'bert_classifier_rubert_tiny_toxicity': 'BertForSequenceClassification', 'bert_classifier_rubertconv_toxic_clf': 'BertForSequenceClassification', 'bert_classifier_rumor': 'BertForSequenceClassification', 'bert_classifier_russian_toxicity': 'BertForSequenceClassification', 'bert_classifier_sa_sub1': 'BertForSequenceClassification', 'bert_classifier_sa_sub2': 'BertForSequenceClassification', 'bert_classifier_sa_sub3': 'BertForSequenceClassification', 'bert_classifier_sa_sub4': 'BertForSequenceClassification', 'bert_classifier_sa_sub5': 'BertForSequenceClassification', 'bert_classifier_savasy_turkish_text_classification': 'BertForSequenceClassification', 'bert_classifier_sb': 'BertForSequenceClassification', 'bert_classifier_sci_uncased_topics': 'BertForSequenceClassification', 'bert_classifier_scibert_scivocab_cased_pub_section': 'BertForSequenceClassification', 'bert_classifier_scibert_scivocab_uncased_pub_section': 'BertForSequenceClassification', 'bert_classifier_scientific_challenges_and_directions': 'BertForSequenceClassification', 'bert_classifier_sead_l_6_h_256_a_8_mrpc': 'BertForSequenceClassification', 'bert_classifier_sead_l_6_h_256_a_8_qnli': 'BertForSequenceClassification', 'bert_classifier_sead_l_6_h_256_a_8_qqp': 'BertForSequenceClassification', 'bert_classifier_sead_l_6_h_256_a_8_rte': 'BertForSequenceClassification', 'bert_classifier_sead_l_6_h_256_a_8_sst2': 'BertForSequenceClassification', 'bert_classifier_sead_l_6_h_256_a_8_wnli': 'BertForSequenceClassification', 'bert_classifier_sead_l_6_h_384_a_12_mrpc': 'BertForSequenceClassification', 'bert_classifier_sead_l_6_h_384_a_12_qnli': 'BertForSequenceClassification', 'bert_classifier_sead_l_6_h_384_a_12_qqp': 'BertForSequenceClassification', 'bert_classifier_sead_l_6_h_384_a_12_rte': 'BertForSequenceClassification', 'bert_classifier_sead_l_6_h_384_a_12_sst2': 'BertForSequenceClassification', 'bert_classifier_sead_l_6_h_384_a_12_wnli': 'BertForSequenceClassification', 'bert_classifier_sec_finetuned_finance_classification': 'BertForSequenceClassification', 'bert_classifier_semantic_relations': 'BertForSequenceClassification', 'bert_classifier_sent_chineses': 'BertForSequenceClassification', 'bert_classifier_sent_sci_irrelevance': 'BertForSequenceClassification', 'bert_classifier_sentence': 'BertForSequenceClassification', 'bert_classifier_sentiment_analysis': 'BertForSequenceClassification', 'bert_classifier_sentiment_tweets': 'BertForSequenceClassification', 'bert_classifier_sgugger_fine_tuned_cola': 'BertForSequenceClassification', 'bert_classifier_sgugger_finetuned_mrpc': 'BertForSequenceClassification', 'bert_classifier_shahma_finetuned_mrpc': 'BertForSequenceClassification', 'bert_classifier_small_finetuned_glue_rte': 'BertForSequenceClassification', 'bert_classifier_snli_base_cased': 'BertForSequenceClassification', 'bert_classifier_snli_base_uncased': 'BertForSequenceClassification', 'bert_classifier_snli_large_cased': 'BertForSequenceClassification', 'bert_classifier_spanish_news_classification_headlines': 'BertForSequenceClassification', 'bert_classifier_spanish_news_classification_headlines_untrained': 'BertForSequenceClassification', 'bert_classifier_sponsorblock_v2': 'BertForSequenceClassification', 'bert_classifier_sundanese_base_emotion': 'BertForSequenceClassification', 'bert_classifier_swtx_erlangshen_roberta_110m_similarity': 'BertForSequenceClassification', 'bert_classifier_taipeiqa_v1': 'BertForSequenceClassification', 'bert_classifier_tanglish_offensive_language_identification': 'BertForSequenceClassification', 'bert_classifier_test_dynamic_pipeline': 'BertForSequenceClassification', 'bert_classifier_test_hub_pr_1': 'BertForSequenceClassification', 'bert_classifier_test_model': 'BertForSequenceClassification', 'bert_classifier_testing3_multilavel': 'BertForSequenceClassification', 'bert_classifier_testing4_multilabel': 'BertForSequenceClassification', 'bert_classifier_text_cls': 'BertForSequenceClassification', 'bert_classifier_tiny_aug_sst2_distilled': 'BertForSequenceClassification', 'bert_classifier_tiny_best': 'BertForSequenceClassification', 'bert_classifier_tiny_finetuned_glue_rte': 'BertForSequenceClassification', 'bert_classifier_tiny_master': 'BertForSequenceClassification', 'bert_classifier_tiny_mnli_distilled': 'BertForSequenceClassification', 'bert_classifier_tiny_qqp_distilled': 'BertForSequenceClassification', 'bert_classifier_tiny_russian_toxic': 'BertForSequenceClassification', 'bert_classifier_tiny_sst2_1_mobile_2_3_gold_labels_distillation': 'BertForSequenceClassification', 'bert_classifier_tiny_sst2_1_mobile_2_distillation': 'BertForSequenceClassification', 'bert_classifier_tiny_sst2_1_mobile_2_only_distillation': 'BertForSequenceClassification', 'bert_classifier_tiny_sst2_1_mobile_and_multi_teacher_distillation': 'BertForSequenceClassification', 'bert_classifier_tiny_sst2_1_mobile_only_distillation': 'BertForSequenceClassification', 'bert_classifier_tiny_sst2_distilled_l4_h_512': 'BertForSequenceClassification', 'bert_classifier_tiny_sst2_distilled_l6_h128': 'BertForSequenceClassification', 'bert_classifier_tiny_sst2_distilled_model': 'BertForSequenceClassification', 'bert_classifier_tiny_sst2_mobile_distillation': 'BertForSequenceClassification', 'bert_classifier_titlewave_base_uncased': 'BertForSequenceClassification', 'bert_classifier_topic': 'BertForSequenceClassification', 'bert_classifier_topic_classification_04': 'BertForSequenceClassification', 'bert_classifier_topic_v5': 'BertForSequenceClassification', 'bert_classifier_toxic': 'BertForSequenceClassification', 'bert_classifier_toxicity': 'BertForSequenceClassification', 'bert_classifier_trash_mail_cls_2022': 'BertForSequenceClassification', 'bert_classifier_turkish_product_comment_sentiment_classification': 'BertForSequenceClassification', 'bert_classifier_turkish_sentiment_analysis': 'BertForSequenceClassification', 'bert_classifier_twitter_sentiment': 'BertForSequenceClassification', 'bert_classifier_uzbek_news_category': 'BertForSequenceClassification', 'bert_classifier_vaccinchatsentenceclassifierdutch_frombertje2_dadialog02': 'BertForSequenceClassification', 'bert_classifier_vaccinchatsentenceclassifierdutch_frombertje2_dadialogqonly': 'BertForSequenceClassification', 'bert_classifier_vaccinchatsentenceclassifierdutch_frombertje2_dadialogqonly09': 'BertForSequenceClassification', 'bert_classifier_vaccinchatsentenceclassifierdutch_frombertjedial': 'BertForSequenceClassification', 'bert_classifier_vaccine_topic': 'BertForSequenceClassification', 'bert_classifier_vanhoan_fine_tuned_cola': 'BertForSequenceClassification', 'bert_classifier_vijaygoriya_test_trainer': 'BertForSequenceClassification', 'bert_classifier_wiselinjayajos_finetuned_mrpc': 'BertForSequenceClassification', 'bert_classifier_xtremedistil_emotion': 'BertForSequenceClassification', 'bert_classifier_xtremedistil_l12_h384_uncased_pub_section': 'BertForSequenceClassification', 'bert_classifier_xtremedistil_l6_h256_uncased_go_emotion': 'BertForSequenceClassification', 'bert_classifier_xtremedistil_l6_h384_emotion': 'BertForSequenceClassification', 'bert_classifier_xtremedistil_l6_h384_go_emotion': 'BertForSequenceClassification', 'bert_embeddings_ARBERT': 'BertEmbeddings', 'bert_embeddings_AraBertMo_base_V1': 'BertEmbeddings', 'bert_embeddings_Ara_DialectBERT': 'BertEmbeddings', 'bert_embeddings_COVID_SciBERT': 'BertEmbeddings', 'bert_embeddings_DarijaBERT': 'BertEmbeddings', 'bert_embeddings_DiLBERT': 'BertEmbeddings', 'bert_embeddings_FinancialBERT': 'BertEmbeddings', 'bert_embeddings_German_MedBERT': 'BertEmbeddings', 'bert_embeddings_KR_FinBert': 'BertEmbeddings', 'bert_embeddings_MARBERT': 'BertEmbeddings', 'bert_embeddings_MARBERTv2': 'BertEmbeddings', 'bert_embeddings_SecBERT': 'BertEmbeddings', 'bert_embeddings_agriculture_bert_uncased': 'BertEmbeddings', 'bert_embeddings_alberti_bert_base_multilingual_cased': 'BertEmbeddings', 'bert_embeddings_arabert_c19': 'BertEmbeddings', 'bert_embeddings_arbert': 'BertEmbeddings', 'bert_embeddings_bangla_base': 'BertEmbeddings', 'bert_embeddings_bangla_bert': 'BertEmbeddings', 'bert_embeddings_bangla_bert_base': 'BertEmbeddings', 'bert_embeddings_base_ar_cased': 'BertEmbeddings', 'bert_embeddings_base_arabert': 'BertEmbeddings', 'bert_embeddings_base_arabertv01': 'BertEmbeddings', 'bert_embeddings_base_arabertv02': 'BertEmbeddings', 'bert_embeddings_base_arabertv2': 'BertEmbeddings', 'bert_embeddings_base_arabic': 'BertEmbeddings', 'bert_embeddings_base_arabic_camel_mix': 'BertEmbeddings', 'bert_embeddings_base_arabic_camel_msa': 'BertEmbeddings', 'bert_embeddings_base_arabic_camel_msa_eighth': 'BertEmbeddings', 'bert_embeddings_base_arabic_camel_msa_half': 'BertEmbeddings', 'bert_embeddings_base_arabic_camel_msa_quarter': 'BertEmbeddings', 'bert_embeddings_base_arabic_camel_msa_sixteenth': 'BertEmbeddings', 'bert_embeddings_base_bg_cased': 'BertEmbeddings', 'bert_embeddings_base_cased': 'BertEmbeddings', 'bert_embeddings_base_chinese': 'BertEmbeddings', 'bert_embeddings_base_da_cased': 'BertEmbeddings', 'bert_embeddings_base_de_cased': 'BertEmbeddings', 'bert_embeddings_base_dutch_cased': 'BertEmbeddings', 'bert_embeddings_base_dutch_cased_frisian': 'BertEmbeddings', 'bert_embeddings_base_el_cased': 'BertEmbeddings', 'bert_embeddings_base_en_cased': 'BertEmbeddings', 'bert_embeddings_base_es_cased': 'BertEmbeddings', 'bert_embeddings_base_finnish_cased_v1': 'BertEmbeddings', 'bert_embeddings_base_finnish_uncased_v1': 'BertEmbeddings', 'bert_embeddings_base_fr_cased': 'BertEmbeddings', 'bert_embeddings_base_german_cased': 'BertEmbeddings', 'bert_embeddings_base_german_uncased': 'BertEmbeddings', 'bert_embeddings_base_hi_cased': 'BertEmbeddings', 'bert_embeddings_base_indonesian_1.5g': 'BertEmbeddings', 'bert_embeddings_base_indonesian_522m': 'BertEmbeddings', 'bert_embeddings_base_it_cased': 'BertEmbeddings', 'bert_embeddings_base_italian_cased': 'BertEmbeddings', 'bert_embeddings_base_italian_uncased': 'BertEmbeddings', 'bert_embeddings_base_italian_xxl_cased': 'BertEmbeddings', 'bert_embeddings_base_italian_xxl_uncased': 'BertEmbeddings', 'bert_embeddings_base_ja_cased': 'BertEmbeddings', 'bert_embeddings_base_japanese': 'BertEmbeddings', 'bert_embeddings_base_japanese_char': 'BertEmbeddings', 'bert_embeddings_base_japanese_char_v2': 'BertEmbeddings', 'bert_embeddings_base_japanese_char_whole_word_masking': 'BertEmbeddings', 'bert_embeddings_base_japanese_v2': 'BertEmbeddings', 'bert_embeddings_base_japanese_whole_word_masking': 'BertEmbeddings', 'bert_embeddings_base_lt_cased': 'BertEmbeddings', 'bert_embeddings_base_multilingual_cased_finetuned_hausa': 'BertEmbeddings', 'bert_embeddings_base_multilingual_cased_finetuned_swahili': 'BertEmbeddings', 'bert_embeddings_base_multilingual_cased_finetuned_yoruba': 'BertEmbeddings', 'bert_embeddings_base_nl_cased': 'BertEmbeddings', 'bert_embeddings_base_no_cased': 'BertEmbeddings', 'bert_embeddings_base_pl_cased': 'BertEmbeddings', 'bert_embeddings_base_portuguese_cased': 'BertEmbeddings', 'bert_embeddings_base_pt_cased': 'BertEmbeddings', 'bert_embeddings_base_ro_cased': 'BertEmbeddings', 'bert_embeddings_base_ru_cased': 'BertEmbeddings', 'bert_embeddings_base_spanish_wwm_cased': 'BertEmbeddings', 'bert_embeddings_base_spanish_wwm_uncased': 'BertEmbeddings', 'bert_embeddings_base_sw_cased': 'BertEmbeddings', 'bert_embeddings_base_th_cased': 'BertEmbeddings', 'bert_embeddings_base_tr_cased': 'BertEmbeddings', 'bert_embeddings_base_uk_cased': 'BertEmbeddings', 'bert_embeddings_base_uncased': 'BertEmbeddings', 'bert_embeddings_base_ur_cased': 'BertEmbeddings', 'bert_embeddings_base_vi_cased': 'BertEmbeddings', 'bert_embeddings_base_zh_cased': 'BertEmbeddings', 'bert_embeddings_bert_base': 'BertEmbeddings', 'bert_embeddings_bert_base_5lang_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_arabert': 'BertEmbeddings', 'bert_embeddings_bert_base_arabertv01': 'BertEmbeddings', 'bert_embeddings_bert_base_arabertv02': 'BertEmbeddings', 'bert_embeddings_bert_base_arabertv02_twitter': 'BertEmbeddings', 'bert_embeddings_bert_base_arabertv2': 'BertEmbeddings', 'bert_embeddings_bert_base_arabic': 'BertEmbeddings', 'bert_embeddings_bert_base_arabic_camelbert_mix': 'BertEmbeddings', 'bert_embeddings_bert_base_arabic_camelbert_msa': 'BertEmbeddings', 'bert_embeddings_bert_base_arabic_camelbert_msa_eighth': 'BertEmbeddings', 'bert_embeddings_bert_base_arabic_camelbert_msa_half': 'BertEmbeddings', 'bert_embeddings_bert_base_arabic_camelbert_msa_quarter': 'BertEmbeddings', 'bert_embeddings_bert_base_arabic_camelbert_msa_sixteenth': 'BertEmbeddings', 'bert_embeddings_bert_base_cased_pt_lenerbr': 'BertEmbeddings', 'bert_embeddings_bert_base_chinese_jinyong': 'BertEmbeddings', 'bert_embeddings_bert_base_de_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_en_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_es_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_fr_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_german_cased_oldvocab': 'BertEmbeddings', 'bert_embeddings_bert_base_german_dbmdz_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_german_dbmdz_uncased': 'BertEmbeddings', 'bert_embeddings_bert_base_german_uncased': 'BertEmbeddings', 'bert_embeddings_bert_base_gl_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_hi_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_historical_german_rw_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_it_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_italian_xxl_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_italian_xxl_uncased': 'BertEmbeddings', 'bert_embeddings_bert_base_ja_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_japanese_basic_char_v2': 'BertEmbeddings', 'bert_embeddings_bert_base_japanese_char': 'BertEmbeddings', 'bert_embeddings_bert_base_japanese_char_extended': 'BertEmbeddings', 'bert_embeddings_bert_base_japanese_char_v2': 'BertEmbeddings', 'bert_embeddings_bert_base_japanese_char_whole_word_masking': 'BertEmbeddings', 'bert_embeddings_bert_base_japanese_v2': 'BertEmbeddings', 'bert_embeddings_bert_base_japanese_whole_word_masking': 'BertEmbeddings', 'bert_embeddings_bert_base_portuguese_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_portuguese_cased_finetuned_peticoes': 'BertEmbeddings', 'bert_embeddings_bert_base_portuguese_cased_finetuned_tcu_acordaos': 'BertEmbeddings', 'bert_embeddings_bert_base_pt_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_qarib': 'BertEmbeddings', 'bert_embeddings_bert_base_qarib60_1790k': 'BertEmbeddings', 'bert_embeddings_bert_base_qarib60_860k': 'BertEmbeddings', 'bert_embeddings_bert_base_ru_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_tr_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_uncased_dstc9': 'BertEmbeddings', 'bert_embeddings_bert_base_uncased_mnli_sparse_70_unstructured_no_classifier': 'BertEmbeddings', 'bert_embeddings_bert_base_uncased_sparse_70_unstructured': 'BertEmbeddings', 'bert_embeddings_bert_base_ur_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_v1_sports': 'BertEmbeddings', 'bert_embeddings_bert_base_vi_cased': 'BertEmbeddings', 'bert_embeddings_bert_base_zh_cased': 'BertEmbeddings', 'bert_embeddings_bert_for_patents': 'BertEmbeddings', 'bert_embeddings_bert_kor_base': 'BertEmbeddings', 'bert_embeddings_bert_large_arabertv02': 'BertEmbeddings', 'bert_embeddings_bert_large_arabertv02_twitter': 'BertEmbeddings', 'bert_embeddings_bert_large_arabertv2': 'BertEmbeddings', 'bert_embeddings_bert_large_arabic': 'BertEmbeddings', 'bert_embeddings_bert_large_cased_pt_lenerbr': 'BertEmbeddings', 'bert_embeddings_bert_large_cased_whole_word_masking': 'BertEmbeddings', 'bert_embeddings_bert_large_chinese': 'BertEmbeddings', 'bert_embeddings_bert_large_japanese': 'BertEmbeddings', 'bert_embeddings_bert_large_japanese_char': 'BertEmbeddings', 'bert_embeddings_bert_large_japanese_char_extended': 'BertEmbeddings', 'bert_embeddings_bert_large_portuguese_cased': 'BertEmbeddings', 'bert_embeddings_bert_large_uncased_whole_word_masking': 'BertEmbeddings', 'bert_embeddings_bert_medium_arabic': 'BertEmbeddings', 'bert_embeddings_bert_mini_arabic': 'BertEmbeddings', 'bert_embeddings_bert_political_election2020_twitter_mlm': 'BertEmbeddings', 'bert_embeddings_bert_small_finetuned_legal_contracts10train10val': 'BertEmbeddings', 'bert_embeddings_bert_small_finetuned_legal_contracts_larger20_5_1': 'BertEmbeddings', 'bert_embeddings_bert_small_finetuned_legal_contracts_larger4010': 'BertEmbeddings', 'bert_embeddings_bert_small_finetuned_legal_definitions': 'BertEmbeddings', 'bert_embeddings_bert_small_finetuned_legal_definitions_longer': 'BertEmbeddings', 'bert_embeddings_bert_small_gl_cased': 'BertEmbeddings', 'bert_embeddings_bert_small_japanese': 'BertEmbeddings', 'bert_embeddings_bert_small_japanese_fin': 'BertEmbeddings', 'bert_embeddings_bert_tiny_finetuned_legal_definitions': 'BertEmbeddings', 'bert_embeddings_beto_gn_base_cased': 'BertEmbeddings', 'bert_embeddings_biobertpt_all': 'BertEmbeddings', 'bert_embeddings_biobertpt_bio': 'BertEmbeddings', 'bert_embeddings_biobertpt_clin': 'BertEmbeddings', 'bert_embeddings_bioclinicalbert_finetuned_covid_papers': 'BertEmbeddings', 'bert_embeddings_bioformer_cased_v1.0': 'BertEmbeddings', 'bert_embeddings_chefberto_italian_cased': 'BertEmbeddings', 'bert_embeddings_chemical_bert_uncased': 'BertEmbeddings', 'bert_embeddings_childes_bert': 'BertEmbeddings', 'bert_embeddings_chinese_bert_wwm_ext': 'BertEmbeddings', 'bert_embeddings_chinese_lert_base': 'BertEmbeddings', 'bert_embeddings_chinese_lert_large': 'BertEmbeddings', 'bert_embeddings_chinese_lert_small': 'BertEmbeddings', 'bert_embeddings_chinese_mac_base': 'BertEmbeddings', 'bert_embeddings_chinese_mac_large': 'BertEmbeddings', 'bert_embeddings_chinese_macbert_base': 'BertEmbeddings', 'bert_embeddings_chinese_macbert_large': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_10_h_128': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_10_h_256': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_10_h_512': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_10_h_768': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_12_h_128': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_12_h_256': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_12_h_512': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_12_h_768': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_2_h_128': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_2_h_256': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_2_h_512': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_2_h_768': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_4_h_128': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_4_h_256': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_4_h_512': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_4_h_768': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_6_h_128': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_6_h_256': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_6_h_512': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_6_h_768': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_8_h_128': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_8_h_256': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_8_h_512': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_l_8_h_768': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_wwm_ext': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_wwm_ext_large': 'BertEmbeddings', 'bert_embeddings_chinese_roberta_wwm_large_ext_fix_mlm': 'BertEmbeddings', 'bert_embeddings_chinese_wwm': 'BertEmbeddings', 'bert_embeddings_cicero_similis': 'BertEmbeddings', 'bert_embeddings_clinical_pubmed_bert_base_128': 'BertEmbeddings', 'bert_embeddings_clinical_pubmed_bert_base_512': 'BertEmbeddings', 'bert_embeddings_covid_scibert': 'BertEmbeddings', 'bert_embeddings_crosloengual_bert': 'BertEmbeddings', 'bert_embeddings_custom_legalbert': 'BertEmbeddings', 'bert_embeddings_danbert_small_cased': 'BertEmbeddings', 'bert_embeddings_dbert': 'BertEmbeddings', 'bert_embeddings_dbmdz_base_german_cased': 'BertEmbeddings', 'bert_embeddings_deberta_base_uncased': 'BertEmbeddings', 'bert_embeddings_dilbert': 'BertEmbeddings', 'bert_embeddings_dpr_spanish_passage_encoder_allqa_base': 'BertEmbeddings', 'bert_embeddings_dpr_spanish_passage_encoder_squades_base': 'BertEmbeddings', 'bert_embeddings_dpr_spanish_question_encoder_allqa_base': 'BertEmbeddings', 'bert_embeddings_dpr_spanish_question_encoder_squades_base': 'BertEmbeddings', 'bert_embeddings_dziribert': 'BertEmbeddings', 'bert_embeddings_env_bert_chinese': 'BertEmbeddings', 'bert_embeddings_fa_base_uncased': 'BertEmbeddings', 'bert_embeddings_fa_zwnj_base': 'BertEmbeddings', 'bert_embeddings_false_positives_scancode_base_uncased_l8_1': 'BertEmbeddings', 'bert_embeddings_false_positives_scancode_bert_base_uncased_L8_1': 'BertEmbeddings', 'bert_embeddings_fernet_c5': 'BertEmbeddings', 'bert_embeddings_fernet_cc': 'BertEmbeddings', 'bert_embeddings_fin_pretrain_yiyanghkust': 'BertEmbeddings', 'bert_embeddings_finbert_pretrain_yiyanghkust': 'BertEmbeddings', 'bert_embeddings_finest_bert': 'BertEmbeddings', 'bert_embeddings_for_patents': 'BertEmbeddings', 'bert_embeddings_g_base': 'BertEmbeddings', 'bert_embeddings_g_large': 'BertEmbeddings', 'bert_embeddings_gbert_base': 'BertEmbeddings', 'bert_embeddings_gbert_large': 'BertEmbeddings', 'bert_embeddings_german_financial_statements_bert': 'BertEmbeddings', 'bert_embeddings_german_medbert': 'BertEmbeddings', 'bert_embeddings_greeksocial_base_greek_uncased_v1': 'BertEmbeddings', 'bert_embeddings_hateBERT': 'BertEmbeddings', 'bert_embeddings_hfl_chinese_roberta_wwm_ext': 'BertEmbeddings', 'bert_embeddings_hfl_chinese_wwm_ext': 'BertEmbeddings', 'bert_embeddings_hseBert_it_cased': 'BertEmbeddings', 'bert_embeddings_incaselawbert': 'BertEmbeddings', 'bert_embeddings_indic_transformers': 'BertEmbeddings', 'bert_embeddings_indic_transformers_bn_bert': 'BertEmbeddings', 'bert_embeddings_indic_transformers_hi_bert': 'BertEmbeddings', 'bert_embeddings_indic_transformers_te_bert': 'BertEmbeddings', 'bert_embeddings_inlegalbert': 'BertEmbeddings', 'bert_embeddings_javanese_bert_small': 'BertEmbeddings', 'bert_embeddings_javanese_bert_small_imdb': 'BertEmbeddings', 'bert_embeddings_javanese_small': 'BertEmbeddings', 'bert_embeddings_javanese_small_imdb': 'BertEmbeddings', 'bert_embeddings_jdt_fin_roberta_wwm': 'BertEmbeddings', 'bert_embeddings_jdt_fin_roberta_wwm_large': 'BertEmbeddings', 'bert_embeddings_jobbert_base_cased': 'BertEmbeddings', 'bert_embeddings_kb_base_swedish_cased': 'BertEmbeddings', 'bert_embeddings_kb_distilled_cased': 'BertEmbeddings', 'bert_embeddings_kblab_base_swedish_cased': 'BertEmbeddings', 'bert_embeddings_kor_base': 'BertEmbeddings', 'bert_embeddings_large_arabertv02': 'BertEmbeddings', 'bert_embeddings_large_arabertv2': 'BertEmbeddings', 'bert_embeddings_large_arabic': 'BertEmbeddings', 'bert_embeddings_large_cased': 'BertEmbeddings', 'bert_embeddings_large_cased_whole_word_masking': 'BertEmbeddings', 'bert_embeddings_large_japanese': 'BertEmbeddings', 'bert_embeddings_large_japanese_char': 'BertEmbeddings', 'bert_embeddings_large_uncased': 'BertEmbeddings', 'bert_embeddings_large_uncased_whole_word_masking': 'BertEmbeddings', 'bert_embeddings_legal_bert_base_uncased': 'BertEmbeddings', 'bert_embeddings_legal_bert_base_uncased_finetuned_ledgarscotus7': 'BertEmbeddings', 'bert_embeddings_legal_bert_base_uncased_finetuned_rramicus': 'BertEmbeddings', 'bert_embeddings_legal_bert_small_uncased': 'BertEmbeddings', 'bert_embeddings_legal_hebert': 'BertEmbeddings', 'bert_embeddings_legal_hebert_ft': 'BertEmbeddings', 'bert_embeddings_legalbert': 'BertEmbeddings', 'bert_embeddings_legalbert_large_1.7m_1': 'BertEmbeddings', 'bert_embeddings_legalbert_large_1.7m_2': 'BertEmbeddings', 'bert_embeddings_lic_class_scancode_base_cased_l32_1': 'BertEmbeddings', 'bert_embeddings_lic_class_scancode_bert_base_cased_L32_1': 'BertEmbeddings', 'bert_embeddings_macbert4csc_base_chinese': 'BertEmbeddings', 'bert_embeddings_marathi_bert': 'BertEmbeddings', 'bert_embeddings_marbert': 'BertEmbeddings', 'bert_embeddings_marbertv2': 'BertEmbeddings', 'bert_embeddings_mbert_ar_c19': 'BertEmbeddings', 'bert_embeddings_medium_arabic': 'BertEmbeddings', 'bert_embeddings_medium_luxembourgish': 'BertEmbeddings', 'bert_embeddings_melayubert': 'BertEmbeddings', 'bert_embeddings_mengzi_bert_base': 'BertEmbeddings', 'bert_embeddings_mengzi_bert_base_fin': 'BertEmbeddings', 'bert_embeddings_mengzi_oscar_base': 'BertEmbeddings', 'bert_embeddings_mengzi_oscar_base_caption': 'BertEmbeddings', 'bert_embeddings_mengzi_oscar_base_retrieval': 'BertEmbeddings', 'bert_embeddings_mini_arabic': 'BertEmbeddings', 'bert_embeddings_minirbt_h256': 'BertEmbeddings', 'bert_embeddings_minirbt_h288': 'BertEmbeddings', 'bert_embeddings_model_attribution_challenge_base_cased': 'BertEmbeddings', 'bert_embeddings_model_attribution_challenge_base_chinese': 'BertEmbeddings', 'bert_embeddings_model_attribution_challenge_base_uncased': 'BertEmbeddings', 'bert_embeddings_multi_dialect_bert_base_arabic': 'BertEmbeddings', 'bert_embeddings_muril_adapted_local': 'BertEmbeddings', 'bert_embeddings_netbert': 'BertEmbeddings', 'bert_embeddings_norbert': 'BertEmbeddings', 'bert_embeddings_norbert2': 'BertEmbeddings', 'bert_embeddings_onlplab_aleph_base': 'BertEmbeddings', 'bert_embeddings_phs_bert': 'BertEmbeddings', 'bert_embeddings_psych_search': 'BertEmbeddings', 'bert_embeddings_ptrsxu_base_chinese': 'BertEmbeddings', 'bert_embeddings_ptrsxu_chinese_wwm_ext': 'BertEmbeddings', 'bert_embeddings_rbt3': 'BertEmbeddings', 'bert_embeddings_rbt4': 'BertEmbeddings', 'bert_embeddings_rbt4_h312': 'BertEmbeddings', 'bert_embeddings_rbt6': 'BertEmbeddings', 'bert_embeddings_rbtl3': 'BertEmbeddings', 'bert_embeddings_scibert_scivocab_finetuned_cord19': 'BertEmbeddings', 'bert_embeddings_sec_bert_base': 'BertEmbeddings', 'bert_embeddings_sec_bert_num': 'BertEmbeddings', 'bert_embeddings_sec_bert_sh': 'BertEmbeddings', 'bert_embeddings_sikubert': 'BertEmbeddings', 'bert_embeddings_sikuroberta': 'BertEmbeddings', 'bert_embeddings_telugu_bertu': 'BertEmbeddings', 'bert_embeddings_uer_large': 'BertEmbeddings', 'bert_embeddings_v_2021_base': 'BertEmbeddings', 'bert_embeddings_v_2021_large': 'BertEmbeddings', 'bert_embeddings_wineberto_italian_cased': 'BertEmbeddings', 'bert_embeddings_wo_chinese_plus': 'BertEmbeddings', 'bert_embeddings_wobert_chinese_base': 'BertEmbeddings', 'bert_embeddings_wobert_chinese_plus': 'BertEmbeddings', 'bert_embeddings_wobert_chinese_plus_base': 'BertEmbeddings', 'bert_finnish_cased': 'BertEmbeddings', 'bert_finnish_uncased': 'BertEmbeddings', 'bert_hi_en_ner': 'BertForTokenClassification', 'bert_large_cased': 'BertEmbeddings', 'bert_large_sequence_classifier_imdb': 'BertForSequenceClassification', 'bert_large_token_classifier_conll03': 'BertForTokenClassification', 'bert_large_token_classifier_ontonote': 'BertForTokenClassification', 'bert_large_uncased': 'BertEmbeddings', 'bert_multi_cased': 'BertEmbeddings', 'bert_multilingual_sequence_classifier_allocine': 'BertForSequenceClassification', 'bert_muril': 'BertEmbeddings', 'bert_ner_ANER': 'BertForTokenClassification', 'bert_ner_aalogan_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_adamlin_recipe_tag_model': 'BertForTokenClassification', 'bert_ner_aditya22_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_ag_based_ner': 'BertForTokenClassification', 'bert_ner_agro_ner': 'BertForTokenClassification', 'bert_ner_airi_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_ajgp_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_alekseykorshuk_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_alexander_learn_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_alexander_learn_bert_finetuned_ner_accelerate': 'BertForTokenClassification', 'bert_ner_alexanderpeter_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_alwaysgetbetter_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_amasi_wikineural_multilingual_ner': 'BertForTokenClassification', 'bert_ner_amir36_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_amrita03_wikineural_multilingual_ner': 'BertForTokenClassification', 'bert_ner_anarise1_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_aneela_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_anery_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_anglicisms_spanish_mbert': 'BertForTokenClassification', 'bert_ner_animalthemuppet_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_arabert_ner': 'BertForTokenClassification', 'bert_ner_arabic_ner': 'BertForTokenClassification', 'bert_ner_archeobertje_ner': 'BertForTokenClassification', 'bert_ner_artemis13fowl_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_artemis13fowl_bert_finetuned_ner_accelerate': 'BertForTokenClassification', 'bert_ner_ashwathgojo234_wikineural_multilingual_ner': 'BertForTokenClassification', 'bert_ner_autonlp_prodigy_10_3362554': 'BertForTokenClassification', 'bert_ner_autonlp_tele_new_5k_557515810': 'BertForTokenClassification', 'bert_ner_autonlp_tele_red_data_model_585716433': 'BertForTokenClassification', 'bert_ner_autotrain_acronym_identification_7324788': 'BertForTokenClassification', 'bert_ner_autotrain_defector_ner_multi_847927015': 'BertForTokenClassification', 'bert_ner_autotrain_lucifer_job_title_comb_858027260': 'BertForTokenClassification', 'bert_ner_autotrain_lucifer_name_894029080': 'BertForTokenClassification', 'bert_ner_autotrain_oms_ner_bi_1044135953': 'BertForTokenClassification', 'bert_ner_awilli_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_balamurugan1603_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_base_bert_tuned_on_tac2017_as_ner': 'BertForTokenClassification', 'bert_ner_baseline_bertv3': 'BertForTokenClassification', 'bert_ner_batya66_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_bc2gm_gene_imbalancedpubmedbert': 'BertForTokenClassification', 'bert_ner_bc2gm_gene_imbalancedscibert_scivocab_cased': 'BertForTokenClassification', 'bert_ner_bc2gm_gene_modified_pubmedbert': 'BertForTokenClassification', 'bert_ner_bc2gm_gene_modified_scibert_scivocab_cased': 'BertForTokenClassification', 'bert_ner_bc4_chem_pubmedbert': 'BertForTokenClassification', 'bert_ner_bc4_modified_biobert_v1.1': 'BertForTokenClassification', 'bert_ner_bc4_modified_biomednlp_pubmedbert_base_uncased_abstract': 'BertForTokenClassification', 'bert_ner_bc4_modified_bluebert_pubmed_uncased_l_12_h_768_a_12': 'BertForTokenClassification', 'bert_ner_bc4_modified_pubmedbert_small': 'BertForTokenClassification', 'bert_ner_bc4_modified_scibert_scivocab_uncased': 'BertForTokenClassification', 'bert_ner_bc4_original_biobert_v1.1': 'BertForTokenClassification', 'bert_ner_bc4_original_biomednlp_pubmedbert_base_uncased_abstract': 'BertForTokenClassification', 'bert_ner_bc4_original_bluebert_pubmed_uncased_l_12_h_768_a_12': 'BertForTokenClassification', 'bert_ner_bc4_original_pubmedbert': 'BertForTokenClassification', 'bert_ner_bc4_original_pubmedbert_small': 'BertForTokenClassification', 'bert_ner_bc4_original_scibert_scivocab_uncased': 'BertForTokenClassification', 'bert_ner_bc4chemd_chem_modified_biobert_384': 'BertForTokenClassification', 'bert_ner_bc4chemd_chem_modified_biobert_512': 'BertForTokenClassification', 'bert_ner_bc4chemd_chem_modified_bluebert_384': 'BertForTokenClassification', 'bert_ner_bc4chemd_chem_modified_bluebert_512': 'BertForTokenClassification', 'bert_ner_bc4chemd_chem_modified_pubmedbert_384': 'BertForTokenClassification', 'bert_ner_bc4chemd_chem_modified_scibert_384': 'BertForTokenClassification', 'bert_ner_bc4chemd_chem_modified_scibert_512': 'BertForTokenClassification', 'bert_ner_bc4chemd_chem_original_biobert_512': 'BertForTokenClassification', 'bert_ner_bc4chemd_chem_original_bluebert_384': 'BertForTokenClassification', 'bert_ner_bc4chemd_chem_original_bluebert_512': 'BertForTokenClassification', 'bert_ner_bc4chemd_chem_original_pubmedbert_384': 'BertForTokenClassification', 'bert_ner_bc4chemd_chem_original_pubmedbert_512': 'BertForTokenClassification', 'bert_ner_bc4chemd_chem_original_scibert_384': 'BertForTokenClassification', 'bert_ner_bc4chemd_chem_original_scibert_512': 'BertForTokenClassification', 'bert_ner_bc4chemd_imbalanced_biobert_base_casesd_v1.1': 'BertForTokenClassification', 'bert_ner_bc4chemd_imbalancedpubmedbert': 'BertForTokenClassification', 'bert_ner_bc4chemd_imbalancedscibert_scivocab_cased': 'BertForTokenClassification', 'bert_ner_bc4chemd_modified_pubmed_clinical': 'BertForTokenClassification', 'bert_ner_bc4chemd_modified_pubmedbert': 'BertForTokenClassification', 'bert_ner_bc4chemd_modified_scibert_scivocab_cased': 'BertForTokenClassification', 'bert_ner_bc4chemd_original_biobert_384': 'BertForTokenClassification', 'bert_ner_bc5cd_chem_modified_pubmedbert_512': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem2_imbalanced_biomednlp_pubmedbert_base_uncased_abstract': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem2_modified_biomednlp_pubmedbert_base_uncased_abstract': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_modified_biobert_384': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_modified_biobert_512': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_modified_biobert_large_cased': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_modified_biobert_v1.1_latest': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_modified_bluebert_512': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_modified_bluebert_pubmed_uncased_l_12_h_768_a_12_latest': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_modified_pubmed_abstract_3': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_modified_pubmed_abstract_latest': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_modified_pubmed_full_3': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_modified_pubmedbert_384': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_modified_scibert_384': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_modified_scibert_512': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_modified_scibert_scivocab_uncased_latest': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_original_biobert_384': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_original_biobert_512': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_original_bluebert_384': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_original_bluebert_512': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_original_pubmedbert_512': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_original_scibert_384': 'BertForTokenClassification', 'bert_ner_bc5cdr_chem_original_scibert_512': 'BertForTokenClassification', 'bert_ner_bc5cdr_chemical_disease_balanced_biobert_base_cased_v1.2': 'BertForTokenClassification', 'bert_ner_bc5cdr_chemical_disease_balanced_biomednlp_pubmedbert_base_uncased_abstract_fulltext': 'BertForTokenClassification', 'bert_ner_bc5cdr_chemical_disease_balanced_pubmedbert': 'BertForTokenClassification', 'bert_ner_bc5cdr_chemical_disease_balanced_sapbert_from_pubmedbert_fulltext': 'BertForTokenClassification', 'bert_ner_bc5cdr_chemical_disease_balanced_scibert_scivocab_cased': 'BertForTokenClassification', 'bert_ner_bc5cdr_chemical_imbalanced_biobert': 'BertForTokenClassification', 'bert_ner_bc5cdr_chemical_imbalanced_biobert_v1.1_latest': 'BertForTokenClassification', 'bert_ner_bc5cdr_chemical_imbalanced_bluebert_pubmed_uncased_l_12_h_768_a_12_latest': 'BertForTokenClassification', 'bert_ner_bc5cdr_chemical_imbalanced_pubmedbert': 'BertForTokenClassification', 'bert_ner_bc5cdr_chemical_imbalanced_pubmedbert_base_uncased_abstract_latest': 'BertForTokenClassification', 'bert_ner_bc5cdr_chemical_imbalanced_scibert_scivocab_cased': 'BertForTokenClassification', 'bert_ner_bc5cdr_chemical_imbalanced_scibert_scivocab_uncased_latest': 'BertForTokenClassification', 'bert_ner_bc5cdr_chemical_modified_pubmedbert': 'BertForTokenClassification', 'bert_ner_bc5cdr_chemical_modified_scibert_scivocab_cased': 'BertForTokenClassification', 'bert_ner_bc5cdr_disease_balancedpubmedbert': 'BertForTokenClassification', 'bert_ner_bc5cdr_disease_imbalanced_biobert_v1.1': 'BertForTokenClassification', 'bert_ner_bc5cdr_disease_imbalanced_biomednlp_pubmedbert_base_uncased_abstract': 'BertForTokenClassification', 'bert_ner_bc5cdr_disease_imbalanced_bluebert_pubmed_uncased_l_12_h_768_a_12_latest': 'BertForTokenClassification', 'bert_ner_bc5cdr_disease_imbalanced_scibert_scivocab_uncased': 'BertForTokenClassification', 'bert_ner_bc5cdr_disease_modified_biobert_v1.1': 'BertForTokenClassification', 'bert_ner_bc5cdr_disease_modified_biomednlp_pubmedbert_base_uncased_abstract': 'BertForTokenClassification', 'bert_ner_bc5cdr_disease_modified_bluebert_pubmed_uncased_l_12_h_768_a_12_latest': 'BertForTokenClassification', 'bert_ner_bc5cdr_disease_modified_pubmedbert': 'BertForTokenClassification', 'bert_ner_bc5cdr_disease_modified_scibert_scivocab_cased': 'BertForTokenClassification', 'bert_ner_bc5cdr_disease_modified_scibert_scivocab_uncased': 'BertForTokenClassification', 'bert_ner_bc5cdr_imbalanced_biobert_base_cased_v1.2': 'BertForTokenClassification', 'bert_ner_bc5cdr_imbalanced_pubmedbert': 'BertForTokenClassification', 'bert_ner_bc5cdr_imbalanced_sapbert_from_pubmedbert_fulltext': 'BertForTokenClassification', 'bert_ner_bc5cdr_imbalanced_scibert_scivocab_cased': 'BertForTokenClassification', 'bert_ner_bchem4_modified_biobert_v1': 'BertForTokenClassification', 'bert_ner_bert_base_arabic_camelbert_ca_ner': 'BertForTokenClassification', 'bert_ner_bert_base_arabic_camelbert_da_ner': 'BertForTokenClassification', 'bert_ner_bert_base_arabic_camelbert_mix_ner': 'BertForTokenClassification', 'bert_ner_bert_base_arabic_camelbert_msa_ner': 'BertForTokenClassification', 'bert_ner_bert_base_cased_chunking': 'BertForTokenClassification', 'bert_ner_bert_base_cased_sem': 'BertForTokenClassification', 'bert_ner_bert_base_chinese_ner': 'BertForTokenClassification', 'bert_ner_bert_base_dutch_cased_finetuned_conll2002_ner': 'BertForTokenClassification', 'bert_ner_bert_base_dutch_cased_finetuned_sonar_ner': 'BertForTokenClassification', 'bert_ner_bert_base_dutch_cased_finetuned_udlassy_ner': 'BertForTokenClassification', 'bert_ner_bert_base_ft_ner_xtreme_id_sultannn': 'BertForTokenClassification', 'bert_ner_bert_base_german_cased_20000_ner': 'BertForTokenClassification', 'bert_ner_bert_base_german_cased_20000_ner_uncased': 'BertForTokenClassification', 'bert_ner_bert_base_german_cased_fine_tuned_ner': 'BertForTokenClassification', 'bert_ner_bert_base_german_cased_own_data_ner': 'BertForTokenClassification', 'bert_ner_bert_base_hu_cased_ner': 'BertForTokenClassification', 'bert_ner_bert_base_hungarian_cased_ner': 'BertForTokenClassification', 'bert_ner_bert_base_indonesian_ner': 'BertForTokenClassification', 'bert_ner_bert_base_irish_cased_v1_finetuned_ner': 'BertForTokenClassification', 'bert_ner_bert_base_multilingual_cased_finetuned_conll2002_ner': 'BertForTokenClassification', 'bert_ner_bert_base_multilingual_cased_finetuned_sonar_ner': 'BertForTokenClassification', 'bert_ner_bert_base_multilingual_cased_finetuned_udlassy_ner': 'BertForTokenClassification', 'bert_ner_bert_base_multilingual_cased_ner_hrl': 'BertForTokenClassification', 'bert_ner_bert_base_multilingual_cased_sem_english': 'BertForTokenClassification', 'bert_ner_bert_base_ner': 'BertForTokenClassification', 'bert_ner_bert_base_ner_finetuned_ner_isu': 'BertForTokenClassification', 'bert_ner_bert_base_ner_uncased': 'BertForTokenClassification', 'bert_ner_bert_base_parsbert_armanner_uncased': 'BertForTokenClassification', 'bert_ner_bert_base_parsbert_ner_uncased': 'BertForTokenClassification', 'bert_ner_bert_base_parsbert_peymaner_uncased': 'BertForTokenClassification', 'bert_ner_bert_base_pt_archive': 'BertForTokenClassification', 'bert_ner_bert_base_spanish_wwm_uncased_finetuned_clinical': 'BertForTokenClassification', 'bert_ner_bert_base_spanish_wwm_uncased_finetuned_ner_medical': 'BertForTokenClassification', 'bert_ner_bert_base_swedish_cased_neriob': 'BertForTokenClassification', 'bert_ner_bert_base_tcm_0.5': 'BertForTokenClassification', 'bert_ner_bert_base_tcm_0.6': 'BertForTokenClassification', 'bert_ner_bert_base_tcm_0.7': 'BertForTokenClassification', 'bert_ner_bert_base_tcm_0.8': 'BertForTokenClassification', 'bert_ner_bert_base_tcm_no_objeto_0.8': 'BertForTokenClassification', 'bert_ner_bert_base_tcm_teste': 'BertForTokenClassification', 'bert_ner_bert_base_turkish_cased_ner': 'BertForTokenClassification', 'bert_ner_bert_base_turkish_cased_ner_tf': 'BertForTokenClassification', 'bert_ner_bert_base_turkish_ner_cased': 'BertForTokenClassification', 'bert_ner_bert_base_turkish_ner_cased_pretrained': 'BertForTokenClassification', 'bert_ner_bert_base_tweetner_2020': 'BertForTokenClassification', 'bert_ner_bert_base_uncased_clinical_ner': 'BertForTokenClassification', 'bert_ner_bert_base_uncased_kin': 'BertForTokenClassification', 'bert_ner_bert_base_uncased_pcm': 'BertForTokenClassification', 'bert_ner_bert_base_uncased_swa': 'BertForTokenClassification', 'bert_ner_bert_based_ner': 'BertForTokenClassification', 'bert_ner_bert_de_ner': 'BertForTokenClassification', 'bert_ner_bert_degree_major_ner_1000': 'BertForTokenClassification', 'bert_ner_bert_dnrti': 'BertForTokenClassification', 'bert_ner_bert_ehsan_ner_accelerate': 'BertForTokenClassification', 'bert_ner_bert_fa_base_uncased_ner_arman': 'BertForTokenClassification', 'bert_ner_bert_fa_base_uncased_ner_peyma': 'BertForTokenClassification', 'bert_ner_bert_fa_zwnj_base_ner': 'BertForTokenClassification', 'bert_ner_bert_fine_tuned_medical_insurance_ner': 'BertForTokenClassification', 'bert_ner_bert_finetuned': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ades_model_1': 'BertForTokenClassification', 'bert_ner_bert_finetuned_comp2': 'BertForTokenClassification', 'bert_ner_bert_finetuned_filler_2': 'BertForTokenClassification', 'bert_ner_bert_finetuned_mutation_recognition_0': 'BertForTokenClassification', 'bert_ner_bert_finetuned_mutation_recognition_1': 'BertForTokenClassification', 'bert_ner_bert_finetuned_mutation_recognition_2': 'BertForTokenClassification', 'bert_ner_bert_finetuned_mutation_recognition_3': 'BertForTokenClassification', 'bert_ner_bert_finetuned_mutation_recognition_4': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner1': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner2': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner3': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner_chinese': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner_custom': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner_sourcerecognition': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner_swedish_small_set_health_and_standart': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner_swedish_test': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner_swedish_test_large_set': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner_swedish_test_numb_2': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner_ubb_conll': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner_ubb_conll_endava_only_misc': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner_ubb_endava': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner_ubb_endava_1': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner_ubb_endava_2': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner_ubb_endava_conll': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner_ubb_endava_only_misc': 'BertForTokenClassification', 'bert_ner_bert_finetuned_ner_uncased': 'BertForTokenClassification', 'bert_ner_bert_finetuned_protagonist': 'BertForTokenClassification', 'bert_ner_bert_keyword_extractor': 'BertForTokenClassification', 'bert_ner_bert_large_cased_finetuned_ner': 'BertForTokenClassification', 'bert_ner_bert_large_ner': 'BertForTokenClassification', 'bert_ner_bert_large_tweetner_2020': 'BertForTokenClassification', 'bert_ner_bert_large_uncased_finetuned_ner': 'BertForTokenClassification', 'bert_ner_bert_large_uncased_med_ner': 'BertForTokenClassification', 'bert_ner_bert_mention_de_vera_pro': 'BertForTokenClassification', 'bert_ner_bert_mention_en_vera_pro': 'BertForTokenClassification', 'bert_ner_bert_mention_fr_vera_pro': 'BertForTokenClassification', 'bert_ner_bert_mt4ts': 'BertForTokenClassification', 'bert_ner_bert_ner_cased_conll2002_nld': 'BertForTokenClassification', 'bert_ner_bert_ner_cased_sonar1_nld': 'BertForTokenClassification', 'bert_ner_bert_ner_i2b2': 'BertForTokenClassification', 'bert_ner_bert_small_finetuned_typo_detection': 'BertForTokenClassification', 'bert_ner_bert_spanish_cased_finetuned_ner': 'BertForTokenClassification', 'bert_ner_bert_split_title_org': 'BertForTokenClassification', 'bert_ner_bert_srb_ner': 'BertForTokenClassification', 'bert_ner_bert_srb_ner_setimes': 'BertForTokenClassification', 'bert_ner_bert_task5finetuned': 'BertForTokenClassification', 'bert_ner_bert_tiny_chinese_ner': 'BertForTokenClassification', 'bert_ner_bert_title_org': 'BertForTokenClassification', 'bert_ner_bert_uncased_keyword_discriminator': 'BertForTokenClassification', 'bert_ner_bert_uncased_keyword_extractor': 'BertForTokenClassification', 'bert_ner_bertimbau_base_lener_br_luciano': 'BertForTokenClassification', 'bert_ner_bertimbau_large_lener_br_luciano': 'BertForTokenClassification', 'bert_ner_berturk_128k_keyword_discriminator': 'BertForTokenClassification', 'bert_ner_berturk_keyword_extractor': 'BertForTokenClassification', 'bert_ner_berturk_uncased_keyword_discriminator': 'BertForTokenClassification', 'bert_ner_berturk_uncased_keyword_extractor': 'BertForTokenClassification', 'bert_ner_bgc_accession': 'BertForTokenClassification', 'bert_ner_bigbio_mtl': 'BertForTokenClassification', 'bert_ner_binay1999_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_biobert_base_cased_v1.2_finetuned_ner_concat_craft_es_stivenlancheros': 'BertForTokenClassification', 'bert_ner_biobert_base_cased_v1.2_finetuned_ner_craft': 'BertForTokenClassification', 'bert_ner_biobert_base_cased_v1.2_finetuned_ner_craft_augmented_en': 'BertForTokenClassification', 'bert_ner_biobert_base_cased_v1.2_finetuned_ner_craft_augmented_es': 'BertForTokenClassification', 'bert_ner_biobert_base_cased_v1.2_finetuned_ner_craft_augmentedtransfer_en': 'BertForTokenClassification', 'bert_ner_biobert_base_cased_v1.2_finetuned_ner_craft_augmentedtransfer_es': 'BertForTokenClassification', 'bert_ner_biobert_base_cased_v1.2_finetuned_ner_craft_english': 'BertForTokenClassification', 'bert_ner_biobert_base_cased_v1.2_finetuned_ner_craft_es_en_stivenlancheros': 'BertForTokenClassification', 'bert_ner_biobert_chemical_ner': 'BertForTokenClassification', 'bert_ner_biobert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_biobert_finetuned_ner_k': 'BertForTokenClassification', 'bert_ner_biobert_finetuned_ner_k2': 'BertForTokenClassification', 'bert_ner_biobert_genetic_ner': 'BertForTokenClassification', 'bert_ner_biobert_ncbi_disease_ner': 'BertForTokenClassification', 'bert_ner_biobert_ncbi_disease_ner_tuned_on_tac2017': 'BertForTokenClassification', 'bert_ner_biobert_ner_bc2gm_corpus': 'BertForTokenClassification', 'bert_ner_biobert_ner_ncbi_disease': 'BertForTokenClassification', 'bert_ner_biobert_on_adr_as_ner': 'BertForTokenClassification', 'bert_ner_biobert_v1.1_pubmed_finetuned_ner': 'BertForTokenClassification', 'bert_ner_biobert_v1.1_pubmed_finetuned_ner_finetuned_ner': 'BertForTokenClassification', 'bert_ner_bioformer_cased_v1.0_bc2gm': 'BertForTokenClassification', 'bert_ner_bioformer_cased_v1.0_ncbi_disease': 'BertForTokenClassification', 'bert_ner_biomuppet': 'BertForTokenClassification', 'bert_ner_bionlp13_modified_biobert_512': 'BertForTokenClassification', 'bert_ner_bionlp13_modified_bluebert_512': 'BertForTokenClassification', 'bert_ner_bionlp13_modified_pubmedbert_384': 'BertForTokenClassification', 'bert_ner_bionlp13_modified_scibert_384': 'BertForTokenClassification', 'bert_ner_bionlp13_modified_scibert_512': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_chem_original_biobert_512': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_chem_original_bluebert_384': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_chem_original_bluebert_512': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_chem_original_scibert_384': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_chem_original_scibert_512': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_imbalanced_biobert': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_imbalanced_scibert_scivocab_cased': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_imbalancedpubmedbert': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_modified_biobert_384': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_modified_biobert_large': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_modified_bioformers': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_modified_bioformers_2': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_modified_pubmedabstract_latest': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_modified_pubmedbert': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_modified_pubmedbert_384': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_modified_pubmedbert_512': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_modified_pubmedbert_abstract_3': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_modified_pubmedbert_full_3': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_modified_scibert': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_original_biobert_384': 'BertForTokenClassification', 'bert_ner_bionlp13cg_chem_original_pubmedbert_512': 'BertForTokenClassification', 'bert_ner_bionlp13cg_modified_biobert_v1.1_latest': 'BertForTokenClassification', 'bert_ner_bionlp13cg_modified_bluebert_pubmed_uncased_l_12_h_768_a_12_latest': 'BertForTokenClassification', 'bert_ner_bionlp13cg_modified_pubmedabstract_latest': 'BertForTokenClassification', 'bert_ner_bionlp13cg_modified_scibert_uncased_latest': 'BertForTokenClassification', 'bert_ner_bionlp13cg_original_biobert_v1.1_latest': 'BertForTokenClassification', 'bert_ner_bionlp13cg_original_bluebert_pubmed_uncased_l_12_h_768_a_12_latest': 'BertForTokenClassification', 'bert_ner_bionlp13cg_original_pubmedbert_abstract_latest': 'BertForTokenClassification', 'bert_ner_bionlp13cg_original_scibert_latest': 'BertForTokenClassification', 'bert_ner_biored_cd_modified_pubmedbert_512': 'BertForTokenClassification', 'bert_ner_biored_cd_original_pubmedbert_512': 'BertForTokenClassification', 'bert_ner_biored_chem_modified_pubmedbert_128_10': 'BertForTokenClassification', 'bert_ner_biored_chem_modified_pubmedbert_128_20': 'BertForTokenClassification', 'bert_ner_biored_chem_modified_pubmedbert_128_32': 'BertForTokenClassification', 'bert_ner_biored_chem_modified_pubmedbert_128_5': 'BertForTokenClassification', 'bert_ner_biored_chem_modified_pubmedbert_256_13': 'BertForTokenClassification', 'bert_ner_biored_chem_modified_pubmedbert_256_40': 'BertForTokenClassification', 'bert_ner_biored_chem_modified_pubmedbert_256_5': 'BertForTokenClassification', 'bert_ner_biored_chem_modified_pubmedbert_320_8': 'BertForTokenClassification', 'bert_ner_biored_chem_modified_pubmedbert_320_8_10': 'BertForTokenClassification', 'bert_ner_biored_chem_modified_pubmedbert_384_5': 'BertForTokenClassification', 'bert_ner_biored_chem_modified_pubmedbert_384_8': 'BertForTokenClassification', 'bert_ner_biored_chem_modified_pubmedbert_384_8_10': 'BertForTokenClassification', 'bert_ner_biored_chem_modified_pubmedbert_512': 'BertForTokenClassification', 'bert_ner_biored_chem_modified_pubmedbert_512_5': 'BertForTokenClassification', 'bert_ner_biored_chem_modified_pubmedbert_512_5_30': 'BertForTokenClassification', 'bert_ner_biored_chem_original_pubmedbert_128_10': 'BertForTokenClassification', 'bert_ner_biored_chem_original_pubmedbert_128_20': 'BertForTokenClassification', 'bert_ner_biored_chem_original_pubmedbert_128_32': 'BertForTokenClassification', 'bert_ner_biored_chem_original_pubmedbert_128_5': 'BertForTokenClassification', 'bert_ner_biored_chem_original_pubmedbert_256_13': 'BertForTokenClassification', 'bert_ner_biored_chem_original_pubmedbert_256_40': 'BertForTokenClassification', 'bert_ner_biored_chem_original_pubmedbert_256_5': 'BertForTokenClassification', 'bert_ner_biored_chem_original_pubmedbert_320_8': 'BertForTokenClassification', 'bert_ner_biored_chem_original_pubmedbert_384_5': 'BertForTokenClassification', 'bert_ner_biored_chem_original_pubmedbert_384_8': 'BertForTokenClassification', 'bert_ner_biored_chem_original_pubmedbert_512': 'BertForTokenClassification', 'bert_ner_biored_chem_original_pubmedbert_512_5': 'BertForTokenClassification', 'bert_ner_biored_chem_original_pubmedbert_512_5_30': 'BertForTokenClassification', 'bert_ner_biored_dis_modified_pubmedbert_128_32': 'BertForTokenClassification', 'bert_ner_biored_dis_modified_pubmedbert_256_13': 'BertForTokenClassification', 'bert_ner_biored_dis_modified_pubmedbert_256_5': 'BertForTokenClassification', 'bert_ner_biored_dis_modified_pubmedbert_320_8': 'BertForTokenClassification', 'bert_ner_biored_dis_modified_pubmedbert_320_8_10': 'BertForTokenClassification', 'bert_ner_biored_dis_modified_pubmedbert_384_5': 'BertForTokenClassification', 'bert_ner_biored_dis_modified_pubmedbert_384_8': 'BertForTokenClassification', 'bert_ner_biored_dis_modified_pubmedbert_384_8_10': 'BertForTokenClassification', 'bert_ner_biored_dis_modified_pubmedbert_512': 'BertForTokenClassification', 'bert_ner_biored_dis_modified_pubmedbert_512_5': 'BertForTokenClassification', 'bert_ner_biored_dis_original_pubmedbert_128_32': 'BertForTokenClassification', 'bert_ner_biored_dis_original_pubmedbert_256_13': 'BertForTokenClassification', 'bert_ner_biored_dis_original_pubmedbert_256_5': 'BertForTokenClassification', 'bert_ner_biored_dis_original_pubmedbert_320_8': 'BertForTokenClassification', 'bert_ner_biored_dis_original_pubmedbert_384_5': 'BertForTokenClassification', 'bert_ner_biored_dis_original_pubmedbert_384_8': 'BertForTokenClassification', 'bert_ner_biored_dis_original_pubmedbert_512': 'BertForTokenClassification', 'bert_ner_biored_dis_original_pubmedbert_512_5': 'BertForTokenClassification', 'bert_ner_body_site': 'BertForTokenClassification', 'bert_ner_brjezierski_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_buehlpa_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_bunsen_base_best': 'BertForTokenClassification', 'bert_ner_buntan_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_butchland_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_canlinzhang_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_carblacac_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_cause_effect_detection': 'BertForTokenClassification', 'bert_ner_ce_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_chandrasutrisnotjhong_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_chanifrusydi_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_chanifrusydi_indobert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_ck_ner_disease': 'BertForTokenClassification', 'bert_ner_ck_ner_subgroup': 'BertForTokenClassification', 'bert_ner_codeswitch_hineng_lid_lince': 'BertForTokenClassification', 'bert_ner_codeswitch_hineng_ner_lince': 'BertForTokenClassification', 'bert_ner_codeswitch_nepeng_lid_lince': 'BertForTokenClassification', 'bert_ner_codeswitch_spaeng_lid_lince': 'BertForTokenClassification', 'bert_ner_codeswitch_spaeng_ner_lince': 'BertForTokenClassification', 'bert_ner_conll12v2': 'BertForTokenClassification', 'bert_ner_core_term_ner_v1': 'BertForTokenClassification', 'bert_ner_craft_chem_imbalanced_scibert': 'BertForTokenClassification', 'bert_ner_craft_chem_imbalancedpubmedbert': 'BertForTokenClassification', 'bert_ner_craft_chem_modified_biobert_large_cased': 'BertForTokenClassification', 'bert_ner_craft_chem_modified_biobert_v1.1': 'BertForTokenClassification', 'bert_ner_craft_chem_modified_bioformers': 'BertForTokenClassification', 'bert_ner_craft_chem_modified_biomednlp_pubmedbert_base_uncased_abstract': 'BertForTokenClassification', 'bert_ner_craft_chem_modified_bluebert_pubmed_uncased_l_12_h_768_a_12': 'BertForTokenClassification', 'bert_ner_craft_chem_modified_pubmedbert': 'BertForTokenClassification', 'bert_ner_craft_chem_modified_scibert': 'BertForTokenClassification', 'bert_ner_craft_chem_modified_scibert_scivocab_uncased': 'BertForTokenClassification', 'bert_ner_craft_chem_original_biobert_v1.1': 'BertForTokenClassification', 'bert_ner_craft_chem_original_biomednlp_pubmedbert_base_uncased_abstract': 'BertForTokenClassification', 'bert_ner_craft_chem_original_bluebert_pubmed_uncased_l_12_h_768_a_12': 'BertForTokenClassification', 'bert_ner_craft_chem_original_scibert_scivocab_uncased': 'BertForTokenClassification', 'bert_ner_craft_modified_biobert_384': 'BertForTokenClassification', 'bert_ner_craft_modified_biobert_512': 'BertForTokenClassification', 'bert_ner_craft_modified_bluebert_384': 'BertForTokenClassification', 'bert_ner_craft_modified_bluebert_512': 'BertForTokenClassification', 'bert_ner_craft_modified_pubmedbert_384': 'BertForTokenClassification', 'bert_ner_craft_modified_pubmedbert_512': 'BertForTokenClassification', 'bert_ner_craft_modified_scibert_384': 'BertForTokenClassification', 'bert_ner_craft_modified_scibert_512': 'BertForTokenClassification', 'bert_ner_craft_original_biobert_384': 'BertForTokenClassification', 'bert_ner_craft_original_biobert_512': 'BertForTokenClassification', 'bert_ner_craft_original_bluebert_384': 'BertForTokenClassification', 'bert_ner_craft_original_bluebert_512': 'BertForTokenClassification', 'bert_ner_craft_original_pubmedbert_384': 'BertForTokenClassification', 'bert_ner_craft_original_pubmedbert_512': 'BertForTokenClassification', 'bert_ner_craft_original_scibert_384': 'BertForTokenClassification', 'bert_ner_craft_original_scibert_512': 'BertForTokenClassification', 'bert_ner_cwan6830_bert_finetuned_ard': 'BertForTokenClassification', 'bert_ner_cwan6830_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_da_bert_ner': 'BertForTokenClassification', 'bert_ner_dani_91_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_danish_bert_botxo_ner_dane': 'BertForTokenClassification', 'bert_ner_datauma_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_davemse_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_dbert_ner': 'BertForTokenClassification', 'bert_ner_dbmdz_bert_large_cased_finetuned_conll03_english': 'BertForTokenClassification', 'bert_ner_deformer': 'BertForTokenClassification', 'bert_ner_deid_bert_i2b2': 'BertForTokenClassification', 'bert_ner_deval_bert_base_ner_finetuned_ner': 'BertForTokenClassification', 'bert_ner_dheerajdhanvee_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_distilbert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_distilbert_jur': 'BertForTokenClassification', 'bert_ner_dizex_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_docusco_bert': 'BertForTokenClassification', 'bert_ner_dpuccine_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_dsghrg_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_dshvadskiy_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_dshvadskiy_bert_finetuned_ner_accelerate': 'BertForTokenClassification', 'bert_ner_eduardopds_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_ehelpbertpt': 'BertForTokenClassification', 'bert_ner_emmanuel_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_envoy': 'BertForTokenClassification', 'bert_ner_epiextract4gard': 'BertForTokenClassification', 'bert_ner_epiextract4gard_v1': 'BertForTokenClassification', 'bert_ner_epiextract4gard_v2': 'BertForTokenClassification', 'bert_ner_estbert_ner': 'BertForTokenClassification', 'bert_ner_estbert_ner_v2': 'BertForTokenClassification', 'bert_ner_eus_es_nymiz': 'BertForTokenClassification', 'bert_ner_evanz37_bert_finetuned_ard': 'BertForTokenClassification', 'bert_ner_fancyerii_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_far50brbert_base': 'BertForTokenClassification', 'bert_ner_far75brbert_base': 'BertForTokenClassification', 'bert_ner_farbrbert_base': 'BertForTokenClassification', 'bert_ner_final_lab': 'BertForTokenClassification', 'bert_ner_foo': 'BertForTokenClassification', 'bert_ner_gbert_base_germaner': 'BertForTokenClassification', 'bert_ner_gbert_large_germaner': 'BertForTokenClassification', 'bert_ner_german_intensifiers_tagging': 'BertForTokenClassification', 'bert_ner_german_press_bert': 'BertForTokenClassification', 'bert_ner_ghost1_bert_finetuned_ner_accelerate': 'BertForTokenClassification', 'bert_ner_gk07_wikineural_multilingual_ner': 'BertForTokenClassification', 'bert_ner_gro_ner_2': 'BertForTokenClassification', 'bert_ner_hebert_ner': 'BertForTokenClassification', 'bert_ner_hilmluo_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_hiner_original_muril_base_cased': 'BertForTokenClassification', 'bert_ner_hing_bert_lid': 'BertForTokenClassification', 'bert_ner_hossay_biobert_base_cased_v1.2_finetuned_ner': 'BertForTokenClassification', 'bert_ner_host': 'BertForTokenClassification', 'bert_ner_hsattar_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_hugging_face_biobert_mlma': 'BertForTokenClassification', 'bert_ner_hugging_face_biobert_mlmav2': 'BertForTokenClassification', 'bert_ner_hugging_face_biobert_mlmav3': 'BertForTokenClassification', 'bert_ner_huggingface_course_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_huggingface_course_bert_finetuned_ner_accelerate': 'BertForTokenClassification', 'bert_ner_icelandic_ner_bert': 'BertForTokenClassification', 'bert_ner_idrisi_lmr_hd_tb': 'BertForTokenClassification', 'bert_ner_idrisi_lmr_hd_tb_partition': 'BertForTokenClassification', 'bert_ner_idrisi_lmr_hd_tl': 'BertForTokenClassification', 'bert_ner_idrisi_lmr_hd_tl_partition': 'BertForTokenClassification', 'bert_ner_importsmart_bert_to_distilbert_ner': 'BertForTokenClassification', 'bert_ner_imvladikon_bert_large_cased_finetuned_conll03_english': 'BertForTokenClassification', 'bert_ner_indicner': 'BertForTokenClassification', 'bert_ner_jameswrbrookes_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_jatinshah_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_jdang_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_jiaxin97_bert_finetuned_ner_adr': 'BertForTokenClassification', 'bert_ner_jimmywu_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_jjglilleberg_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_jo0hnd0e_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_joantirant_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_joantirant_bert_finetuned_ner_accelerate': 'BertForTokenClassification', 'bert_ner_jplago_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_jrubin01_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_juancopi81_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_kakkidaisuki_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_kalex_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_kamalkraj_bert_base_cased_ner_conll2003': 'BertForTokenClassification', 'bert_ner_kaushalkhator_bert_to_distilbert_ner': 'BertForTokenClassification', 'bert_ner_kb_bert_base_swedish_cased_ner': 'BertForTokenClassification', 'bert_ner_kblab_bert_base_swedish_cased_ner': 'BertForTokenClassification', 'bert_ner_kevinform_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_keyword_tag_model': 'BertForTokenClassification', 'bert_ner_keyword_tag_model_10000_9_16_more_ingredient': 'BertForTokenClassification', 'bert_ner_keyword_tag_model_2000': 'BertForTokenClassification', 'bert_ner_keyword_tag_model_2000_9_16': 'BertForTokenClassification', 'bert_ner_keyword_tag_model_2000_9_16_more_ingredient': 'BertForTokenClassification', 'bert_ner_keyword_tag_model_3000_v2': 'BertForTokenClassification', 'bert_ner_keyword_tag_model_4000': 'BertForTokenClassification', 'bert_ner_keyword_tag_model_4000_9_16_more_ingredient': 'BertForTokenClassification', 'bert_ner_keyword_tag_model_6000': 'BertForTokenClassification', 'bert_ner_keyword_tag_model_6000_9_16_more_ingredient': 'BertForTokenClassification', 'bert_ner_keyword_tag_model_6000_v2': 'BertForTokenClassification', 'bert_ner_keyword_tag_model_8000_9_16_more_ingredient': 'BertForTokenClassification', 'bert_ner_keyword_tag_model_9000_v2': 'BertForTokenClassification', 'bert_ner_khan27_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_krimo11_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_ksaluja_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_kurama_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_kurianbenoy_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_kushaljoseph_bert_to_distilbert_ner': 'BertForTokenClassification', 'bert_ner_lab9_1': 'BertForTokenClassification', 'bert_ner_lab9_2': 'BertForTokenClassification', 'bert_ner_labse_ner_nerel': 'BertForTokenClassification', 'bert_ner_laure996_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_leander_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_legalbert_beneficiary_single': 'BertForTokenClassification', 'bert_ner_legalbert_clause_combined': 'BertForTokenClassification', 'bert_ner_lewtun_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_lideming7757_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_literary_german_bert': 'BertForTokenClassification', 'bert_ner_liyingz_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_lordli_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_ludoviciarraga_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_m_bert_ner': 'BertForTokenClassification', 'bert_ner_marathi_ner': 'BertForTokenClassification', 'bert_ner_mascariddu8_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_mascariddu8_bert_finetuned_ner_accelerate': 'BertForTokenClassification', 'bert_ner_mateocolina_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_mattchurgin_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_mbateman_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_mbateman_bert_finetuned_ner_accelerate': 'BertForTokenClassification', 'bert_ner_mbert_base_albanian_cased_ner': 'BertForTokenClassification', 'bert_ner_mbert_base_biomedical_ner': 'BertForTokenClassification', 'bert_ner_mbert_base_cased_ner_conll': 'BertForTokenClassification', 'bert_ner_mbert_base_uncased_kin': 'BertForTokenClassification', 'bert_ner_mbert_base_uncased_ner_kin': 'BertForTokenClassification', 'bert_ner_mbert_base_uncased_ner_pcm': 'BertForTokenClassification', 'bert_ner_mbert_base_uncased_ner_swa': 'BertForTokenClassification', 'bert_ner_mbert_base_uncased_pcm': 'BertForTokenClassification', 'bert_ner_mbert_base_uncased_swa': 'BertForTokenClassification', 'bert_ner_mcdzwil_bert_base_ner_finetuned_ner': 'BertForTokenClassification', 'bert_ner_mdroth_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_mdroth_bert_finetuned_ner_accelerate': 'BertForTokenClassification', 'bert_ner_meddocan_beto_ner': 'BertForTokenClassification', 'bert_ner_media1129_recipe_tag_model': 'BertForTokenClassification', 'bert_ner_michojan_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_mldev_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_mlma': 'BertForTokenClassification', 'bert_ner_mlma_lab8': 'BertForTokenClassification', 'bert_ner_model_co_imb': 'BertForTokenClassification', 'bert_ner_model_col_mod': 'BertForTokenClassification', 'bert_ner_model_imb': 'BertForTokenClassification', 'bert_ner_model_imb_1': 'BertForTokenClassification', 'bert_ner_model_imb_2': 'BertForTokenClassification', 'bert_ner_model_org': 'BertForTokenClassification', 'bert_ner_model_org_1': 'BertForTokenClassification', 'bert_ner_model_org_2': 'BertForTokenClassification', 'bert_ner_modified_bluebert_biored_chem_512_5_30': 'BertForTokenClassification', 'bert_ner_mohitsingh_wikineural_multilingual_ner': 'BertForTokenClassification', 'bert_ner_mordred501_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_named_entity_recognition_nerkor_hubert_hungarian': 'BertForTokenClassification', 'bert_ner_nb_bert_base_ner': 'BertForTokenClassification', 'bert_ner_nbailab_base_ner_scandi': 'BertForTokenClassification', 'bert_ner_ncduy_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_ner_2006': 'BertForTokenClassification', 'bert_ner_ner_bert_base_cased_pt_lenerbr': 'BertForTokenClassification', 'bert_ner_ner_camelbert': 'BertForTokenClassification', 'bert_ner_ner_conll2003': 'BertForTokenClassification', 'bert_ner_ner_conll2003_v2': 'BertForTokenClassification', 'bert_ner_ner_conll2003_v3': 'BertForTokenClassification', 'bert_ner_ner_conll2003_v4': 'BertForTokenClassification', 'bert_ner_ner_dummy_model': 'BertForTokenClassification', 'bert_ner_ner_en_vi_it_es_tinparadox': 'BertForTokenClassification', 'bert_ner_ner_for_female_names': 'BertForTokenClassification', 'bert_ner_ner_hu_model_2021': 'BertForTokenClassification', 'bert_ner_ner_nerd': 'BertForTokenClassification', 'bert_ner_ner_nerd_fine': 'BertForTokenClassification', 'bert_ner_ner_news_portuguese': 'BertForTokenClassification', 'bert_ner_ner_rubert_per_loc_org': 'BertForTokenClassification', 'bert_ner_ner_test': 'BertForTokenClassification', 'bert_ner_neulvo_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_neulvo_bert_finetuned_ner_accelerate': 'BertForTokenClassification', 'bert_ner_new_test_model': 'BertForTokenClassification', 'bert_ner_new_test_model2': 'BertForTokenClassification', 'bert_ner_nicholasdino_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_nielsr_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_nilavo_bert_finetuned': 'BertForTokenClassification', 'bert_ner_nlp_cic_wfu_clinical_cases_ner_mbert_cased_fine_tuned': 'BertForTokenClassification', 'bert_ner_nlp_cic_wfu_clinical_cases_ner_paragraph_tokenized_mbert_cased_fine_tuned': 'BertForTokenClassification', 'bert_ner_nlp_cic_wfu_clinical_cases_ner_sents_tokenized_mbert_cased_fine_tuned': 'BertForTokenClassification', 'bert_ner_nominalization_candidate_classifier': 'BertForTokenClassification', 'bert_ner_nonzerophilip_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_offlangdetectionturkish': 'BertForTokenClassification', 'bert_ner_original_biobert_bc2gm': 'BertForTokenClassification', 'bert_ner_original_biobert_bc5cdr_chemical': 'BertForTokenClassification', 'bert_ner_original_biobert_bc5cdr_disease': 'BertForTokenClassification', 'bert_ner_original_biobert_linnaeus': 'BertForTokenClassification', 'bert_ner_original_biobert_ncbi': 'BertForTokenClassification', 'bert_ner_original_bluebert_bc2gm': 'BertForTokenClassification', 'bert_ner_original_bluebert_bc4chemd': 'BertForTokenClassification', 'bert_ner_original_bluebert_bc5cdr_chemical': 'BertForTokenClassification', 'bert_ner_original_bluebert_bc5cdr_disease': 'BertForTokenClassification', 'bert_ner_original_bluebert_biored_chem': 'BertForTokenClassification', 'bert_ner_original_bluebert_biored_chem_512_5_30': 'BertForTokenClassification', 'bert_ner_original_bluebert_linnaeus': 'BertForTokenClassification', 'bert_ner_original_pubmedbert_bc2gm': 'BertForTokenClassification', 'bert_ner_original_pubmedbert_bc4chemd': 'BertForTokenClassification', 'bert_ner_original_pubmedbert_bc5cdr_chemical': 'BertForTokenClassification', 'bert_ner_original_pubmedbert_bc5cdr_disease': 'BertForTokenClassification', 'bert_ner_original_pubmedbert_linnaeus': 'BertForTokenClassification', 'bert_ner_original_pubmedbert_ncbi': 'BertForTokenClassification', 'bert_ner_original_scibert_bc2gm': 'BertForTokenClassification', 'bert_ner_original_scibert_bc4chemd': 'BertForTokenClassification', 'bert_ner_original_scibert_bc4chemd_o': 'BertForTokenClassification', 'bert_ner_original_scibert_bc5cdr_chemical': 'BertForTokenClassification', 'bert_ner_original_scibert_bc5cdr_chemical_t': 'BertForTokenClassification', 'bert_ner_original_scibert_bc5cdr_chemical_t1': 'BertForTokenClassification', 'bert_ner_original_scibert_bc5cdr_chemical_t2': 'BertForTokenClassification', 'bert_ner_original_scibert_bc5cdr_disease': 'BertForTokenClassification', 'bert_ner_original_scibert_linnaeus': 'BertForTokenClassification', 'bert_ner_orignal_scibert_ncbi': 'BertForTokenClassification', 'bert_ner_orignial_bluebert_ncbi': 'BertForTokenClassification', 'bert_ner_peterhsu_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_peterhsu_bert_finetuned_ner_accelerate': 'BertForTokenClassification', 'bert_ner_peyma_ner_bert_base': 'BertForTokenClassification', 'bert_ner_phijve_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_prot_bert_bfd_ss3': 'BertForTokenClassification', 'bert_ner_ravindra001_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_raymelius_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_rdchambers_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_roberta_base_finetuned_cluener2020_chinese': 'BertForTokenClassification', 'bert_ner_romainlhardy_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_rubert_base_srl_seqlabeling': 'BertForTokenClassification', 'bert_ner_rubert_ner_toxicity': 'BertForTokenClassification', 'bert_ner_rubert_tiny2_sentence_compression': 'BertForTokenClassification', 'bert_ner_rubertconv_toxic_editor': 'BertForTokenClassification', 'bert_ner_russellc_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_russellc_bert_finetuned_ner_accelerate': 'BertForTokenClassification', 'bert_ner_sagerpascal_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_salvatore_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_satellite_instrument_bert_ner': 'BertForTokenClassification', 'bert_ner_satwiksstp_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_scibert_scivocab_cased_ner_jnlpba': 'BertForTokenClassification', 'bert_ner_scibert_scivocab_cased_sdu21_ai': 'BertForTokenClassification', 'bert_ner_scibert_scivocab_uncased_ft_sdu21_ai': 'BertForTokenClassification', 'bert_ner_scibert_scivocab_uncased_ft_tv_sdu21_ai': 'BertForTokenClassification', 'bert_ner_scibert_scivocab_uncased_sdu21_ai': 'BertForTokenClassification', 'bert_ner_scibert_scivocab_uncased_tv_sdu21_ai': 'BertForTokenClassification', 'bert_ner_sebastians_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_sgrannemann_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_shaopeng_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_shiva12_wikineural_multilingual_ner': 'BertForTokenClassification', 'bert_ner_shivanand_wikineural_multilingual_ner': 'BertForTokenClassification', 'bert_ner_shwetabh_wikineural_multilingual_ner': 'BertForTokenClassification', 'bert_ner_siegelou_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_silpa_wikineural_multilingual_ner': 'BertForTokenClassification', 'bert_ner_silviacamplani_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_simple_transformer': 'BertForTokenClassification', 'bert_ner_small': 'BertForTokenClassification', 'bert_ner_small2': 'BertForTokenClassification', 'bert_ner_spacebert_cr': 'BertForTokenClassification', 'bert_ner_spacescibert_cr': 'BertForTokenClassification', 'bert_ner_spanbert_large_cased_finetuned_ade_corpus_v2': 'BertForTokenClassification', 'bert_ner_spanish_cased_finedtuned': 'BertForTokenClassification', 'bert_ner_spasis_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_spasis_bert_finetuned_ner_accelerate': 'BertForTokenClassification', 'bert_ner_ssavla2_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_ssmnspantagger': 'BertForTokenClassification', 'bert_ner_stefan_jo_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_suonbo_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_swedish_ner': 'BertForTokenClassification', 'bert_ner_swedish_sentiment_fear_targets': 'BertForTokenClassification', 'bert_ner_swedish_sentiment_violence_targets': 'BertForTokenClassification', 'bert_ner_syedyusufali_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_sysformbatches2acs': 'BertForTokenClassification', 'bert_ner_t_202_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_tac_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_temporal_tagger_bert_tokenclassifier': 'BertForTokenClassification', 'bert_ner_test_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_testingmodel': 'BertForTokenClassification', 'bert_ner_tf_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_tg_relation_model': 'BertForTokenClassification', 'bert_ner_tinparadox_job_search': 'BertForTokenClassification', 'bert_ner_tiny_bert_for_token_classification': 'BertForTokenClassification', 'bert_ner_tiny_dbmdz_bert_large_cased_finetuned_conll03_english': 'BertForTokenClassification', 'bert_ner_tiny_distilbert_base_cased': 'BertForTokenClassification', 'bert_ner_tinybert_fincorp': 'BertForTokenClassification', 'bert_ner_tinybert_spanish_uncased_finetuned_ner': 'BertForTokenClassification', 'bert_ner_tolgahanturker_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_turkish_ner': 'BertForTokenClassification', 'bert_ner_tushar_rishav_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_twiner_bert_base': 'BertForTokenClassification', 'bert_ner_twiner_bert_base_mtl': 'BertForTokenClassification', 'bert_ner_umlsbert_ner': 'BertForTokenClassification', 'bert_ner_vanmas_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_vdsouza1_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_vikasaeta_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_vikasmani_wikineural_multilingual_ner': 'BertForTokenClassification', 'bert_ner_vikings03_wikineural_multilingual_ner': 'BertForTokenClassification', 'bert_ner_vinspatel4_wikineural_multilingual_ner': 'BertForTokenClassification', 'bert_ner_wakaformer': 'BertForTokenClassification', 'bert_ner_wende_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_wende_bert_finetuned_ner_accelerate': 'BertForTokenClassification', 'bert_ner_wikineural_multilingual_ner': 'BertForTokenClassification', 'bert_ner_winson_bert_finetuned_ner_accelerate': 'BertForTokenClassification', 'bert_ner_wlt_biobert_ncbi': 'BertForTokenClassification', 'bert_ner_wlt_bluebert_linnaeus': 'BertForTokenClassification', 'bert_ner_wlt_bluebert_ncbi': 'BertForTokenClassification', 'bert_ner_wlt_pubmedbert_bc2gm': 'BertForTokenClassification', 'bert_ner_wlt_pubmedbert_linnaeus': 'BertForTokenClassification', 'bert_ner_wlt_scibert_bc2gm': 'BertForTokenClassification', 'bert_ner_wlt_scibert_linnaeus': 'BertForTokenClassification', 'bert_ner_xenergy_indobert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_xesaad_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_xkang_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_xkang_bert_finetuned_ner_accelerate': 'BertForTokenClassification', 'bert_ner_xzt_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_yannis95_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_yfu2307_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_ysharma_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_ytsai25_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_ytsai25_bert_finetuned_ner_adr': 'BertForTokenClassification', 'bert_ner_yv_bert_finetuned_ner': 'BertForTokenClassification', 'bert_ner_yv_bert_finetuned_ner_accelerate': 'BertForTokenClassification', 'bert_ner_zainab18_wikineural_multilingual_ner': 'BertForTokenClassification', 'bert_portuguese_base_cased': 'BertEmbeddings', 'bert_portuguese_large_cased': 'BertEmbeddings', 'bert_pos_13.05.2022.ssccvspantagger': 'BertForTokenClassification', 'bert_pos_4l_weight_decay': 'BertForTokenClassification', 'bert_pos_amhariccacopostag': 'BertForTokenClassification', 'bert_pos_amharicwicpostag': 'BertForTokenClassification', 'bert_pos_amharicwicpostag10tags': 'BertForTokenClassification', 'bert_pos_autonlp_pos_tag_bosque': 'BertForTokenClassification', 'bert_pos_bert_ancient_chinese_base_upos': 'BertForTokenClassification', 'bert_pos_bert_base_arabic_camelbert_ca_pos_egy': 'BertForTokenClassification', 'bert_pos_bert_base_arabic_camelbert_ca_pos_glf': 'BertForTokenClassification', 'bert_pos_bert_base_arabic_camelbert_ca_pos_msa': 'BertForTokenClassification', 'bert_pos_bert_base_arabic_camelbert_da_pos_egy': 'BertForTokenClassification', 'bert_pos_bert_base_arabic_camelbert_da_pos_glf': 'BertForTokenClassification', 'bert_pos_bert_base_arabic_camelbert_da_pos_msa': 'BertForTokenClassification', 'bert_pos_bert_base_arabic_camelbert_mix_pos_egy': 'BertForTokenClassification', 'bert_pos_bert_base_arabic_camelbert_mix_pos_glf': 'BertForTokenClassification', 'bert_pos_bert_base_arabic_camelbert_mix_pos_msa': 'BertForTokenClassification', 'bert_pos_bert_base_arabic_camelbert_msa_pos_egy': 'BertForTokenClassification', 'bert_pos_bert_base_arabic_camelbert_msa_pos_glf': 'BertForTokenClassification', 'bert_pos_bert_base_arabic_camelbert_msa_pos_msa': 'BertForTokenClassification', 'bert_pos_bert_base_cased_ccg': 'BertForTokenClassification', 'bert_pos_bert_base_cased_pos': 'BertForTokenClassification', 'bert_pos_bert_base_chinese_pos': 'BertForTokenClassification', 'bert_pos_bert_base_dutch_cased_finetuned_lassysmall_pos': 'BertForTokenClassification', 'bert_pos_bert_base_dutch_cased_finetuned_udlassy_pos': 'BertForTokenClassification', 'bert_pos_bert_base_dutch_cased_upos_alpino': 'BertForTokenClassification', 'bert_pos_bert_base_dutch_cased_upos_alpino_frisian': 'BertForTokenClassification', 'bert_pos_bert_base_dutch_cased_upos_alpino_gronings': 'BertForTokenClassification', 'bert_pos_bert_base_ft_pos_xtreme': 'BertForTokenClassification', 'bert_pos_bert_base_german_upos': 'BertForTokenClassification', 'bert_pos_bert_base_han_chinese_pos': 'BertForTokenClassification', 'bert_pos_bert_base_japanese_luw_upos': 'BertForTokenClassification', 'bert_pos_bert_base_japanese_unidic_luw_upos': 'BertForTokenClassification', 'bert_pos_bert_base_japanese_upos': 'BertForTokenClassification', 'bert_pos_bert_base_multilingual_cased_chunking_english': 'BertForTokenClassification', 'bert_pos_bert_base_multilingual_cased_pos_english': 'BertForTokenClassification', 'bert_pos_bert_base_russian_upos': 'BertForTokenClassification', 'bert_pos_bert_base_slavic_cyrillic_upos': 'BertForTokenClassification', 'bert_pos_bert_base_swedish_cased_pos': 'BertForTokenClassification', 'bert_pos_bert_base_thai_upos': 'BertForTokenClassification', 'bert_pos_bert_english_uncased_finetuned_chunk': 'BertForTokenClassification', 'bert_pos_bert_english_uncased_finetuned_pos': 'BertForTokenClassification', 'bert_pos_bert_finetuned_chunking': 'BertForTokenClassification', 'bert_pos_bert_finetuned_conll2003_pos': 'BertForTokenClassification', 'bert_pos_bert_finetuned_pos': 'BertForTokenClassification', 'bert_pos_bert_italian_cased_finetuned_pos': 'BertForTokenClassification', 'bert_pos_bert_large_german_upos': 'BertForTokenClassification', 'bert_pos_bert_large_japanese_luw_upos': 'BertForTokenClassification', 'bert_pos_bert_large_japanese_unidic_luw_upos': 'BertForTokenClassification', 'bert_pos_bert_large_japanese_upos': 'BertForTokenClassification', 'bert_pos_bert_large_slavic_cyrillic_upos': 'BertForTokenClassification', 'bert_pos_bert_pos_cased_deepfrog_nld': 'BertForTokenClassification', 'bert_pos_bert_punct_restoration_da_alvenir': 'BertForTokenClassification', 'bert_pos_bert_punct_restoration_de_alvenir': 'BertForTokenClassification', 'bert_pos_bert_punct_restoration_en_alvenir': 'BertForTokenClassification', 'bert_pos_bert_spanish_cased_finetuned_pos': 'BertForTokenClassification', 'bert_pos_bert_spanish_cased_finetuned_pos_16_tags': 'BertForTokenClassification', 'bert_pos_bert_spanish_cased_finetuned_pos_syntax': 'BertForTokenClassification', 'bert_pos_bert_tiny_chinese_pos': 'BertForTokenClassification', 'bert_pos_bertimbau_finetuned_pos_accelerate': 'BertForTokenClassification', 'bert_pos_bertimbau_finetuned_pos_accelerate2': 'BertForTokenClassification', 'bert_pos_bertimbau_finetuned_pos_accelerate3': 'BertForTokenClassification', 'bert_pos_bertimbau_finetuned_pos_accelerate_5': 'BertForTokenClassification', 'bert_pos_bertimbau_finetuned_pos_accelerate_6': 'BertForTokenClassification', 'bert_pos_bertimbau_finetuned_pos_accelerate_7': 'BertForTokenClassification', 'bert_pos_ccvspantagger': 'BertForTokenClassification', 'bert_pos_chinese_bert_wwm_ext_upos': 'BertForTokenClassification', 'bert_pos_chinese_roberta_base_upos': 'BertForTokenClassification', 'bert_pos_chinese_roberta_large_upos': 'BertForTokenClassification', 'bert_pos_classical_chinese_punctuation_guwen_biaodian': 'BertForTokenClassification', 'bert_pos_clnspantagger': 'BertForTokenClassification', 'bert_pos_cmn1spantagger': 'BertForTokenClassification', 'bert_pos_cmv1spantagger': 'BertForTokenClassification', 'bert_pos_codeswitch_hineng_pos_lince': 'BertForTokenClassification', 'bert_pos_codeswitch_spaeng_pos_lince': 'BertForTokenClassification', 'bert_pos_estbert_morph_128': 'BertForTokenClassification', 'bert_pos_estbert_upos_128': 'BertForTokenClassification', 'bert_pos_estbert_xpos_128': 'BertForTokenClassification', 'bert_pos_french_postag_model': 'BertForTokenClassification', 'bert_pos_mbert_grammatical_error_tagger': 'BertForTokenClassification', 'bert_pos_parsbert_finetuned_pos': 'BertForTokenClassification', 'bert_pos_signtagger': 'BertForTokenClassification', 'bert_pos_spanish_cased_finetuned_pos_16_tags': 'BertForTokenClassification', 'bert_pos_ssccvspantagger': 'BertForTokenClassification', 'bert_pos_tetra_tag_en_kitaev': 'BertForTokenClassification', 'bert_pos_tiny_bb_wd': 'BertForTokenClassification', 'bert_pos_tiny_focal_alpah': 'BertForTokenClassification', 'bert_pos_tiny_focal_alpah75': 'BertForTokenClassification', 'bert_pos_tiny_focal_ckpt': 'BertForTokenClassification', 'bert_pos_tiny_focal_v2_label': 'BertForTokenClassification', 'bert_pos_tiny_focal_v3': 'BertForTokenClassification', 'bert_pos_tiny_kt_punctuator': 'BertForTokenClassification', 'bert_pos_tiny_ktoto_punctuator': 'BertForTokenClassification', 'bert_pos_tiny_lr_kk_kktoto': 'BertForTokenClassification', 'bert_pos_tiny_no_focal_v2': 'BertForTokenClassification', 'bert_pos_tiny_toto_punctuator': 'BertForTokenClassification', 'bert_pos_tr_kg_pos_conllu_bert': 'BertForTokenClassification', 'bert_pos_ty_punctuator': 'BertForTokenClassification', 'bert_pos_wwdd_tiny': 'BertForTokenClassification', 'bert_pubmed': 'BertEmbeddings', 'bert_pubmed_squad2': 'BertEmbeddings', 'bert_qa_3lang': 'BertForQuestionAnswering', 'bert_qa_Alexander_Learn_bert_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_Bertv1_fine': 'BertForQuestionAnswering', 'bert_qa_COVID_BERTa': 'BertForQuestionAnswering', 'bert_qa_COVID_BERTb': 'BertForQuestionAnswering', 'bert_qa_COVID_BERTc': 'BertForQuestionAnswering', 'bert_qa_FardinSaboori_bert_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_GBERTQnA': 'BertForQuestionAnswering', 'bert_qa_Graphcore_bert_large_uncased_squad': 'BertForQuestionAnswering', 'bert_qa_Harsit_bert_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_HomayounSadri_bert_base_uncased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_Indobert_QA': 'BertForQuestionAnswering', 'bert_qa_KevinChoi_bert_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_KevinChoi_bert_finetuned_squad_accelerate': 'BertForQuestionAnswering', 'bert_qa_Klue_CommonSense_model': 'BertForQuestionAnswering', 'bert_qa_Laikokwei_bert_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_MTL_bert_base_uncased_ww_squad': 'BertForQuestionAnswering', 'bert_qa_ManuERT_for_xqua': 'BertForQuestionAnswering', 'bert_qa_MiniLM_L12_H384_uncased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_Multi_ling_BERT': 'BertForQuestionAnswering', 'bert_qa_Neulvo_bert_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_Part_1_mBERT_Model_E1': 'BertForQuestionAnswering', 'bert_qa_Part_1_mBERT_Model_E2': 'BertForQuestionAnswering', 'bert_qa_Part_2_BERT_Multilingual_Dutch_Model_E1': 'BertForQuestionAnswering', 'bert_qa_Part_2_mBERT_Model_E2': 'BertForQuestionAnswering', 'bert_qa_Paul_Vinh_bert_base_multilingual_cased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_PruebaBert': 'BertForQuestionAnswering', 'bert_qa_SciBERT_SQuAD_QuAC': 'BertForQuestionAnswering', 'bert_qa_Seongkyu_bert_base_cased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_Shushant_BiomedNLP_PubMedBERT_base_uncased_abstract_fulltext_ContaminationQAmodel_PubmedBERT': 'BertForQuestionAnswering', 'bert_qa_Sotireas_BiomedNLP_PubMedBERT_base_uncased_abstract_fulltext_ContaminationQAmodel_PubmedBERT': 'BertForQuestionAnswering', 'bert_qa_Spanbert_emotion_extraction': 'BertForQuestionAnswering', 'bert_qa_SreyanG_NVIDIA_bert_base_cased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_SreyanG_NVIDIA_bert_base_uncased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_SupriyaArun_bert_base_uncased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_Tianle_bert_base_uncased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_Trial_3_Results': 'BertForQuestionAnswering', 'bert_qa_ahujaniharika95_minilm_uncased_squad2_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_ainize_klue_bert_base_mrc': 'BertForQuestionAnswering', 'bert_qa_aiyshwariya_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_akmal2500_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_andresestevez_bert_base_cased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_andresestevez_bert_finetuned_squad_accelerate': 'BertForQuestionAnswering', 'bert_qa_ankitkupadhyay_bert_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_araSpeedest': 'BertForQuestionAnswering', 'bert_qa_arabert_finetuned_arcd': 'BertForQuestionAnswering', 'bert_qa_arabert_v2': 'BertForQuestionAnswering', 'bert_qa_arap_qa_bert': 'BertForQuestionAnswering', 'bert_qa_arap_qa_bert_large_v2': 'BertForQuestionAnswering', 'bert_qa_arap_qa_bert_v2': 'BertForQuestionAnswering', 'bert_qa_augmented': 'BertForQuestionAnswering', 'bert_qa_augmented_Squad_Translated': 'BertForQuestionAnswering', 'bert_qa_baru98_base_cased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_base_1024_full_trivia': 'BertForQuestionAnswering', 'bert_qa_base_cased_finetuned_log_parser_winlogbeat_nowhitespac': 'BertForQuestionAnswering', 'bert_qa_base_cased_finetuned_log_parser_winlogbeat_nowhitespace_larg': 'BertForQuestionAnswering', 'bert_qa_base_cased_finetuned_squad_r3f': 'BertForQuestionAnswering', 'bert_qa_base_cased_finetuned_squad_v2': 'BertForQuestionAnswering', 'bert_qa_base_cased_iuchatbot_ontologydts_berttokenizer_12april2022': 'BertForQuestionAnswering', 'bert_qa_base_indonesian_tydiqa': 'BertForQuestionAnswering', 'bert_qa_base_japanese_whole_word_masking_tes': 'BertForQuestionAnswering', 'bert_qa_base_japanese_wikipedia_ud_head': 'BertForQuestionAnswering', 'bert_qa_base_multi_mlqa_dev': 'BertForQuestionAnswering', 'bert_qa_base_multi_uncased': 'BertForQuestionAnswering', 'bert_qa_base_multilingual_cased_finetuned_viquad': 'BertForQuestionAnswering', 'bert_qa_base_parsbert_uncased_finetuned_perqa': 'BertForQuestionAnswering', 'bert_qa_base_parsbert_uncased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_base_spanish_wwm_uncased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_base_swedish_cased_squad_experimental': 'BertForQuestionAnswering', 'bert_qa_base_turkish_128k_cased_finetuned_lr_2e_05_epochs_3': 'BertForQuestionAnswering', 'bert_qa_base_turkish_128k_cased_tquad2_finetuned_lr_2e_05_epochs_1': 'BertForQuestionAnswering', 'bert_qa_base_turkish_128k_cased_tquad2_finetuned_lr_2e_05_epochs_3': 'BertForQuestionAnswering', 'bert_qa_base_uncased_attribute_correction': 'BertForQuestionAnswering', 'bert_qa_base_uncased_attribute_correction_mlm': 'BertForQuestionAnswering', 'bert_qa_base_uncased_attribute_correction_mlm_titles': 'BertForQuestionAnswering', 'bert_qa_base_uncased_contracts_finetuned_on_squadv2': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_1024_finetuned_squad_seed_0': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_1024_finetuned_squad_seed_10': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_1024_finetuned_squad_seed_2': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_1024_finetuned_squad_seed_4': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_1024_finetuned_squad_seed_6': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_1024_finetuned_squad_seed_8': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_128_finetuned_squad_seed_10': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_128_finetuned_squad_seed_2': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_128_finetuned_squad_seed_4': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_128_finetuned_squad_seed_42': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_128_finetuned_squad_seed_6': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_128_finetuned_squad_seed_8': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_16_finetuned_squad_seed_0': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_16_finetuned_squad_seed_10': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_16_finetuned_squad_seed_2': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_16_finetuned_squad_seed_4': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_16_finetuned_squad_seed_6': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_16_finetuned_squad_seed_8': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_256_finetuned_squad_seed_10': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_256_finetuned_squad_seed_2': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_256_finetuned_squad_seed_4': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_256_finetuned_squad_seed_6': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_256_finetuned_squad_seed_8': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_32_finetuned_squad_seed_10': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_32_finetuned_squad_seed_2': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_32_finetuned_squad_seed_4': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_32_finetuned_squad_seed_6': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_32_finetuned_squad_seed_8': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_512_finetuned_squad_seed_10': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_512_finetuned_squad_seed_2': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_512_finetuned_squad_seed_4': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_512_finetuned_squad_seed_6': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_512_finetuned_squad_seed_8': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_64_finetuned_squad_seed_10': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_64_finetuned_squad_seed_2': 'BertForQuestionAnswering', 'bert_qa_base_uncased_few_shot_k_64_finetuned_squad_seed_4': 'BertForQuestionAnswering', 'bert_qa_base_uncased_pretrain_finetuned_coqa_fal': 'BertForQuestionAnswering', 'bert_qa_base_uncased_pretrain_finetuned_coqa_falttened': 'BertForQuestionAnswering', 'bert_qa_base_uncased_squad2.0': 'BertForQuestionAnswering', 'bert_qa_base_uncased_squad_v1.0_finetuned': 'BertForQuestionAnswering', 'bert_qa_base_uncased_squad_v2.0_finetuned': 'BertForQuestionAnswering', 'bert_qa_base_uncased_ssp': 'BertForQuestionAnswering', 'bert_qa_batterybert_cased_squad_v1': 'BertForQuestionAnswering', 'bert_qa_batterybert_uncased_squad_v1': 'BertForQuestionAnswering', 'bert_qa_batterydata_bert_base_uncased_squad_v1': 'BertForQuestionAnswering', 'bert_qa_batteryonlybert_cased_squad_v1': 'BertForQuestionAnswering', 'bert_qa_batteryonlybert_uncased_squad_v1': 'BertForQuestionAnswering', 'bert_qa_batteryscibert_cased_squad_v1': 'BertForQuestionAnswering', 'bert_qa_batteryscibert_uncased_squad_v1': 'BertForQuestionAnswering', 'bert_qa_bdickson_bert_base_uncased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_bert': 'BertForQuestionAnswering', 'bert_qa_bert_FT_new_newsqa': 'BertForQuestionAnswering', 'bert_qa_bert_FT_newsqa': 'BertForQuestionAnswering', 'bert_qa_bert_all': 'BertForQuestionAnswering', 'bert_qa_bert_all_squad_all_translated': 'BertForQuestionAnswering', 'bert_qa_bert_all_squad_ben_tel_context': 'BertForQuestionAnswering', 'bert_qa_bert_all_squad_que_translated': 'BertForQuestionAnswering', 'bert_qa_bert_all_translated': 'BertForQuestionAnswering', 'bert_qa_bert_base_1024_full_trivia_copied_embeddings': 'BertForQuestionAnswering', 'bert_qa_bert_base_2048_full_trivia_copied_embeddings': 'BertForQuestionAnswering', 'bert_qa_bert_base_4096_full_trivia_copied_embeddings': 'BertForQuestionAnswering', 'bert_qa_bert_base_512_full_trivia': 'BertForQuestionAnswering', 'bert_qa_bert_base_cased_IUChatbot_ontologyDts': 'BertForQuestionAnswering', 'bert_qa_bert_base_cased_chaii': 'BertForQuestionAnswering', 'bert_qa_bert_base_cased_finetuned_squad_test': 'BertForQuestionAnswering', 'bert_qa_bert_base_cased_squad_v1': 'BertForQuestionAnswering', 'bert_qa_bert_base_cased_squad_v1.1_portuguese': 'BertForQuestionAnswering', 'bert_qa_bert_base_chinese_finetuned_squad_colab': 'BertForQuestionAnswering', 'bert_qa_bert_base_fa_qa': 'BertForQuestionAnswering', 'bert_qa_bert_base_faquad': 'BertForQuestionAnswering', 'bert_qa_bert_base_finetuned_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_base_italian_uncased_squad_it_antoniocappiello': 'BertForQuestionAnswering', 'bert_qa_bert_base_multilingual_cased_finetune_qa': 'BertForQuestionAnswering', 'bert_qa_bert_base_multilingual_cased_finetuned_chaii': 'BertForQuestionAnswering', 'bert_qa_bert_base_multilingual_cased_finetuned_dutch_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_base_multilingual_cased_finetuned_klue': 'BertForQuestionAnswering', 'bert_qa_bert_base_multilingual_cased_finetuned_polish_squad1': 'BertForQuestionAnswering', 'bert_qa_bert_base_multilingual_cased_finetuned_polish_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_base_multilingual_cased_finetuned_squadv1': 'BertForQuestionAnswering', 'bert_qa_bert_base_multilingual_cased_korquad': 'BertForQuestionAnswering', 'bert_qa_bert_base_multilingual_cased_korquad_v1': 'BertForQuestionAnswering', 'bert_qa_bert_base_multilingual_uncased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_bert_base_multilingual_xquad': 'BertForQuestionAnswering', 'bert_qa_bert_base_portuguese_cased_finetuned_squad_v1_pt_mrm8488': 'BertForQuestionAnswering', 'bert_qa_bert_base_sinhala_qa': 'BertForQuestionAnswering', 'bert_qa_bert_base_spanish_wwm_cased_finetuned_qa_mlqa': 'BertForQuestionAnswering', 'bert_qa_bert_base_spanish_wwm_cased_finetuned_qa_sqac': 'BertForQuestionAnswering', 'bert_qa_bert_base_spanish_wwm_cased_finetuned_qa_tar': 'BertForQuestionAnswering', 'bert_qa_bert_base_spanish_wwm_cased_finetuned_spa_squad2_es_finetuned_sqac': 'BertForQuestionAnswering', 'bert_qa_bert_base_spanish_wwm_cased_finetuned_spa_squad2_es_mrm8488': 'BertForQuestionAnswering', 'bert_qa_bert_base_spanish_wwm_cased_finetuned_sqac': 'BertForQuestionAnswering', 'bert_qa_bert_base_spanish_wwm_cased_finetuned_sqac_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_bert_base_spanish_wwm_cased_finetuned_sqac_finetuned_squad2_es_MMG': 'BertForQuestionAnswering', 'bert_qa_bert_base_spanish_wwm_cased_finetuned_squad2_es_MMG': 'BertForQuestionAnswering', 'bert_qa_bert_base_spanish_wwm_cased_finetuned_squad2_es_finetuned_sqac': 'BertForQuestionAnswering', 'bert_qa_bert_base_spanish_wwm_uncased_finetuned_qa_mlqa': 'BertForQuestionAnswering', 'bert_qa_bert_base_spanish_wwm_uncased_finetuned_qa_sqac': 'BertForQuestionAnswering', 'bert_qa_bert_base_spanish_wwm_uncased_finetuned_qa_tar': 'BertForQuestionAnswering', 'bert_qa_bert_base_squadv1': 'BertForQuestionAnswering', 'bert_qa_bert_base_swedish_cased_squad_experimental': 'BertForQuestionAnswering', 'bert_qa_bert_base_swedish_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_base_turkish_cased_finetuned_lr_2e_05_epochs_3': 'BertForQuestionAnswering', 'bert_qa_bert_base_turkish_squad': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_coqa': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_few_shot_k_1024_finetuned_squad_seed_42': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_few_shot_k_128_finetuned_squad_seed_0': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_few_shot_k_16_finetuned_squad_seed_42': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_few_shot_k_256_finetuned_squad_seed_0': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_few_shot_k_32_finetuned_squad_seed_0': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_few_shot_k_512_finetuned_squad_seed_0': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_few_shot_k_64_finetuned_squad_seed_0': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_finetuned_docvqa': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_finetuned_duorc_bert': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_finetuned_infovqa': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_finetuned_newsqa': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_finetuned_squad_frozen_v2': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_finetuned_squad_v1': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_finetuned_squad_v2': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_finetuned_vi_infovqa': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_fiqa_flm_sq_flit': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_qa_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_squad1.1_block_sparse_0.07_v1': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_squad1.1_block_sparse_0.13_v1': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_squad1.1_block_sparse_0.20_v1': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_squad1.1_block_sparse_0.32_v1': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_squad1.1_pruned_x3.2_v2': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_squad2_covid_qa_deepset': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_squad_L3': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_squad_L6': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_squad_v1_sparse0.25': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_squadv1.1_sparse_80_1x4_block_pruneofa': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_squadv1_x1.16_f88.1_d8_unstruct_v1': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_squadv1_x1.84_f88.7_d36_hybrid_filled_v1': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_squadv1_x1.96_f88.3_d27_hybrid_filled_opt_v1': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_squadv1_x2.01_f89.2_d30_hybrid_rewind_opt_v1': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_squadv1_x2.32_f86.6_d15_hybrid_v1': 'BertForQuestionAnswering', 'bert_qa_bert_base_uncased_squadv1_x2.44_f87.7_d26_hybrid_filled_v1': 'BertForQuestionAnswering', 'bert_qa_bert_chinese_finetuned': 'BertForQuestionAnswering', 'bert_qa_bert_fa_QA_v1': 'BertForQuestionAnswering', 'bert_qa_bert_fa_qa_v1': 'BertForQuestionAnswering', 'bert_qa_bert_finetuned_jackh1995': 'BertForQuestionAnswering', 'bert_qa_bert_finetuned_lr2_e5_b16_ep2': 'BertForQuestionAnswering', 'bert_qa_bert_finetuned_qa': 'BertForQuestionAnswering', 'bert_qa_bert_finetuned_squad1': 'BertForQuestionAnswering', 'bert_qa_bert_finetuned_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_finetuned_squad_accelerate_10epoch_transformerfrozen': 'BertForQuestionAnswering', 'bert_qa_bert_finetuned_squad_pytorch': 'BertForQuestionAnswering', 'bert_qa_bert_italian_finedtuned_squadv1_it_alfa': 'BertForQuestionAnswering', 'bert_qa_bert_l_squadv1.1_sl256': 'BertForQuestionAnswering', 'bert_qa_bert_l_squadv1.1_sl384': 'BertForQuestionAnswering', 'bert_qa_bert_large_cased_squad_v1.1_portuguese': 'BertForQuestionAnswering', 'bert_qa_bert_large_cased_whole_word_masking_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_bert_large_faquad': 'BertForQuestionAnswering', 'bert_qa_bert_large_finetuned': 'BertForQuestionAnswering', 'bert_qa_bert_large_finetuned_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_large_question_answering_finetuned_legal': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_finetuned_docvqa': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_squad2_covid_qa_deepset': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_squadv1.1_sparse_80_1x4_block_pruneofa': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_squadv1.1_sparse_90_unstructured': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_squadv2': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_whole_word_masking_chaii': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_whole_word_masking_finetuned_chaii': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_whole_word_masking_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_whole_word_masking_finetuned_squad_finetuned_islamic_squad': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_whole_word_masking_finetuned_squadv2': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_whole_word_masking_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_whole_word_masking_squad2_with_ner_Pistherea_conll2003_with_neg_with_repeat': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_whole_word_masking_squad2_with_ner_Pwhatisthe_conll2003_with_neg_with_repeat': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_whole_word_masking_squad2_with_ner_conll2003_with_neg_with_repeat': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_whole_word_masking_squad2_with_ner_mit_movie_with_neg_with_repeat': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_whole_word_masking_squad2_with_ner_mit_restaurant_with_neg_with_repeat': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_wwm_squadv2_x2.15_f83.2_d25_hybrid_v1': 'BertForQuestionAnswering', 'bert_qa_bert_large_uncased_wwm_squadv2_x2.63_f82.6_d16_hybrid_v1': 'BertForQuestionAnswering', 'bert_qa_bert_medium_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_bert_medium_finetuned_squadv2': 'BertForQuestionAnswering', 'bert_qa_bert_medium_pretrained_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_bert_medium_squad2_distilled': 'BertForQuestionAnswering', 'bert_qa_bert_medium_wrslb_finetuned_squadv1': 'BertForQuestionAnswering', 'bert_qa_bert_mini_5_finetuned_squadv2': 'BertForQuestionAnswering', 'bert_qa_bert_mini_finetuned_squadv2': 'BertForQuestionAnswering', 'bert_qa_bert_mini_wrslb_finetuned_squadv1': 'BertForQuestionAnswering', 'bert_qa_bert_multi_cased_finedtuned_xquad_chaii': 'BertForQuestionAnswering', 'bert_qa_bert_multi_cased_finedtuned_xquad_tydiqa_goldp': 'BertForQuestionAnswering', 'bert_qa_bert_multi_cased_finetuned_chaii': 'BertForQuestionAnswering', 'bert_qa_bert_multi_cased_finetuned_xquadv1': 'BertForQuestionAnswering', 'bert_qa_bert_multi_cased_finetuned_xquadv1_finetuned_squad_colab': 'BertForQuestionAnswering', 'bert_qa_bert_multi_cased_squad_sv_marbogusz': 'BertForQuestionAnswering', 'bert_qa_bert_multi_english_german_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_multi_uncased_finetuned_chaii': 'BertForQuestionAnswering', 'bert_qa_bert_multi_uncased_finetuned_xquadv1': 'BertForQuestionAnswering', 'bert_qa_bert_qa_vi_nvkha': 'BertForQuestionAnswering', 'bert_qa_bert_qasper': 'BertForQuestionAnswering', 'bert_qa_bert_reader_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_set_date_1_lr_2e_5_bs_32_ep_4': 'BertForQuestionAnswering', 'bert_qa_bert_small_2_finetuned_squadv2': 'BertForQuestionAnswering', 'bert_qa_bert_small_cord19_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_small_cord19qa': 'BertForQuestionAnswering', 'bert_qa_bert_small_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_bert_small_finetuned_squadv2': 'BertForQuestionAnswering', 'bert_qa_bert_small_pretrained_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_bert_small_wrslb_finetuned_squadv1': 'BertForQuestionAnswering', 'bert_qa_bert_tiny_2_finetuned_squadv2': 'BertForQuestionAnswering', 'bert_qa_bert_tiny_3_finetuned_squadv2': 'BertForQuestionAnswering', 'bert_qa_bert_tiny_4_finetuned_squadv2': 'BertForQuestionAnswering', 'bert_qa_bert_tiny_5_finetuned_squadv2': 'BertForQuestionAnswering', 'bert_qa_bert_tiny_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_bert_tiny_finetuned_squadv2': 'BertForQuestionAnswering', 'bert_qa_bert_turkish_question_answering': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_10_H_512_A_8_cord19_200616_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_10_H_512_A_8_cord19_200616_squad2_covid_qna': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_10_H_512_A_8_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_10_H_512_A_8_squad2_covid_qna': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_2_H_512_A_8_cord19_200616_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_2_H_512_A_8_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_2_H_512_A_8_squad2_covid_qna': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_4_H_256_A_4_cord19_200616_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_4_H_256_A_4_cord19_200616_squad2_covid_qna': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_4_H_256_A_4_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_4_H_256_A_4_squad2_covid_qna': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_4_H_512_A_8_cord19_200616_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_4_H_512_A_8_cord19_200616_squad2_covid_qna': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_4_H_512_A_8_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_4_H_512_A_8_squad2_covid_qna': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_4_H_768_A_12_cord19_200616_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_4_H_768_A_12_cord19_200616_squad2_covid_qna': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_4_H_768_A_12_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_4_H_768_A_12_squad2_covid_qna': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_6_H_128_A_2_squad2': 'BertForQuestionAnswering', 'bert_qa_bert_uncased_L_6_H_128_A_2_squad2_covid_qna': 'BertForQuestionAnswering', 'bert_qa_bertimbau_squad1.1': 'BertForQuestionAnswering', 'bert_qa_bertlargeabsa': 'BertForQuestionAnswering', 'bert_qa_bertserini_base_cmrc': 'BertForQuestionAnswering', 'bert_qa_bertserini_bert_base_squad': 'BertForQuestionAnswering', 'bert_qa_bertserini_bert_large_squad': 'BertForQuestionAnswering', 'bert_qa_bespin_global_klue_bert_base_mrc': 'BertForQuestionAnswering', 'bert_qa_beto_base_spanish_sqac': 'BertForQuestionAnswering', 'bert_qa_bioBERTpt_squad_v1.1_portuguese': 'BertForQuestionAnswering', 'bert_qa_biobert_base_cased_v1.1_squad': 'BertForQuestionAnswering', 'bert_qa_biobert_base_cased_v1.1_squad_finetuned_biobert': 'BertForQuestionAnswering', 'bert_qa_biobert_base_cased_v1.1_squad_finetuned_covbiobert': 'BertForQuestionAnswering', 'bert_qa_biobert_base_cased_v1.1_squad_finetuned_covdrobert': 'BertForQuestionAnswering', 'bert_qa_biobert_bioasq': 'BertForQuestionAnswering', 'bert_qa_biobert_large_cased_v1.1_squad': 'BertForQuestionAnswering', 'bert_qa_biobert_squad2_cased': 'BertForQuestionAnswering', 'bert_qa_biobert_squad2_cased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_biobert_v1.1_biomedicalquestionanswering': 'BertForQuestionAnswering', 'bert_qa_biobert_v1.1_pubmed_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_biobert_v1.1_pubmed_squad_v2': 'BertForQuestionAnswering', 'bert_qa_bioformer_cased_v1.0_squad1': 'BertForQuestionAnswering', 'bert_qa_biomedical_slot_filling_reader_base': 'BertForQuestionAnswering', 'bert_qa_biomedical_slot_filling_reader_large': 'BertForQuestionAnswering', 'bert_qa_braquad_bert_qna': 'BertForQuestionAnswering', 'bert_qa_callmenicky_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_causal_qa': 'BertForQuestionAnswering', 'bert_qa_chanifrusydi_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_chemical_bert_uncased_squad2': 'BertForQuestionAnswering', 'bert_qa_cheryltsw_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_chinese_pert_base_mrc': 'BertForQuestionAnswering', 'bert_qa_chinese_pert_large_mrc': 'BertForQuestionAnswering', 'bert_qa_chinese_pert_large_open_domain_mrc': 'BertForQuestionAnswering', 'bert_qa_chinese_pretrain_mrc_macbert_large': 'BertForQuestionAnswering', 'bert_qa_chinese_pretrain_mrc_roberta_wwm_ext_large': 'BertForQuestionAnswering', 'bert_qa_cjjie_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_clementgyj_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_consert_techqa': 'BertForQuestionAnswering', 'bert_qa_covid_squad': 'BertForQuestionAnswering', 'bert_qa_covidbert_squad': 'BertForQuestionAnswering', 'bert_qa_csarron_bert_base_uncased_squad_v1': 'BertForQuestionAnswering', 'bert_qa_danish_bert_botxo_qa_squad': 'BertForQuestionAnswering', 'bert_qa_datauma_bert_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_debug_squad': 'BertForQuestionAnswering', 'bert_qa_deep_pavlov_full': 'BertForQuestionAnswering', 'bert_qa_deep_pavlov_full_2': 'BertForQuestionAnswering', 'bert_qa_deepset_bert_base_uncased_squad2': 'BertForQuestionAnswering', 'bert_qa_demo': 'BertForQuestionAnswering', 'bert_qa_distilbert_base_uncased_finetuned_custom': 'BertForQuestionAnswering', 'bert_qa_distilbert_tr_q_a': 'BertForQuestionAnswering', 'bert_qa_distill_bert_base_spanish_wwm_cased_finetuned_spa_squad2_es_mrm8488': 'BertForQuestionAnswering', 'bert_qa_eauction_section_parsing_from_pretrained': 'BertForQuestionAnswering', 'bert_qa_emanuals_squad2.0': 'BertForQuestionAnswering', 'bert_qa_ericw0530_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_fewrel_zero_shot': 'BertForQuestionAnswering', 'bert_qa_fine_tuned_squad_aip': 'BertForQuestionAnswering', 'bert_qa_fine_tuned_tweetqa_aip': 'BertForQuestionAnswering', 'bert_qa_finetune_bert_base_v1': 'BertForQuestionAnswering', 'bert_qa_finetune_bert_base_v2': 'BertForQuestionAnswering', 'bert_qa_finetune_bert_base_v3': 'BertForQuestionAnswering', 'bert_qa_finetune_scibert_v2': 'BertForQuestionAnswering', 'bert_qa_finetuned_squad_50k': 'BertForQuestionAnswering', 'bert_qa_finetuned_squad_transformerfrozen_testtoken': 'BertForQuestionAnswering', 'bert_qa_firmanindolanguagemodel': 'BertForQuestionAnswering', 'bert_qa_fpdm_bert_FT_new_newsqa': 'BertForQuestionAnswering', 'bert_qa_fpdm_bert_FT_newsqa': 'BertForQuestionAnswering', 'bert_qa_fpdm_hier_bert_FT_new_newsqa': 'BertForQuestionAnswering', 'bert_qa_fpdm_hier_bert_FT_newsqa': 'BertForQuestionAnswering', 'bert_qa_fpdm_triplet_bert_FT_new_newsqa': 'BertForQuestionAnswering', 'bert_qa_fpdm_triplet_bert_FT_newsqa': 'BertForQuestionAnswering', 'bert_qa_hebert_finetuned_hebrew_squad': 'BertForQuestionAnswering', 'bert_qa_hendrixcosta': 'BertForQuestionAnswering', 'bert_qa_howey_bert_large_uncased_squad': 'BertForQuestionAnswering', 'bert_qa_huBert_fine_tuned_hungarian_squadv1': 'BertForQuestionAnswering', 'bert_qa_huggingface_course_bert_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_huggingface_course_bert_finetuned_squad_accelerate': 'BertForQuestionAnswering', 'bert_qa_i_manual_m': 'BertForQuestionAnswering', 'bert_qa_internetoftim_bert_large_uncased_squad': 'BertForQuestionAnswering', 'bert_qa_irenelizihui_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_ixambert_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_ixambert_finetuned_squad_eu_MarcBrun': 'BertForQuestionAnswering', 'bert_qa_ixambert_finetuned_squad_eu_en_MarcBrun': 'BertForQuestionAnswering', 'bert_qa_ixambert_finetuned_squad_eu_en_marcbrun': 'BertForQuestionAnswering', 'bert_qa_ixambert_finetuned_squad_eu_marcbrun': 'BertForQuestionAnswering', 'bert_qa_jatinshah_bert_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_jimypbr_bert_base_uncased_squad': 'BertForQuestionAnswering', 'bert_qa_juliusco_distilbert_base_uncased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_kamilali_distilbert_base_uncased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_kaporter_bert_base_uncased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_kcbert_base_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_kd_squad1.1': 'BertForQuestionAnswering', 'bert_qa_kflash_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_kflash_finetuned_squad_accelera': 'BertForQuestionAnswering', 'bert_qa_khanh_base_multilingual_cased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_klue_bert_base_aihub_mrc': 'BertForQuestionAnswering', 'bert_qa_kobert_finetuned_klue_v2': 'BertForQuestionAnswering', 'bert_qa_kobert_finetuned_squad_kor_v1': 'BertForQuestionAnswering', 'bert_qa_komrc_train': 'BertForQuestionAnswering', 'bert_qa_large_japanese_wikipedia_ud_head': 'BertForQuestionAnswering', 'bert_qa_large_uncased_finetuned_infovqa': 'BertForQuestionAnswering', 'bert_qa_large_uncased_finetuned_vi_infovqa': 'BertForQuestionAnswering', 'bert_qa_large_uncased_ssp': 'BertForQuestionAnswering', 'bert_qa_legal': 'BertForQuestionAnswering', 'bert_qa_leofelix_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_lewtun_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_linkbert_base_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_linkbert_large_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_logo_qna_model': 'BertForQuestionAnswering', 'bert_qa_loodos_bert_base_uncased_QA_fine_tuned': 'BertForQuestionAnswering', 'bert_qa_mBERT_all_ty_SQen_SQ20_1': 'BertForQuestionAnswering', 'bert_qa_macsquad': 'BertForQuestionAnswering', 'bert_qa_mbert_bengali_tydiqa_qa': 'BertForQuestionAnswering', 'bert_qa_mbert_finetuned_mlqa_ar_hi_dev': 'BertForQuestionAnswering', 'bert_qa_mbert_finetuned_mlqa_de_hi_dev': 'BertForQuestionAnswering', 'bert_qa_mbert_finetuned_mlqa_dev': 'BertForQuestionAnswering', 'bert_qa_mbert_finetuned_mlqa_en_hi_dev': 'BertForQuestionAnswering', 'bert_qa_mbert_finetuned_mlqa_en_zh_hi_dev': 'BertForQuestionAnswering', 'bert_qa_mbert_finetuned_mlqa_es_hi_dev': 'BertForQuestionAnswering', 'bert_qa_mbert_finetuned_mlqa_vi_hi_dev': 'BertForQuestionAnswering', 'bert_qa_mbert_finetuned_mlqa_zh_hi_dev': 'BertForQuestionAnswering', 'bert_qa_mini_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_mini_finetuned_squadv2': 'BertForQuestionAnswering', 'bert_qa_minilm_l12_h384_uncased_squad': 'BertForQuestionAnswering', 'bert_qa_minilm_uncased_squad2': 'BertForQuestionAnswering', 'bert_qa_mkkc58_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_model_output': 'BertForQuestionAnswering', 'bert_qa_modelontquad': 'BertForQuestionAnswering', 'bert_qa_modelonwhol': 'BertForQuestionAnswering', 'bert_qa_mqa_baseline': 'BertForQuestionAnswering', 'bert_qa_mqa_cls': 'BertForQuestionAnswering', 'bert_qa_mqa_sim': 'BertForQuestionAnswering', 'bert_qa_mqa_unsupsim': 'BertForQuestionAnswering', 'bert_qa_mrbalazs5_bert_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_mrp_bert_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_multi_uncased_trained_squadv2': 'BertForQuestionAnswering', 'bert_qa_multilingual_base_cased_chines': 'BertForQuestionAnswering', 'bert_qa_multilingual_bert_base_cased_arabic': 'BertForQuestionAnswering', 'bert_qa_multilingual_bert_base_cased_chinese': 'BertForQuestionAnswering', 'bert_qa_multilingual_bert_base_cased_english': 'BertForQuestionAnswering', 'bert_qa_multilingual_bert_base_cased_german': 'BertForQuestionAnswering', 'bert_qa_multilingual_bert_base_cased_hindi': 'BertForQuestionAnswering', 'bert_qa_multilingual_bert_base_cased_spanish': 'BertForQuestionAnswering', 'bert_qa_multilingual_bert_base_cased_vietnamese': 'BertForQuestionAnswering', 'bert_qa_muril_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_muril_finetuned_squadv1': 'BertForQuestionAnswering', 'bert_qa_muril_large_cased_hita_qa': 'BertForQuestionAnswering', 'bert_qa_muril_large_squad2': 'BertForQuestionAnswering', 'bert_qa_my_new_model': 'BertForQuestionAnswering', 'bert_qa_mymild_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_nausheen_finetuned_squad_accelera': 'BertForQuestionAnswering', 'bert_qa_neg_komrc_train': 'BertForQuestionAnswering', 'bert_qa_negfir_distilbert_base_uncased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_neuralmagic_bert_squad_12layer_0sparse': 'BertForQuestionAnswering', 'bert_qa_news_pretrain_bert_FT_new_newsqa': 'BertForQuestionAnswering', 'bert_qa_news_pretrain_bert_FT_newsqa': 'BertForQuestionAnswering', 'bert_qa_newsqa_bert_el_Danastos': 'BertForQuestionAnswering', 'bert_qa_newsqa_bert_el_danastos': 'BertForQuestionAnswering', 'bert_qa_newsqa_el_4': 'BertForQuestionAnswering', 'bert_qa_nickmuchi_bert_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_no_need_to_name_this': 'BertForQuestionAnswering', 'bert_qa_nolog_SciBert_v2': 'BertForQuestionAnswering', 'bert_qa_nq_bert_el_Danastos': 'BertForQuestionAnswering', 'bert_qa_nq_bert_el_danastos': 'BertForQuestionAnswering', 'bert_qa_nq_el_4': 'BertForQuestionAnswering', 'bert_qa_nq_squad_el_3': 'BertForQuestionAnswering', 'bert_qa_nq_squad_el_4': 'BertForQuestionAnswering', 'bert_qa_ofirzaf_bert_large_uncased_squad': 'BertForQuestionAnswering', 'bert_qa_output_files': 'BertForQuestionAnswering', 'bert_qa_paranoidandroid_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_parsbert_finetuned_persianqa': 'BertForQuestionAnswering', 'bert_qa_part_2_bert_multilingual_dutch_model_e1': 'BertForQuestionAnswering', 'bert_qa_part_2_mbert_model_e1': 'BertForQuestionAnswering', 'bert_qa_paul_vinh_bert_base_multilingual_cased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_peterhsu_bert_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_peterhsu_bert_finetuned_squad_accelerate': 'BertForQuestionAnswering', 'bert_qa_pquad': 'BertForQuestionAnswering', 'bert_qa_pquad_2': 'BertForQuestionAnswering', 'bert_qa_prunebert_base_uncased_6_finepruned_w_distil_squad': 'BertForQuestionAnswering', 'bert_qa_pubmed_bert_squadv2': 'BertForQuestionAnswering', 'bert_qa_qa_roberta_base_chinese_extractive': 'BertForQuestionAnswering', 'bert_qa_qacombination_bert_el_Danastos': 'BertForQuestionAnswering', 'bert_qa_qacombination_el_4': 'BertForQuestionAnswering', 'bert_qa_qacombined_el_3': 'BertForQuestionAnswering', 'bert_qa_qacombined_el_4': 'BertForQuestionAnswering', 'bert_qa_qaconv_bert_large_uncased_whole_word_masking_squad2': 'BertForQuestionAnswering', 'bert_qa_qgrantq_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_question_answering_cased_squadv2': 'BertForQuestionAnswering', 'bert_qa_question_answering_chinese': 'BertForQuestionAnswering', 'bert_qa_question_answering_uncased_squadv2': 'BertForQuestionAnswering', 'bert_qa_question_answering_zh_voidful': 'BertForQuestionAnswering', 'bert_qa_questionansweing': 'BertForQuestionAnswering', 'bert_qa_renukswamy_minilm_uncased_squad2_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_results': 'BertForQuestionAnswering', 'bert_qa_roberta_base_chinese_extractive': 'BertForQuestionAnswering', 'bert_qa_roberta_base_chinese_extractive_qa': 'BertForQuestionAnswering', 'bert_qa_roberta_base_chinese_extractive_qa_scratch': 'BertForQuestionAnswering', 'bert_qa_rule_based_hier_quadruplet_0.1_epochs_1_shard_1_squad2.0': 'BertForQuestionAnswering', 'bert_qa_rule_based_hier_quadruplet_epochs_1_shard_1_squad2.0': 'BertForQuestionAnswering', 'bert_qa_rule_based_hier_triplet_0.1_epochs_1_shard_1_squad2.0': 'BertForQuestionAnswering', 'bert_qa_rule_based_hier_triplet_epochs_1_shard_1_kldiv_squad2.0': 'BertForQuestionAnswering', 'bert_qa_rule_based_hier_triplet_epochs_1_shard_1_squad2.0': 'BertForQuestionAnswering', 'bert_qa_rule_based_only_classfn_epochs_1_shard_1_squad2.0': 'BertForQuestionAnswering', 'bert_qa_rule_based_quadruplet_epochs_1_shard_1_squad2.0': 'BertForQuestionAnswering', 'bert_qa_rule_based_triplet_epochs_1_shard_1_squad2.0': 'BertForQuestionAnswering', 'bert_qa_rule_softmatching': 'BertForQuestionAnswering', 'bert_qa_ruselkomp_sbert_large_nlu_ru_finetuned_squad_full': 'BertForQuestionAnswering', 'bert_qa_sagemaker_BioclinicalBERT_ADR': 'BertForQuestionAnswering', 'bert_qa_salti_bert_base_multilingual_cased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_sapbert_from_pubmedbert_squad2': 'BertForQuestionAnswering', 'bert_qa_sber_full_tes': 'BertForQuestionAnswering', 'bert_qa_sbert_large_nlu_ru_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_scibert_coqa': 'BertForQuestionAnswering', 'bert_qa_scibert_nli_squad': 'BertForQuestionAnswering', 'bert_qa_scibert_scivocab_uncased_squad': 'BertForQuestionAnswering', 'bert_qa_scibert_scivocab_uncased_squad_v2': 'BertForQuestionAnswering', 'bert_qa_sd1': 'BertForQuestionAnswering', 'bert_qa_sd1_small': 'BertForQuestionAnswering', 'bert_qa_sd2': 'BertForQuestionAnswering', 'bert_qa_sd2_lr_5e_5_bs_32_e_3': 'BertForQuestionAnswering', 'bert_qa_sd2_small': 'BertForQuestionAnswering', 'bert_qa_sd3': 'BertForQuestionAnswering', 'bert_qa_sd3_small': 'BertForQuestionAnswering', 'bert_qa_sebastians_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_sebastians_finetuned_squad_accelera': 'BertForQuestionAnswering', 'bert_qa_set_date_1_impartit_4': 'BertForQuestionAnswering', 'bert_qa_set_date_1_lr_2e_5_bs_32_ep_3': 'BertForQuestionAnswering', 'bert_qa_set_date_1_lr_3e_5_bs_32_ep_3': 'BertForQuestionAnswering', 'bert_qa_set_date_2_lr_2e_5_bs_32_ep_3': 'BertForQuestionAnswering', 'bert_qa_set_date_2_lr_2e_5_bs_32_ep_4': 'BertForQuestionAnswering', 'bert_qa_set_date_2_lr_3e_5_bs_32_ep_3': 'BertForQuestionAnswering', 'bert_qa_set_date_3_lr_2e_5_bs_32_ep_3': 'BertForQuestionAnswering', 'bert_qa_set_date_3_lr_2e_5_bs_32_ep_4': 'BertForQuestionAnswering', 'bert_qa_set_date_3_lr_3e_5_bs_32_ep_3': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_1024_finetuned_squad_seed_0': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_1024_finetuned_squad_seed_10': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_1024_finetuned_squad_seed_2': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_1024_finetuned_squad_seed_4': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_1024_finetuned_squad_seed_42': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_1024_finetuned_squad_seed_6': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_1024_finetuned_squad_seed_8': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_128_finetuned_squad_seed_0': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_128_finetuned_squad_seed_10': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_128_finetuned_squad_seed_4': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_128_finetuned_squad_seed_42': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_128_finetuned_squad_seed_6': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_128_finetuned_squad_seed_8': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_16_finetuned_squad_seed_0': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_16_finetuned_squad_seed_10': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_16_finetuned_squad_seed_2': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_16_finetuned_squad_seed_4': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_16_finetuned_squad_seed_42': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_16_finetuned_squad_seed_6': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_16_finetuned_squad_seed_8': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_256_finetuned_squad_seed_0': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_256_finetuned_squad_seed_10': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_256_finetuned_squad_seed_2': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_256_finetuned_squad_seed_4': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_256_finetuned_squad_seed_6': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_256_finetuned_squad_seed_8': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_32_finetuned_squad_seed_0': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_32_finetuned_squad_seed_10': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_32_finetuned_squad_seed_2': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_32_finetuned_squad_seed_4': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_32_finetuned_squad_seed_6': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_32_finetuned_squad_seed_8': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_512_finetuned_squad_seed_0': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_512_finetuned_squad_seed_10': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_512_finetuned_squad_seed_2': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_512_finetuned_squad_seed_4': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_512_finetuned_squad_seed_6': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_512_finetuned_squad_seed_8': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_64_finetuned_squad_seed_0': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_64_finetuned_squad_seed_10': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_64_finetuned_squad_seed_2': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_64_finetuned_squad_seed_4': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_64_finetuned_squad_seed_6': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_cased_few_shot_k_64_finetuned_squad_seed_8': 'BertForQuestionAnswering', 'bert_qa_spanbert_base_finetuned_squad_r3f': 'BertForQuestionAnswering', 'bert_qa_spanbert_finetuned_squadv1': 'BertForQuestionAnswering', 'bert_qa_spanbert_finetuned_squadv2': 'BertForQuestionAnswering', 'bert_qa_spanbert_large_recruit_qa': 'BertForQuestionAnswering', 'bert_qa_spanbert_recruit_qa': 'BertForQuestionAnswering', 'bert_qa_spasis_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_spasis_finetuned_squad_accelera': 'BertForQuestionAnswering', 'bert_qa_specter_model_squad2.0': 'BertForQuestionAnswering', 'bert_qa_squad1.1': 'BertForQuestionAnswering', 'bert_qa_squad1.1_1': 'BertForQuestionAnswering', 'bert_qa_squad2.0': 'BertForQuestionAnswering', 'bert_qa_squad_baseline': 'BertForQuestionAnswering', 'bert_qa_squad_bert_el_Danastos': 'BertForQuestionAnswering', 'bert_qa_squad_bert_el_danastos': 'BertForQuestionAnswering', 'bert_qa_squad_el_4': 'BertForQuestionAnswering', 'bert_qa_squad_en_bert_base': 'BertForQuestionAnswering', 'bert_qa_squad_mbert_en_de_es_model': 'BertForQuestionAnswering', 'bert_qa_squad_mbert_en_de_es_vi_zh_model': 'BertForQuestionAnswering', 'bert_qa_squad_mbert_model': 'BertForQuestionAnswering', 'bert_qa_squad_mbert_model_2': 'BertForQuestionAnswering', 'bert_qa_squad_ms_bert_base': 'BertForQuestionAnswering', 'bert_qa_squad_xxl_cased_hub1': 'BertForQuestionAnswering', 'bert_qa_srmukundb_bert_base_uncased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_susghosh_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_telugu_bertu_tydiqa': 'BertForQuestionAnswering', 'bert_qa_testpersianqa': 'BertForQuestionAnswering', 'bert_qa_tests_finetuned_squad_test_bert': 'BertForQuestionAnswering', 'bert_qa_tests_finetuned_squad_test_bert_2': 'BertForQuestionAnswering', 'bert_qa_tf_bert_base_cased_squad2': 'BertForQuestionAnswering', 'bert_qa_tf_bert_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_thai_bert_multi_cased_finetuned_xquadv1_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_timur1984_sbert_large_nlu_ru_finetuned_squad_full': 'BertForQuestionAnswering', 'bert_qa_tiny_finetuned_squadv2': 'BertForQuestionAnswering', 'bert_qa_tiny_wrslb_finetuned_squadv1': 'BertForQuestionAnswering', 'bert_qa_tinybert_6l_768d_squad2': 'BertForQuestionAnswering', 'bert_qa_tinybert_6l_768d_squad2_large_teach': 'BertForQuestionAnswering', 'bert_qa_tinybert_6l_768d_squad2_large_teacher_dummy': 'BertForQuestionAnswering', 'bert_qa_tinybert_6l_768d_squad2_large_teacher_finetuned': 'BertForQuestionAnswering', 'bert_qa_tinybert_6l_768d_squad2_large_teacher_finetuned_step1': 'BertForQuestionAnswering', 'bert_qa_tinybert_general_4l_312d_squad': 'BertForQuestionAnswering', 'bert_qa_triviaqa_bert_el_Danastos': 'BertForQuestionAnswering', 'bert_qa_triviaqa_bert_el_danastos': 'BertForQuestionAnswering', 'bert_qa_triviaqa_el_4': 'BertForQuestionAnswering', 'bert_qa_twmkn9_bert_base_uncased_squad2': 'BertForQuestionAnswering', 'bert_qa_uncased_finetuned_squad_indonesian': 'BertForQuestionAnswering', 'bert_qa_uncased_l_2_h_128_a_2_cord19_200616_squad2': 'BertForQuestionAnswering', 'bert_qa_uncased_l_2_h_128_a_2_cord19_200616_squad2_covid_qna': 'BertForQuestionAnswering', 'bert_qa_uncased_l_2_h_128_a_2_squad2': 'BertForQuestionAnswering', 'bert_qa_uncased_l_2_h_128_a_2_squad2_covid_qna': 'BertForQuestionAnswering', 'bert_qa_uncased_l_2_h_512_a_8_cord19_200616_squad2_covid_qna': 'BertForQuestionAnswering', 'bert_qa_uncased_l_6_h_128_a_2_cord19_200616_squad2': 'BertForQuestionAnswering', 'bert_qa_uncased_l_6_h_128_a_2_cord19_200616_squad2_covid_qna': 'BertForQuestionAnswering', 'bert_qa_unqover_bert_base_uncased_newsqa': 'BertForQuestionAnswering', 'bert_qa_unqover_bert_base_uncased_squad': 'BertForQuestionAnswering', 'bert_qa_unqover_large_uncased_newsqa': 'BertForQuestionAnswering', 'bert_qa_unsup_consert_base_squad2.0': 'BertForQuestionAnswering', 'bert_qa_victoraavila_bert_base_uncased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_victorlee071200_base_cased_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_vuiseng9_bert_base_uncased_squad': 'BertForQuestionAnswering', 'bert_qa_wiselinjayajos_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_wounkai_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_xdistil_l12_h384_squad2': 'BertForQuestionAnswering', 'bert_qa_xdzadi00_based_v4': 'BertForQuestionAnswering', 'bert_qa_xquad_th_mbert_base': 'BertForQuestionAnswering', 'bert_qa_xtremedistil_l6_h256_uncased_TQUAD_finetuned_lr_2e_05_epochs_9': 'BertForQuestionAnswering', 'bert_qa_xtremedistil_l6_h256_uncased_finetuned_lr_2e_05_epochs_3': 'BertForQuestionAnswering', 'bert_qa_xtremedistil_l6_h256_uncased_finetuned_lr_2e_05_epochs_6': 'BertForQuestionAnswering', 'bert_qa_xtremedistil_l6_h256_uncased_tquad_finetuned_lr_2e_05_epochs_3': 'BertForQuestionAnswering', 'bert_qa_xtremedistil_l6_h256_uncased_tquad_finetuned_lr_2e_05_epochs_6': 'BertForQuestionAnswering', 'bert_qa_ydshieh_bert_base_cased_squad2': 'BertForQuestionAnswering', 'bert_qa_yossra_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_youngjae_bert_finetuned_squad': 'BertForQuestionAnswering', 'bert_qa_youngjae_bert_finetuned_squad_accelerate': 'BertForQuestionAnswering', 'bert_qa_zero_shot': 'BertForQuestionAnswering', 'bert_sentence_embeddings_financial': 'BertEmbeddings', 'bert_sequence_classifier_ade': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_age_news': 'BertForSequenceClassification', 'bert_sequence_classifier_antisemitism': 'BertForSequenceClassification', 'bert_sequence_classifier_base_german_dbmdz_cased_finetuned_pawsx': 'BertForSequenceClassification', 'bert_sequence_classifier_beto_emotion_analysis': 'BertForSequenceClassification', 'bert_sequence_classifier_beto_sentiment_analysis': 'BertForSequenceClassification', 'bert_sequence_classifier_binary_rct_biobert': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_covid_sentiment': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_dehatebert_mono': 'BertForSequenceClassification', 'bert_sequence_classifier_depression': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_depression_binary': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_depression_twitter': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_electricidad_base_finetuned_sst2': 'BertForSequenceClassification', 'bert_sequence_classifier_emotion': 'BertForSequenceClassification', 'bert_sequence_classifier_finbert': 'BertForSequenceClassification', 'bert_sequence_classifier_finbert_tone': 'BertForSequenceClassification', 'bert_sequence_classifier_gender_biobert': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_hatexplain': 'BertForSequenceClassification', 'bert_sequence_classifier_health_mentions_bert': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_health_mentions_gbert': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_health_mentions_gbert_large': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_health_mentions_medbert': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_japanese_sentiment': 'BertForSequenceClassification', 'bert_sequence_classifier_mini_finetuned_age_news_classification': 'BertForSequenceClassification', 'bert_sequence_classifier_multilingual_sentiment': 'BertForSequenceClassification', 'bert_sequence_classifier_news_sentiment': 'BertForSequenceClassification', 'bert_sequence_classifier_pico_biobert': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_question_statement': 'BertForSequenceClassification', 'bert_sequence_classifier_question_statement_clinical': 'BertForSequenceClassification', 'bert_sequence_classifier_rct_biobert': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_rubert_sentiment': 'BertForSequenceClassification', 'bert_sequence_classifier_sdoh_community_absent_status': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_sdoh_community_present_status': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_sdoh_environment_status': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_sentiment': 'BertForSequenceClassification', 'bert_sequence_classifier_sms_spam': 'BertForSequenceClassification', 'bert_sequence_classifier_song_lyrics': 'BertForSequenceClassification', 'bert_sequence_classifier_spanish_tinybert_betito_finetuned_mnli': 'BertForSequenceClassification', 'bert_sequence_classifier_spanish_tinybert_betito_finetuned_xnli': 'BertForSequenceClassification', 'bert_sequence_classifier_stress': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_tiny_finetuned_fake_news_detection': 'BertForSequenceClassification', 'bert_sequence_classifier_tiny_finetuned_sms_spam_detection': 'BertForSequenceClassification', 'bert_sequence_classifier_tiny_finetuned_yahoo_answers_topics': 'BertForSequenceClassification', 'bert_sequence_classifier_toxicity': 'BertForSequenceClassification', 'bert_sequence_classifier_treatment_changes_sentiment_tweet': 'MedicalBertForSequenceClassification', 'bert_sequence_classifier_trec_coarse': 'BertForSequenceClassification', 'bert_sequence_classifier_turkish_sentiment': 'BertForSequenceClassification', 'bert_token_classifier_ade_tweet_binary': 'MedicalBertForTokenClassifier', 'bert_token_classifier_aleph_finetuned_metaphor_detection': 'BertForTokenClassification', 'bert_token_classifier_ara_ner': 'BertForTokenClassification', 'bert_token_classifier_autonlp_ingredient_pseudo_label_training_ner_29576765': 'BertForTokenClassification', 'bert_token_classifier_autonlp_ingredient_sentiment_analysis_19126711': 'BertForTokenClassification', 'bert_token_classifier_autotrain_final_784824206': 'BertForTokenClassification', 'bert_token_classifier_autotrain_final_784824213': 'BertForTokenClassification', 'bert_token_classifier_autotrain_gro_ner': 'BertForTokenClassification', 'bert_token_classifier_autotrain_lucy_alicorp_1356152290': 'BertForTokenClassification', 'bert_token_classifier_autotrain_medicaltokenclassification_1279048948': 'BertForTokenClassification', 'bert_token_classifier_autotrain_ner_favsbot': 'BertForTokenClassification', 'bert_token_classifier_autotrain_tk_1181244086': 'BertForTokenClassification', 'bert_token_classifier_base_chinese_ws': 'BertForTokenClassification', 'bert_token_classifier_base_german_finetuned_ler': 'BertForTokenClassification', 'bert_token_classifier_base_han_chinese_ws': 'BertForTokenClassification', 'bert_token_classifier_base_multilingual_cased_finetuned_albanian_ner': 'BertForTokenClassification', 'bert_token_classifier_base_multilingual_cased_finetuned_norsk_ner': 'BertForTokenClassification', 'bert_token_classifier_base_swedish_lowermix_reallysimple_ner': 'BertForTokenClassification', 'bert_token_classifier_base_uncased_city_country_ner': 'BertForTokenClassification', 'bert_token_classifier_berturk_cased_keyword_discriminator': 'BertForTokenClassification', 'bert_token_classifier_berturk_cased_ner': 'BertForTokenClassification', 'bert_token_classifier_berturk_sunlp_ner_turkish': 'BertForTokenClassification', 'bert_token_classifier_biobertpt_clin_tempclinbr': 'BertForTokenClassification', 'bert_token_classifier_cased_keyword_discriminator': 'BertForTokenClassification', 'bert_token_classifier_chinese_ner': 'BertForTokenClassification', 'bert_token_classifier_clinicalnerpt_chemical': 'BertForTokenClassification', 'bert_token_classifier_clinicalnerpt_diagnostic': 'BertForTokenClassification', 'bert_token_classifier_clinicalnerpt_disease': 'BertForTokenClassification', 'bert_token_classifier_clinicalnerpt_disorder': 'BertForTokenClassification', 'bert_token_classifier_clinicalnerpt_finding': 'BertForTokenClassification', 'bert_token_classifier_drug_development_trials': 'BertForTokenClassification', 'bert_token_classifier_dutch_udlassy_ner': 'BertForTokenClassification', 'bert_token_classifier_fernet_cc_sk_ner': 'BertForTokenClassification', 'bert_token_classifier_german_ler': 'BertForTokenClassification', 'bert_token_classifier_hi_en_ner': 'BertForTokenClassification', 'bert_token_classifier_large_ner': 'BertForTokenClassification', 'bert_token_classifier_lewip_informal_tagger': 'BertForTokenClassification', 'bert_token_classifier_loodos_sunlp_ner_turkish': 'BertForTokenClassification', 'bert_token_classifier_ncbi_bc5cdr_disease': 'BertForTokenClassification', 'bert_token_classifier_negation_uncertainty': 'MedicalBertForTokenClassifier', 'bert_token_classifier_ner4designtutor': 'BertForTokenClassification', 'bert_token_classifier_ner_ade': 'MedicalBertForTokenClassifier', 'bert_token_classifier_ner_ade_binary': 'MedicalBertForTokenClassifier', 'bert_token_classifier_ner_anatomy': 'MedicalBertForTokenClassifier', 'bert_token_classifier_ner_bacteria': 'MedicalBertForTokenClassifier', 'bert_token_classifier_ner_bionlp': 'MedicalBertForTokenClassifier', 'bert_token_classifier_ner_btc': 'BertForTokenClassification', 'bert_token_classifier_ner_cellular': 'MedicalBertForTokenClassifier', 'bert_token_classifier_ner_chemicals': 'MedicalBertForTokenClassifier', 'bert_token_classifier_ner_chemprot': 'MedicalBertForTokenClassifier', 'bert_token_classifier_ner_clinical': 'MedicalBertForTokenClassifier', 'bert_token_classifier_ner_clinical_trials_abstracts': 'MedicalBertForTokenClassifier', 'bert_token_classifier_ner_deid': 'MedicalBertForTokenClassifier', 'bert_token_classifier_ner_drugs': 'MedicalBertForTokenClassifier', 'bert_token_classifier_ner_fine_tuned_beto': 'BertForTokenClassification', 'bert_token_classifier_ner_japanese': 'BertForTokenClassification', 'bert_token_classifier_ner_jsl': 'MedicalBertForTokenClassifier', 'bert_token_classifier_ner_jsl_slim': 'MedicalBertForTokenClassifier', 'bert_token_classifier_ner_legal': 'BertForTokenClassification', 'bert_token_classifier_ner_living_species': 'MedicalBertForTokenClassifier', 'bert_token_classifier_ner_supplement': 'BertForTokenClassification', 'bert_token_classifier_ner_ud_gsd': 'BertForTokenClassification', 'bert_token_classifier_parsbert_armanner': 'BertForTokenClassification', 'bert_token_classifier_parsbert_ner': 'BertForTokenClassification', 'bert_token_classifier_parsbert_peymaner': 'BertForTokenClassification', 'bert_token_classifier_pharmacology': 'MedicalBertForTokenClassifier', 'bert_token_classifier_restore_punctuation_ptbr': 'BertForTokenClassification', 'bert_token_classifier_scandi_ner': 'BertForTokenClassification', 'bert_token_classifier_spanish_ner': 'BertForTokenClassification', 'bert_token_classifier_swedish_ner': 'BertForTokenClassification', 'bert_token_classifier_tiny_chinese_ws': 'BertForTokenClassification', 'bert_token_classifier_turkish_ner': 'BertForTokenClassification', 'bert_wiki_books': 'BertEmbeddings', 'bert_wiki_books_mnli': 'BertEmbeddings', 'bert_wiki_books_qnli': 'BertEmbeddings', 'bert_wiki_books_qqp': 'BertEmbeddings', 'bert_wiki_books_squad2': 'BertEmbeddings', 'bert_wiki_books_sst2': 'BertEmbeddings', 'beto_sentiment': 'BertForSequenceClassification', 'binary2image': 'BinaryToImage', 'biobert_clinical_base_cased': 'BertEmbeddings', 'biobert_discharge_base_cased': 'BertEmbeddings', 'biobert_embeddings_all': 'BertEmbeddings', 'biobert_embeddings_biomedical': 'BertEmbeddings', 'biobert_embeddings_clinical': 'BertEmbeddings', 'biobert_pmc_base_cased': 'BertEmbeddings', 'biobert_pubmed_base_cased': 'BertEmbeddings', 'biobert_pubmed_base_cased_v1.2': 'BertEmbeddings', 'biobert_pubmed_large_cased': 'BertEmbeddings', 'biobert_pubmed_pmc_base_cased': 'BertEmbeddings', 'camembert_base': 'CamemBertEmbeddings', 'camembert_base_ccnet': 'CamemBertEmbeddings', 'camembert_base_ccnet_4gb': 'CamemBertEmbeddings', 'camembert_base_oscar_4gb': 'CamemBertEmbeddings', 'camembert_base_sequence_classifier_allocine': 'CamemBertForSequenceClassification', 'camembert_base_token_classifier_wikiner': 'CamemBertForTokenClassification', 'camembert_base_wikipedia_4gb': 'CamemBertEmbeddings', 'camembert_classifier_base_wikipedia_4gb_finetuned_job_ner': 'CamemBertForTokenClassification', 'camembert_classifier_berties': 'CamemBertForTokenClassification', 'camembert_classifier_das22_41_pretrained_finetuned_ref': 'CamemBertForTokenClassification', 'camembert_classifier_das22_42_finetuned_ref': 'CamemBertForTokenClassification', 'camembert_classifier_das22_43_pretrained_finetuned_pero': 'CamemBertForTokenClassification', 'camembert_classifier_das22_44_finetuned_pero': 'CamemBertForTokenClassification', 'camembert_classifier_est_roberta_hist_ner': 'CamemBertForTokenClassification', 'camembert_classifier_magbert_ner': 'CamemBertForTokenClassification', 'camembert_classifier_ner': 'CamemBertForTokenClassification', 'camembert_classifier_ner_with_dates': 'CamemBertForTokenClassification', 'camembert_classifier_poet': 'CamemBertForTokenClassification', 'camembert_classifier_pos_french': 'CamemBertForTokenClassification', 'camembert_classifier_squadfr_fquad_piaf_answer_extraction': 'CamemBertForTokenClassification', 'camembert_classifier_test_tcp_ca_cassandra_themis': 'CamemBertForTokenClassification', 'camembert_embeddings_adam1224_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_adeimousa_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_aliasdasd_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_ankitkupadhyay_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_arbertmo': 'CamemBertEmbeddings', 'camembert_embeddings_bertweetfr_base': 'CamemBertEmbeddings', 'camembert_embeddings_camembert_aux_amandes': 'CamemBertEmbeddings', 'camembert_embeddings_camembert_mlm': 'CamemBertEmbeddings', 'camembert_embeddings_codingjacob_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_cylee_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_das22_10_camembert_pretrained': 'CamemBertEmbeddings', 'camembert_embeddings_dataikunlp_camembert_base': 'CamemBertEmbeddings', 'camembert_embeddings_devtrent_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_dianeshan_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_distilcamembert_base': 'CamemBertEmbeddings', 'camembert_embeddings_doyyingface_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_dummy': 'CamemBertEmbeddings', 'camembert_embeddings_edge2992_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_eduardopds_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_elliotsmith_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_elusive_magnolia_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_ericchchiu_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_est_roberta': 'CamemBertEmbeddings', 'camembert_embeddings_fjluque_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_fjluque_generic_model2': 'CamemBertEmbeddings', 'camembert_embeddings_generic2': 'CamemBertEmbeddings', 'camembert_embeddings_generic_model_r91m': 'CamemBertEmbeddings', 'camembert_embeddings_generic_model_test': 'CamemBertEmbeddings', 'camembert_embeddings_h4d35_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_hackertec_generic': 'CamemBertEmbeddings', 'camembert_embeddings_hasanmurad_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_hasanmuradbuet_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_henrywang_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_jcai1_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_joe8zhang_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_jonathansum_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_juliencarbonnell_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_katrin_kc_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_katster_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_kaushikacharya_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_leisa_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_lewtun_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_lijingxin_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_lijingxin_generic_model_2': 'CamemBertEmbeddings', 'camembert_embeddings_linyi_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_mbateman_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_mohammadrea76_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_myx4567_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_new_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_osanseviero_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_peterhsu_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_pgperrone_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_safik_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_sebu_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_seyfullah_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_sloberta': 'CamemBertEmbeddings', 'camembert_embeddings_sonny_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_summfinfr': 'CamemBertEmbeddings', 'camembert_embeddings_tf_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_tnagata_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_tpanza_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_umberto_commoncrawl_cased_v1': 'CamemBertEmbeddings', 'camembert_embeddings_umberto_wikipedia_uncased_v1': 'CamemBertEmbeddings', 'camembert_embeddings_wangst_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_weipeng_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_xkang_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_yancong_generic_model': 'CamemBertEmbeddings', 'camembert_embeddings_ysharma_generic_model_2': 'CamemBertEmbeddings', 'camembert_embeddings_zhenghuabin_generic_model': 'CamemBertEmbeddings', 'camembert_large': 'CamemBertEmbeddings', 'camembert_large_sequence_classifier_allocine': 'CamemBertForSequenceClassification', 'camembert_large_token_classifier_wikiner': 'CamemBertForTokenClassification', 'chinese_bert_wwm': 'BertEmbeddings', 'chinese_xlnet_base': 'XlnetEmbeddings', 'chunk_embeddings': 'ChunkEmbeddings', 'classifierdl_ade_biobert': 'ClassifierDLModel', 'classifierdl_ade_clinicalbert': 'ClassifierDLModel', 'classifierdl_ade_conversational_biobert': 'ClassifierDLModel', 'classifierdl_bert_news': 'ClassifierDLModel', 'classifierdl_bert_sentiment': 'ClassifierDLModel', 'classifierdl_berturk_cyberbullying': 'ClassifierDLModel', 'classifierdl_distilbert_sentiment': 'ClassifierDLModel', 'classifierdl_electra_questionpair': 'ClassifierDLModel', 'classifierdl_gender_biobert': 'ClassifierDLModel', 'classifierdl_gender_sbert': 'ClassifierDLModel', 'classifierdl_pico_biobert': 'ClassifierDLModel', 'classifierdl_urduvec_fakenews': 'ClassifierDLModel', 'classifierdl_use_atis': 'ClassifierDLModel', 'classifierdl_use_cyberbullying': 'ClassifierDLModel', 'classifierdl_use_emotion': 'ClassifierDLModel', 'classifierdl_use_fakenews': 'ClassifierDLModel', 'classifierdl_use_sarcasm': 'ClassifierDLModel', 'classifierdl_use_sentiment': 'ClassifierDLModel', 'classifierdl_use_snips': 'ClassifierDLModel', 'classifierdl_use_spam': 'ClassifierDLModel', 'classifierdl_use_trec50': 'ClassifierDLModel', 'classifierdl_use_trec6': 'ClassifierDLModel', 'classifierdl_xlm_roberta_sentiment': 'ClassifierDLModel', 'clinical_deidentification': 'MedicalNerModel', 'clinical_longformer': 'LongformerEmbeddings', 'context_parser': 'ContextualParserModel', 'covidbert_large_uncased': 'BertEmbeddings', 'cvx_code_mapper': 'ChunkMapperModel', 'cvx_name_mapper': 'ChunkMapperModel', 'dane_ner_6B_100': 'NerDLModel', 'dane_ner_6B_300': 'NerDLModel', 'dane_ner_840B_300': 'NerDLModel', 'date_matcher': 'DateMatcher', 'deberta_v3_base': 'DeBertaEmbeddings', 'deberta_v3_base_sequence_classifier_imdb': 'DeBertaForSequenceClassification', 'deberta_v3_base_token_classifier_conll03': 'DeBertaForTokenClassification', 'deberta_v3_base_token_classifier_ontonotes': 'DeBertaForTokenClassification', 'deberta_v3_large': 'DeBertaEmbeddings', 'deberta_v3_large_sequence_classifier_imdb': 'DeBertaForSequenceClassification', 'deberta_v3_large_token_classifier_conll03': 'DeBertaForTokenClassification', 'deberta_v3_large_token_classifier_ontonotes': 'DeBertaForTokenClassification', 'deberta_v3_small': 'DeBertaEmbeddings', 'deberta_v3_small_sequence_classifier_ag_news': 'DeBertaForSequenceClassification', 'deberta_v3_small_sequence_classifier_dbpedia_14': 'DeBertaForSequenceClassification', 'deberta_v3_small_sequence_classifier_imdb': 'DeBertaForSequenceClassification', 'deberta_v3_small_token_classifier_conll03': 'DeBertaForTokenClassification', 'deberta_v3_small_token_classifier_ontonotes': 'DeBertaForTokenClassification', 'deberta_v3_xsmall': 'DeBertaEmbeddings', 'deberta_v3_xsmall_qa_squad2': 'DeBertaForQuestionAnswering', 'deberta_v3_xsmall_sequence_classifier_ag_news': 'DeBertaForSequenceClassification', 'deberta_v3_xsmall_sequence_classifier_imdb': 'DeBertaForSequenceClassification', 'deberta_v3_xsmall_token_classifier_conll03': 'DeBertaForTokenClassification', 'deberta_v3_xsmall_token_classifier_ontonotes': 'DeBertaForTokenClassification', 'defaen.resolve.loinc.bluebertult_chunker': 'Chunker', 'default_chunker': 'Chunker', 'deid_rules': 'DeIdentificationModel', 'deidentify_enriched_clinical': 'DeIdentificationModel', 'deidentify_large': 'DeIdentificationModel', 'deidentify_rb': 'DeIdentificationModel', 'deidentify_rb_no_regex': 'DeIdentificationModel', 'dependency_conllu': 'DependencyParserModel', 'dependency_conllu.untyped': 'DependencyParserModel', 'dependency_typed_conllu': 'TypedDependencyParserModel', 'disease_mentions_tweet': 'MedicalNerModel', 'distilbert_base_cased': 'DistilBertEmbeddings', 'distilbert_base_cased_qa_squad2': 'DistilBertForQuestionAnswering', 'distilbert_base_multilingual_cased': 'DistilBertEmbeddings', 'distilbert_base_sequence_classifier_ag_news': 'DistilBertForSequenceClassification', 'distilbert_base_sequence_classifier_airlines': 'DistilBertForSequenceClassification', 'distilbert_base_sequence_classifier_amazon_polarity': 'DistilBertForSequenceClassification', 'distilbert_base_sequence_classifier_food': 'DistilBertForSequenceClassification', 'distilbert_base_sequence_classifier_imdb': 'DistilBertForSequenceClassification', 'distilbert_base_sequence_classifier_qqp': 'DistilBertForSequenceClassification', 'distilbert_base_sequence_classifier_toxicity': 'DistilBertForSequenceClassification', 'distilbert_base_token_classifier_conll03': 'DistilBertForTokenClassification', 'distilbert_base_token_classifier_few_nerd': 'DistilBertForTokenClassification', 'distilbert_base_token_classifier_masakhaner': 'DistilBertForTokenClassification', 'distilbert_base_token_classifier_ontonotes': 'DistilBertForTokenClassification', 'distilbert_base_uncased': 'DistilBertEmbeddings', 'distilbert_classifier_base_uncased_newspop_student': 'DistilBertForSequenceClassification', 'distilbert_embeddings_BERTino': 'DistilBertEmbeddings', 'distilbert_embeddings_base_uncased': 'DistilBertEmbeddings', 'distilbert_embeddings_base_uncased_sparse_85_unstructured_pruneofa': 'DistilBertEmbeddings', 'distilbert_embeddings_base_uncased_sparse_90_unstructured_pruneofa': 'DistilBertEmbeddings', 'distilbert_embeddings_bertino': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_ar_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_de_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_en_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_es_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_es_multilingual_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_fr_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_german_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_hi_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_indonesian': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_it_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_ja_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_nl_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_pl_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_pt_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_ro_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_ru_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_th_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_tr_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_uk_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_uncased_sparse_85_unstructured_pruneofa': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_uncased_sparse_90_unstructured_pruneofa': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_ur_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_base_zh_cased': 'DistilBertEmbeddings', 'distilbert_embeddings_distilbert_fa_zwnj_base': 'DistilBertEmbeddings', 'distilbert_embeddings_finetuned_sarcasm_classification': 'DistilBertEmbeddings', 'distilbert_embeddings_indic_transformers': 'DistilBertEmbeddings', 'distilbert_embeddings_indic_transformers_bn_distilbert': 'DistilBertEmbeddings', 'distilbert_embeddings_indic_transformers_hi_distilbert': 'DistilBertEmbeddings', 'distilbert_embeddings_javanese_distilbert_small': 'DistilBertEmbeddings', 'distilbert_embeddings_javanese_distilbert_small_imdb': 'DistilBertEmbeddings', 'distilbert_embeddings_javanese_small': 'DistilBertEmbeddings', 'distilbert_embeddings_javanese_small_imdb': 'DistilBertEmbeddings', 'distilbert_embeddings_malaysian_distilbert_small': 'DistilBertEmbeddings', 'distilbert_embeddings_malaysian_small': 'DistilBertEmbeddings', 'distilbert_embeddings_marathi': 'DistilBertEmbeddings', 'distilbert_embeddings_marathi_distilbert': 'DistilBertEmbeddings', 'distilbert_embeddings_test_text': 'DistilBertEmbeddings', 'distilbert_multilingual_sequence_classifier_allocine': 'DistilBertForSequenceClassification', 'distilbert_ner_autotrain_defector_ner_846726994': 'DistilBertForTokenClassification', 'distilbert_ner_autotrain_lucifer_job_title_853727204': 'DistilBertForTokenClassification', 'distilbert_ner_autotrain_lucifer_morningstar_job_859227344': 'DistilBertForTokenClassification', 'distilbert_ner_autotrain_luicfer_company_861827409': 'DistilBertForTokenClassification', 'distilbert_ner_base_multi_cased_finetuned_typo_detection': 'DistilBertForTokenClassification', 'distilbert_ner_distilbert_base_cased_finetuned_conll03_english': 'DistilBertForTokenClassification', 'distilbert_ner_distilbert_base_german_europeana_cased_germeval_14': 'DistilBertForTokenClassification', 'distilbert_ner_distilbert_base_multilingual_cased_finetuned_conll2003_ner': 'DistilBertForTokenClassification', 'distilbert_ner_distilbert_base_multilingual_cased_masakhaner': 'DistilBertForTokenClassification', 'distilbert_ner_distilbert_base_multilingual_cased_ner_hrl': 'DistilBertForTokenClassification', 'distilbert_ner_distilbert_base_uncased_finetuned_conll03_english': 'DistilBertForTokenClassification', 'distilbert_ner_keyphrase_extraction_distilbert_inspec': 'DistilBertForTokenClassification', 'distilbert_ner_keyphrase_extraction_distilbert_kptimes': 'DistilBertForTokenClassification', 'distilbert_ner_keyphrase_extraction_distilbert_openkp': 'DistilBertForTokenClassification', 'distilbert_ner_ma_ner_v7_distil': 'DistilBertForTokenClassification', 'distilbert_qa_21iridescent_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_AQG_CV_Squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_Adrian_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_Ayoola_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_BERT': 'DistilBertForQuestionAnswering', 'distilbert_qa_BERT_ClinicalQA': 'DistilBertForQuestionAnswering', 'distilbert_qa_COVID_DistilBERTa': 'DistilBertForQuestionAnswering', 'distilbert_qa_COVID_DistilBERTb': 'DistilBertForQuestionAnswering', 'distilbert_qa_COVID_DistilBERTc': 'DistilBertForQuestionAnswering', 'distilbert_qa_FOFer_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_Firat_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_Gayathri_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_Hoang_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_HomayounSadri_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_MTL_base_uncased_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_MYX4567_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_Nadhiya_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_ParulChaudhari_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_Plimpton_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_Raphaelg9_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_Rocketknight1_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_SAE_base_uncased_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_SEISHIN_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_Shashidhar_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_Sourabh714_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_SupriyaArun_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_Thitaree_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_Tianle_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_V3RX2000_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_Wiam_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_aaraki_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_abhinavkulkarni_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_akr_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_andi611_base_uncased_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_anurag0077_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_anurag0077_base_uncased_finetuned_squad2': 'DistilBertForQuestionAnswering', 'distilbert_qa_arvalinno_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_avioo1_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_cased_distilled_chaii': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_cased_distilled_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_cased_distilled_squad_finetuned_squad_small': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_cased_distilled_squad_finetuned_squad_test': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_cased_sgd_qa_step5000': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_config1': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_config2': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_config3': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_squad2_custom_dataset': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_3feb_2022_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_distilled_chaii': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_distilled_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_distilled_squad_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_finetuned_advers': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_finetuned_duorc_': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_finetuned_indosquad_v2': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_finetuned_infovqa': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_finetuned_jumbling_squad_15': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_finetuned_natural_questions': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_finetuned_squad3': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_finetuned_squad_': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_finetuned_squad_colab': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_finetuned_squad_frozen_v2': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_finetuned_squad_v1': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_finetuned_triviaqa': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_gradient_clinic': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_holtin_finetuned_full_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_holtin_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_qa_with_ner': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_squad2': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_squad2_covid_qa_deepset': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_squad2_with_ner': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_squad2_with_ner_mit_restaurant_with_neg_with_repeat': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_squad2_with_ner_with_neg': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_squad2_with_ner_with_neg_with_multi': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_squad2_with_ner_with_neg_with_multi_with_repeat': 'DistilBertForQuestionAnswering', 'distilbert_qa_base_uncased_squad2_with_ner_with_neg_with_repeat': 'DistilBertForQuestionAnswering', 'distilbert_qa_bdickson_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_bert_base_cased_finetuned_log_parser_winlogbeat': 'DistilBertForQuestionAnswering', 'distilbert_qa_bizlin_distil_model': 'DistilBertForQuestionAnswering', 'distilbert_qa_caiosantillo_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_checkpoint_500_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_custom': 'DistilBertForQuestionAnswering', 'distilbert_qa_custom3': 'DistilBertForQuestionAnswering', 'distilbert_qa_custom4': 'DistilBertForQuestionAnswering', 'distilbert_qa_custom5': 'DistilBertForQuestionAnswering', 'distilbert_qa_dbert_3epoch': 'DistilBertForQuestionAnswering', 'distilbert_qa_distilBertABSA': 'DistilBertForQuestionAnswering', 'distilbert_qa_distil_bert_finetuned_log_parser_1': 'DistilBertForQuestionAnswering', 'distilbert_qa_distil_bert_finetuned_log_parser_winlogbeat': 'DistilBertForQuestionAnswering', 'distilbert_qa_distillbert_base_spanish_uncased_finetuned_qa_mlqa': 'DistilBertForQuestionAnswering', 'distilbert_qa_distillbert_base_spanish_uncased_finetuned_qa_sqac': 'DistilBertForQuestionAnswering', 'distilbert_qa_distillbert_base_spanish_uncased_finetuned_qa_tar': 'DistilBertForQuestionAnswering', 'distilbert_qa_emre_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_en_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_en_de_es_model': 'DistilBertForQuestionAnswering', 'distilbert_qa_en_de_model': 'DistilBertForQuestionAnswering', 'distilbert_qa_en_de_vi_zh_es_model': 'DistilBertForQuestionAnswering', 'distilbert_qa_fadhilarkan_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_finetuned': 'DistilBertForQuestionAnswering', 'distilbert_qa_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_flat_N_max': 'DistilBertForQuestionAnswering', 'distilbert_qa_gokulkarthik_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_graviraja_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_guhuawuli_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_hark99_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_hcy11_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_hiiii23_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_holtin_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_huggingfaceepita_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_huxxx657_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_jgammack_base_uncased_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_jhoonk_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_jsunster_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_kaggleodin_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_model_QA_5_epoch_RU': 'DistilBertForQuestionAnswering', 'distilbert_qa_multi_finedtuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_multi_finetuned_for_xqua_on_chaii': 'DistilBertForQuestionAnswering', 'distilbert_qa_multi_finetuned_for_xqua_on_tydiqa': 'DistilBertForQuestionAnswering', 'distilbert_qa_mvonwyl_base_uncased_finetuned_squad2': 'DistilBertForQuestionAnswering', 'distilbert_qa_mysquadv2_8Jan22_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_mysquadv2_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_projectmodel_bert': 'DistilBertForQuestionAnswering', 'distilbert_qa_qa': 'DistilBertForQuestionAnswering', 'distilbert_qa_robustqa_baseline_01': 'DistilBertForQuestionAnswering', 'distilbert_qa_robustqa_baseline_02': 'DistilBertForQuestionAnswering', 'distilbert_qa_robustqa_tapt': 'DistilBertForQuestionAnswering', 'distilbert_qa_sdsqna': 'DistilBertForQuestionAnswering', 'distilbert_qa_single_label_N_max': 'DistilBertForQuestionAnswering', 'distilbert_qa_single_label_N_max_long_training': 'DistilBertForQuestionAnswering', 'distilbert_qa_squad_en_de_es_model': 'DistilBertForQuestionAnswering', 'distilbert_qa_squad_en_de_es_vi_zh_model': 'DistilBertForQuestionAnswering', 'distilbert_qa_squad_slp': 'DistilBertForQuestionAnswering', 'distilbert_qa_squadv1': 'DistilBertForQuestionAnswering', 'distilbert_qa_tabo_base_uncased_finetuned_squad2': 'DistilBertForQuestionAnswering', 'distilbert_qa_test_squad_trained': 'DistilBertForQuestionAnswering', 'distilbert_qa_test_squad_trained_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_tiny_base_cased_distilled_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_transformers_base_uncased_finetuneQA_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_transformers_qa': 'DistilBertForQuestionAnswering', 'distilbert_qa_tucan9389_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_unique_N_max': 'DistilBertForQuestionAnswering', 'distilbert_qa_unqover_base_uncased_newsqa': 'DistilBertForQuestionAnswering', 'distilbert_qa_unqover_base_uncased_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_usami_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_vitusya_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_vkmr_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_qa_vkrishnamoorthy_base_uncased_finetuned_squad': 'DistilBertForQuestionAnswering', 'distilbert_sequence_classifier_ade': 'MedicalDistilBertForSequenceClassification', 'distilbert_sequence_classifier_ane_distilbert_base_uncased_finetuned_sst_2_english': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_arxiv_topics_distilbert_base_cased': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autonlp_doctor_de_24595544': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autonlp_gibberish_detector_492513457': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autonlp_kaggledays_625717992': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autonlp_ks_530615016': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autonlp_mono_625317956': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autonlp_song_lyrics_18753423': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autonlp_test3_2101787': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autonlp_toxic_new_30516963': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autonlp_tweet_sentiment_extraction_20114061': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autonlp_tweets_classification_23044997': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autotrain_country_recognition_1059336697': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autotrain_finetunedmodelbert_1034335535': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autotrain_imdb_1166543171': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autotrain_kaggle_effective_arguments_1086739296': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autotrain_lucifer_multi_844026969': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autotrain_lucifer_multi_auto_all_837626708': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autotrain_mbtinlp_798824628': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_autotrain_online_orders_755323156': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_banking77': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_bert_base_uncased_emotion': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_bhadresh_savani_distilbert_base_uncased_emotion': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_bias_detection_model': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_c4_zh_distilbert_base_uncased': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_clause_classification': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_cross_encoder_distilbert_it_efederici': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_cross_encoder_mmarco_german_distilbert_base': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_base_cased_trec_coarse': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_base_dutch_cased_toxic_comments': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_base_german_cased_toxic_comments': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_base_turkish_cased_emotion': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_base_uncased_agnews_student': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_base_uncased_emotion_2': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_base_uncased_finetuned_ag_news': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_base_uncased_finetuned_app': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_base_uncased_finetuned_emotion_en_tweets': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_base_uncased_finetuned_sst_2_english': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_base_uncased_go_emotions_student': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_base_uncased_if': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_base_uncased_ner_agnews': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_base_uncased_qa_boolq': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_base_uncased_sentiment_sst2': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_multiclass_textclassification': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_political_tweets': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_quality': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_distilbert_tweet_eval_emotion': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_emotion': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_environmental_due_diligence_model': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_finetuned_distilbert_needmining': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_industry': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_industry_classification': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_is_legit_kwd_march_27': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_ma_mlc_v7_distil': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_ma_sa_v7_distil': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_markingmulticlass': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_philschmid_distilbert_base_uncased_emotion': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_policy': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_policy_distilbert_7d': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_sentiment_analysis_sbcbi': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_sentimentanalysisdistillbert': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_speeqo_distilbert_base_uncased_finetuned_sst_2_english': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_sst2': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_state_op_detector': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_testing': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_toxic_comment_model': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_tweet_disaster_classifier': 'DistilBertForSequenceClassification', 'distilbert_sequence_classifier_wiki_complexity': 'DistilBertForSequenceClassification', 'distilbert_token_classifier_persian_ner': 'DistilBertForTokenClassification', 'distilbert_token_classifier_typo_detector': 'DistilBertForTokenClassification', 'distilbert_uncased': 'DistilBertEmbeddings', 'distilroberta_base': 'RoBertaEmbeddings', 'distilroberta_base_token_classifier_ontonotes': 'RoBertaForTokenClassification', 'doc2text': 'DocToText', 'doc2text_table': 'DocToTextTable', 'doc2vec_gigaword_300': 'Doc2VecModel', 'doc2vec_gigaword_wiki_300': 'Doc2VecModel', 'document_normalizer': 'DocumentNormalizer', 'drug_action_treatment_mapper': 'ChunkMapperModel', 'drug_ade_mapper': 'ChunkMapperModel', 'drug_brandname_ndc_mapper': 'ChunkMapperModel', 'drug_category_mapper': 'ChunkMapperModel', 'drug_normalizer': 'NormalizerModel', 'dutch_cc_300d': 'WordEmbeddingsModel', 'electra_base_uncased': 'BertEmbeddings', 'electra_classifier_base_discriminator_offenseval2019_downsample': 'BertForSequenceClassification', 'electra_classifier_beep_kc_base_bias': 'BertForSequenceClassification', 'electra_classifier_beep_kc_base_hate': 'BertForSequenceClassification', 'electra_classifier_beep_ko_base_v3_discriminator_bias': 'BertForSequenceClassification', 'electra_classifier_beep_ko_base_v3_discriminator_hate': 'BertForSequenceClassification', 'electra_classifier_bertic_tweetsentiment': 'BertForSequenceClassification', 'electra_classifier_dv_small_news_classification': 'BertForSequenceClassification', 'electra_classifier_electricidad_base_finetuned_go_emotions': 'BertForSequenceClassification', 'electra_classifier_kc_base_bad_sentence': 'BertForSequenceClassification', 'electra_classifier_ko_base_bad_sentence': 'BertForSequenceClassification', 'electra_classifier_ko_base_bias': 'BertForSequenceClassification', 'electra_classifier_ko_base_finetuned_nsmc': 'BertForSequenceClassification', 'electra_classifier_ko_base_finetuned_sentiment': 'BertForSequenceClassification', 'electra_classifier_ko_base_gender_bias': 'BertForSequenceClassification', 'electra_classifier_ko_base_v3_bias': 'BertForSequenceClassification', 'electra_classifier_ko_base_v3_gender_bias': 'BertForSequenceClassification', 'electra_classifier_ko_base_v3_generalized_sentiment_analysis': 'BertForSequenceClassification', 'electra_classifier_ko_base_v3_hate_speech': 'BertForSequenceClassification', 'electra_classifier_ko_senti_1': 'BertForSequenceClassification', 'electra_classifier_ko_small_finetuned_intent_cls': 'BertForSequenceClassification', 'electra_classifier_ko_small_finetuned_nsmc': 'BertForSequenceClassification', 'electra_classifier_ko_small_finetuned_sentiment': 'BertForSequenceClassification', 'electra_classifier_korean_hatespeech': 'BertForSequenceClassification', 'electra_classifier_korean_hatespeech_multilabel': 'BertForSequenceClassification', 'electra_classifier_kote_for_easygoing_people': 'BertForSequenceClassification', 'electra_classifier_large_discriminator_nli_efl_hateval': 'BertForSequenceClassification', 'electra_classifier_large_discriminator_nli_efl_tweeteval': 'BertForSequenceClassification', 'electra_classifier_large_discriminator_snli_mnli_fever_anli_r1_r2_r3_nli': 'BertForSequenceClassification', 'electra_classifier_mfma': 'BertForSequenceClassification', 'electra_classifier_mindlogic_ko_ai_citizen_base': 'BertForSequenceClassification', 'electra_classifier_mrm8488_electricidad_small_finetuned_amazon_review_classification': 'BertForSequenceClassification', 'electra_classifier_nsmc_ko_test_model': 'BertForSequenceClassification', 'electra_classifier_qd_dialog_base_turkish': 'BertForSequenceClassification', 'electra_classifier_qd_quora_base_turkish': 'BertForSequenceClassification', 'electra_classifier_qd_tweet_base_turkish': 'BertForSequenceClassification', 'electra_classifier_small_finetuned_imdb': 'BertForSequenceClassification', 'electra_classifier_tunib_base_bad_sentence': 'BertForSequenceClassification', 'electra_classifier_turkish_sentiment_analysis': 'BertForSequenceClassification', 'electra_embeddings_araelectra_base_generator': 'BertEmbeddings', 'electra_embeddings_delectra_generator': 'BertEmbeddings', 'electra_embeddings_electra_base_french_europeana_cased_generator': 'BertEmbeddings', 'electra_embeddings_electra_base_gc4_64k_0_cased_generator': 'BertEmbeddings', 'electra_embeddings_electra_base_gc4_64k_1000000_cased_generator': 'BertEmbeddings', 'electra_embeddings_electra_base_gc4_64k_100000_cased_generator': 'BertEmbeddings', 'electra_embeddings_electra_base_gc4_64k_200000_cased_generator': 'BertEmbeddings', 'electra_embeddings_electra_base_gc4_64k_300000_cased_generator': 'BertEmbeddings', 'electra_embeddings_electra_base_gc4_64k_400000_cased_generator': 'BertEmbeddings', 'electra_embeddings_electra_base_gc4_64k_500000_cased_generator': 'BertEmbeddings', 'electra_embeddings_electra_base_gc4_64k_600000_cased_generator': 'BertEmbeddings', 'electra_embeddings_electra_base_gc4_64k_700000_cased_generator': 'BertEmbeddings', 'electra_embeddings_electra_base_gc4_64k_800000_cased_generator': 'BertEmbeddings', 'electra_embeddings_electra_base_gc4_64k_900000_cased_generator': 'BertEmbeddings', 'electra_embeddings_electra_base_generator': 'BertEmbeddings', 'electra_embeddings_electra_base_italian_xxl_cased_generator': 'BertEmbeddings', 'electra_embeddings_electra_base_japanese_generator': 'BertEmbeddings', 'electra_embeddings_electra_base_turkish_mc4_cased_generator': 'BertEmbeddings', 'electra_embeddings_electra_base_turkish_mc4_uncased_generator': 'BertEmbeddings', 'electra_embeddings_electra_large_generator': 'BertEmbeddings', 'electra_embeddings_electra_small_generator': 'BertEmbeddings', 'electra_embeddings_electra_small_japanese_fin_generator': 'BertEmbeddings', 'electra_embeddings_electra_small_japanese_generator': 'BertEmbeddings', 'electra_embeddings_electra_small_paper_japanese_fin_generator': 'BertEmbeddings', 'electra_embeddings_electra_small_paper_japanese_generator': 'BertEmbeddings', 'electra_embeddings_electra_tagalog_base_cased_generator': 'BertEmbeddings', 'electra_embeddings_electra_tagalog_base_uncased_generator': 'BertEmbeddings', 'electra_embeddings_electra_tagalog_small_cased_generator': 'BertEmbeddings', 'electra_embeddings_electra_tagalog_small_uncased_generator': 'BertEmbeddings', 'electra_embeddings_electricidad_base_generator': 'BertEmbeddings', 'electra_embeddings_finance_koelectra_base_generator': 'BertEmbeddings', 'electra_embeddings_finance_koelectra_small_generator': 'BertEmbeddings', 'electra_embeddings_gelectra_base_generator': 'BertEmbeddings', 'electra_embeddings_gelectra_large_generator': 'BertEmbeddings', 'electra_embeddings_koelectra_base_generator': 'BertEmbeddings', 'electra_embeddings_koelectra_base_v2_generator': 'BertEmbeddings', 'electra_embeddings_koelectra_base_v3_generator': 'BertEmbeddings', 'electra_embeddings_koelectra_small_generator': 'BertEmbeddings', 'electra_embeddings_kr_electra_generator': 'BertEmbeddings', 'electra_large_uncased': 'BertEmbeddings', 'electra_medal_acronym': 'BertEmbeddings', 'electra_qa_AraELECTRA_discriminator_SOQAL': 'BertForQuestionAnswering', 'electra_qa_AraElectra_base_finetuned_ARCD': 'BertForQuestionAnswering', 'electra_qa_BioM_Base_SQuAD2': 'BertForQuestionAnswering', 'electra_qa_BioM_Base_SQuAD2_BioASQ8B': 'BertForQuestionAnswering', 'electra_qa_BioM_Large_SQuAD2': 'BertForQuestionAnswering', 'electra_qa_BioM_Large_SQuAD2_BioASQ8B': 'BertForQuestionAnswering', 'electra_qa_DSPFirst_Finetuning_1': 'BertForQuestionAnswering', 'electra_qa_DSPFirst_Finetuning_2': 'BertForQuestionAnswering', 'electra_qa_DSPFirst_Finetuning_3': 'BertForQuestionAnswering', 'electra_qa_DSPFirst_Finetuning_4': 'BertForQuestionAnswering', 'electra_qa_DSPFirst_Finetuning_5': 'BertForQuestionAnswering', 'electra_qa_German_question_answer': 'BertForQuestionAnswering', 'electra_qa_TestQA2': 'BertForQuestionAnswering', 'electra_qa_araElectra_SQUAD_ARCD': 'BertForQuestionAnswering', 'electra_qa_araElectra_SQUAD_ARCD_768': 'BertForQuestionAnswering', 'electra_qa_ara_base_artydiqa': 'BertForQuestionAnswering', 'electra_qa_base_best_squad2': 'BertForQuestionAnswering', 'electra_qa_base_chaii': 'BertForQuestionAnswering', 'electra_qa_base_discriminator_finetuned_squad': 'BertForQuestionAnswering', 'electra_qa_base_discriminator_finetuned_squadv1': 'BertForQuestionAnswering', 'electra_qa_base_discriminator_finetuned_squadv2': 'BertForQuestionAnswering', 'electra_qa_base_finetuned_squadv1': 'BertForQuestionAnswering', 'electra_qa_base_finetuned_squadv2': 'BertForQuestionAnswering', 'electra_qa_base_squad2': 'BertForQuestionAnswering', 'electra_qa_base_squad2_covid_deepset': 'BertForQuestionAnswering', 'electra_qa_base_v2_finetuned_korquad': 'BertForQuestionAnswering', 'electra_qa_base_v2_finetuned_korquad_384': 'BertForQuestionAnswering', 'electra_qa_base_v3_discriminator_finetuned_klue_v4': 'BertForQuestionAnswering', 'electra_qa_base_v3_finetuned_korquad': 'BertForQuestionAnswering', 'electra_qa_biomedtra_small_es_squad2': 'BertForQuestionAnswering', 'electra_qa_elctrafp': 'BertForQuestionAnswering', 'electra_qa_electricidad_small_finetuned_squadv1': 'BertForQuestionAnswering', 'electra_qa_enelpi_squad': 'BertForQuestionAnswering', 'electra_qa_g_base_germanquad': 'BertForQuestionAnswering', 'electra_qa_g_base_germanquad_distilled': 'BertForQuestionAnswering', 'electra_qa_g_large_germanquad': 'BertForQuestionAnswering', 'electra_qa_google_base_discriminator_squad': 'BertForQuestionAnswering', 'electra_qa_google_small_discriminator_squad': 'BertForQuestionAnswering', 'electra_qa_hankzhong_small_discriminator_finetuned_squad': 'BertForQuestionAnswering', 'electra_qa_klue_mrc_base': 'BertForQuestionAnswering', 'electra_qa_large_discriminator_squad2_512': 'BertForQuestionAnswering', 'electra_qa_large_discriminator_squad2_custom_dataset': 'BertForQuestionAnswering', 'electra_qa_large_finetuned_squadv1': 'BertForQuestionAnswering', 'electra_qa_large_squad': 'BertForQuestionAnswering', 'electra_qa_large_synqa': 'BertForQuestionAnswering', 'electra_qa_long': 'BertForQuestionAnswering', 'electra_qa_slp': 'BertForQuestionAnswering', 'electra_qa_small_discriminator_finetuned_squad_1': 'BertForQuestionAnswering', 'electra_qa_small_discriminator_finetuned_squad_2': 'BertForQuestionAnswering', 'electra_qa_small_finetuned_squadv1': 'BertForQuestionAnswering', 'electra_qa_small_finetuned_squadv2': 'BertForQuestionAnswering', 'electra_qa_small_turkish_uncased_discriminator_finetuned': 'BertForQuestionAnswering', 'electra_qa_small_v3_finetuned_korquad': 'BertForQuestionAnswering', 'electra_qa_squad_slp': 'BertForQuestionAnswering', 'electra_qa_turkish': 'BertForQuestionAnswering', 'electra_small_uncased': 'BertEmbeddings', 'elmo': 'ElmoEmbeddings', 'embeddings_biovec': 'WordEmbeddingsModel', 'embeddings_clinical': 'WordEmbeddingsModel', 'embeddings_healthcare': 'WordEmbeddingsModel', 'embeddings_healthcare_100d': 'WordEmbeddingsModel', 'embeddings_icdoem': 'WordEmbeddingsModel', 'embeddings_icdoem_2ng': 'WordEmbeddingsModel', 'embeddings_scielo_150d': 'WordEmbeddingsModel', 'embeddings_scielo_300d': 'WordEmbeddingsModel', 'embeddings_scielo_50d': 'WordEmbeddingsModel', 'embeddings_scielowiki_150d': 'WordEmbeddingsModel', 'embeddings_scielowiki_300d': 'WordEmbeddingsModel', 'embeddings_scielowiki_50d': 'WordEmbeddingsModel', 'embeddings_sciwiki_150d': 'WordEmbeddingsModel', 'embeddings_sciwiki_300d': 'WordEmbeddingsModel', 'embeddings_sciwiki_50d': 'WordEmbeddingsModel', 'finnish_ner_6B_100': 'NerDLModel', 'finnish_ner_6B_300': 'NerDLModel', 'finnish_ner_840B_300': 'NerDLModel', 'genericclassifier_sdoh_alcohol_usage_binary_sbiobert_cased_mli': 'GenericClassifierModel', 'genericclassifier_sdoh_alcohol_usage_sbiobert_cased_mli': 'GenericClassifierModel', 'genericclassifier_sdoh_economics_binary_sbiobert_cased_mli': 'GenericClassifierModel', 'genericclassifier_sdoh_substance_usage_binary_sbiobert_cased_mli': 'GenericClassifierModel', 'genericclassifier_sdoh_tobacco_usage_sbiobert_cased_mli': 'GenericClassifierModel', 'glove_100d': 'WordEmbeddingsModel', 'glove_6B_100': 'WordEmbeddingsModel', 'glove_6B_300': 'WordEmbeddingsModel', 'glove_840B_300': 'WordEmbeddingsModel', 'google_t5_small_ssm_nq': 'T5Transformer', 'gpt2': 'GPT2Transformer', 'gpt2_distilled': 'GPT2Transformer', 'gpt2_medium': 'GPT2Transformer', 'gpt_large': 'GPT2Transformer', 'hebrew_cc_300d': 'WordEmbeddingsModel', 'hebrewner_cc_300d': 'NerDLModel', 'hindi_cc_300d': 'WordEmbeddingsModel', 'icd10_icd9_mapper': 'ChunkMapperModel', 'icd10cm_mapper': 'ChunkMapperModel', 'icd10cm_snomed_mapper': 'ChunkMapperModel', 'icd10cm_umls_mapper': 'ChunkMapperModel', 'icd9_icd10_mapper': 'ChunkMapperModel', 'icd9_mapper': 'ChunkMapperModel', 'icdo_snomed_mapper': 'ChunkMapperModel', 'image2text': 'ImageToText', 'image_classifier_vit_ALL': 'ViTForImageClassification', 'image_classifier_vit_CarViT': 'ViTForImageClassification', 'image_classifier_vit_Check_Aligned_Teeth': 'ViTForImageClassification', 'image_classifier_vit_Check_GoodBad_Teeth': 'ViTForImageClassification', 'image_classifier_vit_Check_Gum_Teeth': 'ViTForImageClassification', 'image_classifier_vit_Check_Missing_Teeth': 'ViTForImageClassification', 'image_classifier_vit_Infrastructures': 'ViTForImageClassification', 'image_classifier_vit_Insectodoptera': 'ViTForImageClassification', 'image_classifier_vit_PANDA_ViT': 'ViTForImageClassification', 'image_classifier_vit_PanJuOffset_TwoClass': 'ViTForImageClassification', 'image_classifier_vit_SDO_VT1': 'ViTForImageClassification', 'image_classifier_vit_Teeth_A': 'ViTForImageClassification', 'image_classifier_vit_Teeth_B': 'ViTForImageClassification', 'image_classifier_vit_Teeth_C': 'ViTForImageClassification', 'image_classifier_vit_Test_Model': 'ViTForImageClassification', 'image_classifier_vit_Tomato_Leaf_Classifier': 'ViTForImageClassification', 'image_classifier_vit_VIT_Basic': 'ViTForImageClassification', 'image_classifier_vit_ViTFineTuned': 'ViTForImageClassification', 'image_classifier_vit_ViT_FaceMask_Finetuned': 'ViTForImageClassification', 'image_classifier_vit_Visual_transformer_chihuahua_cookies': 'ViTForImageClassification', 'image_classifier_vit_WEC_types': 'ViTForImageClassification', 'image_classifier_vit__beans': 'ViTForImageClassification', 'image_classifier_vit__flyswot_test': 'ViTForImageClassification', 'image_classifier_vit__spectrogram': 'ViTForImageClassification', 'image_classifier_vit_age_classifier': 'ViTForImageClassification', 'image_classifier_vit_airplanes': 'ViTForImageClassification', 'image_classifier_vit_ak__base_patch16_224_in21k_image_classification': 'ViTForImageClassification', 'image_classifier_vit_amgerindaf': 'ViTForImageClassification', 'image_classifier_vit_animal_classifier': 'ViTForImageClassification', 'image_classifier_vit_animal_classifier_huggingface': 'ViTForImageClassification', 'image_classifier_vit_animals_classifier': 'ViTForImageClassification', 'image_classifier_vit_anomaly': 'ViTForImageClassification', 'image_classifier_vit_apes': 'ViTForImageClassification', 'image_classifier_vit_architectural_styles': 'ViTForImageClassification', 'image_classifier_vit_asl': 'ViTForImageClassification', 'image_classifier_vit_autotrain_cifar10__base': 'ViTForImageClassification', 'image_classifier_vit_autotrain_dog_vs_food': 'ViTForImageClassification', 'image_classifier_vit_autotrain_fashion_mnist__base': 'ViTForImageClassification', 'image_classifier_vit_baked_goods': 'ViTForImageClassification', 'image_classifier_vit_base_avengers_v1': 'ViTForImageClassification', 'image_classifier_vit_base_beans': 'ViTForImageClassification', 'image_classifier_vit_base_beans_demo': 'ViTForImageClassification', 'image_classifier_vit_base_beans_demo_v2': 'ViTForImageClassification', 'image_classifier_vit_base_beans_demo_v3': 'ViTForImageClassification', 'image_classifier_vit_base_beans_demo_v5': 'ViTForImageClassification', 'image_classifier_vit_base_cats_vs_dogs': 'ViTForImageClassification', 'image_classifier_vit_base_cifar10': 'ViTForImageClassification', 'image_classifier_vit_base_food101': 'ViTForImageClassification', 'image_classifier_vit_base_movie_scenes_v1': 'ViTForImageClassification', 'image_classifier_vit_base_mri': 'ViTForImageClassification', 'image_classifier_vit_base_patch16_224': 'ViTForImageClassification', 'image_classifier_vit_base_patch16_224_cifar10': 'ViTForImageClassification', 'image_classifier_vit_base_patch16_224_finetuned_eurosat': 'ViTForImageClassification', 'image_classifier_vit_base_patch16_224_finetuned_kvasirv2_colonoscopy': 'ViTForImageClassification', 'image_classifier_vit_base_patch16_224_finetuned_largerDataSet_docSeperator_more_labels_all_apache2': 'ViTForImageClassification', 'image_classifier_vit_base_patch16_224_finetuned_pneumothorax': 'ViTForImageClassification', 'image_classifier_vit_base_patch16_224_in21k_aidSat': 'ViTForImageClassification', 'image_classifier_vit_base_patch16_224_in21k_bantai_v1': 'ViTForImageClassification', 'image_classifier_vit_base_patch16_224_in21k_classify_4scence': 'ViTForImageClassification', 'image_classifier_vit_base_patch16_224_in21k_euroSat': 'ViTForImageClassification', 'image_classifier_vit_base_patch16_224_in21k_finetuned_cifar10': 'ViTForImageClassification', 'image_classifier_vit_base_patch16_224_in21k_snacks': 'ViTForImageClassification', 'image_classifier_vit_base_patch16_224_in21k_ucSat': 'ViTForImageClassification', 'image_classifier_vit_base_patch16_224_recylce_ft': 'ViTForImageClassification', 'image_classifier_vit_base_patch16_384': 'ViTForImageClassification', 'image_classifier_vit_base_patch32_384': 'ViTForImageClassification', 'image_classifier_vit_base_patch32_384_finetuned_eurosat': 'ViTForImageClassification', 'image_classifier_vit_base_xray_pneumonia': 'ViTForImageClassification', 'image_classifier_vit_baseball_stadium_foods': 'ViTForImageClassification', 'image_classifier_vit_beer_vs_wine': 'ViTForImageClassification', 'image_classifier_vit_beer_whisky_wine_detection': 'ViTForImageClassification', 'image_classifier_vit_blocks': 'ViTForImageClassification', 'image_classifier_vit_cifar10': 'ViTForImageClassification', 'image_classifier_vit_cifar_10_2': 'ViTForImageClassification', 'image_classifier_vit_computer_stuff': 'ViTForImageClassification', 'image_classifier_vit_croupier_creature_classifier': 'ViTForImageClassification', 'image_classifier_vit_deit_base_patch16_224': 'ViTForImageClassification', 'image_classifier_vit_deit_flyswot': 'ViTForImageClassification', 'image_classifier_vit_deit_small_patch16_224': 'ViTForImageClassification', 'image_classifier_vit_deit_tiny_patch16_224': 'ViTForImageClassification', 'image_classifier_vit_demo': 'ViTForImageClassification', 'image_classifier_vit_denver_nyc_paris': 'ViTForImageClassification', 'image_classifier_vit_diam': 'ViTForImageClassification', 'image_classifier_vit_digital': 'ViTForImageClassification', 'image_classifier_vit_dog': 'ViTForImageClassification', 'image_classifier_vit_dog_breed_classifier': 'ViTForImageClassification', 'image_classifier_vit_dog_food__base_patch16_224_in21k': 'ViTForImageClassification', 'image_classifier_vit_dog_races': 'ViTForImageClassification', 'image_classifier_vit_dog_vs_chicken': 'ViTForImageClassification', 'image_classifier_vit_doggos_lol': 'ViTForImageClassification', 'image_classifier_vit_dogs': 'ViTForImageClassification', 'image_classifier_vit_dwarf_goats': 'ViTForImageClassification', 'image_classifier_vit_electric_2': 'ViTForImageClassification', 'image_classifier_vit_electric_pole_type_classification': 'ViTForImageClassification', 'image_classifier_vit_ex_for_evan': 'ViTForImageClassification', 'image_classifier_vit_exper1_mesum5': 'ViTForImageClassification', 'image_classifier_vit_exper2_mesum5': 'ViTForImageClassification', 'image_classifier_vit_exper3_mesum5': 'ViTForImageClassification', 'image_classifier_vit_exper4_mesum5': 'ViTForImageClassification', 'image_classifier_vit_exper5_mesum5': 'ViTForImageClassification', 'image_classifier_vit_exper6_mesum5': 'ViTForImageClassification', 'image_classifier_vit_exper7_mesum5': 'ViTForImageClassification', 'image_classifier_vit_exper_batch_16_e4': 'ViTForImageClassification', 'image_classifier_vit_exper_batch_16_e8': 'ViTForImageClassification', 'image_classifier_vit_exper_batch_32_e4': 'ViTForImageClassification', 'image_classifier_vit_exper_batch_32_e8': 'ViTForImageClassification', 'image_classifier_vit_exper_batch_8_e4': 'ViTForImageClassification', 'image_classifier_vit_exper_batch_8_e8': 'ViTForImageClassification', 'image_classifier_vit_fancy_animales': 'ViTForImageClassification', 'image_classifier_vit_finetuned_cats_dogs': 'ViTForImageClassification', 'image_classifier_vit_finetuned_chest_xray_pneumonia': 'ViTForImageClassification', 'image_classifier_vit_finetuned_eurosat_kornia': 'ViTForImageClassification', 'image_classifier_vit_flowers': 'ViTForImageClassification', 'image_classifier_vit_food': 'ViTForImageClassification', 'image_classifier_vit_fruits': 'ViTForImageClassification', 'image_classifier_vit_garbage_classification': 'ViTForImageClassification', 'image_classifier_vit_generation_xyz': 'ViTForImageClassification', 'image_classifier_vit_grain': 'ViTForImageClassification', 'image_classifier_vit_greens': 'ViTForImageClassification', 'image_classifier_vit_gtsrb_model': 'ViTForImageClassification', 'image_classifier_vit_hot_dog_or_sandwich': 'ViTForImageClassification', 'image_classifier_vit_hotdog_not_hotdog': 'ViTForImageClassification', 'image_classifier_vit_housing_categories': 'ViTForImageClassification', 'image_classifier_vit_hugging_geese': 'ViTForImageClassification', 'image_classifier_vit_huggingpics_package_demo_2': 'ViTForImageClassification', 'image_classifier_vit_ice_cream': 'ViTForImageClassification', 'image_classifier_vit_iiif_manuscript_': 'ViTForImageClassification', 'image_classifier_vit_indian_snacks': 'ViTForImageClassification', 'image_classifier_vit_koala_panda_wombat': 'ViTForImageClassification', 'image_classifier_vit_lawn_weeds': 'ViTForImageClassification', 'image_classifier_vit_llama_alpaca_guanaco_vicuna': 'ViTForImageClassification', 'image_classifier_vit_llama_alpaca_snake': 'ViTForImageClassification', 'image_classifier_vit_llama_or_potato': 'ViTForImageClassification', 'image_classifier_vit_llama_or_what': 'ViTForImageClassification', 'image_classifier_vit_lotr': 'ViTForImageClassification', 'image_classifier_vit_lucky_model': 'ViTForImageClassification', 'image_classifier_vit_lung_cancer': 'ViTForImageClassification', 'image_classifier_vit_mit_indoor_scenes': 'ViTForImageClassification', 'image_classifier_vit_modelversion01': 'ViTForImageClassification', 'image_classifier_vit_modeversion1_m6_e4': 'ViTForImageClassification', 'image_classifier_vit_modeversion1_m6_e4n': 'ViTForImageClassification', 'image_classifier_vit_modeversion1_m7_e4': 'ViTForImageClassification', 'image_classifier_vit_modeversion28_7': 'ViTForImageClassification', 'image_classifier_vit_modeversion2_m7_e8': 'ViTForImageClassification', 'image_classifier_vit_my_bean_VIT': 'ViTForImageClassification', 'image_classifier_vit_new_exper3': 'ViTForImageClassification', 'image_classifier_vit_new_york_tokyo_london': 'ViTForImageClassification', 'image_classifier_vit_occupation_prediction': 'ViTForImageClassification', 'image_classifier_vit_opencampus_age_detection': 'ViTForImageClassification', 'image_classifier_vit_orcs_and_friends': 'ViTForImageClassification', 'image_classifier_vit_oz_fauna': 'ViTForImageClassification', 'image_classifier_vit_pasta_pizza_ravioli': 'ViTForImageClassification', 'image_classifier_vit_pasta_shapes': 'ViTForImageClassification', 'image_classifier_vit_places': 'ViTForImageClassification', 'image_classifier_vit_planes_airlines': 'ViTForImageClassification', 'image_classifier_vit_planes_trains_automobiles': 'ViTForImageClassification', 'image_classifier_vit_platzi__base_beans_omar_espejel': 'ViTForImageClassification', 'image_classifier_vit_pneumonia_bielefeld_dl_course': 'ViTForImageClassification', 'image_classifier_vit_pneumonia_test_attempt': 'ViTForImageClassification', 'image_classifier_vit_pond': 'ViTForImageClassification', 'image_classifier_vit_pond_image_classification_1': 'ViTForImageClassification', 'image_classifier_vit_pond_image_classification_10': 'ViTForImageClassification', 'image_classifier_vit_pond_image_classification_11': 'ViTForImageClassification', 'image_classifier_vit_pond_image_classification_12': 'ViTForImageClassification', 'image_classifier_vit_pond_image_classification_2': 'ViTForImageClassification', 'image_classifier_vit_pond_image_classification_3': 'ViTForImageClassification', 'image_classifier_vit_pond_image_classification_4': 'ViTForImageClassification', 'image_classifier_vit_pond_image_classification_5': 'ViTForImageClassification', 'image_classifier_vit_pond_image_classification_6': 'ViTForImageClassification', 'image_classifier_vit_pond_image_classification_7': 'ViTForImageClassification', 'image_classifier_vit_pond_image_classification_8': 'ViTForImageClassification', 'image_classifier_vit_pond_image_classification_9': 'ViTForImageClassification', 'image_classifier_vit_puppies_classify': 'ViTForImageClassification', 'image_classifier_vit_rare_bottle': 'ViTForImageClassification', 'image_classifier_vit_rare_puppers': 'ViTForImageClassification', 'image_classifier_vit_rare_puppers2': 'ViTForImageClassification', 'image_classifier_vit_rare_puppers3': 'ViTForImageClassification', 'image_classifier_vit_rare_puppers_09_04_2021': 'ViTForImageClassification', 'image_classifier_vit_rare_puppers_demo': 'ViTForImageClassification', 'image_classifier_vit_rare_puppers_new_auth': 'ViTForImageClassification', 'image_classifier_vit_resnet_50_euroSat': 'ViTForImageClassification', 'image_classifier_vit_resnet_50_ucSat': 'ViTForImageClassification', 'image_classifier_vit_road_good_damaged_condition': 'ViTForImageClassification', 'image_classifier_vit_robot2': 'ViTForImageClassification', 'image_classifier_vit_robot22': 'ViTForImageClassification', 'image_classifier_vit_rock_challenge_DeiT_solo': 'ViTForImageClassification', 'image_classifier_vit_rock_challenge_DeiT_solo_2': 'ViTForImageClassification', 'image_classifier_vit_rock_challenge_ViT_two_by_two': 'ViTForImageClassification', 'image_classifier_vit_roomclassifier': 'ViTForImageClassification', 'image_classifier_vit_roomidentifier': 'ViTForImageClassification', 'image_classifier_vit_rust_image_classification_1': 'ViTForImageClassification', 'image_classifier_vit_rust_image_classification_10': 'ViTForImageClassification', 'image_classifier_vit_rust_image_classification_11': 'ViTForImageClassification', 'image_classifier_vit_rust_image_classification_12': 'ViTForImageClassification', 'image_classifier_vit_rust_image_classification_2': 'ViTForImageClassification', 'image_classifier_vit_rust_image_classification_3': 'ViTForImageClassification', 'image_classifier_vit_rust_image_classification_4': 'ViTForImageClassification', 'image_classifier_vit_rust_image_classification_5': 'ViTForImageClassification', 'image_classifier_vit_rust_image_classification_6': 'ViTForImageClassification', 'image_classifier_vit_rust_image_classification_7': 'ViTForImageClassification', 'image_classifier_vit_rust_image_classification_8': 'ViTForImageClassification', 'image_classifier_vit_rust_image_classification_9': 'ViTForImageClassification', 'image_classifier_vit_sea_mammals': 'ViTForImageClassification', 'image_classifier_vit_shirt_identifier': 'ViTForImageClassification', 'image_classifier_vit_simple_kitchen': 'ViTForImageClassification', 'image_classifier_vit_skin_type': 'ViTForImageClassification', 'image_classifier_vit_snacks': 'ViTForImageClassification', 'image_classifier_vit_south_indian_foods': 'ViTForImageClassification', 'image_classifier_vit_string_instrument_detector': 'ViTForImageClassification', 'image_classifier_vit_taco_or_what': 'ViTForImageClassification', 'image_classifier_vit_teeth_test': 'ViTForImageClassification', 'image_classifier_vit_teeth_verify': 'ViTForImageClassification', 'image_classifier_vit_test': 'ViTForImageClassification', 'image_classifier_vit_test_model_a': 'ViTForImageClassification', 'image_classifier_vit_tiny__random': 'ViTForImageClassification', 'image_classifier_vit_tiny_patch16_224': 'ViTForImageClassification', 'image_classifier_vit_trainer_rare_puppers': 'ViTForImageClassification', 'image_classifier_vit_upside_down_classifier': 'ViTForImageClassification', 'image_classifier_vit_vc_bantai__withoutAMBI': 'ViTForImageClassification', 'image_classifier_vit_vc_bantai__withoutAMBI_adunest': 'ViTForImageClassification', 'image_classifier_vit_vc_bantai__withoutAMBI_adunest_trial': 'ViTForImageClassification', 'image_classifier_vit_vc_bantai__withoutAMBI_adunest_v1': 'ViTForImageClassification', 'image_classifier_vit_vc_bantai__withoutAMBI_adunest_v2': 'ViTForImageClassification', 'image_classifier_vit_vc_bantai__withoutAMBI_adunest_v3': 'ViTForImageClassification', 'image_classifier_vit_violation_classification_bantai_': 'ViTForImageClassification', 'image_classifier_vit_violation_classification_bantai__v100ep': 'ViTForImageClassification', 'image_classifier_vit_violation_classification_bantai__v80ep': 'ViTForImageClassification', 'image_classifier_vit_violation_classification_bantai__withES': 'ViTForImageClassification', 'image_classifier_vit_vision_transformer_fmri_classification_ft': 'ViTForImageClassification', 'image_classifier_vit_vision_transformer_v3': 'ViTForImageClassification', 'image_classifier_vit_vision_transformers_spain_or_italy_fan': 'ViTForImageClassification', 'image_classifier_vit_vliegmachine': 'ViTForImageClassification', 'image_classifier_vit_where_am_I_hospital_balcony_hallway_airport_coffee_house': 'ViTForImageClassification', 'image_classifier_vit_where_am_I_hospital_balcony_hallway_airport_coffee_house_apartment_office': 'ViTForImageClassification', 'image_classifier_vit_world_landmarks': 'ViTForImageClassification', 'japanese_cc_300d': 'WordEmbeddingsModel', 'jsl_ner_wip_clinical': 'MedicalNerModel', 'jsl_ner_wip_greedy_clinical': 'MedicalNerModel', 'jsl_ner_wip_modifier_clinical': 'MedicalNerModel', 'jsl_rd_ner_wip_greedy_biobert': 'MedicalNerModel', 'jsl_rd_ner_wip_greedy_clinical': 'MedicalNerModel', 'jsl_sbert_medium_rxnorm': 'BertSentenceEmbeddings', 'kegg_disease_mapper': 'ChunkMapperModel', 'kegg_drug_mapper': 'ChunkMapperModel', 'labse': 'BertSentenceEmbeddings', 'ld_tatoeba_bigru_21': 'LanguageDetectorDL', 'ld_tatoeba_cnn_99': 'LanguageDetectorDL', 'ld_wiki_20': 'LanguageDetectorDL', 'ld_wiki_7': 'LanguageDetectorDL', 'ld_wiki_cnn_231': 'LanguageDetectorDL', 'ld_wiki_tatoeba_cnn_21': 'LanguageDetectorDL', 'ld_wiki_tatoeba_cnn_220': 'LanguageDetectorDL', 'ld_wiki_tatoeba_cnn_375': 'LanguageDetectorDL', 'ld_wiki_tatoeba_cnn_43': 'LanguageDetectorDL', 'ld_wiki_tatoeba_cnn_95': 'LanguageDetectorDL', 'legal_longformer_base': 'LongformerEmbeddings', 'legalectra_base': 'BertEmbeddings', 'legalectra_small': 'BertEmbeddings', 'lemma': 'LemmatizerModel', 'lemma_afribooms': 'LemmatizerModel', 'lemma_alksnis': 'LemmatizerModel', 'lemma_alpino': 'LemmatizerModel', 'lemma_ancora': 'LemmatizerModel', 'lemma_antbnc': 'LemmatizerModel', 'lemma_arcosg': 'LemmatizerModel', 'lemma_armtdp': 'LemmatizerModel', 'lemma_atis': 'LemmatizerModel', 'lemma_bdt': 'LemmatizerModel', 'lemma_bokmaal': 'LemmatizerModel', 'lemma_bosque': 'LemmatizerModel', 'lemma_boun': 'LemmatizerModel', 'lemma_btb': 'LemmatizerModel', 'lemma_cac': 'LemmatizerModel', 'lemma_ccg': 'LemmatizerModel', 'lemma_cltt': 'LemmatizerModel', 'lemma_csui': 'LemmatizerModel', 'lemma_ctg': 'LemmatizerModel', 'lemma_ddt': 'LemmatizerModel', 'lemma_dxc': 'LemmatizerModel', 'lemma_edt': 'LemmatizerModel', 'lemma_esl': 'LemmatizerModel', 'lemma_ewt': 'LemmatizerModel', 'lemma_farpahc': 'LemmatizerModel', 'lemma_fictree': 'LemmatizerModel', 'lemma_framenet': 'LemmatizerModel', 'lemma_ftb': 'LemmatizerModel', 'lemma_gdt': 'LemmatizerModel', 'lemma_giella': 'LemmatizerModel', 'lemma_gsd': 'LemmatizerModel', 'lemma_gsdluw': 'LemmatizerModel', 'lemma_gsdsimp': 'LemmatizerModel', 'lemma_gum': 'LemmatizerModel', 'lemma_hdt': 'LemmatizerModel', 'lemma_hdtb': 'LemmatizerModel', 'lemma_hiencs': 'LemmatizerModel', 'lemma_hse': 'LemmatizerModel', 'lemma_htb': 'LemmatizerModel', 'lemma_icepahc': 'LemmatizerModel', 'lemma_idt': 'LemmatizerModel', 'lemma_imst': 'LemmatizerModel', 'lemma_isdt': 'LemmatizerModel', 'lemma_ittb': 'LemmatizerModel', 'lemma_iu': 'LemmatizerModel', 'lemma_kaist': 'LemmatizerModel', 'lemma_kenet': 'LemmatizerModel', 'lemma_kyoto': 'LemmatizerModel', 'lemma_lassysmall': 'LemmatizerModel', 'lemma_lfg': 'LemmatizerModel', 'lemma_lines': 'LemmatizerModel', 'lemma_llct': 'LemmatizerModel', 'lemma_lvtb': 'LemmatizerModel', 'lemma_modern': 'LemmatizerModel', 'lemma_mtg': 'LemmatizerModel', 'lemma_mudt': 'LemmatizerModel', 'lemma_nonstandard': 'LemmatizerModel', 'lemma_nsc': 'LemmatizerModel', 'lemma_nynorsk': 'LemmatizerModel', 'lemma_nynorsklia': 'LemmatizerModel', 'lemma_padt': 'LemmatizerModel', 'lemma_parisstories': 'LemmatizerModel', 'lemma_partut': 'LemmatizerModel', 'lemma_pdb': 'LemmatizerModel', 'lemma_pdt': 'LemmatizerModel', 'lemma_penn': 'LemmatizerModel', 'lemma_perdt': 'LemmatizerModel', 'lemma_perseus': 'LemmatizerModel', 'lemma_postwita': 'LemmatizerModel', 'lemma_proiel': 'LemmatizerModel', 'lemma_rhapsodie': 'LemmatizerModel', 'lemma_rnc': 'LemmatizerModel', 'lemma_rrt': 'LemmatizerModel', 'lemma_sagt': 'LemmatizerModel', 'lemma_scriptorium': 'LemmatizerModel', 'lemma_sequoia': 'LemmatizerModel', 'lemma_seraji': 'LemmatizerModel', 'lemma_set': 'LemmatizerModel', 'lemma_simonero': 'LemmatizerModel', 'lemma_snk': 'LemmatizerModel', 'lemma_spacylookup': 'LemmatizerModel', 'lemma_srcmf': 'LemmatizerModel', 'lemma_ssj': 'LemmatizerModel', 'lemma_sst': 'LemmatizerModel', 'lemma_syntagrus': 'LemmatizerModel', 'lemma_szeged': 'LemmatizerModel', 'lemma_taiga': 'LemmatizerModel', 'lemma_talbanken': 'LemmatizerModel', 'lemma_tdt': 'LemmatizerModel', 'lemma_torot': 'LemmatizerModel', 'lemma_tourism': 'LemmatizerModel', 'lemma_treegal': 'LemmatizerModel', 'lemma_ttb': 'LemmatizerModel', 'lemma_twittiro': 'LemmatizerModel', 'lemma_udante': 'LemmatizerModel', 'lemma_udt': 'LemmatizerModel', 'lemma_udtb': 'LemmatizerModel', 'lemma_ufal': 'LemmatizerModel', 'lemma_vedic': 'LemmatizerModel', 'lemma_vit': 'LemmatizerModel', 'lemma_vtb': 'LemmatizerModel', 'lemma_wtb': 'LemmatizerModel', 'longformer_base_4096': 'LongformerEmbeddings', 'longformer_base_base_qa_squad2': 'LongformerForQuestionAnswering', 'longformer_base_sequence_classifier_ag_news': 'LongformerForSequenceClassification', 'longformer_base_sequence_classifier_imdb': 'LongformerForSequenceClassification', 'longformer_base_token_classifier_conll03': 'LongformerForTokenClassification', 'longformer_large_4096': 'LongformerEmbeddings', 'longformer_large_token_classifier_conll03': 'LongformerForTokenClassification', 'longformer_legal_base_8192': 'LongformerEmbeddings', 'longformer_legal_embeddings': 'LongformerEmbeddings', 'longformer_qa_Chatbot': 'LongformerForQuestionAnswering', 'longformer_qa_base_4096_finetuned_squadv2': 'LongformerForQuestionAnswering', 'longformer_qa_covid': 'LongformerForQuestionAnswering', 'longformer_qa_large_4096_finetuned_triviaqa': 'LongformerForQuestionAnswering', 'longformer_qa_ponmari': 'LongformerForQuestionAnswering', 'longformer_qa_recruit': 'LongformerForQuestionAnswering', 'longformer_qa_recruit_large': 'LongformerForQuestionAnswering', 'longformer_qa_recruit_v2': 'LongformerForQuestionAnswering', 'mdeberta_v3_base': 'DeBertaEmbeddings', 'mdeberta_v3_base_sequence_classifier_allocine': 'DeBertaForSequenceClassification', 'mdeberta_v3_base_sequence_classifier_imdb': 'DeBertaForSequenceClassification', 'meddroprof_scielowiki': 'MedicalNerModel', 'mesh_umls_mapper': 'ChunkMapperModel', 'multiclassifierdl_use_e2e': 'MultiClassifierDLModel', 'multiclassifierdl_use_toxic': 'MultiClassifierDLModel', 'multiclassifierdl_use_toxic_sm': 'MultiClassifierDLModel', 'ner_abbreviation_clinical': 'MedicalNerModel', 'ner_ade_binary': 'MedicalNerModel', 'ner_ade_biobert': 'MedicalNerModel', 'ner_ade_clinical': 'MedicalNerModel', 'ner_ade_clinicalbert': 'MedicalNerModel', 'ner_ade_healthcare': 'MedicalNerModel', 'ner_anatomy': 'MedicalNerModel', 'ner_anatomy_biobert': 'MedicalNerModel', 'ner_anatomy_coarse': 'MedicalNerModel', 'ner_anatomy_coarse_biobert': 'MedicalNerModel', 'ner_aspect_based_sentiment': 'MedicalNerModel', 'ner_bacterial_species': 'MedicalNerModel', 'ner_biomarker': 'MedicalNerModel', 'ner_biomedical_bc2gm': 'MedicalNerModel', 'ner_bionlp': 'MedicalNerModel', 'ner_bionlp_biobert': 'MedicalNerModel', 'ner_cancer_genetics': 'MedicalNerModel', 'ner_cellular': 'MedicalNerModel', 'ner_cellular_biobert': 'MedicalNerModel', 'ner_chemd_clinical': 'MedicalNerModel', 'ner_chemicals': 'MedicalNerModel', 'ner_chemprot_biobert': 'MedicalNerModel', 'ner_chemprot_clinical': 'MedicalNerModel', 'ner_chexpert': 'MedicalNerModel', 'ner_clinical': 'MedicalNerModel', 'ner_clinical_bert': 'MedicalNerModel', 'ner_clinical_biobert': 'MedicalNerModel', 'ner_clinical_trials_abstracts': 'MedicalNerModel', 'ner_conll_albert_base_uncased': 'NerDLModel', 'ner_conll_albert_large_uncased': 'NerDLModel', 'ner_conll_bert_base_cased': 'NerDLModel', 'ner_conll_distilbert_base_cased': 'NerDLModel', 'ner_conll_elmo': 'NerDLModel', 'ner_conll_longformer_large_4096': 'NerDLModel', 'ner_conll_roberta_base': 'NerDLModel', 'ner_conll_roberta_large': 'NerDLModel', 'ner_conll_xlm_roberta_base': 'NerDLModel', 'ner_conll_xlnet_base_cased': 'NerDLModel', 'ner_covid_trials': 'MedicalNerModel', 'ner_deid_augmented': 'MedicalNerModel', 'ner_deid_biobert': 'MedicalNerModel', 'ner_deid_enriched': 'MedicalNerModel', 'ner_deid_enriched_biobert': 'MedicalNerModel', 'ner_deid_generic': 'MedicalNerModel', 'ner_deid_generic_augmented': 'MedicalNerModel', 'ner_deid_generic_bert': 'MedicalNerModel', 'ner_deid_generic_roberta': 'MedicalNerModel', 'ner_deid_generic_roberta_augmented': 'MedicalNerModel', 'ner_deid_large': 'MedicalNerModel', 'ner_deid_sd': 'MedicalNerModel', 'ner_deid_sd_large': 'MedicalNerModel', 'ner_deid_subentity': 'MedicalNerModel', 'ner_deid_subentity_augmented': 'MedicalNerModel', 'ner_deid_subentity_augmented_i2b2': 'MedicalNerModel', 'ner_deid_subentity_bert': 'MedicalNerModel', 'ner_deid_subentity_roberta': 'MedicalNerModel', 'ner_deid_subentity_roberta_augmented': 'MedicalNerModel', 'ner_deid_synthetic': 'MedicalNerModel', 'ner_deidentify_dl': 'MedicalNerModel', 'ner_diag_proc': 'MedicalNerModel', 'ner_diseases': 'MedicalNerModel', 'ner_diseases_biobert': 'MedicalNerModel', 'ner_diseases_large': 'MedicalNerModel', 'ner_dl': 'NerDLModel', 'ner_dl_bert': 'NerDLModel', 'ner_dl_bert_base_cased': 'NerDLModel', 'ner_dl_sentence': 'NerDLModel', 'ner_drugprot_clinical': 'MedicalNerModel', 'ner_drugs': 'MedicalNerModel', 'ner_drugs_greedy': 'MedicalNerModel', 'ner_drugs_large': 'MedicalNerModel', 'ner_eu_clinical_case': 'MedicalNerModel', 'ner_events_admission_clinical': 'MedicalNerModel', 'ner_events_biobert': 'MedicalNerModel', 'ner_events_clinical': 'MedicalNerModel', 'ner_events_healthcare': 'MedicalNerModel', 'ner_financial_contract': 'MedicalNerModel', 'ner_genetic_variants': 'MedicalNerModel', 'ner_healthcare': 'MedicalNerModel', 'ner_healthcare_slim': 'MedicalNerModel', 'ner_human_phenotype_gene_biobert': 'MedicalNerModel', 'ner_human_phenotype_gene_clinical': 'MedicalNerModel', 'ner_human_phenotype_go_biobert': 'MedicalNerModel', 'ner_human_phenotype_go_clinical': 'MedicalNerModel', 'ner_jifs_glove_840B_300d': 'NerDLModel', 'ner_jsl': 'MedicalNerModel', 'ner_jsl_biobert': 'MedicalNerModel', 'ner_jsl_enriched': 'MedicalNerModel', 'ner_jsl_enriched_biobert': 'MedicalNerModel', 'ner_jsl_greedy_biobert': 'MedicalNerModel', 'ner_jsl_slim': 'MedicalNerModel', 'ner_kmou_glove_840B_300d': 'NerDLModel', 'ner_legal': 'MedicalNerModel', 'ner_living_species': 'MedicalNerModel', 'ner_living_species_300': 'MedicalNerModel', 'ner_living_species_bert': 'MedicalNerModel', 'ner_living_species_biobert': 'MedicalNerModel', 'ner_living_species_roberta': 'MedicalNerModel', 'ner_lst20_glove_840B_300d': 'NerDLModel', 'ner_measurements_clinical': 'MedicalNerModel', 'ner_medmentions_coarse': 'MedicalNerModel', 'ner_mit_movie_complex_bert_base_cased': 'NerDLModel', 'ner_mit_movie_complex_distilbert_base_cased': 'NerDLModel', 'ner_mit_movie_simple_distilbert_base_cased': 'NerDLModel', 'ner_msra_bert_768d': 'NerDLModel', 'ner_nature_nero_clinical': 'MedicalNerModel', 'ner_negation_uncertainty': 'MedicalNerModel', 'ner_neoplasms': 'NerDLModel', 'ner_nihss': 'MedicalNerModel', 'ner_oncology': 'MedicalNerModel', 'ner_oncology_anatomy_general': 'MedicalNerModel', 'ner_oncology_anatomy_general_healthcare': 'MedicalNerModel', 'ner_oncology_anatomy_general_wip': 'MedicalNerModel', 'ner_oncology_anatomy_granular': 'MedicalNerModel', 'ner_oncology_anatomy_granular_wip': 'MedicalNerModel', 'ner_oncology_biomarker': 'MedicalNerModel', 'ner_oncology_biomarker_healthcare': 'MedicalNerModel', 'ner_oncology_biomarker_wip': 'MedicalNerModel', 'ner_oncology_demographics': 'MedicalNerModel', 'ner_oncology_demographics_wip': 'MedicalNerModel', 'ner_oncology_diagnosis': 'MedicalNerModel', 'ner_oncology_diagnosis_wip': 'MedicalNerModel', 'ner_oncology_posology': 'MedicalNerModel', 'ner_oncology_posology_wip': 'MedicalNerModel', 'ner_oncology_response_to_treatment': 'MedicalNerModel', 'ner_oncology_response_to_treatment_wip': 'MedicalNerModel', 'ner_oncology_test': 'MedicalNerModel', 'ner_oncology_test_wip': 'MedicalNerModel', 'ner_oncology_therapy': 'MedicalNerModel', 'ner_oncology_therapy_wip': 'MedicalNerModel', 'ner_oncology_tnm': 'MedicalNerModel', 'ner_oncology_tnm_wip': 'MedicalNerModel', 'ner_oncology_unspecific_posology': 'MedicalNerModel', 'ner_oncology_unspecific_posology_healthcare': 'MedicalNerModel', 'ner_oncology_unspecific_posology_wip': 'MedicalNerModel', 'ner_oncology_wip': 'MedicalNerModel', 'ner_ontonotes_distilbert_base_cased': 'NerDLModel', 'ner_ontonotes_roberta_base': 'NerDLModel', 'ner_ontonotes_roberta_large': 'NerDLModel', 'ner_pathogen': 'MedicalNerModel', 'ner_pharmacology': 'MedicalNerModel', 'ner_posology': 'MedicalNerModel', 'ner_posology_biobert': 'MedicalNerModel', 'ner_posology_experimental': 'MedicalNerModel', 'ner_posology_greedy': 'MedicalNerModel', 'ner_posology_healthcare': 'MedicalNerModel', 'ner_posology_large': 'MedicalNerModel', 'ner_posology_large_biobert': 'MedicalNerModel', 'ner_posology_small': 'MedicalNerModel', 'ner_radiology': 'MedicalNerModel', 'ner_radiology_wip_clinical': 'MedicalNerModel', 'ner_risk_factors': 'MedicalNerModel', 'ner_risk_factors_biobert': 'MedicalNerModel', 'ner_sdoh_mentions': 'MedicalNerModel', 'ner_sdoh_mentions_test': 'MedicalNerModel', 'ner_sdoh_slim_wip': 'MedicalNerModel', 'ner_supplement_clinical': 'MedicalNerModel', 'ner_traffic': 'MedicalNerModel', 'ner_ud_gsd_bert_base_japanese': 'NerDLModel', 'ner_ud_gsd_cc_300d': 'NerDLModel', 'ner_ud_gsd_glove_840B_300d': 'NerDLModel', 'ner_ud_gsd_xlm_roberta_base': 'NerDLModel', 'ner_weibo_bert_768d': 'NerDLModel', 'ner_wikiner_glove_840B_300': 'NerDLModel', 'ner_wikiner_xlm_roberta_base': 'NerDLModel', 'ner_xtreme_glove_840B_300': 'NerDLModel', 'ner_xtreme_xlm_roberta_xtreme_base': 'NerDLModel', 'nerdl_atis_840b_300d': 'NerDLModel', 'nerdl_conll_deberta_base': 'NerDLModel', 'nerdl_conll_deberta_large': 'NerDLModel', 'nerdl_conll_elmo': 'NerDLModel', 'nerdl_fewnerd_100d': 'NerDLModel', 'nerdl_fewnerd_subentity_100d': 'NerDLModel', 'nerdl_restaurant_100d': 'NerDLModel', 'nerdl_snips_100d': 'NerDLModel', 'nerdl_tumour_demo': 'MedicalNerModel', 'ngram': 'NGramGenerator', 'nl': 'RoBertaEmbeddings', 'norm': 'NormalizerModel', 'normalizer': 'NormalizerModel', 'norne_6B_100': 'NerDLModel', 'norne_6B_300': 'NerDLModel', 'norne_840B_300': 'NerDLModel', 'onto_100': 'NerDLModel', 'onto_300': 'NerDLModel', 'onto_bert_base_cased': 'NerDLModel', 'onto_bert_large_cased': 'NerDLModel', 'onto_electra_base_uncased': 'NerDLModel', 'onto_electra_large_uncased': 'NerDLModel', 'onto_electra_small_uncased': 'NerDLModel', 'onto_small_bert_L2_128': 'NerDLModel', 'onto_small_bert_L4_256': 'NerDLModel', 'onto_small_bert_L4_512': 'NerDLModel', 'onto_small_bert_L8_512': 'NerDLModel', 'opus_mt_aav_en': 'MarianTransformer', 'opus_mt_aed_es': 'MarianTransformer', 'opus_mt_af_de': 'MarianTransformer', 'opus_mt_af_en': 'MarianTransformer', 'opus_mt_af_eo': 'MarianTransformer', 'opus_mt_af_es': 'MarianTransformer', 'opus_mt_af_fi': 'MarianTransformer', 'opus_mt_af_fr': 'MarianTransformer', 'opus_mt_af_nl': 'MarianTransformer', 'opus_mt_af_ru': 'MarianTransformer', 'opus_mt_af_sv': 'MarianTransformer', 'opus_mt_afa_afa': 'MarianTransformer', 'opus_mt_afa_en': 'MarianTransformer', 'opus_mt_alv_en': 'MarianTransformer', 'opus_mt_am_sv': 'MarianTransformer', 'opus_mt_ar_de': 'MarianTransformer', 'opus_mt_ar_el': 'MarianTransformer', 'opus_mt_ar_en': 'MarianTransformer', 'opus_mt_ar_eo': 'MarianTransformer', 'opus_mt_ar_es': 'MarianTransformer', 'opus_mt_ar_fr': 'MarianTransformer', 'opus_mt_ar_he': 'MarianTransformer', 'opus_mt_ar_it': 'MarianTransformer', 'opus_mt_ar_pl': 'MarianTransformer', 'opus_mt_ar_ru': 'MarianTransformer', 'opus_mt_ar_tr': 'MarianTransformer', 'opus_mt_art_en': 'MarianTransformer', 'opus_mt_ase_de': 'MarianTransformer', 'opus_mt_ase_en': 'MarianTransformer', 'opus_mt_ase_es': 'MarianTransformer', 'opus_mt_ase_fr': 'MarianTransformer', 'opus_mt_ase_sv': 'MarianTransformer', 'opus_mt_az_en': 'MarianTransformer', 'opus_mt_az_es': 'MarianTransformer', 'opus_mt_az_tr': 'MarianTransformer', 'opus_mt_bat_en': 'MarianTransformer', 'opus_mt_bcl_de': 'MarianTransformer', 'opus_mt_bcl_en': 'MarianTransformer', 'opus_mt_bcl_es': 'MarianTransformer', 'opus_mt_bcl_fi': 'MarianTransformer', 'opus_mt_bcl_fr': 'MarianTransformer', 'opus_mt_bcl_sv': 'MarianTransformer', 'opus_mt_be_es': 'MarianTransformer', 'opus_mt_bem_en': 'MarianTransformer', 'opus_mt_bem_es': 'MarianTransformer', 'opus_mt_bem_fi': 'MarianTransformer', 'opus_mt_bem_fr': 'MarianTransformer', 'opus_mt_bem_sv': 'MarianTransformer', 'opus_mt_ber_en': 'MarianTransformer', 'opus_mt_ber_es': 'MarianTransformer', 'opus_mt_ber_fr': 'MarianTransformer', 'opus_mt_bg_de': 'MarianTransformer', 'opus_mt_bg_en': 'MarianTransformer', 'opus_mt_bg_eo': 'MarianTransformer', 'opus_mt_bg_es': 'MarianTransformer', 'opus_mt_bg_fi': 'MarianTransformer', 'opus_mt_bg_fr': 'MarianTransformer', 'opus_mt_bg_it': 'MarianTransformer', 'opus_mt_bg_ru': 'MarianTransformer', 'opus_mt_bg_sv': 'MarianTransformer', 'opus_mt_bg_tr': 'MarianTransformer', 'opus_mt_bg_uk': 'MarianTransformer', 'opus_mt_bi_en': 'MarianTransformer', 'opus_mt_bi_es': 'MarianTransformer', 'opus_mt_bi_fr': 'MarianTransformer', 'opus_mt_bi_sv': 'MarianTransformer', 'opus_mt_bn_en': 'MarianTransformer', 'opus_mt_bnt_en': 'MarianTransformer', 'opus_mt_bzs_en': 'MarianTransformer', 'opus_mt_bzs_es': 'MarianTransformer', 'opus_mt_bzs_fi': 'MarianTransformer', 'opus_mt_bzs_fr': 'MarianTransformer', 'opus_mt_bzs_sv': 'MarianTransformer', 'opus_mt_ca_de': 'MarianTransformer', 'opus_mt_ca_en': 'MarianTransformer', 'opus_mt_ca_es': 'MarianTransformer', 'opus_mt_ca_fr': 'MarianTransformer', 'opus_mt_ca_it': 'MarianTransformer', 'opus_mt_ca_nl': 'MarianTransformer', 'opus_mt_ca_pt': 'MarianTransformer', 'opus_mt_ca_uk': 'MarianTransformer', 'opus_mt_cau_en': 'MarianTransformer', 'opus_mt_ccs_en': 'MarianTransformer', 'opus_mt_ceb_en': 'MarianTransformer', 'opus_mt_ceb_es': 'MarianTransformer', 'opus_mt_ceb_fi': 'MarianTransformer', 'opus_mt_ceb_fr': 'MarianTransformer', 'opus_mt_ceb_sv': 'MarianTransformer', 'opus_mt_cel_en': 'MarianTransformer', 'opus_mt_chk_en': 'MarianTransformer', 'opus_mt_chk_es': 'MarianTransformer', 'opus_mt_chk_fr': 'MarianTransformer', 'opus_mt_chk_sv': 'MarianTransformer', 'opus_mt_cpf_en': 'MarianTransformer', 'opus_mt_cpp_cpp': 'MarianTransformer', 'opus_mt_cpp_en': 'MarianTransformer', 'opus_mt_crs_de': 'MarianTransformer', 'opus_mt_crs_en': 'MarianTransformer', 'opus_mt_crs_es': 'MarianTransformer', 'opus_mt_crs_fi': 'MarianTransformer', 'opus_mt_crs_fr': 'MarianTransformer', 'opus_mt_crs_sv': 'MarianTransformer', 'opus_mt_cs_de': 'MarianTransformer', 'opus_mt_cs_en': 'MarianTransformer', 'opus_mt_cs_eo': 'MarianTransformer', 'opus_mt_cs_fi': 'MarianTransformer', 'opus_mt_cs_fr': 'MarianTransformer', 'opus_mt_cs_sv': 'MarianTransformer', 'opus_mt_cs_uk': 'MarianTransformer', 'opus_mt_csg_es': 'MarianTransformer', 'opus_mt_csn_es': 'MarianTransformer', 'opus_mt_cus_en': 'MarianTransformer', 'opus_mt_cy_en': 'MarianTransformer', 'opus_mt_da_de': 'MarianTransformer', 'opus_mt_da_en': 'MarianTransformer', 'opus_mt_da_eo': 'MarianTransformer', 'opus_mt_da_es': 'MarianTransformer', 'opus_mt_da_fi': 'MarianTransformer', 'opus_mt_da_fr': 'MarianTransformer', 'opus_mt_da_no': 'MarianTransformer', 'opus_mt_da_ru': 'MarianTransformer', 'opus_mt_de_af': 'MarianTransformer', 'opus_mt_de_ar': 'MarianTransformer', 'opus_mt_de_ase': 'MarianTransformer', 'opus_mt_de_bcl': 'MarianTransformer', 'opus_mt_de_bg': 'MarianTransformer', 'opus_mt_de_bi': 'MarianTransformer', 'opus_mt_de_bzs': 'MarianTransformer', 'opus_mt_de_ca': 'MarianTransformer', 'opus_mt_de_crs': 'MarianTransformer', 'opus_mt_de_cs': 'MarianTransformer', 'opus_mt_de_da': 'MarianTransformer', 'opus_mt_de_de': 'MarianTransformer', 'opus_mt_de_ee': 'MarianTransformer', 'opus_mt_de_efi': 'MarianTransformer', 'opus_mt_de_el': 'MarianTransformer', 'opus_mt_de_en': 'MarianTransformer', 'opus_mt_de_eo': 'MarianTransformer', 'opus_mt_de_es': 'MarianTransformer', 'opus_mt_de_et': 'MarianTransformer', 'opus_mt_de_eu': 'MarianTransformer', 'opus_mt_de_fi': 'MarianTransformer', 'opus_mt_de_fj': 'MarianTransformer', 'opus_mt_de_fr': 'MarianTransformer', 'opus_mt_de_gaa': 'MarianTransformer', 'opus_mt_de_gil': 'MarianTransformer', 'opus_mt_de_guw': 'MarianTransformer', 'opus_mt_de_ha': 'MarianTransformer', 'opus_mt_de_he': 'MarianTransformer', 'opus_mt_de_hil': 'MarianTransformer', 'opus_mt_de_ho': 'MarianTransformer', 'opus_mt_de_hr': 'MarianTransformer', 'opus_mt_de_ht': 'MarianTransformer', 'opus_mt_de_hu': 'MarianTransformer', 'opus_mt_de_ig': 'MarianTransformer', 'opus_mt_de_ilo': 'MarianTransformer', 'opus_mt_de_is': 'MarianTransformer', 'opus_mt_de_iso': 'MarianTransformer', 'opus_mt_de_it': 'MarianTransformer', 'opus_mt_de_kg': 'MarianTransformer', 'opus_mt_de_ln': 'MarianTransformer', 'opus_mt_de_loz': 'MarianTransformer', 'opus_mt_de_lt': 'MarianTransformer', 'opus_mt_de_lua': 'MarianTransformer', 'opus_mt_de_ms': 'MarianTransformer', 'opus_mt_de_mt': 'MarianTransformer', 'opus_mt_de_niu': 'MarianTransformer', 'opus_mt_de_nl': 'MarianTransformer', 'opus_mt_de_no': 'MarianTransformer', 'opus_mt_de_nso': 'MarianTransformer', 'opus_mt_de_ny': 'MarianTransformer', 'opus_mt_de_pag': 'MarianTransformer', 'opus_mt_de_pap': 'MarianTransformer', 'opus_mt_de_pis': 'MarianTransformer', 'opus_mt_de_pl': 'MarianTransformer', 'opus_mt_de_pon': 'MarianTransformer', 'opus_mt_de_tl': 'MarianTransformer', 'opus_mt_de_uk': 'MarianTransformer', 'opus_mt_de_vi': 'MarianTransformer', 'opus_mt_dra_en': 'MarianTransformer', 'opus_mt_ee_de': 'MarianTransformer', 'opus_mt_ee_en': 'MarianTransformer', 'opus_mt_ee_es': 'MarianTransformer', 'opus_mt_ee_fi': 'MarianTransformer', 'opus_mt_ee_fr': 'MarianTransformer', 'opus_mt_ee_sv': 'MarianTransformer', 'opus_mt_efi_de': 'MarianTransformer', 'opus_mt_efi_en': 'MarianTransformer', 'opus_mt_efi_fi': 'MarianTransformer', 'opus_mt_efi_fr': 'MarianTransformer', 'opus_mt_efi_sv': 'MarianTransformer', 'opus_mt_el_ar': 'MarianTransformer', 'opus_mt_el_eo': 'MarianTransformer', 'opus_mt_el_fi': 'MarianTransformer', 'opus_mt_el_fr': 'MarianTransformer', 'opus_mt_el_sv': 'MarianTransformer', 'opus_mt_en_aav': 'MarianTransformer', 'opus_mt_en_af': 'MarianTransformer', 'opus_mt_en_afa': 'MarianTransformer', 'opus_mt_en_alv': 'MarianTransformer', 'opus_mt_en_ar': 'MarianTransformer', 'opus_mt_en_az': 'MarianTransformer', 'opus_mt_en_bat': 'MarianTransformer', 'opus_mt_en_bcl': 'MarianTransformer', 'opus_mt_en_bem': 'MarianTransformer', 'opus_mt_en_ber': 'MarianTransformer', 'opus_mt_en_bg': 'MarianTransformer', 'opus_mt_en_bi': 'MarianTransformer', 'opus_mt_en_bnt': 'MarianTransformer', 'opus_mt_en_bzs': 'MarianTransformer', 'opus_mt_en_ca': 'MarianTransformer', 'opus_mt_en_ceb': 'MarianTransformer', 'opus_mt_en_cel': 'MarianTransformer', 'opus_mt_en_chk': 'MarianTransformer', 'opus_mt_en_cpf': 'MarianTransformer', 'opus_mt_en_cpp': 'MarianTransformer', 'opus_mt_en_crs': 'MarianTransformer', 'opus_mt_en_cs': 'MarianTransformer', 'opus_mt_en_cus': 'MarianTransformer', 'opus_mt_en_cy': 'MarianTransformer', 'opus_mt_en_da': 'MarianTransformer', 'opus_mt_en_de': 'MarianTransformer', 'opus_mt_en_dra': 'MarianTransformer', 'opus_mt_en_ee': 'MarianTransformer', 'opus_mt_en_efi': 'MarianTransformer', 'opus_mt_en_el': 'MarianTransformer', 'opus_mt_en_eo': 'MarianTransformer', 'opus_mt_en_es': 'MarianTransformer', 'opus_mt_en_et': 'MarianTransformer', 'opus_mt_en_eu': 'MarianTransformer', 'opus_mt_en_euq': 'MarianTransformer', 'opus_mt_en_fi': 'MarianTransformer', 'opus_mt_en_fiu': 'MarianTransformer', 'opus_mt_en_fj': 'MarianTransformer', 'opus_mt_en_fr': 'MarianTransformer', 'opus_mt_en_ga': 'MarianTransformer', 'opus_mt_en_gaa': 'MarianTransformer', 'opus_mt_en_gem': 'MarianTransformer', 'opus_mt_en_gil': 'MarianTransformer', 'opus_mt_en_gl': 'MarianTransformer', 'opus_mt_en_gmq': 'MarianTransformer', 'opus_mt_en_gmw': 'MarianTransformer', 'opus_mt_en_grk': 'MarianTransformer', 'opus_mt_en_guw': 'MarianTransformer', 'opus_mt_en_gv': 'MarianTransformer', 'opus_mt_en_ha': 'MarianTransformer', 'opus_mt_en_he': 'MarianTransformer', 'opus_mt_en_hi': 'MarianTransformer', 'opus_mt_en_hil': 'MarianTransformer', 'opus_mt_en_ho': 'MarianTransformer', 'opus_mt_en_ht': 'MarianTransformer', 'opus_mt_en_hu': 'MarianTransformer', 'opus_mt_en_hy': 'MarianTransformer', 'opus_mt_en_id': 'MarianTransformer', 'opus_mt_en_ig': 'MarianTransformer', 'opus_mt_en_iir': 'MarianTransformer', 'opus_mt_en_ilo': 'MarianTransformer', 'opus_mt_en_inc': 'MarianTransformer', 'opus_mt_en_ine': 'MarianTransformer', 'opus_mt_en_is': 'MarianTransformer', 'opus_mt_en_iso': 'MarianTransformer', 'opus_mt_en_it': 'MarianTransformer', 'opus_mt_en_itc': 'MarianTransformer', 'opus_mt_en_jap': 'MarianTransformer', 'opus_mt_en_kg': 'MarianTransformer', 'opus_mt_en_kj': 'MarianTransformer', 'opus_mt_en_kqn': 'MarianTransformer', 'opus_mt_en_kwn': 'MarianTransformer', 'opus_mt_en_kwy': 'MarianTransformer', 'opus_mt_en_lg': 'MarianTransformer', 'opus_mt_en_ln': 'MarianTransformer', 'opus_mt_en_loz': 'MarianTransformer', 'opus_mt_en_lu': 'MarianTransformer', 'opus_mt_en_lua': 'MarianTransformer', 'opus_mt_en_lue': 'MarianTransformer', 'opus_mt_en_lun': 'MarianTransformer', 'opus_mt_en_luo': 'MarianTransformer', 'opus_mt_en_lus': 'MarianTransformer', 'opus_mt_en_map': 'MarianTransformer', 'opus_mt_en_mfe': 'MarianTransformer', 'opus_mt_en_mg': 'MarianTransformer', 'opus_mt_en_mh': 'MarianTransformer', 'opus_mt_en_mk': 'MarianTransformer', 'opus_mt_en_mkh': 'MarianTransformer', 'opus_mt_en_ml': 'MarianTransformer', 'opus_mt_en_mos': 'MarianTransformer', 'opus_mt_en_mr': 'MarianTransformer', 'opus_mt_en_mt': 'MarianTransformer', 'opus_mt_en_mul': 'MarianTransformer', 'opus_mt_en_ng': 'MarianTransformer', 'opus_mt_en_nic': 'MarianTransformer', 'opus_mt_en_niu': 'MarianTransformer', 'opus_mt_en_nl': 'MarianTransformer', 'opus_mt_en_nso': 'MarianTransformer', 'opus_mt_en_ny': 'MarianTransformer', 'opus_mt_en_nyk': 'MarianTransformer', 'opus_mt_en_om': 'MarianTransformer', 'opus_mt_en_pag': 'MarianTransformer', 'opus_mt_en_pap': 'MarianTransformer', 'opus_mt_en_phi': 'MarianTransformer', 'opus_mt_en_pis': 'MarianTransformer', 'opus_mt_en_pon': 'MarianTransformer', 'opus_mt_en_poz': 'MarianTransformer', 'opus_mt_en_pqe': 'MarianTransformer', 'opus_mt_en_pqw': 'MarianTransformer', 'opus_mt_en_rn': 'MarianTransformer', 'opus_mt_en_rnd': 'MarianTransformer', 'opus_mt_en_ro': 'MarianTransformer', 'opus_mt_en_roa': 'MarianTransformer', 'opus_mt_en_ru': 'MarianTransformer', 'opus_mt_en_run': 'MarianTransformer', 'opus_mt_en_rw': 'MarianTransformer', 'opus_mt_en_sal': 'MarianTransformer', 'opus_mt_en_sem': 'MarianTransformer', 'opus_mt_en_sg': 'MarianTransformer', 'opus_mt_en_sit': 'MarianTransformer', 'opus_mt_en_sk': 'MarianTransformer', 'opus_mt_en_sla': 'MarianTransformer', 'opus_mt_en_sm': 'MarianTransformer', 'opus_mt_en_sn': 'MarianTransformer', 'opus_mt_en_sq': 'MarianTransformer', 'opus_mt_en_ss': 'MarianTransformer', 'opus_mt_en_st': 'MarianTransformer', 'opus_mt_en_sv': 'MarianTransformer', 'opus_mt_en_sw': 'MarianTransformer', 'opus_mt_en_swc': 'MarianTransformer', 'opus_mt_en_tdt': 'MarianTransformer', 'opus_mt_en_ti': 'MarianTransformer', 'opus_mt_en_tiv': 'MarianTransformer', 'opus_mt_en_tl': 'MarianTransformer', 'opus_mt_en_tll': 'MarianTransformer', 'opus_mt_en_tn': 'MarianTransformer', 'opus_mt_en_to': 'MarianTransformer', 'opus_mt_en_toi': 'MarianTransformer', 'opus_mt_en_tpi': 'MarianTransformer', 'opus_mt_en_trk': 'MarianTransformer', 'opus_mt_en_ts': 'MarianTransformer', 'opus_mt_en_tut': 'MarianTransformer', 'opus_mt_en_tvl': 'MarianTransformer', 'opus_mt_en_tw': 'MarianTransformer', 'opus_mt_en_ty': 'MarianTransformer', 'opus_mt_en_uk': 'MarianTransformer', 'opus_mt_en_umb': 'MarianTransformer', 'opus_mt_en_ur': 'MarianTransformer', 'opus_mt_en_urj': 'MarianTransformer', 'opus_mt_en_vi': 'MarianTransformer', 'opus_mt_en_xh': 'MarianTransformer', 'opus_mt_en_zh': 'MarianTransformer', 'opus_mt_en_zle': 'MarianTransformer', 'opus_mt_en_zls': 'MarianTransformer', 'opus_mt_en_zlw': 'MarianTransformer', 'opus_mt_eo_af': 'MarianTransformer', 'opus_mt_eo_bg': 'MarianTransformer', 'opus_mt_eo_cs': 'MarianTransformer', 'opus_mt_eo_da': 'MarianTransformer', 'opus_mt_eo_de': 'MarianTransformer', 'opus_mt_eo_el': 'MarianTransformer', 'opus_mt_eo_en': 'MarianTransformer', 'opus_mt_eo_es': 'MarianTransformer', 'opus_mt_eo_fi': 'MarianTransformer', 'opus_mt_eo_fr': 'MarianTransformer', 'opus_mt_eo_he': 'MarianTransformer', 'opus_mt_eo_hu': 'MarianTransformer', 'opus_mt_eo_it': 'MarianTransformer', 'opus_mt_eo_nl': 'MarianTransformer', 'opus_mt_eo_pl': 'MarianTransformer', 'opus_mt_eo_pt': 'MarianTransformer', 'opus_mt_eo_ro': 'MarianTransformer', 'opus_mt_eo_ru': 'MarianTransformer', 'opus_mt_eo_sh': 'MarianTransformer', 'opus_mt_eo_sv': 'MarianTransformer', 'opus_mt_es_aed': 'MarianTransformer', 'opus_mt_es_af': 'MarianTransformer', 'opus_mt_es_ar': 'MarianTransformer', 'opus_mt_es_ase': 'MarianTransformer', 'opus_mt_es_bcl': 'MarianTransformer', 'opus_mt_es_ber': 'MarianTransformer', 'opus_mt_es_bg': 'MarianTransformer', 'opus_mt_es_bi': 'MarianTransformer', 'opus_mt_es_bzs': 'MarianTransformer', 'opus_mt_es_ca': 'MarianTransformer', 'opus_mt_es_ceb': 'MarianTransformer', 'opus_mt_es_crs': 'MarianTransformer', 'opus_mt_es_cs': 'MarianTransformer', 'opus_mt_es_csg': 'MarianTransformer', 'opus_mt_es_csn': 'MarianTransformer', 'opus_mt_es_da': 'MarianTransformer', 'opus_mt_es_de': 'MarianTransformer', 'opus_mt_es_ee': 'MarianTransformer', 'opus_mt_es_efi': 'MarianTransformer', 'opus_mt_es_el': 'MarianTransformer', 'opus_mt_es_en': 'MarianTransformer', 'opus_mt_es_eo': 'MarianTransformer', 'opus_mt_es_es': 'MarianTransformer', 'opus_mt_es_et': 'MarianTransformer', 'opus_mt_es_eu': 'MarianTransformer', 'opus_mt_es_fi': 'MarianTransformer', 'opus_mt_es_fj': 'MarianTransformer', 'opus_mt_es_fr': 'MarianTransformer', 'opus_mt_es_gaa': 'MarianTransformer', 'opus_mt_es_gil': 'MarianTransformer', 'opus_mt_es_gl': 'MarianTransformer', 'opus_mt_es_guw': 'MarianTransformer', 'opus_mt_es_ha': 'MarianTransformer', 'opus_mt_es_he': 'MarianTransformer', 'opus_mt_es_hil': 'MarianTransformer', 'opus_mt_es_ho': 'MarianTransformer', 'opus_mt_es_hr': 'MarianTransformer', 'opus_mt_es_ht': 'MarianTransformer', 'opus_mt_es_id': 'MarianTransformer', 'opus_mt_es_ig': 'MarianTransformer', 'opus_mt_es_ilo': 'MarianTransformer', 'opus_mt_es_is': 'MarianTransformer', 'opus_mt_es_iso': 'MarianTransformer', 'opus_mt_es_it': 'MarianTransformer', 'opus_mt_es_kg': 'MarianTransformer', 'opus_mt_es_ln': 'MarianTransformer', 'opus_mt_es_loz': 'MarianTransformer', 'opus_mt_es_lt': 'MarianTransformer', 'opus_mt_es_lua': 'MarianTransformer', 'opus_mt_es_lus': 'MarianTransformer', 'opus_mt_es_mfs': 'MarianTransformer', 'opus_mt_es_mk': 'MarianTransformer', 'opus_mt_es_mt': 'MarianTransformer', 'opus_mt_es_niu': 'MarianTransformer', 'opus_mt_es_nl': 'MarianTransformer', 'opus_mt_es_no': 'MarianTransformer', 'opus_mt_es_nso': 'MarianTransformer', 'opus_mt_es_ny': 'MarianTransformer', 'opus_mt_es_pag': 'MarianTransformer', 'opus_mt_es_pap': 'MarianTransformer', 'opus_mt_es_pis': 'MarianTransformer', 'opus_mt_es_pl': 'MarianTransformer', 'opus_mt_es_pon': 'MarianTransformer', 'opus_mt_es_prl': 'MarianTransformer', 'opus_mt_es_rn': 'MarianTransformer', 'opus_mt_es_ro': 'MarianTransformer', 'opus_mt_es_ru': 'MarianTransformer', 'opus_mt_es_rw': 'MarianTransformer', 'opus_mt_es_sg': 'MarianTransformer', 'opus_mt_es_sl': 'MarianTransformer', 'opus_mt_es_sm': 'MarianTransformer', 'opus_mt_es_sn': 'MarianTransformer', 'opus_mt_es_srn': 'MarianTransformer', 'opus_mt_es_st': 'MarianTransformer', 'opus_mt_es_swc': 'MarianTransformer', 'opus_mt_es_tl': 'MarianTransformer', 'opus_mt_es_tll': 'MarianTransformer', 'opus_mt_es_tn': 'MarianTransformer', 'opus_mt_es_to': 'MarianTransformer', 'opus_mt_es_tpi': 'MarianTransformer', 'opus_mt_es_tvl': 'MarianTransformer', 'opus_mt_es_tw': 'MarianTransformer', 'opus_mt_es_ty': 'MarianTransformer', 'opus_mt_es_tzo': 'MarianTransformer', 'opus_mt_es_uk': 'MarianTransformer', 'opus_mt_es_ve': 'MarianTransformer', 'opus_mt_es_vi': 'MarianTransformer', 'opus_mt_es_war': 'MarianTransformer', 'opus_mt_es_wls': 'MarianTransformer', 'opus_mt_es_xh': 'MarianTransformer', 'opus_mt_es_yo': 'MarianTransformer', 'opus_mt_es_yua': 'MarianTransformer', 'opus_mt_es_zai': 'MarianTransformer', 'opus_mt_et_de': 'MarianTransformer', 'opus_mt_et_en': 'MarianTransformer', 'opus_mt_et_es': 'MarianTransformer', 'opus_mt_et_fi': 'MarianTransformer', 'opus_mt_et_fr': 'MarianTransformer', 'opus_mt_et_ru': 'MarianTransformer', 'opus_mt_et_sv': 'MarianTransformer', 'opus_mt_eu_de': 'MarianTransformer', 'opus_mt_eu_en': 'MarianTransformer', 'opus_mt_eu_es': 'MarianTransformer', 'opus_mt_eu_ru': 'MarianTransformer', 'opus_mt_euq_en': 'MarianTransformer', 'opus_mt_fi_af': 'MarianTransformer', 'opus_mt_fi_bcl': 'MarianTransformer', 'opus_mt_fi_bem': 'MarianTransformer', 'opus_mt_fi_bg': 'MarianTransformer', 'opus_mt_fi_bzs': 'MarianTransformer', 'opus_mt_fi_ceb': 'MarianTransformer', 'opus_mt_fi_crs': 'MarianTransformer', 'opus_mt_fi_cs': 'MarianTransformer', 'opus_mt_fi_de': 'MarianTransformer', 'opus_mt_fi_ee': 'MarianTransformer', 'opus_mt_fi_efi': 'MarianTransformer', 'opus_mt_fi_el': 'MarianTransformer', 'opus_mt_fi_en': 'MarianTransformer', 'opus_mt_fi_eo': 'MarianTransformer', 'opus_mt_fi_es': 'MarianTransformer', 'opus_mt_fi_et': 'MarianTransformer', 'opus_mt_fi_fi': 'MarianTransformer', 'opus_mt_fi_fj': 'MarianTransformer', 'opus_mt_fi_fr': 'MarianTransformer', 'opus_mt_fi_fse': 'MarianTransformer', 'opus_mt_fi_gaa': 'MarianTransformer', 'opus_mt_fi_gil': 'MarianTransformer', 'opus_mt_fi_guw': 'MarianTransformer', 'opus_mt_fi_ha': 'MarianTransformer', 'opus_mt_fi_he': 'MarianTransformer', 'opus_mt_fi_hil': 'MarianTransformer', 'opus_mt_fi_ho': 'MarianTransformer', 'opus_mt_fi_hr': 'MarianTransformer', 'opus_mt_fi_ht': 'MarianTransformer', 'opus_mt_fi_hu': 'MarianTransformer', 'opus_mt_fi_id': 'MarianTransformer', 'opus_mt_fi_ig': 'MarianTransformer', 'opus_mt_fi_ilo': 'MarianTransformer', 'opus_mt_fi_is': 'MarianTransformer', 'opus_mt_fi_iso': 'MarianTransformer', 'opus_mt_fi_it': 'MarianTransformer', 'opus_mt_fi_kg': 'MarianTransformer', 'opus_mt_fi_kqn': 'MarianTransformer', 'opus_mt_fi_lg': 'MarianTransformer', 'opus_mt_fi_ln': 'MarianTransformer', 'opus_mt_fi_lu': 'MarianTransformer', 'opus_mt_fi_lua': 'MarianTransformer', 'opus_mt_fi_lue': 'MarianTransformer', 'opus_mt_fi_lus': 'MarianTransformer', 'opus_mt_fi_lv': 'MarianTransformer', 'opus_mt_fi_mfe': 'MarianTransformer', 'opus_mt_fi_mg': 'MarianTransformer', 'opus_mt_fi_mh': 'MarianTransformer', 'opus_mt_fi_mk': 'MarianTransformer', 'opus_mt_fi_mos': 'MarianTransformer', 'opus_mt_fi_mt': 'MarianTransformer', 'opus_mt_fi_niu': 'MarianTransformer', 'opus_mt_fi_nl': 'MarianTransformer', 'opus_mt_fi_no': 'MarianTransformer', 'opus_mt_fi_nso': 'MarianTransformer', 'opus_mt_fi_ny': 'MarianTransformer', 'opus_mt_fi_pag': 'MarianTransformer', 'opus_mt_fi_pap': 'MarianTransformer', 'opus_mt_fi_pis': 'MarianTransformer', 'opus_mt_fi_pon': 'MarianTransformer', 'opus_mt_fi_ro': 'MarianTransformer', 'opus_mt_fi_ru': 'MarianTransformer', 'opus_mt_fi_run': 'MarianTransformer', 'opus_mt_fi_rw': 'MarianTransformer', 'opus_mt_fi_sg': 'MarianTransformer', 'opus_mt_fi_sk': 'MarianTransformer', 'opus_mt_fi_sl': 'MarianTransformer', 'opus_mt_fi_sm': 'MarianTransformer', 'opus_mt_fi_sn': 'MarianTransformer', 'opus_mt_fi_sq': 'MarianTransformer', 'opus_mt_fi_srn': 'MarianTransformer', 'opus_mt_fi_st': 'MarianTransformer', 'opus_mt_fi_sv': 'MarianTransformer', 'opus_mt_fi_sw': 'MarianTransformer', 'opus_mt_fi_swc': 'MarianTransformer', 'opus_mt_fi_tiv': 'MarianTransformer', 'opus_mt_fi_tll': 'MarianTransformer', 'opus_mt_fi_tn': 'MarianTransformer', 'opus_mt_fi_to': 'MarianTransformer', 'opus_mt_fi_toi': 'MarianTransformer', 'opus_mt_fi_tpi': 'MarianTransformer', 'opus_mt_fi_tr': 'MarianTransformer', 'opus_mt_fi_ts': 'MarianTransformer', 'opus_mt_fi_tvl': 'MarianTransformer', 'opus_mt_fi_tw': 'MarianTransformer', 'opus_mt_fi_ty': 'MarianTransformer', 'opus_mt_fi_uk': 'MarianTransformer', 'opus_mt_fi_ve': 'MarianTransformer', 'opus_mt_fi_war': 'MarianTransformer', 'opus_mt_fi_wls': 'MarianTransformer', 'opus_mt_fi_xh': 'MarianTransformer', 'opus_mt_fi_yap': 'MarianTransformer', 'opus_mt_fi_yo': 'MarianTransformer', 'opus_mt_fi_zne': 'MarianTransformer', 'opus_mt_fiu_en': 'MarianTransformer', 'opus_mt_fiu_fiu': 'MarianTransformer', 'opus_mt_fj_en': 'MarianTransformer', 'opus_mt_fj_fr': 'MarianTransformer', 'opus_mt_fr_af': 'MarianTransformer', 'opus_mt_fr_ar': 'MarianTransformer', 'opus_mt_fr_ase': 'MarianTransformer', 'opus_mt_fr_bcl': 'MarianTransformer', 'opus_mt_fr_bem': 'MarianTransformer', 'opus_mt_fr_ber': 'MarianTransformer', 'opus_mt_fr_bg': 'MarianTransformer', 'opus_mt_fr_bi': 'MarianTransformer', 'opus_mt_fr_bzs': 'MarianTransformer', 'opus_mt_fr_ca': 'MarianTransformer', 'opus_mt_fr_ceb': 'MarianTransformer', 'opus_mt_fr_crs': 'MarianTransformer', 'opus_mt_fr_de': 'MarianTransformer', 'opus_mt_fr_ee': 'MarianTransformer', 'opus_mt_fr_efi': 'MarianTransformer', 'opus_mt_fr_el': 'MarianTransformer', 'opus_mt_fr_en': 'MarianTransformer', 'opus_mt_fr_eo': 'MarianTransformer', 'opus_mt_fr_es': 'MarianTransformer', 'opus_mt_fr_fj': 'MarianTransformer', 'opus_mt_fr_gaa': 'MarianTransformer', 'opus_mt_fr_gil': 'MarianTransformer', 'opus_mt_fr_guw': 'MarianTransformer', 'opus_mt_fr_ha': 'MarianTransformer', 'opus_mt_fr_he': 'MarianTransformer', 'opus_mt_fr_hil': 'MarianTransformer', 'opus_mt_fr_ho': 'MarianTransformer', 'opus_mt_fr_hr': 'MarianTransformer', 'opus_mt_fr_ht': 'MarianTransformer', 'opus_mt_fr_hu': 'MarianTransformer', 'opus_mt_fr_id': 'MarianTransformer', 'opus_mt_fr_ig': 'MarianTransformer', 'opus_mt_fr_ilo': 'MarianTransformer', 'opus_mt_fr_iso': 'MarianTransformer', 'opus_mt_fr_kg': 'MarianTransformer', 'opus_mt_fr_kqn': 'MarianTransformer', 'opus_mt_fr_kwy': 'MarianTransformer', 'opus_mt_fr_lg': 'MarianTransformer', 'opus_mt_fr_ln': 'MarianTransformer', 'opus_mt_fr_loz': 'MarianTransformer', 'opus_mt_fr_lu': 'MarianTransformer', 'opus_mt_fr_lua': 'MarianTransformer', 'opus_mt_fr_lue': 'MarianTransformer', 'opus_mt_fr_lus': 'MarianTransformer', 'opus_mt_fr_mfe': 'MarianTransformer', 'opus_mt_fr_mh': 'MarianTransformer', 'opus_mt_fr_mos': 'MarianTransformer', 'opus_mt_fr_ms': 'MarianTransformer', 'opus_mt_fr_mt': 'MarianTransformer', 'opus_mt_fr_niu': 'MarianTransformer', 'opus_mt_fr_no': 'MarianTransformer', 'opus_mt_fr_nso': 'MarianTransformer', 'opus_mt_fr_ny': 'MarianTransformer', 'opus_mt_fr_pag': 'MarianTransformer', 'opus_mt_fr_pap': 'MarianTransformer', 'opus_mt_fr_pis': 'MarianTransformer', 'opus_mt_fr_pl': 'MarianTransformer', 'opus_mt_fr_pon': 'MarianTransformer', 'opus_mt_fr_rnd': 'MarianTransformer', 'opus_mt_fr_ro': 'MarianTransformer', 'opus_mt_fr_ru': 'MarianTransformer', 'opus_mt_fr_run': 'MarianTransformer', 'opus_mt_fr_rw': 'MarianTransformer', 'opus_mt_fr_sg': 'MarianTransformer', 'opus_mt_fr_sk': 'MarianTransformer', 'opus_mt_fr_sl': 'MarianTransformer', 'opus_mt_fr_sm': 'MarianTransformer', 'opus_mt_fr_sn': 'MarianTransformer', 'opus_mt_fr_srn': 'MarianTransformer', 'opus_mt_fr_st': 'MarianTransformer', 'opus_mt_fr_sv': 'MarianTransformer', 'opus_mt_fr_swc': 'MarianTransformer', 'opus_mt_fr_tiv': 'MarianTransformer', 'opus_mt_fr_tl': 'MarianTransformer', 'opus_mt_fr_tll': 'MarianTransformer', 'opus_mt_fr_tn': 'MarianTransformer', 'opus_mt_fr_to': 'MarianTransformer', 'opus_mt_fr_tpi': 'MarianTransformer', 'opus_mt_fr_ts': 'MarianTransformer', 'opus_mt_fr_tum': 'MarianTransformer', 'opus_mt_fr_tvl': 'MarianTransformer', 'opus_mt_fr_tw': 'MarianTransformer', 'opus_mt_fr_ty': 'MarianTransformer', 'opus_mt_fr_uk': 'MarianTransformer', 'opus_mt_fr_ve': 'MarianTransformer', 'opus_mt_fr_vi': 'MarianTransformer', 'opus_mt_fr_war': 'MarianTransformer', 'opus_mt_fr_wls': 'MarianTransformer', 'opus_mt_fr_xh': 'MarianTransformer', 'opus_mt_fr_yap': 'MarianTransformer', 'opus_mt_fr_yo': 'MarianTransformer', 'opus_mt_fr_zne': 'MarianTransformer', 'opus_mt_fse_fi': 'MarianTransformer', 'opus_mt_ga_en': 'MarianTransformer', 'opus_mt_gaa_de': 'MarianTransformer', 'opus_mt_gaa_en': 'MarianTransformer', 'opus_mt_gaa_es': 'MarianTransformer', 'opus_mt_gaa_fi': 'MarianTransformer', 'opus_mt_gaa_fr': 'MarianTransformer', 'opus_mt_gaa_sv': 'MarianTransformer', 'opus_mt_gem_en': 'MarianTransformer', 'opus_mt_gem_gem': 'MarianTransformer', 'opus_mt_gil_en': 'MarianTransformer', 'opus_mt_gil_es': 'MarianTransformer', 'opus_mt_gil_fi': 'MarianTransformer', 'opus_mt_gil_fr': 'MarianTransformer', 'opus_mt_gil_sv': 'MarianTransformer', 'opus_mt_gl_en': 'MarianTransformer', 'opus_mt_gl_es': 'MarianTransformer', 'opus_mt_gl_pt': 'MarianTransformer', 'opus_mt_gmq_en': 'MarianTransformer', 'opus_mt_gmq_gmq': 'MarianTransformer', 'opus_mt_gmw_en': 'MarianTransformer', 'opus_mt_gmw_gmw': 'MarianTransformer', 'opus_mt_grk_en': 'MarianTransformer', 'opus_mt_guw_de': 'MarianTransformer', 'opus_mt_guw_en': 'MarianTransformer', 'opus_mt_guw_es': 'MarianTransformer', 'opus_mt_guw_fi': 'MarianTransformer', 'opus_mt_guw_fr': 'MarianTransformer', 'opus_mt_guw_sv': 'MarianTransformer', 'opus_mt_gv_en': 'MarianTransformer', 'opus_mt_ha_en': 'MarianTransformer', 'opus_mt_ha_es': 'MarianTransformer', 'opus_mt_ha_fi': 'MarianTransformer', 'opus_mt_ha_fr': 'MarianTransformer', 'opus_mt_ha_sv': 'MarianTransformer', 'opus_mt_he_ar': 'MarianTransformer', 'opus_mt_he_de': 'MarianTransformer', 'opus_mt_he_eo': 'MarianTransformer', 'opus_mt_he_es': 'MarianTransformer', 'opus_mt_he_fi': 'MarianTransformer', 'opus_mt_he_it': 'MarianTransformer', 'opus_mt_he_ru': 'MarianTransformer', 'opus_mt_he_sv': 'MarianTransformer', 'opus_mt_he_uk': 'MarianTransformer', 'opus_mt_hi_en': 'MarianTransformer', 'opus_mt_hi_ur': 'MarianTransformer', 'opus_mt_hil_de': 'MarianTransformer', 'opus_mt_hil_en': 'MarianTransformer', 'opus_mt_hil_fi': 'MarianTransformer', 'opus_mt_ho_en': 'MarianTransformer', 'opus_mt_hr_es': 'MarianTransformer', 'opus_mt_hr_fi': 'MarianTransformer', 'opus_mt_hr_fr': 'MarianTransformer', 'opus_mt_hr_sv': 'MarianTransformer', 'opus_mt_ht_en': 'MarianTransformer', 'opus_mt_ht_es': 'MarianTransformer', 'opus_mt_ht_fi': 'MarianTransformer', 'opus_mt_ht_fr': 'MarianTransformer', 'opus_mt_ht_sv': 'MarianTransformer', 'opus_mt_hu_de': 'MarianTransformer', 'opus_mt_hu_en': 'MarianTransformer', 'opus_mt_hu_eo': 'MarianTransformer', 'opus_mt_hu_fi': 'MarianTransformer', 'opus_mt_hu_fr': 'MarianTransformer', 'opus_mt_hu_sv': 'MarianTransformer', 'opus_mt_hu_uk': 'MarianTransformer', 'opus_mt_hy_en': 'MarianTransformer', 'opus_mt_hy_ru': 'MarianTransformer', 'opus_mt_id_en': 'MarianTransformer', 'opus_mt_id_es': 'MarianTransformer', 'opus_mt_id_fi': 'MarianTransformer', 'opus_mt_id_fr': 'MarianTransformer', 'opus_mt_id_sv': 'MarianTransformer', 'opus_mt_ig_de': 'MarianTransformer', 'opus_mt_ig_en': 'MarianTransformer', 'opus_mt_ig_es': 'MarianTransformer', 'opus_mt_ig_fi': 'MarianTransformer', 'opus_mt_ig_fr': 'MarianTransformer', 'opus_mt_ig_sv': 'MarianTransformer', 'opus_mt_iir_en': 'MarianTransformer', 'opus_mt_iir_iir': 'MarianTransformer', 'opus_mt_ilo_de': 'MarianTransformer', 'opus_mt_ilo_en': 'MarianTransformer', 'opus_mt_ilo_es': 'MarianTransformer', 'opus_mt_ilo_fi': 'MarianTransformer', 'opus_mt_ilo_sv': 'MarianTransformer', 'opus_mt_inc_en': 'MarianTransformer', 'opus_mt_inc_inc': 'MarianTransformer', 'opus_mt_ine_en': 'MarianTransformer', 'opus_mt_ine_ine': 'MarianTransformer', 'opus_mt_is_de': 'MarianTransformer', 'opus_mt_is_en': 'MarianTransformer', 'opus_mt_is_eo': 'MarianTransformer', 'opus_mt_is_es': 'MarianTransformer', 'opus_mt_is_fi': 'MarianTransformer', 'opus_mt_is_fr': 'MarianTransformer', 'opus_mt_is_it': 'MarianTransformer', 'opus_mt_is_sv': 'MarianTransformer', 'opus_mt_iso_en': 'MarianTransformer', 'opus_mt_iso_es': 'MarianTransformer', 'opus_mt_iso_fi': 'MarianTransformer', 'opus_mt_iso_fr': 'MarianTransformer', 'opus_mt_iso_sv': 'MarianTransformer', 'opus_mt_it_ar': 'MarianTransformer', 'opus_mt_it_bg': 'MarianTransformer', 'opus_mt_it_ca': 'MarianTransformer', 'opus_mt_it_de': 'MarianTransformer', 'opus_mt_it_en': 'MarianTransformer', 'opus_mt_it_eo': 'MarianTransformer', 'opus_mt_it_es': 'MarianTransformer', 'opus_mt_it_fr': 'MarianTransformer', 'opus_mt_it_is': 'MarianTransformer', 'opus_mt_it_lt': 'MarianTransformer', 'opus_mt_it_ms': 'MarianTransformer', 'opus_mt_it_sv': 'MarianTransformer', 'opus_mt_it_uk': 'MarianTransformer', 'opus_mt_it_vi': 'MarianTransformer', 'opus_mt_itc_en': 'MarianTransformer', 'opus_mt_itc_itc': 'MarianTransformer', 'opus_mt_ja_ar': 'MarianTransformer', 'opus_mt_ja_bg': 'MarianTransformer', 'opus_mt_ja_da': 'MarianTransformer', 'opus_mt_ja_de': 'MarianTransformer', 'opus_mt_ja_en': 'MarianTransformer', 'opus_mt_ja_es': 'MarianTransformer', 'opus_mt_ja_fi': 'MarianTransformer', 'opus_mt_ja_fr': 'MarianTransformer', 'opus_mt_ja_he': 'MarianTransformer', 'opus_mt_ja_hu': 'MarianTransformer', 'opus_mt_ja_it': 'MarianTransformer', 'opus_mt_ja_ms': 'MarianTransformer', 'opus_mt_ja_nl': 'MarianTransformer', 'opus_mt_ja_pl': 'MarianTransformer', 'opus_mt_ja_pt': 'MarianTransformer', 'opus_mt_ja_ru': 'MarianTransformer', 'opus_mt_ja_sh': 'MarianTransformer', 'opus_mt_ja_sv': 'MarianTransformer', 'opus_mt_ja_tr': 'MarianTransformer', 'opus_mt_ja_vi': 'MarianTransformer', 'opus_mt_jap_en': 'MarianTransformer', 'opus_mt_ka_en': 'MarianTransformer', 'opus_mt_ka_ru': 'MarianTransformer', 'opus_mt_kab_en': 'MarianTransformer', 'opus_mt_kg_en': 'MarianTransformer', 'opus_mt_kg_es': 'MarianTransformer', 'opus_mt_kg_fr': 'MarianTransformer', 'opus_mt_kg_sv': 'MarianTransformer', 'opus_mt_kj_en': 'MarianTransformer', 'opus_mt_kl_en': 'MarianTransformer', 'opus_mt_ko_de': 'MarianTransformer', 'opus_mt_ko_en': 'MarianTransformer', 'opus_mt_ko_es': 'MarianTransformer', 'opus_mt_ko_fi': 'MarianTransformer', 'opus_mt_ko_fr': 'MarianTransformer', 'opus_mt_ko_hu': 'MarianTransformer', 'opus_mt_ko_ru': 'MarianTransformer', 'opus_mt_ko_sv': 'MarianTransformer', 'opus_mt_kqn_en': 'MarianTransformer', 'opus_mt_kqn_es': 'MarianTransformer', 'opus_mt_kqn_fr': 'MarianTransformer', 'opus_mt_kqn_sv': 'MarianTransformer', 'opus_mt_kwn_en': 'MarianTransformer', 'opus_mt_kwy_en': 'MarianTransformer', 'opus_mt_kwy_fr': 'MarianTransformer', 'opus_mt_kwy_sv': 'MarianTransformer', 'opus_mt_lg_en': 'MarianTransformer', 'opus_mt_lg_es': 'MarianTransformer', 'opus_mt_lg_fi': 'MarianTransformer', 'opus_mt_lg_fr': 'MarianTransformer', 'opus_mt_lg_sv': 'MarianTransformer', 'opus_mt_ln_de': 'MarianTransformer', 'opus_mt_ln_en': 'MarianTransformer', 'opus_mt_ln_es': 'MarianTransformer', 'opus_mt_ln_fr': 'MarianTransformer', 'opus_mt_loz_de': 'MarianTransformer', 'opus_mt_loz_en': 'MarianTransformer', 'opus_mt_loz_es': 'MarianTransformer', 'opus_mt_loz_fi': 'MarianTransformer', 'opus_mt_loz_fr': 'MarianTransformer', 'opus_mt_loz_sv': 'MarianTransformer', 'opus_mt_lt_de': 'MarianTransformer', 'opus_mt_lt_eo': 'MarianTransformer', 'opus_mt_lt_es': 'MarianTransformer', 'opus_mt_lt_fr': 'MarianTransformer', 'opus_mt_lt_it': 'MarianTransformer', 'opus_mt_lt_pl': 'MarianTransformer', 'opus_mt_lt_ru': 'MarianTransformer', 'opus_mt_lt_sv': 'MarianTransformer', 'opus_mt_lt_tr': 'MarianTransformer', 'opus_mt_lu_en': 'MarianTransformer', 'opus_mt_lu_es': 'MarianTransformer', 'opus_mt_lu_fi': 'MarianTransformer', 'opus_mt_lu_fr': 'MarianTransformer', 'opus_mt_lu_sv': 'MarianTransformer', 'opus_mt_lua_en': 'MarianTransformer', 'opus_mt_lua_es': 'MarianTransformer', 'opus_mt_lua_fi': 'MarianTransformer', 'opus_mt_lua_fr': 'MarianTransformer', 'opus_mt_lua_sv': 'MarianTransformer', 'opus_mt_lue_en': 'MarianTransformer', 'opus_mt_lue_es': 'MarianTransformer', 'opus_mt_lue_fi': 'MarianTransformer', 'opus_mt_lue_fr': 'MarianTransformer', 'opus_mt_lue_sv': 'MarianTransformer', 'opus_mt_lun_en': 'MarianTransformer', 'opus_mt_luo_en': 'MarianTransformer', 'opus_mt_lus_en': 'MarianTransformer', 'opus_mt_lus_es': 'MarianTransformer', 'opus_mt_lus_fi': 'MarianTransformer', 'opus_mt_lus_fr': 'MarianTransformer', 'opus_mt_lus_sv': 'MarianTransformer', 'opus_mt_lv_en': 'MarianTransformer', 'opus_mt_lv_es': 'MarianTransformer', 'opus_mt_lv_fi': 'MarianTransformer', 'opus_mt_lv_fr': 'MarianTransformer', 'opus_mt_lv_ru': 'MarianTransformer', 'opus_mt_lv_sv': 'MarianTransformer', 'opus_mt_mfe_en': 'MarianTransformer', 'opus_mt_mfe_es': 'MarianTransformer', 'opus_mt_mfs_es': 'MarianTransformer', 'opus_mt_mg_en': 'MarianTransformer', 'opus_mt_mg_es': 'MarianTransformer', 'opus_mt_mh_en': 'MarianTransformer', 'opus_mt_mh_es': 'MarianTransformer', 'opus_mt_mh_fi': 'MarianTransformer', 'opus_mt_mk_en': 'MarianTransformer', 'opus_mt_mk_es': 'MarianTransformer', 'opus_mt_mk_fi': 'MarianTransformer', 'opus_mt_mk_fr': 'MarianTransformer', 'opus_mt_mkh_en': 'MarianTransformer', 'opus_mt_ml_en': 'MarianTransformer', 'opus_mt_mos_en': 'MarianTransformer', 'opus_mt_mr_en': 'MarianTransformer', 'opus_mt_ms_de': 'MarianTransformer', 'opus_mt_ms_fr': 'MarianTransformer', 'opus_mt_ms_it': 'MarianTransformer', 'opus_mt_ms_ms': 'MarianTransformer', 'opus_mt_mt_en': 'MarianTransformer', 'opus_mt_mt_es': 'MarianTransformer', 'opus_mt_mt_fi': 'MarianTransformer', 'opus_mt_mt_fr': 'MarianTransformer', 'opus_mt_mt_sv': 'MarianTransformer', 'opus_mt_mul_en': 'MarianTransformer', 'opus_mt_ng_en': 'MarianTransformer', 'opus_mt_nic_en': 'MarianTransformer', 'opus_mt_niu_de': 'MarianTransformer', 'opus_mt_niu_en': 'MarianTransformer', 'opus_mt_niu_es': 'MarianTransformer', 'opus_mt_niu_fi': 'MarianTransformer', 'opus_mt_niu_fr': 'MarianTransformer', 'opus_mt_niu_sv': 'MarianTransformer', 'opus_mt_nl_af': 'MarianTransformer', 'opus_mt_nl_ca': 'MarianTransformer', 'opus_mt_nl_en': 'MarianTransformer', 'opus_mt_nl_eo': 'MarianTransformer', 'opus_mt_nl_es': 'MarianTransformer', 'opus_mt_nl_fi': 'MarianTransformer', 'opus_mt_nl_fr': 'MarianTransformer', 'opus_mt_nl_no': 'MarianTransformer', 'opus_mt_nl_sv': 'MarianTransformer', 'opus_mt_nl_uk': 'MarianTransformer', 'opus_mt_no_da': 'MarianTransformer', 'opus_mt_no_de': 'MarianTransformer', 'opus_mt_no_es': 'MarianTransformer', 'opus_mt_no_fi': 'MarianTransformer', 'opus_mt_no_fr': 'MarianTransformer', 'opus_mt_no_nl': 'MarianTransformer', 'opus_mt_no_no': 'MarianTransformer', 'opus_mt_no_pl': 'MarianTransformer', 'opus_mt_no_ru': 'MarianTransformer', 'opus_mt_no_sv': 'MarianTransformer', 'opus_mt_no_uk': 'MarianTransformer', 'opus_mt_nso_de': 'MarianTransformer', 'opus_mt_nso_en': 'MarianTransformer', 'opus_mt_nso_es': 'MarianTransformer', 'opus_mt_nso_fi': 'MarianTransformer', 'opus_mt_nso_fr': 'MarianTransformer', 'opus_mt_nso_sv': 'MarianTransformer', 'opus_mt_ny_de': 'MarianTransformer', 'opus_mt_ny_en': 'MarianTransformer', 'opus_mt_ny_es': 'MarianTransformer', 'opus_mt_nyk_en': 'MarianTransformer', 'opus_mt_om_en': 'MarianTransformer', 'opus_mt_pa_en': 'MarianTransformer', 'opus_mt_pag_de': 'MarianTransformer', 'opus_mt_pag_en': 'MarianTransformer', 'opus_mt_pag_es': 'MarianTransformer', 'opus_mt_pag_fi': 'MarianTransformer', 'opus_mt_pag_sv': 'MarianTransformer', 'opus_mt_pap_de': 'MarianTransformer', 'opus_mt_pap_en': 'MarianTransformer', 'opus_mt_pap_es': 'MarianTransformer', 'opus_mt_pap_fi': 'MarianTransformer', 'opus_mt_pap_fr': 'MarianTransformer', 'opus_mt_phi_en': 'MarianTransformer', 'opus_mt_pis_en': 'MarianTransformer', 'opus_mt_pis_es': 'MarianTransformer', 'opus_mt_pis_fi': 'MarianTransformer', 'opus_mt_pis_fr': 'MarianTransformer', 'opus_mt_pis_sv': 'MarianTransformer', 'opus_mt_pl_ar': 'MarianTransformer', 'opus_mt_pl_de': 'MarianTransformer', 'opus_mt_pl_en': 'MarianTransformer', 'opus_mt_pl_eo': 'MarianTransformer', 'opus_mt_pl_es': 'MarianTransformer', 'opus_mt_pl_fr': 'MarianTransformer', 'opus_mt_pl_lt': 'MarianTransformer', 'opus_mt_pl_no': 'MarianTransformer', 'opus_mt_pl_sv': 'MarianTransformer', 'opus_mt_pl_uk': 'MarianTransformer', 'opus_mt_pon_en': 'MarianTransformer', 'opus_mt_pon_es': 'MarianTransformer', 'opus_mt_pon_fi': 'MarianTransformer', 'opus_mt_pon_fr': 'MarianTransformer', 'opus_mt_pon_sv': 'MarianTransformer', 'opus_mt_pqe_en': 'MarianTransformer', 'opus_mt_prl_es': 'MarianTransformer', 'opus_mt_pt_ca': 'MarianTransformer', 'opus_mt_pt_eo': 'MarianTransformer', 'opus_mt_pt_gl': 'MarianTransformer', 'opus_mt_pt_tl': 'MarianTransformer', 'opus_mt_pt_uk': 'MarianTransformer', 'opus_mt_rn_de': 'MarianTransformer', 'opus_mt_rn_en': 'MarianTransformer', 'opus_mt_rn_es': 'MarianTransformer', 'opus_mt_rn_fr': 'MarianTransformer', 'opus_mt_rn_ru': 'MarianTransformer', 'opus_mt_rnd_en': 'MarianTransformer', 'opus_mt_rnd_fr': 'MarianTransformer', 'opus_mt_rnd_sv': 'MarianTransformer', 'opus_mt_ro_eo': 'MarianTransformer', 'opus_mt_ro_fi': 'MarianTransformer', 'opus_mt_ro_fr': 'MarianTransformer', 'opus_mt_ro_sv': 'MarianTransformer', 'opus_mt_roa_en': 'MarianTransformer', 'opus_mt_ru_af': 'MarianTransformer', 'opus_mt_ru_ar': 'MarianTransformer', 'opus_mt_ru_bg': 'MarianTransformer', 'opus_mt_ru_da': 'MarianTransformer', 'opus_mt_ru_en': 'MarianTransformer', 'opus_mt_ru_eo': 'MarianTransformer', 'opus_mt_ru_es': 'MarianTransformer', 'opus_mt_ru_et': 'MarianTransformer', 'opus_mt_ru_eu': 'MarianTransformer', 'opus_mt_ru_fi': 'MarianTransformer', 'opus_mt_ru_fr': 'MarianTransformer', 'opus_mt_ru_he': 'MarianTransformer', 'opus_mt_ru_hy': 'MarianTransformer', 'opus_mt_ru_lt': 'MarianTransformer', 'opus_mt_ru_lv': 'MarianTransformer', 'opus_mt_ru_no': 'MarianTransformer', 'opus_mt_ru_sl': 'MarianTransformer', 'opus_mt_ru_sv': 'MarianTransformer', 'opus_mt_ru_uk': 'MarianTransformer', 'opus_mt_ru_vi': 'MarianTransformer', 'opus_mt_run_en': 'MarianTransformer', 'opus_mt_run_es': 'MarianTransformer', 'opus_mt_run_sv': 'MarianTransformer', 'opus_mt_rw_en': 'MarianTransformer', 'opus_mt_rw_es': 'MarianTransformer', 'opus_mt_rw_fr': 'MarianTransformer', 'opus_mt_rw_sv': 'MarianTransformer', 'opus_mt_sal_en': 'MarianTransformer', 'opus_mt_sem_en': 'MarianTransformer', 'opus_mt_sem_sem': 'MarianTransformer', 'opus_mt_sg_en': 'MarianTransformer', 'opus_mt_sg_es': 'MarianTransformer', 'opus_mt_sg_fi': 'MarianTransformer', 'opus_mt_sg_fr': 'MarianTransformer', 'opus_mt_sg_sv': 'MarianTransformer', 'opus_mt_sh_eo': 'MarianTransformer', 'opus_mt_sh_uk': 'MarianTransformer', 'opus_mt_sk_en': 'MarianTransformer', 'opus_mt_sk_es': 'MarianTransformer', 'opus_mt_sk_fi': 'MarianTransformer', 'opus_mt_sk_fr': 'MarianTransformer', 'opus_mt_sk_sv': 'MarianTransformer', 'opus_mt_sl_es': 'MarianTransformer', 'opus_mt_sl_fi': 'MarianTransformer', 'opus_mt_sl_fr': 'MarianTransformer', 'opus_mt_sl_ru': 'MarianTransformer', 'opus_mt_sl_sv': 'MarianTransformer', 'opus_mt_sl_uk': 'MarianTransformer', 'opus_mt_sla_en': 'MarianTransformer', 'opus_mt_sla_sla': 'MarianTransformer', 'opus_mt_sm_en': 'MarianTransformer', 'opus_mt_sm_es': 'MarianTransformer', 'opus_mt_sm_fr': 'MarianTransformer', 'opus_mt_sn_en': 'MarianTransformer', 'opus_mt_sn_es': 'MarianTransformer', 'opus_mt_sn_fr': 'MarianTransformer', 'opus_mt_sn_sv': 'MarianTransformer', 'opus_mt_sq_en': 'MarianTransformer', 'opus_mt_sq_es': 'MarianTransformer', 'opus_mt_sq_sv': 'MarianTransformer', 'opus_mt_srn_en': 'MarianTransformer', 'opus_mt_srn_es': 'MarianTransformer', 'opus_mt_srn_fr': 'MarianTransformer', 'opus_mt_srn_sv': 'MarianTransformer', 'opus_mt_ss_en': 'MarianTransformer', 'opus_mt_ssp_es': 'MarianTransformer', 'opus_mt_st_en': 'MarianTransformer', 'opus_mt_st_es': 'MarianTransformer', 'opus_mt_st_fi': 'MarianTransformer', 'opus_mt_st_fr': 'MarianTransformer', 'opus_mt_st_sv': 'MarianTransformer', 'opus_mt_sv_af': 'MarianTransformer', 'opus_mt_sv_ase': 'MarianTransformer', 'opus_mt_sv_bcl': 'MarianTransformer', 'opus_mt_sv_bem': 'MarianTransformer', 'opus_mt_sv_bg': 'MarianTransformer', 'opus_mt_sv_bi': 'MarianTransformer', 'opus_mt_sv_bzs': 'MarianTransformer', 'opus_mt_sv_ceb': 'MarianTransformer', 'opus_mt_sv_chk': 'MarianTransformer', 'opus_mt_sv_crs': 'MarianTransformer', 'opus_mt_sv_cs': 'MarianTransformer', 'opus_mt_sv_ee': 'MarianTransformer', 'opus_mt_sv_efi': 'MarianTransformer', 'opus_mt_sv_el': 'MarianTransformer', 'opus_mt_sv_en': 'MarianTransformer', 'opus_mt_sv_eo': 'MarianTransformer', 'opus_mt_sv_es': 'MarianTransformer', 'opus_mt_sv_et': 'MarianTransformer', 'opus_mt_sv_fi': 'MarianTransformer', 'opus_mt_sv_fj': 'MarianTransformer', 'opus_mt_sv_fr': 'MarianTransformer', 'opus_mt_sv_gaa': 'MarianTransformer', 'opus_mt_sv_gil': 'MarianTransformer', 'opus_mt_sv_guw': 'MarianTransformer', 'opus_mt_sv_ha': 'MarianTransformer', 'opus_mt_sv_he': 'MarianTransformer', 'opus_mt_sv_hil': 'MarianTransformer', 'opus_mt_sv_ho': 'MarianTransformer', 'opus_mt_sv_hr': 'MarianTransformer', 'opus_mt_sv_ht': 'MarianTransformer', 'opus_mt_sv_hu': 'MarianTransformer', 'opus_mt_sv_id': 'MarianTransformer', 'opus_mt_sv_ig': 'MarianTransformer', 'opus_mt_sv_ilo': 'MarianTransformer', 'opus_mt_sv_is': 'MarianTransformer', 'opus_mt_sv_iso': 'MarianTransformer', 'opus_mt_sv_kg': 'MarianTransformer', 'opus_mt_sv_kqn': 'MarianTransformer', 'opus_mt_sv_kwy': 'MarianTransformer', 'opus_mt_sv_lg': 'MarianTransformer', 'opus_mt_sv_ln': 'MarianTransformer', 'opus_mt_sv_lu': 'MarianTransformer', 'opus_mt_sv_lua': 'MarianTransformer', 'opus_mt_sv_lue': 'MarianTransformer', 'opus_mt_sv_lus': 'MarianTransformer', 'opus_mt_sv_lv': 'MarianTransformer', 'opus_mt_sv_mfe': 'MarianTransformer', 'opus_mt_sv_mh': 'MarianTransformer', 'opus_mt_sv_mos': 'MarianTransformer', 'opus_mt_sv_mt': 'MarianTransformer', 'opus_mt_sv_niu': 'MarianTransformer', 'opus_mt_sv_nl': 'MarianTransformer', 'opus_mt_sv_no': 'MarianTransformer', 'opus_mt_sv_nso': 'MarianTransformer', 'opus_mt_sv_ny': 'MarianTransformer', 'opus_mt_sv_pag': 'MarianTransformer', 'opus_mt_sv_pap': 'MarianTransformer', 'opus_mt_sv_pis': 'MarianTransformer', 'opus_mt_sv_pon': 'MarianTransformer', 'opus_mt_sv_rnd': 'MarianTransformer', 'opus_mt_sv_ro': 'MarianTransformer', 'opus_mt_sv_ru': 'MarianTransformer', 'opus_mt_sv_run': 'MarianTransformer', 'opus_mt_sv_rw': 'MarianTransformer', 'opus_mt_sv_sg': 'MarianTransformer', 'opus_mt_sv_sk': 'MarianTransformer', 'opus_mt_sv_sl': 'MarianTransformer', 'opus_mt_sv_sm': 'MarianTransformer', 'opus_mt_sv_sn': 'MarianTransformer', 'opus_mt_sv_sq': 'MarianTransformer', 'opus_mt_sv_srn': 'MarianTransformer', 'opus_mt_sv_st': 'MarianTransformer', 'opus_mt_sv_sv': 'MarianTransformer', 'opus_mt_sv_swc': 'MarianTransformer', 'opus_mt_sv_th': 'MarianTransformer', 'opus_mt_sv_tiv': 'MarianTransformer', 'opus_mt_sv_tll': 'MarianTransformer', 'opus_mt_sv_tn': 'MarianTransformer', 'opus_mt_sv_to': 'MarianTransformer', 'opus_mt_sv_toi': 'MarianTransformer', 'opus_mt_sv_tpi': 'MarianTransformer', 'opus_mt_sv_ts': 'MarianTransformer', 'opus_mt_sv_tum': 'MarianTransformer', 'opus_mt_sv_tvl': 'MarianTransformer', 'opus_mt_sv_tw': 'MarianTransformer', 'opus_mt_sv_ty': 'MarianTransformer', 'opus_mt_sv_uk': 'MarianTransformer', 'opus_mt_sv_umb': 'MarianTransformer', 'opus_mt_sv_ve': 'MarianTransformer', 'opus_mt_sv_war': 'MarianTransformer', 'opus_mt_sv_wls': 'MarianTransformer', 'opus_mt_sv_xh': 'MarianTransformer', 'opus_mt_sv_yap': 'MarianTransformer', 'opus_mt_sv_yo': 'MarianTransformer', 'opus_mt_sv_zne': 'MarianTransformer', 'opus_mt_swc_en': 'MarianTransformer', 'opus_mt_swc_es': 'MarianTransformer', 'opus_mt_swc_fi': 'MarianTransformer', 'opus_mt_swc_fr': 'MarianTransformer', 'opus_mt_swc_sv': 'MarianTransformer', 'opus_mt_taw_en': 'MarianTransformer', 'opus_mt_th_en': 'MarianTransformer', 'opus_mt_th_fr': 'MarianTransformer', 'opus_mt_ti_en': 'MarianTransformer', 'opus_mt_tiv_en': 'MarianTransformer', 'opus_mt_tiv_fr': 'MarianTransformer', 'opus_mt_tiv_sv': 'MarianTransformer', 'opus_mt_tl_de': 'MarianTransformer', 'opus_mt_tl_en': 'MarianTransformer', 'opus_mt_tl_es': 'MarianTransformer', 'opus_mt_tl_pt': 'MarianTransformer', 'opus_mt_tll_en': 'MarianTransformer', 'opus_mt_tll_es': 'MarianTransformer', 'opus_mt_tll_fi': 'MarianTransformer', 'opus_mt_tll_fr': 'MarianTransformer', 'opus_mt_tll_sv': 'MarianTransformer', 'opus_mt_tn_en': 'MarianTransformer', 'opus_mt_tn_es': 'MarianTransformer', 'opus_mt_tn_fr': 'MarianTransformer', 'opus_mt_tn_sv': 'MarianTransformer', 'opus_mt_to_en': 'MarianTransformer', 'opus_mt_to_es': 'MarianTransformer', 'opus_mt_to_fr': 'MarianTransformer', 'opus_mt_to_sv': 'MarianTransformer', 'opus_mt_toi_en': 'MarianTransformer', 'opus_mt_toi_es': 'MarianTransformer', 'opus_mt_toi_fi': 'MarianTransformer', 'opus_mt_toi_fr': 'MarianTransformer', 'opus_mt_toi_sv': 'MarianTransformer', 'opus_mt_tpi_en': 'MarianTransformer', 'opus_mt_tpi_sv': 'MarianTransformer', 'opus_mt_tr_ar': 'MarianTransformer', 'opus_mt_tr_az': 'MarianTransformer', 'opus_mt_tr_en': 'MarianTransformer', 'opus_mt_tr_eo': 'MarianTransformer', 'opus_mt_tr_es': 'MarianTransformer', 'opus_mt_tr_fr': 'MarianTransformer', 'opus_mt_tr_lt': 'MarianTransformer', 'opus_mt_tr_sv': 'MarianTransformer', 'opus_mt_tr_uk': 'MarianTransformer', 'opus_mt_trk_en': 'MarianTransformer', 'opus_mt_ts_en': 'MarianTransformer', 'opus_mt_ts_es': 'MarianTransformer', 'opus_mt_ts_fi': 'MarianTransformer', 'opus_mt_ts_fr': 'MarianTransformer', 'opus_mt_ts_sv': 'MarianTransformer', 'opus_mt_tum_en': 'MarianTransformer', 'opus_mt_tum_es': 'MarianTransformer', 'opus_mt_tum_fr': 'MarianTransformer', 'opus_mt_tum_sv': 'MarianTransformer', 'opus_mt_tvl_en': 'MarianTransformer', 'opus_mt_tvl_es': 'MarianTransformer', 'opus_mt_tvl_fi': 'MarianTransformer', 'opus_mt_tvl_fr': 'MarianTransformer', 'opus_mt_tvl_sv': 'MarianTransformer', 'opus_mt_tw_es': 'MarianTransformer', 'opus_mt_tw_fi': 'MarianTransformer', 'opus_mt_tw_fr': 'MarianTransformer', 'opus_mt_tw_sv': 'MarianTransformer', 'opus_mt_ty_es': 'MarianTransformer', 'opus_mt_ty_fi': 'MarianTransformer', 'opus_mt_ty_fr': 'MarianTransformer', 'opus_mt_ty_sv': 'MarianTransformer', 'opus_mt_tzo_es': 'MarianTransformer', 'opus_mt_uk_bg': 'MarianTransformer', 'opus_mt_uk_ca': 'MarianTransformer', 'opus_mt_uk_cs': 'MarianTransformer', 'opus_mt_uk_de': 'MarianTransformer', 'opus_mt_uk_en': 'MarianTransformer', 'opus_mt_uk_es': 'MarianTransformer', 'opus_mt_uk_fi': 'MarianTransformer', 'opus_mt_uk_fr': 'MarianTransformer', 'opus_mt_uk_he': 'MarianTransformer', 'opus_mt_uk_hu': 'MarianTransformer', 'opus_mt_uk_it': 'MarianTransformer', 'opus_mt_uk_nl': 'MarianTransformer', 'opus_mt_uk_no': 'MarianTransformer', 'opus_mt_uk_pl': 'MarianTransformer', 'opus_mt_uk_pt': 'MarianTransformer', 'opus_mt_uk_ru': 'MarianTransformer', 'opus_mt_uk_sh': 'MarianTransformer', 'opus_mt_uk_sl': 'MarianTransformer', 'opus_mt_uk_sv': 'MarianTransformer', 'opus_mt_uk_tr': 'MarianTransformer', 'opus_mt_umb_en': 'MarianTransformer', 'opus_mt_ur_en': 'MarianTransformer', 'opus_mt_urj_en': 'MarianTransformer', 'opus_mt_urj_urj': 'MarianTransformer', 'opus_mt_ve_en': 'MarianTransformer', 'opus_mt_ve_es': 'MarianTransformer', 'opus_mt_vi_de': 'MarianTransformer', 'opus_mt_vi_en': 'MarianTransformer', 'opus_mt_vi_eo': 'MarianTransformer', 'opus_mt_vi_es': 'MarianTransformer', 'opus_mt_vi_fr': 'MarianTransformer', 'opus_mt_vi_it': 'MarianTransformer', 'opus_mt_vi_ru': 'MarianTransformer', 'opus_mt_vsl_es': 'MarianTransformer', 'opus_mt_wa_en': 'MarianTransformer', 'opus_mt_wal_en': 'MarianTransformer', 'opus_mt_war_en': 'MarianTransformer', 'opus_mt_war_es': 'MarianTransformer', 'opus_mt_war_fi': 'MarianTransformer', 'opus_mt_war_fr': 'MarianTransformer', 'opus_mt_war_sv': 'MarianTransformer', 'opus_mt_wls_en': 'MarianTransformer', 'opus_mt_wls_fr': 'MarianTransformer', 'opus_mt_wls_sv': 'MarianTransformer', 'opus_mt_xh_en': 'MarianTransformer', 'opus_mt_xh_es': 'MarianTransformer', 'opus_mt_xh_fr': 'MarianTransformer', 'opus_mt_xh_sv': 'MarianTransformer', 'opus_mt_yap_en': 'MarianTransformer', 'opus_mt_yap_fr': 'MarianTransformer', 'opus_mt_yap_sv': 'MarianTransformer', 'opus_mt_yo_en': 'MarianTransformer', 'opus_mt_yo_es': 'MarianTransformer', 'opus_mt_yo_fi': 'MarianTransformer', 'opus_mt_yo_fr': 'MarianTransformer', 'opus_mt_yo_sv': 'MarianTransformer', 'opus_mt_zai_es': 'MarianTransformer', 'opus_mt_zh_bg': 'MarianTransformer', 'opus_mt_zh_de': 'MarianTransformer', 'opus_mt_zh_en': 'MarianTransformer', 'opus_mt_zh_fi': 'MarianTransformer', 'opus_mt_zh_he': 'MarianTransformer', 'opus_mt_zh_it': 'MarianTransformer', 'opus_mt_zh_ms': 'MarianTransformer', 'opus_mt_zh_nl': 'MarianTransformer', 'opus_mt_zh_sv': 'MarianTransformer', 'opus_mt_zh_uk': 'MarianTransformer', 'opus_mt_zh_vi': 'MarianTransformer', 'opus_mt_zle_en': 'MarianTransformer', 'opus_mt_zle_zle': 'MarianTransformer', 'opus_mt_zls_en': 'MarianTransformer', 'opus_mt_zls_zls': 'MarianTransformer', 'opus_mt_zlw_en': 'MarianTransformer', 'opus_mt_zlw_zlw': 'MarianTransformer', 'opus_mt_zne_es': 'MarianTransformer', 'opus_mt_zne_fi': 'MarianTransformer', 'opus_mt_zne_fr': 'MarianTransformer', 'opus_mt_zne_sv': 'MarianTransformer', 'opus_tatoeba_af_ru': 'MarianTransformer', 'opus_tatoeba_es_zh': 'MarianTransformer', 'opus_tatoeba_he_fr': 'MarianTransformer', 'opus_tatoeba_he_it': 'MarianTransformer', 'opus_tatoeba_it_he': 'MarianTransformer', 'pdf2table': 'PdfToTextTable', 'pdf2text': 'PdfToText', 'persian_w2v_cc_300d': 'WordEmbeddingsModel', 'personer_cc_300d': 'NerDLModel', 'pos_afribooms': 'PerceptronModel', 'pos_alksnis': 'PerceptronModel', 'pos_alpino': 'PerceptronModel', 'pos_anc': 'PerceptronModel', 'pos_ancora': 'PerceptronModel', 'pos_arcosg': 'PerceptronModel', 'pos_armtdp': 'PerceptronModel', 'pos_atis': 'PerceptronModel', 'pos_bdt': 'PerceptronModel', 'pos_bokmaal': 'PerceptronModel', 'pos_bosque': 'PerceptronModel', 'pos_boun': 'PerceptronModel', 'pos_btb': 'PerceptronModel', 'pos_cac': 'PerceptronModel', 'pos_ccg': 'PerceptronModel', 'pos_clinical': 'PerceptronModel', 'pos_cltt': 'PerceptronModel', 'pos_csui': 'PerceptronModel', 'pos_ctg': 'PerceptronModel', 'pos_ddt': 'PerceptronModel', 'pos_edt': 'PerceptronModel', 'pos_ewt': 'PerceptronModel', 'pos_farpahc': 'PerceptronModel', 'pos_fictree': 'PerceptronModel', 'pos_framenet': 'PerceptronModel', 'pos_ftb': 'PerceptronModel', 'pos_gdt': 'PerceptronModel', 'pos_giella': 'PerceptronModel', 'pos_gsd': 'PerceptronModel', 'pos_gsdluw': 'PerceptronModel', 'pos_gsdsimp': 'PerceptronModel', 'pos_gum': 'PerceptronModel', 'pos_hdt': 'PerceptronModel', 'pos_hdtb': 'PerceptronModel', 'pos_hiencs': 'PerceptronModel', 'pos_hse': 'PerceptronModel', 'pos_htb': 'PerceptronModel', 'pos_icepahc': 'PerceptronModel', 'pos_idt': 'PerceptronModel', 'pos_imst': 'PerceptronModel', 'pos_isdt': 'PerceptronModel', 'pos_ittb': 'PerceptronModel', 'pos_iu': 'PerceptronModel', 'pos_kaist': 'PerceptronModel', 'pos_kenet': 'PerceptronModel', 'pos_kyoto': 'PerceptronModel', 'pos_lassysmall': 'PerceptronModel', 'pos_lfg': 'PerceptronModel', 'pos_lines': 'PerceptronModel', 'pos_llct': 'PerceptronModel', 'pos_lst20': 'PerceptronModel', 'pos_lvtb': 'PerceptronModel', 'pos_modern': 'PerceptronModel', 'pos_msri': 'PerceptronModel', 'pos_mtg': 'PerceptronModel', 'pos_mudt': 'PerceptronModel', 'pos_nonstandard': 'PerceptronModel', 'pos_nsc': 'PerceptronModel', 'pos_nynorsk': 'PerceptronModel', 'pos_nynorsklia': 'PerceptronModel', 'pos_padt': 'PerceptronModel', 'pos_parisstories': 'PerceptronModel', 'pos_partut': 'PerceptronModel', 'pos_pdb': 'PerceptronModel', 'pos_pdt': 'PerceptronModel', 'pos_penn': 'PerceptronModel', 'pos_perdt': 'PerceptronModel', 'pos_perseus': 'PerceptronModel', 'pos_postwita': 'PerceptronModel', 'pos_proiel': 'PerceptronModel', 'pos_rhapsodie': 'PerceptronModel', 'pos_rnc': 'PerceptronModel', 'pos_rrt': 'PerceptronModel', 'pos_sagt': 'PerceptronModel', 'pos_scriptorium': 'PerceptronModel', 'pos_sequoia': 'PerceptronModel', 'pos_seraji': 'PerceptronModel', 'pos_set': 'PerceptronModel', 'pos_simonero': 'PerceptronModel', 'pos_snk': 'PerceptronModel', 'pos_srcmf': 'PerceptronModel', 'pos_ssj': 'PerceptronModel', 'pos_sst': 'PerceptronModel', 'pos_syntagrus': 'PerceptronModel', 'pos_szeged': 'PerceptronModel', 'pos_taiga': 'PerceptronModel', 'pos_talbanken': 'PerceptronModel', 'pos_tdt': 'PerceptronModel', 'pos_torot': 'PerceptronModel', 'pos_tourism': 'PerceptronModel', 'pos_treegal': 'PerceptronModel', 'pos_ttb': 'PerceptronModel', 'pos_twittiro': 'PerceptronModel', 'pos_ud_alpino': 'PerceptronModel', 'pos_ud_ancora': 'PerceptronModel', 'pos_ud_armtdp': 'PerceptronModel', 'pos_ud_att': 'PerceptronModel', 'pos_ud_bdt': 'PerceptronModel', 'pos_ud_bhtb': 'PerceptronModel', 'pos_ud_bokmaal': 'PerceptronModel', 'pos_ud_bosque': 'PerceptronModel', 'pos_ud_btb': 'PerceptronModel', 'pos_ud_ddt': 'PerceptronModel', 'pos_ud_ewt': 'PerceptronModel', 'pos_ud_gdt': 'PerceptronModel', 'pos_ud_gsd': 'PerceptronModel', 'pos_ud_gsd_trad': 'PerceptronModel', 'pos_ud_hdt': 'PerceptronModel', 'pos_ud_hdtb': 'PerceptronModel', 'pos_ud_htb': 'PerceptronModel', 'pos_ud_idt': 'PerceptronModel', 'pos_ud_imst': 'PerceptronModel', 'pos_ud_isdt': 'PerceptronModel', 'pos_ud_iu': 'PerceptronModel', 'pos_ud_kaist': 'PerceptronModel', 'pos_ud_keb': 'PerceptronModel', 'pos_ud_lfg': 'PerceptronModel', 'pos_ud_llct': 'PerceptronModel', 'pos_ud_lvtb': 'PerceptronModel', 'pos_ud_nynorsk': 'PerceptronModel', 'pos_ud_padt': 'PerceptronModel', 'pos_ud_pdt': 'PerceptronModel', 'pos_ud_perdt': 'PerceptronModel', 'pos_ud_rrt': 'PerceptronModel', 'pos_ud_snk': 'PerceptronModel', 'pos_ud_ssj': 'PerceptronModel', 'pos_ud_szeged': 'PerceptronModel', 'pos_ud_tal': 'PerceptronModel', 'pos_ud_tdt': 'PerceptronModel', 'pos_ud_treegal': 'PerceptronModel', 'pos_ud_udtb': 'PerceptronModel', 'pos_ud_ufal': 'PerceptronModel', 'pos_ud_ytb': 'PerceptronModel', 'pos_udante': 'PerceptronModel', 'pos_udt': 'PerceptronModel', 'pos_udtb': 'PerceptronModel', 'pos_ufal': 'PerceptronModel', 'pos_vedic': 'PerceptronModel', 'pos_vit': 'PerceptronModel', 'pos_vtb': 'PerceptronModel', 'pos_wtb': 'PerceptronModel', 'ppt2text_table': 'PptToTextTable', 'pragmatic_sentence_detector': 'SentenceDetector', 'rct_binary_classifier_biobert': 'ClassifierDLModel', 'rct_binary_classifier_use': 'ClassifierDLModel', 're_ade_biobert': 'RelationExtractionModel', 're_ade_clinical': 'RelationExtractionModel', 're_drugprot_clinical': 'RelationExtractionModel', 're_oncology_biomarker_result_wip': 'RelationExtractionModel', 're_oncology_granular_wip': 'RelationExtractionModel', 're_oncology_location_wip': 'RelationExtractionModel', 're_oncology_size_wip': 'RelationExtractionModel', 're_oncology_temporal_wip': 'RelationExtractionModel', 're_oncology_test_result_wip': 'RelationExtractionModel', 're_oncology_wip': 'RelationExtractionModel', 're_temporal_events_clinical': 'RelationExtractionModel', 're_test_result_date': 'RelationExtractionModel', 're_zeroshot_biobert': 'ZeroShotRelationExtractionModel', 'redl_ade_biobert': 'RelationExtractionDLModel', 'redl_bodypart_direction_biobert': 'RelationExtractionDLModel', 'redl_bodypart_problem_biobert': 'RelationExtractionDLModel', 'redl_bodypart_procedure_test_biobert': 'RelationExtractionDLModel', 'redl_chemprot_biobert': 'RelationExtractionDLModel', 'redl_clinical_biobert': 'RelationExtractionDLModel', 'redl_date_clinical_biobert': 'RelationExtractionDLModel', 'redl_drug_drug_interaction_biobert': 'RelationExtractionDLModel', 'redl_drugprot_biobert': 'RelationExtractionDLModel', 'redl_human_phenotype_gene_biobert': 'RelationExtractionDLModel', 'redl_nihss_biobert': 'RelationExtractionDLModel', 'redl_oncology_biobert_wip': 'RelationExtractionDLModel', 'redl_oncology_biomarker_result_biobert_wip': 'RelationExtractionDLModel', 'redl_oncology_granular_biobert_wip': 'RelationExtractionDLModel', 'redl_oncology_location_biobert_wip': 'RelationExtractionDLModel', 'redl_oncology_size_biobert_wip': 'RelationExtractionDLModel', 'redl_oncology_temporal_biobert_wip': 'RelationExtractionDLModel', 'redl_oncology_test_result_biobert_wip': 'RelationExtractionDLModel', 'redl_temporal_events_biobert': 'RelationExtractionDLModel', 'regex_matcher': 'RegexMatcher', 'roberta_base': 'RoBertaEmbeddings', 'roberta_base_biomedical': 'RoBertaEmbeddings', 'roberta_base_qa_squad2': 'RoBertaForQuestionAnswering', 'roberta_base_sequence_classifier_ag_news': 'RoBertaForSequenceClassification', 'roberta_base_sequence_classifier_imdb': 'RoBertaForSequenceClassification', 'roberta_base_token_classifier_conll03': 'RoBertaForTokenClassification', 'roberta_base_token_classifier_ontonotes': 'RoBertaForTokenClassification', 'roberta_classifier_acts_feedback1': 'RoBertaForSequenceClassification', 'roberta_classifier_argument': 'RoBertaForSequenceClassification', 'roberta_classifier_aristotletan_base_finetuned_sst2': 'RoBertaForSequenceClassification', 'roberta_classifier_attribute_classification': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_123_478412765': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_bbc_37249301': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_bert_covid_407910467': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_cml_412010597': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_covid_432211280': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_doctor_de_24595548': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_fa_473312409': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_fake_covid_news_36769078': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_formality_scoring_2_32597818': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_fred2_2682064': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_group_classification_441411446': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_imdb_base_3662644': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_imdb_demo_hf_16622775': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_imdb_rating_625417974': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_imdb_reviews_sentiment_329982': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_imdb_sentiment_classification_31154': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_imdb_test_21134453': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_large2_479012819': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_large_finetuned_467612250': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_lessons_tagging_606217261': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_my_own_imdb_sentiment_analysis_2131817': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_predict_roi_1_29797730': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_reading_prediction_172506': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_savesome_631818261': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_sst1_529214890': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_traffic_nlp_binary_537215209': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_trec_classification_522314623': 'RoBertaForSequenceClassification', 'roberta_classifier_autonlp_txc_17923124': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_amazon_shoe_reviews_classification_1104340243': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_app_review_train_1314150168': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_article_pred_1142742075': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_atc2': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_base_imdb_1275248775': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_base_imdb_1275248776': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_base_imdb_1275248777': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_base_imdb_1275248778': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_base_imdb_1275248779': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_basetweeteval_1281048986': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_basetweeteval_1281048987': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_basetweeteval_1281048988': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_basetweeteval_1281048989': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_basetweeteval_1281048990': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_car_review_project_966432120': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_car_review_project_966432121': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_citizen_nlu_bn_1370652766': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_citizen_nlu_hindi_1370952776': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_commonsense_1_696121179': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_intentclassificationfilipino_715021714': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_lucifer_multi_auto_all_837626712': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_mental_health_analysis_752423172': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_mlsec_1013333734': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_neurips_chanllenge_1287149282': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_news_916530070': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_nlu_crypto_sentiment_analysis_754123133': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_not_interested_1_1213145894': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_pan_976832386': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_pan_977432399': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_qn_classification_1015534072': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_security_texts_classification_688020754': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_security_texts_classification_distil_688220764': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_sentiment_polarity_918130222': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_smm4h_large_clean_874027878': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_test_project_879428192': 'RoBertaForSequenceClassification', 'roberta_classifier_autotrain_yes_or_no_on_circa_1009033469': 'RoBertaForSequenceClassification', 'roberta_classifier_banking77': 'RoBertaForSequenceClassification', 'roberta_classifier_base_adr_smm4h2022': 'RoBertaForSequenceClassification', 'roberta_classifier_base_bne_finetuned_amazon_reviews_multi_finetuned_amazon_reviews_multi': 'RoBertaForSequenceClassification', 'roberta_classifier_base_bne_finetuned_catalonia_independence_detector': 'RoBertaForSequenceClassification', 'roberta_classifier_base_bne_finetuned_cyberbullying_spanish': 'RoBertaForSequenceClassification', 'roberta_classifier_base_bne_finetuned_hate_speech_offensive_spanish': 'RoBertaForSequenceClassification', 'roberta_classifier_base_ca_finetuned_catalonia_independence_detector': 'RoBertaForSequenceClassification', 'roberta_classifier_base_ca_finetuned_cyberbullying_catalan': 'RoBertaForSequenceClassification', 'roberta_classifier_base_ca_finetuned_tecla': 'RoBertaForSequenceClassification', 'roberta_classifier_base_ca_sts_cased': 'RoBertaForSequenceClassification', 'roberta_classifier_base_ca_tc_cased': 'RoBertaForSequenceClassification', 'roberta_classifier_base_ca_te_cased': 'RoBertaForSequenceClassification', 'roberta_classifier_base_ca_v2_sts_cased': 'RoBertaForSequenceClassification', 'roberta_classifier_base_ca_v2_tc_cased': 'RoBertaForSequenceClassification', 'roberta_classifier_base_ca_v2_te_cased': 'RoBertaForSequenceClassification', 'roberta_classifier_base_clickbait': 'RoBertaForSequenceClassification', 'roberta_classifier_base_cola': 'RoBertaForSequenceClassification', 'roberta_classifier_base_emotion': 'RoBertaForSequenceClassification', 'roberta_classifier_base_finetuned_osdg': 'RoBertaForSequenceClassification', 'roberta_classifier_base_formality_ranker': 'RoBertaForSequenceClassification', 'roberta_classifier_base_frenk_hate': 'RoBertaForSequenceClassification', 'roberta_classifier_base_imdb': 'RoBertaForSequenceClassification', 'roberta_classifier_base_indonesian_1.5g_sentiment_analysis_smsa': 'RoBertaForSequenceClassification', 'roberta_classifier_base_indonesian_sentiment_analysis_smsa': 'RoBertaForSequenceClassification', 'roberta_classifier_base_stars': 'RoBertaForSequenceClassification', 'roberta_classifier_base_toxicity': 'RoBertaForSequenceClassification', 'roberta_classifier_bertin_base_sentiment_analysis': 'RoBertaForSequenceClassification', 'roberta_classifier_bertin_base_spanish_sem_eval_2018_task_1': 'RoBertaForSequenceClassification', 'roberta_classifier_bertin_base_xnli': 'RoBertaForSequenceClassification', 'roberta_classifier_bertin_exist22_task1': 'RoBertaForSequenceClassification', 'roberta_classifier_bne_sentiment_analysis': 'RoBertaForSequenceClassification', 'roberta_classifier_carer_2': 'RoBertaForSequenceClassification', 'roberta_classifier_carer_new': 'RoBertaForSequenceClassification', 'roberta_classifier_clasificacion_sentimientos': 'RoBertaForSequenceClassification', 'roberta_classifier_codeberta_language_identification': 'RoBertaForSequenceClassification', 'roberta_classifier_comments_el_toxic': 'RoBertaForSequenceClassification', 'roberta_classifier_concreteness_english_distil_base': 'RoBertaForSequenceClassification', 'roberta_classifier_covid_policy_21': 'RoBertaForSequenceClassification', 'roberta_classifier_cryptobert': 'RoBertaForSequenceClassification', 'roberta_classifier_ctrl44': 'RoBertaForSequenceClassification', 'roberta_classifier_ctrl44_clf': 'RoBertaForSequenceClassification', 'roberta_classifier_cuad_contract_type': 'RoBertaForSequenceClassification', 'roberta_classifier_cuenb_mnli': 'RoBertaForSequenceClassification', 'roberta_classifier_dbounds_large_finetuned_clinc': 'RoBertaForSequenceClassification', 'roberta_classifier_depression_detection': 'RoBertaForSequenceClassification', 'roberta_classifier_detect_acoso_twitter': 'RoBertaForSequenceClassification', 'roberta_classifier_discord_nft_sentiment': 'RoBertaForSequenceClassification', 'roberta_classifier_distil_base_finetuned_fake_news_english': 'RoBertaForSequenceClassification', 'roberta_classifier_distil_base_sst2_distilled': 'RoBertaForSequenceClassification', 'roberta_classifier_distil_bias': 'RoBertaForSequenceClassification', 'roberta_classifier_distil_clickbait': 'RoBertaForSequenceClassification', 'roberta_classifier_distil_current': 'RoBertaForSequenceClassification', 'roberta_classifier_distil_finetuned_banking77': 'RoBertaForSequenceClassification', 'roberta_classifier_distil_finetuned_financial_text_classification': 'RoBertaForSequenceClassification', 'roberta_classifier_distil_finetuned_stereotype_detection': 'RoBertaForSequenceClassification', 'roberta_classifier_distil_hatespeech': 'RoBertaForSequenceClassification', 'roberta_classifier_distil_mbfc_bias': 'RoBertaForSequenceClassification', 'roberta_classifier_distil_mbfc_bias_4class': 'RoBertaForSequenceClassification', 'roberta_classifier_distil_news_small': 'RoBertaForSequenceClassification', 'roberta_classifier_distil_offensive': 'RoBertaForSequenceClassification', 'roberta_classifier_distil_propaganda_2class': 'RoBertaForSequenceClassification', 'roberta_classifier_distil_proppy': 'RoBertaForSequenceClassification', 'roberta_classifier_earning_call_transcript_classification': 'RoBertaForSequenceClassification', 'roberta_classifier_election_relevancy_best': 'RoBertaForSequenceClassification', 'roberta_classifier_emo': 'RoBertaForSequenceClassification', 'roberta_classifier_emoroberta': 'RoBertaForSequenceClassification', 'roberta_classifier_emotion': 'RoBertaForSequenceClassification', 'roberta_classifier_emotion_english_distil_base': 'RoBertaForSequenceClassification', 'roberta_classifier_emotion_english_large': 'RoBertaForSequenceClassification', 'roberta_classifier_environmental_claims': 'RoBertaForSequenceClassification', 'roberta_classifier_fake_news_classification': 'RoBertaForSequenceClassification', 'roberta_classifier_fake_news_debunker': 'RoBertaForSequenceClassification', 'roberta_classifier_fake_news_detection_spanish': 'RoBertaForSequenceClassification', 'roberta_classifier_fakeddit': 'RoBertaForSequenceClassification', 'roberta_classifier_feedback_intent_test': 'RoBertaForSequenceClassification', 'roberta_classifier_fever': 'RoBertaForSequenceClassification', 'roberta_classifier_financial_large_sentiment': 'RoBertaForSequenceClassification', 'roberta_classifier_finetune_emotion_distilroberta': 'RoBertaForSequenceClassification', 'roberta_classifier_finetuned_sentence_itr0_2e_05_all_01_03_2022_02_53_51': 'RoBertaForSequenceClassification', 'roberta_classifier_finetuning_cardiffnlp_sentiment_model': 'RoBertaForSequenceClassification', 'roberta_classifier_finsent': 'RoBertaForSequenceClassification', 'roberta_classifier_for_multilabel_sentence_classification': 'RoBertaForSequenceClassification', 'roberta_classifier_fs_distil_fine_tuned': 'RoBertaForSequenceClassification', 'roberta_classifier_fyp2022': 'RoBertaForSequenceClassification', 'roberta_classifier_hate_speech_dynabench_r1_target': 'RoBertaForSequenceClassification', 'roberta_classifier_hate_speech_dynabench_r2_target': 'RoBertaForSequenceClassification', 'roberta_classifier_hate_speech_dynabench_r3_target': 'RoBertaForSequenceClassification', 'roberta_classifier_hate_speech_dynabench_r4_target': 'RoBertaForSequenceClassification', 'roberta_classifier_icebert_finetuned_grouped': 'RoBertaForSequenceClassification', 'roberta_classifier_indo_indonli': 'RoBertaForSequenceClassification', 'roberta_classifier_indonesia_emotion': 'RoBertaForSequenceClassification', 'roberta_classifier_indonesia_emotion_small': 'RoBertaForSequenceClassification', 'roberta_classifier_indonesia_sentiment': 'RoBertaForSequenceClassification', 'roberta_classifier_indonesian_base_emotion': 'RoBertaForSequenceClassification', 'roberta_classifier_indonesian_base_indonli': 'RoBertaForSequenceClassification', 'roberta_classifier_indonesian_base_sentiment': 'RoBertaForSequenceClassification', 'roberta_classifier_indonesiasentiment': 'RoBertaForSequenceClassification', 'roberta_classifier_intel_base_mrpc': 'RoBertaForSequenceClassification', 'roberta_classifier_iqa_classification': 'RoBertaForSequenceClassification', 'roberta_classifier_isear_bert': 'RoBertaForSequenceClassification', 'roberta_classifier_iterater_intention': 'RoBertaForSequenceClassification', 'roberta_classifier_jeremiahz_base_mrpc': 'RoBertaForSequenceClassification', 'roberta_classifier_large': 'RoBertaForSequenceClassification', 'roberta_classifier_large_faithcritic': 'RoBertaForSequenceClassification', 'roberta_classifier_large_finetuned_clinc_1': 'RoBertaForSequenceClassification', 'roberta_classifier_large_finetuned_clinc_12': 'RoBertaForSequenceClassification', 'roberta_classifier_large_finetuned_clinc_123': 'RoBertaForSequenceClassification', 'roberta_classifier_large_finetuned_clinc_1234': 'RoBertaForSequenceClassification', 'roberta_classifier_large_finetuned_clinc_12345': 'RoBertaForSequenceClassification', 'roberta_classifier_large_finetuned_clinc_123456': 'RoBertaForSequenceClassification', 'roberta_classifier_large_finetuned_clinc_1234567': 'RoBertaForSequenceClassification', 'roberta_classifier_large_finetuned_clinc_314': 'RoBertaForSequenceClassification', 'roberta_classifier_large_finetuned_clinc_3141': 'RoBertaForSequenceClassification', 'roberta_classifier_large_finetuned_plutchik_emotion': 'RoBertaForSequenceClassification', 'roberta_classifier_large_mnli': 'RoBertaForSequenceClassification', 'roberta_classifier_large_mnli_finetuned_header': 'RoBertaForSequenceClassification', 'roberta_classifier_large_mnli_misogyny_sexism_4tweets_2e_05_0.05': 'RoBertaForSequenceClassification', 'roberta_classifier_large_mnli_misogyny_sexism_4tweets_3e_05_0.05': 'RoBertaForSequenceClassification', 'roberta_classifier_large_mnli_misogyny_sexism_4tweets_3e_05_0.055_4': 'RoBertaForSequenceClassification', 'roberta_classifier_large_mnli_misogyny_sexism_4tweets_3e_05_0.05_singledt': 'RoBertaForSequenceClassification', 'roberta_classifier_large_mnli_misogyny_sexism_4tweets_3e_05_0.07': 'RoBertaForSequenceClassification', 'roberta_classifier_large_pyrxsum': 'RoBertaForSequenceClassification', 'roberta_classifier_large_realsumm': 'RoBertaForSequenceClassification', 'roberta_classifier_large_realsumm_by_examples_fold1': 'RoBertaForSequenceClassification', 'roberta_classifier_large_realsumm_by_examples_fold2': 'RoBertaForSequenceClassification', 'roberta_classifier_large_realsumm_by_examples_fold3': 'RoBertaForSequenceClassification', 'roberta_classifier_large_realsumm_by_examples_fold4': 'RoBertaForSequenceClassification', 'roberta_classifier_large_realsumm_by_examples_fold5': 'RoBertaForSequenceClassification', 'roberta_classifier_large_realsumm_by_systems_fold1': 'RoBertaForSequenceClassification', 'roberta_classifier_large_realsumm_by_systems_fold2': 'RoBertaForSequenceClassification', 'roberta_classifier_large_realsumm_by_systems_fold3': 'RoBertaForSequenceClassification', 'roberta_classifier_large_realsumm_by_systems_fold4': 'RoBertaForSequenceClassification', 'roberta_classifier_large_realsumm_by_systems_fold5': 'RoBertaForSequenceClassification', 'roberta_classifier_large_snli_mnli_fever_anli_r1_r2_r3_nli': 'RoBertaForSequenceClassification', 'roberta_classifier_large_tac08': 'RoBertaForSequenceClassification', 'roberta_classifier_large_tac08_tac09': 'RoBertaForSequenceClassification', 'roberta_classifier_large_tac09': 'RoBertaForSequenceClassification', 'roberta_classifier_large_wanli': 'RoBertaForSequenceClassification', 'roberta_classifier_large_winogrande': 'RoBertaForSequenceClassification', 'roberta_classifier_latam_question_quality': 'RoBertaForSequenceClassification', 'roberta_classifier_lewtun_large_finetuned_clinc': 'RoBertaForSequenceClassification', 'roberta_classifier_lewtun_minilmv2_l12_h384_distilled_finetuned_clinc': 'RoBertaForSequenceClassification', 'roberta_classifier_live_demo_question_intimacy': 'RoBertaForSequenceClassification', 'roberta_classifier_lro_v1.0.2a': 'RoBertaForSequenceClassification', 'roberta_classifier_main_intent_test': 'RoBertaForSequenceClassification', 'roberta_classifier_malayalam_news': 'RoBertaForSequenceClassification', 'roberta_classifier_manibert': 'RoBertaForSequenceClassification', 'roberta_classifier_mediawatch_el_topics': 'RoBertaForSequenceClassification', 'roberta_classifier_minilm_l12_clinc_distilled': 'RoBertaForSequenceClassification', 'roberta_classifier_minilm_l12_h384_sst2_distilled': 'RoBertaForSequenceClassification', 'roberta_classifier_minilm_l6_clinc_distilled': 'RoBertaForSequenceClassification', 'roberta_classifier_minilmv2_l12_h384_distilled_finetuned_spam_detection': 'RoBertaForSequenceClassification', 'roberta_classifier_minilmv2_l12_h384_emotion': 'RoBertaForSequenceClassification', 'roberta_classifier_minilmv2_l12_h384_finetuned_clinc': 'RoBertaForSequenceClassification', 'roberta_classifier_minilmv2_l12_h384_sst2': 'RoBertaForSequenceClassification', 'roberta_classifier_minilmv2_l6_h384_emotion': 'RoBertaForSequenceClassification', 'roberta_classifier_minilmv2_l6_h384_sst2': 'RoBertaForSequenceClassification', 'roberta_classifier_minilmv2_l6_h768_sst2': 'RoBertaForSequenceClassification', 'roberta_classifier_mnli_base': 'RoBertaForSequenceClassification', 'roberta_classifier_mnli_distil_base': 'RoBertaForSequenceClassification', 'roberta_classifier_mnli_large': 'RoBertaForSequenceClassification', 'roberta_classifier_multiclass_textclassification': 'RoBertaForSequenceClassification', 'roberta_classifier_my_awesome_model': 'RoBertaForSequenceClassification', 'roberta_classifier_neutral_non_neutral': 'RoBertaForSequenceClassification', 'roberta_classifier_news_sentiment_analysis': 'RoBertaForSequenceClassification', 'roberta_classifier_not_interested_v0': 'RoBertaForSequenceClassification', 'roberta_classifier_offensive': 'RoBertaForSequenceClassification', 'roberta_classifier_offensive_beta': 'RoBertaForSequenceClassification', 'roberta_classifier_optimum_large_finetuned_clinc': 'RoBertaForSequenceClassification', 'roberta_classifier_optimum_minilmv2_l12_h384_distilled_finetuned_clinc': 'RoBertaForSequenceClassification', 'roberta_classifier_paper_feedback_intent': 'RoBertaForSequenceClassification', 'roberta_classifier_paraphrase': 'RoBertaForSequenceClassification', 'roberta_classifier_parrot_adequacy_model': 'RoBertaForSequenceClassification', 'roberta_classifier_philschmid_large_finetuned_clinc': 'RoBertaForSequenceClassification', 'roberta_classifier_philschmid_large_sst2': 'RoBertaForSequenceClassification', 'roberta_classifier_philschmid_minilmv2_l12_h384_distilled_finetuned_clinc': 'RoBertaForSequenceClassification', 'roberta_classifier_platzi_distil_base_mrpc_glue_omar_espejel': 'RoBertaForSequenceClassification', 'roberta_classifier_policyberta_7d': 'RoBertaForSequenceClassification', 'roberta_classifier_programming_lang_identifier': 'RoBertaForSequenceClassification', 'roberta_classifier_pump_intent_test': 'RoBertaForSequenceClassification', 'roberta_classifier_purchase_intention_english_large': 'RoBertaForSequenceClassification', 'roberta_classifier_qandaclassifier': 'RoBertaForSequenceClassification', 'roberta_classifier_qnli_base': 'RoBertaForSequenceClassification', 'roberta_classifier_qqp_base': 'RoBertaForSequenceClassification', 'roberta_classifier_question_intimacy': 'RoBertaForSequenceClassification', 'roberta_classifier_reactiongif': 'RoBertaForSequenceClassification', 'roberta_classifier_readability_es_3class_paragraphs': 'RoBertaForSequenceClassification', 'roberta_classifier_readability_es_3class_sentences': 'RoBertaForSequenceClassification', 'roberta_classifier_readability_es_paragraphs': 'RoBertaForSequenceClassification', 'roberta_classifier_readability_es_sentences': 'RoBertaForSequenceClassification', 'roberta_classifier_reranking_model': 'RoBertaForSequenceClassification', 'roberta_classifier_rile': 'RoBertaForSequenceClassification', 'roberta_classifier_robbert_dutch_base_toxic_comments': 'RoBertaForSequenceClassification', 'roberta_classifier_robbert_twitter_sentiment': 'RoBertaForSequenceClassification', 'roberta_classifier_robbert_twitter_sentiment_custom': 'RoBertaForSequenceClassification', 'roberta_classifier_robbert_v2_dutch_base_hebban_reviews': 'RoBertaForSequenceClassification', 'roberta_classifier_robbert_v2_dutch_sentiment': 'RoBertaForSequenceClassification', 'roberta_classifier_robbertje_merged_dutch_sentiment': 'RoBertaForSequenceClassification', 'roberta_classifier_rota': 'RoBertaForSequenceClassification', 'roberta_classifier_rte_base': 'RoBertaForSequenceClassification', 'roberta_classifier_ruperta_base_sentiment_analysis': 'RoBertaForSequenceClassification', 'roberta_classifier_sagemaker_base_emotion': 'RoBertaForSequenceClassification', 'roberta_classifier_scim_distilroberta': 'RoBertaForSequenceClassification', 'roberta_classifier_sentence_classification4designtutor': 'RoBertaForSequenceClassification', 'roberta_classifier_sentiment_analysis': 'RoBertaForSequenceClassification', 'roberta_classifier_sentiment_analysis_custom': 'RoBertaForSequenceClassification', 'roberta_classifier_sentiment_large_english': 'RoBertaForSequenceClassification', 'roberta_classifier_sentiment_large_english_3_classes': 'RoBertaForSequenceClassification', 'roberta_classifier_snli_base': 'RoBertaForSequenceClassification', 'roberta_classifier_snli_distil_base': 'RoBertaForSequenceClassification', 'roberta_classifier_snli_large': 'RoBertaForSequenceClassification', 'roberta_classifier_soongsil_bert_base_apeach': 'RoBertaForSequenceClassification', 'roberta_classifier_soongsil_bert_small_apeach': 'RoBertaForSequenceClassification', 'roberta_classifier_soongsil_bert_wellness': 'RoBertaForSequenceClassification', 'roberta_classifier_soongsilbert_base_beep': 'RoBertaForSequenceClassification', 'roberta_classifier_spte_large_all_mnli': 'RoBertaForSequenceClassification', 'roberta_classifier_spte_large_mnli_200': 'RoBertaForSequenceClassification', 'roberta_classifier_stop_the_steal_relevancy_analysis_binary': 'RoBertaForSequenceClassification', 'roberta_classifier_stress_twitter': 'RoBertaForSequenceClassification', 'roberta_classifier_sundanese_base_emotion': 'RoBertaForSequenceClassification', 'roberta_classifier_superpal': 'RoBertaForSequenceClassification', 'roberta_classifier_ti_geezswitch': 'RoBertaForSequenceClassification', 'roberta_classifier_ti_sentiment': 'RoBertaForSequenceClassification', 'roberta_classifier_to_music_genre': 'RoBertaForSequenceClassification', 'roberta_classifier_toxic_detector_distilroberta': 'RoBertaForSequenceClassification', 'roberta_classifier_toxicity': 'RoBertaForSequenceClassification', 'roberta_classifier_tweet_offensive_eval': 'RoBertaForSequenceClassification', 'roberta_classifier_tweet_sentiment_eval': 'RoBertaForSequenceClassification', 'roberta_classifier_tweet_topic_19_multi': 'RoBertaForSequenceClassification', 'roberta_classifier_tweet_topic_19_single': 'RoBertaForSequenceClassification', 'roberta_classifier_tweet_topic_21_multi': 'RoBertaForSequenceClassification', 'roberta_classifier_tweet_topic_21_single': 'RoBertaForSequenceClassification', 'roberta_classifier_tweetclimateanalysis': 'RoBertaForSequenceClassification', 'roberta_classifier_twisent_siebert': 'RoBertaForSequenceClassification', 'roberta_classifier_twisent_twisent': 'RoBertaForSequenceClassification', 'roberta_classifier_twitch_base_sentiment_latest': 'RoBertaForSequenceClassification', 'roberta_classifier_twitter_base_dec2021_rbam_fine_tuned': 'RoBertaForSequenceClassification', 'roberta_classifier_twitter_base_emotion': 'RoBertaForSequenceClassification', 'roberta_classifier_twitter_base_jun2022_sem_eval_2018_task_1': 'RoBertaForSequenceClassification', 'roberta_classifier_twitter_base_mar2022_finetuned_emotion': 'RoBertaForSequenceClassification', 'roberta_classifier_twitter_base_mar2022_finetuned_sentiment': 'RoBertaForSequenceClassification', 'roberta_classifier_twitter_base_sentiment_latest': 'RoBertaForSequenceClassification', 'roberta_classifier_twitter_base_sentiment_latest_finetuned_fg_concat_sentence_h_news': 'RoBertaForSequenceClassification', 'roberta_classifier_twitter_base_sentiment_latest_finetuned_fg_single_sentence_news': 'RoBertaForSequenceClassification', 'roberta_classifier_twitter_base_stance_abortionv2': 'RoBertaForSequenceClassification', 'roberta_classifier_uganda_labor_market_interview_text_classification': 'RoBertaForSequenceClassification', 'roberta_classifier_unbiased_toxic': 'RoBertaForSequenceClassification', 'roberta_classifier_unhappyzebra100': 'RoBertaForSequenceClassification', 'roberta_classifier_vent_emotion': 'RoBertaForSequenceClassification', 'roberta_classifier_verdict': 'RoBertaForSequenceClassification', 'roberta_classifier_yelp_rating_classification': 'RoBertaForSequenceClassification', 'roberta_classifier_zabanshenas_base_mix': 'RoBertaForSequenceClassification', 'roberta_embeddings_BR_BERTo': 'RoBertaEmbeddings', 'roberta_embeddings_Bible_roberta_base': 'RoBertaEmbeddings', 'roberta_embeddings_KNUBert': 'RoBertaEmbeddings', 'roberta_embeddings_KanBERTo': 'RoBertaEmbeddings', 'roberta_embeddings_MedRoBERTa.nl': 'RoBertaEmbeddings', 'roberta_embeddings_RoBERTa_hindi_guj_san': 'RoBertaEmbeddings', 'roberta_embeddings_RoBERTalex': 'RoBertaEmbeddings', 'roberta_embeddings_RuPERTa_base': 'RoBertaEmbeddings', 'roberta_embeddings_SecRoBERTa': 'RoBertaEmbeddings', 'roberta_embeddings_am_roberta': 'RoBertaEmbeddings', 'roberta_embeddings_base_bulgarian': 'RoBertaEmbeddings', 'roberta_embeddings_base_indonesian_522m': 'RoBertaEmbeddings', 'roberta_embeddings_bertin_base_gaussian': 'RoBertaEmbeddings', 'roberta_embeddings_bertin_base_gaussian_exp_512seqlen': 'RoBertaEmbeddings', 'roberta_embeddings_bertin_base_random': 'RoBertaEmbeddings', 'roberta_embeddings_bertin_base_random_exp_512seqlen': 'RoBertaEmbeddings', 'roberta_embeddings_bertin_base_stepwise': 'RoBertaEmbeddings', 'roberta_embeddings_bertin_base_stepwise_exp_512seqlen': 'RoBertaEmbeddings', 'roberta_embeddings_bertin_roberta_base_spanish': 'RoBertaEmbeddings', 'roberta_embeddings_bertin_roberta_large_spanish': 'RoBertaEmbeddings', 'roberta_embeddings_chEMBL26_smiles_v2': 'RoBertaEmbeddings', 'roberta_embeddings_chEMBL_smiles_v1': 'RoBertaEmbeddings', 'roberta_embeddings_codeberta_small_v1': 'RoBertaEmbeddings', 'roberta_embeddings_cord19_1m7k': 'RoBertaEmbeddings', 'roberta_embeddings_distil_base': 'RoBertaEmbeddings', 'roberta_embeddings_distilroberta_base': 'RoBertaEmbeddings', 'roberta_embeddings_distilroberta_base_climate_d': 'RoBertaEmbeddings', 'roberta_embeddings_distilroberta_base_climate_d_s': 'RoBertaEmbeddings', 'roberta_embeddings_distilroberta_base_climate_f': 'RoBertaEmbeddings', 'roberta_embeddings_distilroberta_base_finetuned_jira_qt_issue_title': 'RoBertaEmbeddings', 'roberta_embeddings_distilroberta_base_finetuned_jira_qt_issue_titles_and_bodies': 'RoBertaEmbeddings', 'roberta_embeddings_fairlex_ecthr_minilm': 'RoBertaEmbeddings', 'roberta_embeddings_fairlex_scotus_minilm': 'RoBertaEmbeddings', 'roberta_embeddings_fernet_news': 'RoBertaEmbeddings', 'roberta_embeddings_financial': 'RoBertaEmbeddings', 'roberta_embeddings_french_roberta': 'RoBertaEmbeddings', 'roberta_embeddings_hindi': 'RoBertaEmbeddings', 'roberta_embeddings_hotelbert': 'RoBertaEmbeddings', 'roberta_embeddings_hotelbert_small': 'RoBertaEmbeddings', 'roberta_embeddings_indic_transformers': 'RoBertaEmbeddings', 'roberta_embeddings_indic_transformers_hi_roberta': 'RoBertaEmbeddings', 'roberta_embeddings_indic_transformers_te_roberta': 'RoBertaEmbeddings', 'roberta_embeddings_indo_roberta_small': 'RoBertaEmbeddings', 'roberta_embeddings_indo_small': 'RoBertaEmbeddings', 'roberta_embeddings_indonesian_roberta_base': 'RoBertaEmbeddings', 'roberta_embeddings_indonesian_roberta_large': 'RoBertaEmbeddings', 'roberta_embeddings_javanese_roberta_small': 'RoBertaEmbeddings', 'roberta_embeddings_javanese_roberta_small_imdb': 'RoBertaEmbeddings', 'roberta_embeddings_javanese_small': 'RoBertaEmbeddings', 'roberta_embeddings_javanese_small_imdb': 'RoBertaEmbeddings', 'roberta_embeddings_jurisbert': 'RoBertaEmbeddings', 'roberta_embeddings_legal_base': 'RoBertaEmbeddings', 'roberta_embeddings_legal_roberta_base': 'RoBertaEmbeddings', 'roberta_embeddings_medroberta.nl': 'RoBertaEmbeddings', 'roberta_embeddings_mlm_spanish_roberta_base': 'RoBertaEmbeddings', 'roberta_embeddings_model_attribution_challenge_base': 'RoBertaEmbeddings', 'roberta_embeddings_muppet_roberta_base': 'RoBertaEmbeddings', 'roberta_embeddings_muppet_roberta_large': 'RoBertaEmbeddings', 'roberta_embeddings_palobert_base_greek_uncased_v1': 'RoBertaEmbeddings', 'roberta_embeddings_pdelobelle_robbert_v2_dutch_base': 'RoBertaEmbeddings', 'roberta_embeddings_pmc_med_bio_mlm_roberta_large': 'RoBertaEmbeddings', 'roberta_embeddings_robasqu': 'RoBertaEmbeddings', 'roberta_embeddings_robbert_v2_dutch_base': 'RoBertaEmbeddings', 'roberta_embeddings_robbertje_1_gb_bort': 'RoBertaEmbeddings', 'roberta_embeddings_robbertje_1_gb_merged': 'RoBertaEmbeddings', 'roberta_embeddings_robbertje_1_gb_non_shuffled': 'RoBertaEmbeddings', 'roberta_embeddings_robbertje_1_gb_shuffled': 'RoBertaEmbeddings', 'roberta_embeddings_roberta_base_bne': 'RoBertaEmbeddings', 'roberta_embeddings_roberta_base_indonesian_522M': 'RoBertaEmbeddings', 'roberta_embeddings_roberta_base_russian_v0': 'RoBertaEmbeddings', 'roberta_embeddings_roberta_base_wechsel_chinese': 'RoBertaEmbeddings', 'roberta_embeddings_roberta_base_wechsel_french': 'RoBertaEmbeddings', 'roberta_embeddings_roberta_base_wechsel_german': 'RoBertaEmbeddings', 'roberta_embeddings_roberta_fa_zwnj_base': 'RoBertaEmbeddings', 'roberta_embeddings_roberta_ko_small': 'RoBertaEmbeddings', 'roberta_embeddings_roberta_large_bne': 'RoBertaEmbeddings', 'roberta_embeddings_roberta_pubmed': 'RoBertaEmbeddings', 'roberta_embeddings_roberta_tagalog_base': 'RoBertaEmbeddings', 'roberta_embeddings_roberta_tagalog_large': 'RoBertaEmbeddings', 'roberta_embeddings_roberta_urdu_small': 'RoBertaEmbeddings', 'roberta_embeddings_robertinh': 'RoBertaEmbeddings', 'roberta_embeddings_ruRoberta_large': 'RoBertaEmbeddings', 'roberta_embeddings_ruperta_base_finetuned_spa_constitution': 'RoBertaEmbeddings', 'roberta_embeddings_sinhalaberto': 'RoBertaEmbeddings', 'roberta_embeddings_slovakbert': 'RoBertaEmbeddings', 'roberta_embeddings_small_bulgarian': 'RoBertaEmbeddings', 'roberta_embeddings_sundanese_roberta_base': 'RoBertaEmbeddings', 'roberta_embeddings_tagalog_base': 'RoBertaEmbeddings', 'roberta_embeddings_tagalog_large': 'RoBertaEmbeddings', 'roberta_embeddings_tswanabert': 'RoBertaEmbeddings', 'roberta_embeddings_ukr_roberta_base': 'RoBertaEmbeddings', 'roberta_embeddings_zuberta': 'RoBertaEmbeddings', 'roberta_jurisbert_clas_art_convencion_americana_dh': 'RoBertaForSequenceClassification', 'roberta_jurisbert_class_tratados_internacionales_sistema_universal': 'RoBertaForSequenceClassification', 'roberta_large': 'RoBertaEmbeddings', 'roberta_large_token_classifier_conll03': 'RoBertaForTokenClassification', 'roberta_large_token_classifier_ontonotes': 'RoBertaForTokenClassification', 'roberta_ner_NER_LAW_MONEY4': 'RoBertaForTokenClassification', 'roberta_ner_RuPERTa_base_finetuned_ner': 'RoBertaForTokenClassification', 'roberta_ner_alekseykorshuk_roberta_base_finetuned_ner': 'RoBertaForTokenClassification', 'roberta_ner_autotrain_zuozhuan_1100540141': 'RoBertaForTokenClassification', 'roberta_ner_bayartsogt_roberta_base_ner_demo': 'RoBertaForTokenClassification', 'roberta_ner_bergurth_icebert_finetuned_ner': 'RoBertaForTokenClassification', 'roberta_ner_bert_finetuned_protagonist_english': 'RoBertaForTokenClassification', 'roberta_ner_bert_finetuned_protagonist_english_pc': 'RoBertaForTokenClassification', 'roberta_ner_bertreach_finetuned_ner': 'RoBertaForTokenClassification', 'roberta_ner_bertweet_large_tweetner_2020': 'RoBertaForTokenClassification', 'roberta_ner_bsc_bio_ehr_es_cantemist': 'RoBertaForTokenClassification', 'roberta_ner_bsc_bio_ehr_es_pharmaconer': 'RoBertaForTokenClassification', 'roberta_ner_buyandelger_roberta_base_ner_demo': 'RoBertaForTokenClassification', 'roberta_ner_clulab_roberta_timex_semeval': 'RoBertaForTokenClassification', 'roberta_ner_codebert_base_finetuned_stackoverflow': 'RoBertaForTokenClassification', 'roberta_ner_codebert_base_finetuned_stackoverflow_ner': 'RoBertaForTokenClassification', 'roberta_ner_codebert_mt4ts': 'RoBertaForTokenClassification', 'roberta_ner_codeberta_mt4ts': 'RoBertaForTokenClassification', 'roberta_ner_company_segment_ner': 'RoBertaForTokenClassification', 'roberta_ner_deid_roberta_i2b2': 'RoBertaForTokenClassification', 'roberta_ner_diag_proc': 'MedicalNerModel', 'roberta_ner_distil_slovakbert_ner': 'RoBertaForTokenClassification', 'roberta_ner_distilroberta_base_ner_conll2003': 'RoBertaForTokenClassification', 'roberta_ner_distilroberta_base_ner_wikiann': 'RoBertaForTokenClassification', 'roberta_ner_distilroberta_base_ner_wikiann_conll2003_3_class': 'RoBertaForTokenClassification', 'roberta_ner_distilroberta_base_ner_wikiann_conll2003_4_class': 'RoBertaForTokenClassification', 'roberta_ner_domain_adapted_timex': 'RoBertaForTokenClassification', 'roberta_ner_egoitz_roberta_timex_semeval': 'RoBertaForTokenClassification', 'roberta_ner_eliasbe_icebert_finetuned_ner': 'RoBertaForTokenClassification', 'roberta_ner_finetuned_bioclinical': 'RoBertaForTokenClassification', 'roberta_ner_fnrbt': 'RoBertaForTokenClassification', 'roberta_ner_ganzorig_roberta_base_ner_demo': 'RoBertaForTokenClassification', 'roberta_ner_graphcodebert_mt4ts': 'RoBertaForTokenClassification', 'roberta_ner_graphpolygot_mt4ts': 'RoBertaForTokenClassification', 'roberta_ner_icebert_ner': 'RoBertaForTokenClassification', 'roberta_ner_icelandic_ner_roberta': 'RoBertaForTokenClassification', 'roberta_ner_indonesian_roberta_base_bapos_tagger': 'RoBertaForTokenClassification', 'roberta_ner_jurisbert_finetuning_ner': 'RoBertaForTokenClassification', 'roberta_ner_keyphrase_extraction_kbir_inspec': 'RoBertaForTokenClassification', 'roberta_ner_keyphrase_extraction_kbir_kpcrowd': 'RoBertaForTokenClassification', 'roberta_ner_keyphrase_extraction_kbir_kptimes': 'RoBertaForTokenClassification', 'roberta_ner_keyphrase_extraction_kbir_openkp': 'RoBertaForTokenClassification', 'roberta_ner_keyphrase_extraction_kbir_semeval2017': 'RoBertaForTokenClassification', 'roberta_ner_ler_roberta': 'RoBertaForTokenClassification', 'roberta_ner_meds': 'RoBertaForTokenClassification', 'roberta_ner_medslabs': 'RoBertaForTokenClassification', 'roberta_ner_mimics_tagging_roberta_base': 'RoBertaForTokenClassification', 'roberta_ner_ner_chemical_bionlp_bc5cdr_pubmed': 'RoBertaForTokenClassification', 'roberta_ner_ner_disease_ncbi_bionlp_bc5cdr_pubmed': 'RoBertaForTokenClassification', 'roberta_ner_ner_gene_dna_rna_jnlpba_pubmed': 'RoBertaForTokenClassification', 'roberta_ner_ner_roberta_base_ontonotesv5_englishv4': 'RoBertaForTokenClassification', 'roberta_ner_nlp_cic_wfu_clinical_cases_ner_sents_tokenized_bertin_roberta_base_spanish_fine_tuned': 'RoBertaForTokenClassification', 'roberta_ner_omunkhuush_roberta_base_ner_demo': 'RoBertaForTokenClassification', 'roberta_ner_orri_icebert_finetuned_ner': 'RoBertaForTokenClassification', 'roberta_ner_polygot_mt4ts': 'RoBertaForTokenClassification', 'roberta_ner_robbert2_ner_cased_sonar1_nld': 'RoBertaForTokenClassification', 'roberta_ner_robbert_ner_cased_sonar1_nld': 'RoBertaForTokenClassification', 'roberta_ner_robbert_v2_dutch_ner': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_biomedical_clinical_es_finetuned_ner_concat_craft_es_stivenlancheros': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_biomedical_clinical_es_finetuned_ner_craft': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_biomedical_clinical_es_finetuned_ner_craft_augmented_en': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_biomedical_clinical_es_finetuned_ner_craft_augmented_es': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_biomedical_clinical_es_finetuned_ner_craft_augmentedtransfer_en': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_biomedical_clinical_es_finetuned_ner_craft_augmentedtransfer_es': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_biomedical_clinical_es_finetuned_ner_craft_en_es_stivenlancheros': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_bne_capitel_ner': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_bne_capitel_ner_plus': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_ca_cased_ner': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_finetuned_abbr': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_finetuned_beer_ner': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_finetuned_ner_0': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_finetuned_ner_1': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_finetuned_ner_2': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_finetuned_ner_3': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_finetuned_ner_4': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_finetuned_ner_kmeans': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_kin': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_ner': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_pcm': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_swa': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_tweetner_2020': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_tweetner_2020_2021_concat': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_tweetner_2020_2021_continuous': 'RoBertaForTokenClassification', 'roberta_ner_roberta_base_tweetner_2021': 'RoBertaForTokenClassification', 'roberta_ner_roberta_classics_ner': 'RoBertaForTokenClassification', 'roberta_ner_roberta_fa_zwnj_base_ner': 'RoBertaForTokenClassification', 'roberta_ner_roberta_large_bne_capitel_ner': 'RoBertaForTokenClassification', 'roberta_ner_roberta_large_finetuned_abbr': 'RoBertaForTokenClassification', 'roberta_ner_roberta_large_finetuned_ades_model_2': 'RoBertaForTokenClassification', 'roberta_ner_roberta_large_legal_act_extraction': 'RoBertaForTokenClassification', 'roberta_ner_roberta_large_ner_english': 'RoBertaForTokenClassification', 'roberta_ner_roberta_large_tweetner_2020': 'RoBertaForTokenClassification', 'roberta_ner_roberta_large_tweetner_2020_2021_concat': 'RoBertaForTokenClassification', 'roberta_ner_roberta_large_tweetner_2020_2021_continuous': 'RoBertaForTokenClassification', 'roberta_ner_roberta_large_tweetner_2020_selflabel2020_concat': 'RoBertaForTokenClassification', 'roberta_ner_roberta_large_tweetner_2020_selflabel2020_continuous': 'RoBertaForTokenClassification', 'roberta_ner_roberta_large_tweetner_2020_selflabel2021_concat': 'RoBertaForTokenClassification', 'roberta_ner_roberta_large_tweetner_2020_selflabel2021_continuous': 'RoBertaForTokenClassification', 'roberta_ner_roberta_large_tweetner_2021': 'RoBertaForTokenClassification', 'roberta_ner_roberta_large_tweetner_random': 'RoBertaForTokenClassification', 'roberta_ner_roberta_large_tweetner_selflabel2020': 'RoBertaForTokenClassification', 'roberta_ner_roberta_large_tweetner_selflabel2021': 'RoBertaForTokenClassification', 'roberta_ner_roberta_mt4ts': 'RoBertaForTokenClassification', 'roberta_ner_roberta_ticker': 'RoBertaForTokenClassification', 'roberta_ner_romainlhardy_roberta_large_finetuned_ner': 'RoBertaForTokenClassification', 'roberta_ner_satellite_instrument_roberta_NER': 'RoBertaForTokenClassification', 'roberta_ner_spaceroberta_cr': 'RoBertaForTokenClassification', 'roberta_ner_spanberta_base_cased_ner_conll02': 'RoBertaForTokenClassification', 'roberta_ner_spanberta_base_cased_ner_conll02_finetuned_ner': 'RoBertaForTokenClassification', 'roberta_ner_spanish_disease_finder': 'RoBertaForTokenClassification', 'roberta_ner_sroberta_base_ner': 'RoBertaForTokenClassification', 'roberta_ner_sroberta_l_ner': 'RoBertaForTokenClassification', 'roberta_ner_sroberta_ner': 'RoBertaForTokenClassification', 'roberta_ner_sroberta_nlp': 'RoBertaForTokenClassification', 'roberta_ner_sroberta_xl_ner': 'RoBertaForTokenClassification', 'roberta_ner_thorduragust_icebert_finetuned_ner': 'RoBertaForTokenClassification', 'roberta_ner_tner_roberta_base_tweet_2020': 'RoBertaForTokenClassification', 'roberta_ner_tner_roberta_large_multiconer_en_asahi417': 'RoBertaForTokenClassification', 'roberta_ner_tner_roberta_large_tweet_2020': 'RoBertaForTokenClassification', 'roberta_ner_tner_roberta_large_tweet_st_2020': 'RoBertaForTokenClassification', 'roberta_ner_tner_roberta_large_tweet_st_asahi417': 'RoBertaForTokenClassification', 'roberta_ner_troberta': 'RoBertaForTokenClassification', 'roberta_ner_twitter_roberta_base_2019_90m_tweetner_2020': 'RoBertaForTokenClassification', 'roberta_ner_twitter_roberta_base_2019_90m_tweetner_2020_2021_concat': 'RoBertaForTokenClassification', 'roberta_ner_twitter_roberta_base_2019_90m_tweetner_2020_2021_continuous': 'RoBertaForTokenClassification', 'roberta_ner_twitter_roberta_base_2019_90m_tweetner_2021': 'RoBertaForTokenClassification', 'roberta_ner_twitter_roberta_base_2019_90m_tweetner_random': 'RoBertaForTokenClassification', 'roberta_ner_twitter_roberta_base_dec2020_tweetner_2020': 'RoBertaForTokenClassification', 'roberta_ner_twitter_roberta_base_dec2020_tweetner_2020_2021_concat': 'RoBertaForTokenClassification', 'roberta_ner_twitter_roberta_base_dec2020_tweetner_2020_2021_continuous': 'RoBertaForTokenClassification', 'roberta_ner_twitter_roberta_base_dec2020_tweetner_2021': 'RoBertaForTokenClassification', 'roberta_ner_twitter_roberta_base_dec2020_tweetner_random': 'RoBertaForTokenClassification', 'roberta_ner_twitter_roberta_base_dec2021_tweetner_2020': 'RoBertaForTokenClassification', 'roberta_ner_twitter_roberta_base_dec2021_tweetner_2020_2021_concat': 'RoBertaForTokenClassification', 'roberta_ner_twitter_roberta_base_dec2021_tweetner_2020_2021_continuous': 'RoBertaForTokenClassification', 'roberta_ner_twitter_roberta_base_dec2021_tweetner_2021': 'RoBertaForTokenClassification', 'roberta_ner_twitter_roberta_base_dec2021_tweetner_random': 'RoBertaForTokenClassification', 'roberta_ner_veganuary_ner': 'RoBertaForTokenClassification', 'roberta_pos_RuPERTa_base_finetuned_pos': 'RoBertaForTokenClassification', 'roberta_pos_distil_slovakbert_upos': 'RoBertaForTokenClassification', 'roberta_pos_esperberto_small_pos': 'RoBertaForTokenClassification', 'roberta_pos_fullstop_dutch_punctuation_prediction': 'RoBertaForTokenClassification', 'roberta_pos_indonesian_roberta_base_posp_tagger': 'RoBertaForTokenClassification', 'roberta_pos_lao_roberta_base_pos_tagger': 'RoBertaForTokenClassification', 'roberta_pos_robbert2_pos_cased_deepfrog_nld': 'RoBertaForTokenClassification', 'roberta_pos_roberta_base_bne_capitel_pos': 'RoBertaForTokenClassification', 'roberta_pos_roberta_base_english_upos': 'RoBertaForTokenClassification', 'roberta_pos_roberta_large_bne_capitel_pos': 'RoBertaForTokenClassification', 'roberta_pos_roberta_large_english_upos': 'RoBertaForTokenClassification', 'roberta_pos_roberto_base_finetuned_pos': 'RoBertaForTokenClassification', 'roberta_pos_slovakbert_pos': 'RoBertaForTokenClassification', 'roberta_pos_veganuary_pos': 'RoBertaForTokenClassification', 'roberta_qa_ADDI_CH_RoBERTa': 'RoBertaForQuestionAnswering', 'roberta_qa_ADDI_DE_RoBERTa': 'RoBertaForQuestionAnswering', 'roberta_qa_ADDI_FI_RoBERTa': 'RoBertaForQuestionAnswering', 'roberta_qa_ADDI_FR_RoBERTa': 'RoBertaForQuestionAnswering', 'roberta_qa_ADDI_FR_XLM_R': 'RoBertaForQuestionAnswering', 'roberta_qa_ADDI_IT_RoBERTa': 'RoBertaForQuestionAnswering', 'roberta_qa_BSC_TeMU_roberta_base_bne_sqac': 'RoBertaForQuestionAnswering', 'roberta_qa_BSC_TeMU_roberta_large_bne_sqac': 'RoBertaForQuestionAnswering', 'roberta_qa_CV_Custom_DS': 'RoBertaForQuestionAnswering', 'roberta_qa_CV_Merge_DS': 'RoBertaForQuestionAnswering', 'roberta_qa_EManuals_RoBERTa_squad2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_Firat_roberta_base_finetuned_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_IceBERT_QA': 'RoBertaForQuestionAnswering', 'roberta_qa_PlanTL_GOB_ES_roberta_base_bne_sqac': 'RoBertaForQuestionAnswering', 'roberta_qa_PlanTL_GOB_ES_roberta_large_bne_sqac': 'RoBertaForQuestionAnswering', 'roberta_qa_QA_for_Event_Extraction': 'RoBertaForQuestionAnswering', 'roberta_qa_REQA_RoBERTa': 'RoBertaForQuestionAnswering', 'roberta_qa_RoBERTa_base_finetuned_squad2_lwt': 'RoBertaForQuestionAnswering', 'roberta_qa_RoBERTa_emotion_extraction': 'RoBertaForQuestionAnswering', 'roberta_qa_RoBERTa_for_seizureFrequency_QA': 'RoBertaForQuestionAnswering', 'roberta_qa_Roberta': 'RoBertaForQuestionAnswering', 'roberta_qa_Roberta_Custom_Squad_DS': 'RoBertaForQuestionAnswering', 'roberta_qa_RuPERTa_base_finetuned_squadv1': 'RoBertaForQuestionAnswering', 'roberta_qa_RuPERTa_base_finetuned_squadv2': 'RoBertaForQuestionAnswering', 'roberta_qa_SAE_roberta_base_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_TestQaV1': 'RoBertaForQuestionAnswering', 'roberta_qa_addi_ch_roberta': 'RoBertaForQuestionAnswering', 'roberta_qa_addi_de_roberta': 'RoBertaForQuestionAnswering', 'roberta_qa_addi_fi_roberta': 'RoBertaForQuestionAnswering', 'roberta_qa_addi_fr_roberta': 'RoBertaForQuestionAnswering', 'roberta_qa_addi_fr_xlm_r': 'RoBertaForQuestionAnswering', 'roberta_qa_addi_it_roberta': 'RoBertaForQuestionAnswering', 'roberta_qa_ai_club_inductions_21_nlp_roBERTa': 'RoBertaForQuestionAnswering', 'roberta_qa_ai_club_inductions_21_nlp_roBERTa_base_squad_v2': 'RoBertaForQuestionAnswering', 'roberta_qa_akdeniz27_base_cuad': 'RoBertaForQuestionAnswering', 'roberta_qa_akdeniz27_roberta_base_cuad': 'RoBertaForQuestionAnswering', 'roberta_qa_autoevaluate_base_squad2': 'RoBertaForQuestionAnswering', 'roberta_qa_autotrain_test2_756523213': 'RoBertaForQuestionAnswering', 'roberta_qa_autotrain_test2_756523214': 'RoBertaForQuestionAnswering', 'roberta_qa_avioo1_roberta_base_squad2_finetuned_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_base_1b_1_finetuned_squadv1': 'RoBertaForQuestionAnswering', 'roberta_qa_base_1b_1_finetuned_squadv2': 'RoBertaForQuestionAnswering', 'roberta_qa_base_bne_squad2': 'RoBertaForQuestionAnswering', 'roberta_qa_base_bne_squad_2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_base_ca_cased': 'RoBertaForQuestionAnswering', 'roberta_qa_base_ca_v2_cased': 'RoBertaForQuestionAnswering', 'roberta_qa_base_cuad_finetuned': 'RoBertaForQuestionAnswering', 'roberta_qa_base_mitmovie_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_base_on_cuad': 'RoBertaForQuestionAnswering', 'roberta_qa_base_spanish_s_c': 'RoBertaForQuestionAnswering', 'roberta_qa_base_spanish_squades': 'RoBertaForQuestionAnswering', 'roberta_qa_base_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_base_squad2_distilled': 'RoBertaForQuestionAnswering', 'roberta_qa_base_squad_v1': 'RoBertaForQuestionAnswering', 'roberta_qa_base_squad_v2': 'RoBertaForQuestionAnswering', 'roberta_qa_bertin_large_finetuned_s_c': 'RoBertaForQuestionAnswering', 'roberta_qa_bertin_large_finetuned_sqac': 'RoBertaForQuestionAnswering', 'roberta_qa_bertserini_roberta_base': 'RoBertaForQuestionAnswering', 'roberta_qa_bsc_temu_base_bne_s_c': 'RoBertaForQuestionAnswering', 'roberta_qa_bsc_temu_large_bne_s_c': 'RoBertaForQuestionAnswering', 'roberta_qa_carbonblog': 'RoBertaForQuestionAnswering', 'roberta_qa_cart': 'RoBertaForQuestionAnswering', 'roberta_qa_cline_emanuals_techqa': 'RoBertaForQuestionAnswering', 'roberta_qa_cline_squad2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_cline_techqa': 'RoBertaForQuestionAnswering', 'roberta_qa_cv_custom_ds': 'RoBertaForQuestionAnswering', 'roberta_qa_cv_merge_ds': 'RoBertaForQuestionAnswering', 'roberta_qa_cyberlandr_door': 'RoBertaForQuestionAnswering', 'roberta_qa_declutr_emanuals_techqa': 'RoBertaForQuestionAnswering', 'roberta_qa_declutr_model_squad2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_declutr_techqa': 'RoBertaForQuestionAnswering', 'roberta_qa_deepakvk_roberta_base_squad2_finetuned_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_deepset_base_squad2': 'RoBertaForQuestionAnswering', 'roberta_qa_deepset_large_squad2': 'RoBertaForQuestionAnswering', 'roberta_qa_distilrobert_base_squadv2_328seq_128stride_test': 'RoBertaForQuestionAnswering', 'roberta_qa_distilroberta_base_finetuned_squad2_lwt': 'RoBertaForQuestionAnswering', 'roberta_qa_distilroberta_base_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_distilroberta_base_squad2': 'RoBertaForQuestionAnswering', 'roberta_qa_distilroberta_base_squad_v2': 'RoBertaForQuestionAnswering', 'roberta_qa_distilroberta_finetuned_squadv1': 'RoBertaForQuestionAnswering', 'roberta_qa_dpr_nq_reader_roberta_base': 'RoBertaForQuestionAnswering', 'roberta_qa_dpr_nq_reader_roberta_base_v2': 'RoBertaForQuestionAnswering', 'roberta_qa_fin_RoBERTa_v1_finetuned_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_fin_roberta_v1_finetuned_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_fpdm_hier_roberta_FT_new_newsqa': 'RoBertaForQuestionAnswering', 'roberta_qa_fpdm_hier_roberta_FT_newsqa': 'RoBertaForQuestionAnswering', 'roberta_qa_fpdm_roberta_FT_newsqa': 'RoBertaForQuestionAnswering', 'roberta_qa_fpdm_triplet_roberta_FT_new_newsqa': 'RoBertaForQuestionAnswering', 'roberta_qa_fpdm_triplet_roberta_FT_newsqa': 'RoBertaForQuestionAnswering', 'roberta_qa_ft_lr_cu_leolin12345': 'RoBertaForQuestionAnswering', 'roberta_qa_huxxx657_roberta_base_finetuned_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_icebert': 'RoBertaForQuestionAnswering', 'roberta_qa_icebert_texas_squad_is_saattrupdan': 'RoBertaForQuestionAnswering', 'roberta_qa_large_bne_squad_2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_large_cuad': 'RoBertaForQuestionAnswering', 'roberta_qa_large_finetuned_squad2': 'RoBertaForQuestionAnswering', 'roberta_qa_large_syn': 'RoBertaForQuestionAnswering', 'roberta_qa_large_syn_ext': 'RoBertaForQuestionAnswering', 'roberta_qa_legal_qa': 'RoBertaForQuestionAnswering', 'roberta_qa_longformer_base_4096_spanish_finetuned_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_marshmellow77_base_cuad': 'RoBertaForQuestionAnswering', 'roberta_qa_marshmellow77_roberta_base_cuad': 'RoBertaForQuestionAnswering', 'roberta_qa_movie_mitmovie_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_movie_roberta_MITmovie_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_movie_roberta_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_movie_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_mrm8488_base_bne_finetuned_s_c': 'RoBertaForQuestionAnswering', 'roberta_qa_mrm8488_roberta_base_bne_finetuned_sqac': 'RoBertaForQuestionAnswering', 'roberta_qa_navteca_base_squad2': 'RoBertaForQuestionAnswering', 'roberta_qa_navteca_large_squad2': 'RoBertaForQuestionAnswering', 'roberta_qa_navteca_roberta_base_squad2': 'RoBertaForQuestionAnswering', 'roberta_qa_negation_detector': 'RoBertaForQuestionAnswering', 'roberta_qa_news_pretrain_roberta_FT_new_newsqa': 'RoBertaForQuestionAnswering', 'roberta_qa_news_pretrain_roberta_FT_newsqa': 'RoBertaForQuestionAnswering', 'roberta_qa_nlp_en_es_base_bne_finetuned_s_c': 'RoBertaForQuestionAnswering', 'roberta_qa_nlp_en_es_roberta_base_bne_finetuned_sqac': 'RoBertaForQuestionAnswering', 'roberta_qa_nlpunibo_roberta': 'RoBertaForQuestionAnswering', 'roberta_qa_nlu': 'RoBertaForQuestionAnswering', 'roberta_qa_plantl_gob_es_base_bne_s_c': 'RoBertaForQuestionAnswering', 'roberta_qa_plantl_gob_es_large_bne_s_c': 'RoBertaForQuestionAnswering', 'roberta_qa_plm': 'RoBertaForQuestionAnswering', 'roberta_qa_priv_qna': 'RoBertaForQuestionAnswering', 'roberta_qa_prk_roberta_base_squad2_finetuned_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_qaconv_roberta_large_squad2': 'RoBertaForQuestionAnswering', 'roberta_qa_qanlu': 'RoBertaForQuestionAnswering', 'roberta_qa_quales_iberlef': 'RoBertaForQuestionAnswering', 'roberta_qa_rahulchakwate_roberta_base_finetuned_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_robertaABSA': 'RoBertaForQuestionAnswering', 'roberta_qa_robertaBaseABSA': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_FT_new_newsqa': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_FT_newsqa': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_1B_1_finetuned_squadv1': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_1B_1_finetuned_squadv2': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_MITmovie_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_QnA_squad2_trained': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_best_squad2': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_biomedical_clinical_es_squad2_hackathon_pln': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_biomedical_es_squad2_hackathon_pln': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_bne_squad2_hackathon_pln': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_bne_squad_2.0_es_jamarju': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_ca_cased_qa': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_chaii': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_custom_QA': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_emr': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_1024_finetuned_squad_seed_0': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_1024_finetuned_squad_seed_10': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_1024_finetuned_squad_seed_2': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_1024_finetuned_squad_seed_4': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_1024_finetuned_squad_seed_42': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_1024_finetuned_squad_seed_6': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_1024_finetuned_squad_seed_8': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_128_finetuned_squad_seed_0': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_128_finetuned_squad_seed_10': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_128_finetuned_squad_seed_2': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_128_finetuned_squad_seed_4': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_128_finetuned_squad_seed_42': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_128_finetuned_squad_seed_6': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_128_finetuned_squad_seed_8': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_16_finetuned_squad_seed_0': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_16_finetuned_squad_seed_10': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_16_finetuned_squad_seed_2': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_16_finetuned_squad_seed_4': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_16_finetuned_squad_seed_42': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_16_finetuned_squad_seed_6': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_16_finetuned_squad_seed_8': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_256_finetuned_squad_seed_0': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_256_finetuned_squad_seed_10': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_256_finetuned_squad_seed_2': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_256_finetuned_squad_seed_4': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_256_finetuned_squad_seed_6': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_256_finetuned_squad_seed_8': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_32_finetuned_squad_seed_0': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_32_finetuned_squad_seed_10': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_32_finetuned_squad_seed_2': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_32_finetuned_squad_seed_4': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_32_finetuned_squad_seed_6': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_32_finetuned_squad_seed_8': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_512_finetuned_squad_seed_0': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_512_finetuned_squad_seed_10': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_512_finetuned_squad_seed_2': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_512_finetuned_squad_seed_4': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_512_finetuned_squad_seed_6': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_512_finetuned_squad_seed_8': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_64_finetuned_squad_seed_0': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_64_finetuned_squad_seed_10': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_64_finetuned_squad_seed_2': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_64_finetuned_squad_seed_4': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_64_finetuned_squad_seed_6': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_few_shot_k_64_finetuned_squad_seed_8': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_finetuned': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_finetuned_cuad': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_finetuned_cuad_gam': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_finetuned_deletion_squad_10': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_finetuned_deletion_squad_15': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_finetuned_scrambled_squad_10': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_finetuned_scrambled_squad_10_new': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_finetuned_scrambled_squad_15': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_finetuned_scrambled_squad_15_new': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_finetuned_scrambled_squad_5': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_finetuned_scrambled_squad_5_new': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_finetuned_squad2': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_finetuned_squad2_lwt': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_finetuned_squad_1': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_finetuned_squad_2': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_finetuned_squad_3': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_on_cuad': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_spanish_sqac': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_spanish_squades': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_squad2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_squad2_boolq_baseline': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_squad2_covid': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_squad2_covid_qa_deepset': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_squad2_distilled': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_squad2_finetuned_selqa': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_squad2_nq': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_squad_v1': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_base_tweetqa_model': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_customds_finetune': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_eda_and_parav3': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_edav3': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_fine_tuned_tweet_sentiment_extractor': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_l_squadv1.1': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_large_bne_squad_2.0_es_jamarju': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_large_cuad': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_large_data_seed_0': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_large_data_seed_4': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_large_fine_tuned_squad_es_stevemobs': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_large_finetuned_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_large_finetuned_squad2': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_large_initialization_seed_0': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_large_squad2': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_large_squad2_covid_qa_deepset': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_large_squad2_hp': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_large_squad_v1': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_large_synqa': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_large_synqa_ext': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_large_tweetqa': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_paraphrasev3': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_qasper': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_squadv1_base': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_train_json': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_tydiqa': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_unaugmentedv3': 'RoBertaForQuestionAnswering', 'roberta_qa_roberta_unaugv3': 'RoBertaForQuestionAnswering', 'roberta_qa_rule_based_roberta_bert_quadruplet_epochs_1_shard_1_squad2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_rule_based_roberta_bert_triplet_epochs_1_shard_1_squad2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_rule_based_roberta_hier_quadruplet_0.1_epochs_1_shard_1_squad2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_rule_based_roberta_hier_quadruplet_epochs_1_shard_1_squad2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_rule_based_roberta_hier_triplet_0.1_epochs_1_shard_1_squad2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_rule_based_roberta_hier_triplet_epochs_1_shard_1_squad2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_rule_based_roberta_only_classfn_epochs_1_shard_1_squad2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_rule_based_roberta_only_classfn_twostage_epochs_1_shard_1_squad2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_rule_based_roberta_twostage_quadruplet_epochs_1_shard_1_squad2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_rule_based_roberta_twostagequadruplet_hier_epochs_1_shard_1_squad2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_rule_based_roberta_twostagetriplet_epochs_1_shard_1_squad2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_rule_based_roberta_twostagetriplet_hier_epochs_1_shard_1_squad2.0': 'RoBertaForQuestionAnswering', 'roberta_qa_ruperta_base_finetuned_squadv1': 'RoBertaForQuestionAnswering', 'roberta_qa_ruperta_base_finetuned_squadv2': 'RoBertaForQuestionAnswering', 'roberta_qa_sae_roberta_base_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_sper': 'RoBertaForQuestionAnswering', 'roberta_qa_sumba_roberta_base_squad2_finetuned_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_testABSA': 'RoBertaForQuestionAnswering', 'roberta_qa_testABSA3': 'RoBertaForQuestionAnswering', 'roberta_qa_tiny_6l_768d': 'RoBertaForQuestionAnswering', 'roberta_qa_tiny_squad2': 'RoBertaForQuestionAnswering', 'roberta_qa_tinyroberta_6l_768d': 'RoBertaForQuestionAnswering', 'roberta_qa_tinyroberta_squad2': 'RoBertaForQuestionAnswering', 'roberta_qa_tinyroberta_squad2_step1': 'RoBertaForQuestionAnswering', 'roberta_qa_unqover_roberta_base_newsqa': 'RoBertaForQuestionAnswering', 'roberta_qa_unqover_roberta_base_squad': 'RoBertaForQuestionAnswering', 'roberta_qa_unqover_roberta_large_newsqa': 'RoBertaForQuestionAnswering', 'roberta_qa_ydshieh_base_squad2': 'RoBertaForQuestionAnswering', 'roberta_qa_ydshieh_roberta_base_squad2': 'RoBertaForQuestionAnswering', 'roberta_sequence_classifier_bsc_base_spanish_diagnostics': 'RoBertaForSequenceClassification', 'roberta_sequence_classifier_codebert2codebert_finetuned_code_defect_detection': 'RoBertaForSequenceClassification', 'roberta_sequence_classifier_codebert_base_finetuned_detect_insecure_cod': 'RoBertaForSequenceClassification', 'roberta_sequence_classifier_distilroberta_base_finetuned_suicide_depression': 'RoBertaForSequenceClassification', 'roberta_sequence_classifier_distilroberta_finetuned_age_news_classification': 'RoBertaForSequenceClassification', 'roberta_sequence_classifier_distilroberta_finetuned_financial_news_sentiment_analysis': 'RoBertaForSequenceClassification', 'roberta_sequence_classifier_distilroberta_finetuned_rotten_tomatoes_sentiment_analysis': 'RoBertaForSequenceClassification', 'roberta_sequence_classifier_distilroberta_finetuned_tweets_hate_speech': 'RoBertaForSequenceClassification', 'roberta_sequence_classifier_ruperta_base_finetuned_pawsx': 'RoBertaForSequenceClassification', 'roberta_token_classifier_bne_capitel_ner': 'RoBertaForTokenClassification', 'roberta_token_classifier_icelandic_ner': 'RoBertaForTokenClassification', 'roberta_token_classifier_pos_tagger': 'RoBertaForTokenClassification', 'roberta_token_classifier_ticker': 'RoBertaForTokenClassification', 'roberta_token_classifier_timex_semeval': 'RoBertaForTokenClassification', 'roberta_token_classifier_zwnj_base_ner': 'RoBertaForTokenClassification', 'robertaresolve_snomed': 'SentenceEntityResolverModel', 'rxnorm_action_treatment_mapper': 'ChunkMapperModel', 'rxnorm_mapper': 'ChunkMapperModel', 'rxnorm_ndc_mapper': 'ChunkMapperModel', 'rxnorm_normalized_mapper': 'ChunkMapperModel', 'rxnorm_umls_mapper': 'ChunkMapperModel', 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.1': 'BertSentenceEmbeddings', 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.10': 'BertSentenceEmbeddings', 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.2': 'BertSentenceEmbeddings', 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.3': 'BertSentenceEmbeddings', 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.4': 'BertSentenceEmbeddings', 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.5': 'BertSentenceEmbeddings', 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.7': 'BertSentenceEmbeddings', 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.8': 'BertSentenceEmbeddings', 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v0.9': 'BertSentenceEmbeddings', 'sbert_bert_large_portuguese_cased_legal_mlm_sts_v1.0': 'BertSentenceEmbeddings', 'sbert_bert_large_portuguese_cased_legal_mlm_v0.11_gpl_nli_sts_v0': 'BertSentenceEmbeddings', 'sbert_bert_large_portuguese_cased_legal_mlm_v0.11_gpl_nli_sts_v1': 'BertSentenceEmbeddings', 'sbert_bert_large_portuguese_cased_legal_mlm_v0.11_nli_sts_v0': 'BertSentenceEmbeddings', 'sbert_bert_large_portuguese_cased_legal_mlm_v0.11_nli_sts_v1': 'BertSentenceEmbeddings', 'sbert_bert_large_portuguese_cased_legal_mlm_v0.11_sts_v0': 'BertSentenceEmbeddings', 'sbert_bert_large_portuguese_cased_legal_mlm_v0.11_sts_v1': 'BertSentenceEmbeddings', 'sbert_chinese_qmc_finance_v1': 'BertSentenceEmbeddings', 'sbert_chinese_qmc_finance_v1_distill': 'BertSentenceEmbeddings', 'sbert_jsl_medium_rxnorm_uncased': 'BertSentenceEmbeddings', 'sbert_jsl_medium_umls_uncased': 'BertSentenceEmbeddings', 'sbert_jsl_medium_uncased': 'BertSentenceEmbeddings', 'sbert_jsl_mini_umls_uncased': 'BertSentenceEmbeddings', 'sbert_jsl_mini_uncased': 'BertSentenceEmbeddings', 'sbert_jsl_tiny_umls_uncased': 'BertSentenceEmbeddings', 'sbert_jsl_tiny_uncased': 'BertSentenceEmbeddings', 'sbert_legal_bertimbau_base_tsdae_sts': 'BertSentenceEmbeddings', 'sbert_legal_bertimbau_large_gpl_sts': 'BertSentenceEmbeddings', 'sbert_legal_bertimbau_large_tsdae_sts': 'BertSentenceEmbeddings', 'sbert_legal_bertimbau_large_tsdae_sts_v2': 'BertSentenceEmbeddings', 'sbert_legal_bertimbau_large_tsdae_sts_v4': 'BertSentenceEmbeddings', 'sbert_legal_bertimbau_large_tsdae_v4_gpl_sts': 'BertSentenceEmbeddings', 'sbert_legal_bertimbau_large_v2_sts': 'BertSentenceEmbeddings', 'sbert_legal_bertimbau_sts_base': 'BertSentenceEmbeddings', 'sbert_legal_bertimbau_sts_base_ma': 'BertSentenceEmbeddings', 'sbert_legal_bertimbau_sts_base_ma_v2': 'BertSentenceEmbeddings', 'sbert_legal_bertimbau_sts_large': 'BertSentenceEmbeddings', 'sbert_legal_bertimbau_sts_large_ma': 'BertSentenceEmbeddings', 'sbert_legal_bertimbau_sts_large_ma_v3': 'BertSentenceEmbeddings', 'sbert_legal_bertimbau_sts_large_v2': 'BertSentenceEmbeddings', 'sbert_setfit_finetuned_financial_text_classification': 'BertSentenceEmbeddings', 'sbertresolve_icd10cm_slim_billable_hcc_med': 'SentenceEntityResolverModel', 'sbertresolve_icd10gm': 'SentenceEntityResolverModel', 'sbertresolve_jsl_rxnorm_augmented_med': 'SentenceEntityResolverModel', 'sbertresolve_ner_model_finder': 'SentenceEntityResolverModel', 'sbertresolve_rxnorm_disposition': 'SentenceEntityResolverModel', 'sbertresolve_snomed': 'SentenceEntityResolverModel', 'sbertresolve_snomed_bodyStructure_med': 'SentenceEntityResolverModel', 'sbertresolve_snomed_conditions': 'SentenceEntityResolverModel', 'sbiobert_base_cased_mli': 'BertSentenceEmbeddings', 'sbiobert_jsl_cased': 'BertSentenceEmbeddings', 'sbiobert_jsl_rxnorm_cased': 'BertSentenceEmbeddings', 'sbiobert_jsl_umls_cased': 'BertSentenceEmbeddings', 'sbiobertresolve_HPO': 'SentenceEntityResolverModel', 'sbiobertresolve_atc': 'SentenceEntityResolverModel', 'sbiobertresolve_clinical_abbreviation_acronym': 'SentenceEntityResolverModel', 'sbiobertresolve_clinical_snomed_procedures_measurements': 'SentenceEntityResolverModel', 'sbiobertresolve_cpt': 'SentenceEntityResolverModel', 'sbiobertresolve_cpt_augmented': 'SentenceEntityResolverModel', 'sbiobertresolve_cpt_procedures_augmented': 'SentenceEntityResolverModel', 'sbiobertresolve_cpt_procedures_measurements_augmented': 'SentenceEntityResolverModel', 'sbiobertresolve_cvx': 'SentenceEntityResolverModel', 'sbiobertresolve_hcc_augmented': 'SentenceEntityResolverModel', 'sbiobertresolve_hcpcs': 'SentenceEntityResolverModel', 'sbiobertresolve_icd10cm': 'SentenceEntityResolverModel', 'sbiobertresolve_icd10cm_augmented': 'SentenceEntityResolverModel', 'sbiobertresolve_icd10cm_augmented_billable_hcc': 'SentenceEntityResolverModel', 'sbiobertresolve_icd10cm_generalised': 'SentenceEntityResolverModel', 'sbiobertresolve_icd10cm_slim_billable_hcc': 'SentenceEntityResolverModel', 'sbiobertresolve_icd10cm_slim_normalized': 'SentenceEntityResolverModel', 'sbiobertresolve_icd10pcs': 'SentenceEntityResolverModel', 'sbiobertresolve_icd10pcs_augmented': 'SentenceEntityResolverModel', 'sbiobertresolve_icd9': 'SentenceEntityResolverModel', 'sbiobertresolve_icdo': 'SentenceEntityResolverModel', 'sbiobertresolve_icdo_augmented': 'SentenceEntityResolverModel', 'sbiobertresolve_icdo_base': 'SentenceEntityResolverModel', 'sbiobertresolve_jsl_rxnorm_augmented': 'SentenceEntityResolverModel', 'sbiobertresolve_loinc': 'SentenceEntityResolverModel', 'sbiobertresolve_loinc_augmented': 'SentenceEntityResolverModel', 'sbiobertresolve_loinc_cased': 'SentenceEntityResolverModel', 'sbiobertresolve_mesh': 'SentenceEntityResolverModel', 'sbiobertresolve_ndc': 'SentenceEntityResolverModel', 'sbiobertresolve_rxcui': 'SentenceEntityResolverModel', 'sbiobertresolve_rxnorm': 'SentenceEntityResolverModel', 'sbiobertresolve_rxnorm_action_treatment': 'SentenceEntityResolverModel', 'sbiobertresolve_rxnorm_augmented': 'SentenceEntityResolverModel', 'sbiobertresolve_rxnorm_augmented_cased': 'SentenceEntityResolverModel', 'sbiobertresolve_rxnorm_augmented_re': 'SentenceEntityResolverModel', 'sbiobertresolve_rxnorm_disposition': 'SentenceEntityResolverModel', 'sbiobertresolve_rxnorm_ndc': 'SentenceEntityResolverModel', 'sbiobertresolve_snomed_auxConcepts': 'SentenceEntityResolverModel', 'sbiobertresolve_snomed_auxConcepts_int': 'SentenceEntityResolverModel', 'sbiobertresolve_snomed_bodyStructure': 'SentenceEntityResolverModel', 'sbiobertresolve_snomed_drug': 'SentenceEntityResolverModel', 'sbiobertresolve_snomed_findings': 'SentenceEntityResolverModel', 'sbiobertresolve_snomed_findings_aux_concepts': 'SentenceEntityResolverModel', 'sbiobertresolve_snomed_findings_int': 'SentenceEntityResolverModel', 'sbiobertresolve_snomed_procedures_measurements': 'SentenceEntityResolverModel', 'sbiobertresolve_umls_clinical_drugs': 'SentenceEntityResolverModel', 'sbiobertresolve_umls_disease_syndrome': 'SentenceEntityResolverModel', 'sbiobertresolve_umls_drug_substance': 'SentenceEntityResolverModel', 'sbiobertresolve_umls_findings': 'SentenceEntityResolverModel', 'sbiobertresolve_umls_major_concepts': 'SentenceEntityResolverModel', 'sbluebert_base_uncased_mli': 'BertSentenceEmbeddings', 'sbluebertresolve_loinc': 'SentenceEntityResolverModel', 'sbluebertresolve_loinc_uncased': 'SentenceEntityResolverModel', 'sbluebertresolve_rxnorm_augmented_uncased': 'SentenceEntityResolverModel', 'sent_bert_base_cased': 'BertSentenceEmbeddings', 'sent_bert_base_uncased': 'BertSentenceEmbeddings', 'sent_bert_base_uncased_legal': 'BertSentenceEmbeddings', 'sent_bert_large_cased': 'BertSentenceEmbeddings', 'sent_bert_large_uncased': 'BertSentenceEmbeddings', 'sent_bert_multi_cased': 'BertSentenceEmbeddings', 'sent_bert_muril': 'BertSentenceEmbeddings', 'sent_bert_pubmed': 'BertSentenceEmbeddings', 'sent_bert_pubmed_squad2': 'BertSentenceEmbeddings', 'sent_bert_use_cmlm_en_base': 'BertSentenceEmbeddings', 'sent_bert_use_cmlm_en_large': 'BertSentenceEmbeddings', 'sent_bert_use_cmlm_multi_base': 'BertSentenceEmbeddings', 'sent_bert_use_cmlm_multi_base_br': 'BertSentenceEmbeddings', 'sent_bert_wiki_books': 'BertSentenceEmbeddings', 'sent_bert_wiki_books_mnli': 'BertSentenceEmbeddings', 'sent_bert_wiki_books_qnli': 'BertSentenceEmbeddings', 'sent_bert_wiki_books_qqp': 'BertSentenceEmbeddings', 'sent_bert_wiki_books_squad2': 'BertSentenceEmbeddings', 'sent_bert_wiki_books_sst2': 'BertSentenceEmbeddings', 'sent_biobert_clinical_base_cased': 'BertSentenceEmbeddings', 'sent_biobert_discharge_base_cased': 'BertSentenceEmbeddings', 'sent_biobert_pmc_base_cased': 'BertSentenceEmbeddings', 'sent_biobert_pubmed_base_cased': 'BertSentenceEmbeddings', 'sent_biobert_pubmed_large_cased': 'BertSentenceEmbeddings', 'sent_biobert_pubmed_pmc_base_cased': 'BertSentenceEmbeddings', 'sent_covidbert_large_uncased': 'BertSentenceEmbeddings', 'sent_distilroberta_base': 'RoBertaSentenceEmbeddings', 'sent_electra_base_uncased': 'BertSentenceEmbeddings', 'sent_electra_large_uncased': 'BertSentenceEmbeddings', 'sent_electra_small_uncased': 'BertSentenceEmbeddings', 'sent_roberta_base': 'RoBertaSentenceEmbeddings', 'sent_roberta_large': 'RoBertaSentenceEmbeddings', 'sent_small_bert_L10_128': 'BertSentenceEmbeddings', 'sent_small_bert_L10_256': 'BertSentenceEmbeddings', 'sent_small_bert_L10_512': 'BertSentenceEmbeddings', 'sent_small_bert_L10_768': 'BertSentenceEmbeddings', 'sent_small_bert_L12_128': 'BertSentenceEmbeddings', 'sent_small_bert_L12_256': 'BertSentenceEmbeddings', 'sent_small_bert_L12_512': 'BertSentenceEmbeddings', 'sent_small_bert_L12_768': 'BertSentenceEmbeddings', 'sent_small_bert_L2_128': 'BertSentenceEmbeddings', 'sent_small_bert_L2_256': 'BertSentenceEmbeddings', 'sent_small_bert_L2_512': 'BertSentenceEmbeddings', 'sent_small_bert_L2_768': 'BertSentenceEmbeddings', 'sent_small_bert_L4_128': 'BertSentenceEmbeddings', 'sent_small_bert_L4_256': 'BertSentenceEmbeddings', 'sent_small_bert_L4_512': 'BertSentenceEmbeddings', 'sent_small_bert_L4_768': 'BertSentenceEmbeddings', 'sent_small_bert_L6_128': 'BertSentenceEmbeddings', 'sent_small_bert_L6_256': 'BertSentenceEmbeddings', 'sent_small_bert_L6_512': 'BertSentenceEmbeddings', 'sent_small_bert_L6_768': 'BertSentenceEmbeddings', 'sent_small_bert_L8_128': 'BertSentenceEmbeddings', 'sent_small_bert_L8_256': 'BertSentenceEmbeddings', 'sent_small_bert_L8_512': 'BertSentenceEmbeddings', 'sent_small_bert_L8_768': 'BertSentenceEmbeddings', 'sent_xlm_roberta_base': 'XlmRoBertaSentenceEmbeddings', 'sent_xlm_roberta_base_finetuned_amharic': 'XlmRoBertaSentenceEmbeddings', 'sent_xlm_roberta_base_finetuned_hausa': 'XlmRoBertaSentenceEmbeddings', 'sent_xlm_roberta_base_finetuned_igbo': 'XlmRoBertaSentenceEmbeddings', 'sent_xlm_roberta_base_finetuned_kinyarwanda': 'XlmRoBertaSentenceEmbeddings', 'sent_xlm_roberta_base_finetuned_luganda': 'XlmRoBertaSentenceEmbeddings', 'sent_xlm_roberta_base_finetuned_naija': 'XlmRoBertaSentenceEmbeddings', 'sent_xlm_roberta_base_finetuned_swahili': 'XlmRoBertaSentenceEmbeddings', 'sent_xlm_roberta_base_finetuned_wolof': 'XlmRoBertaSentenceEmbeddings', 'sent_xlm_roberta_base_finetuned_yoruba': 'XlmRoBertaSentenceEmbeddings', 'sentence_detector_dl': 'SentenceDetectorDLModel', 'sentence_detector_dl_healthcare': 'SentenceDetectorDLModel', 'sentiment_jager_use': 'SentimentDLModel', 'sentiment_vivekn': 'ViveknSentimentModel', 'sentimentdl_glove_imdb': 'SentimentDLModel', 'sentimentdl_urduvec_imdb': 'SentimentDLModel', 'sentimentdl_use_imdb': 'SentimentDLModel', 'sentimentdl_use_twitter': 'SentimentDLModel', 'small_bert_L10_128': 'BertEmbeddings', 'small_bert_L10_256': 'BertEmbeddings', 'small_bert_L10_512': 'BertEmbeddings', 'small_bert_L10_768': 'BertEmbeddings', 'small_bert_L12_128': 'BertEmbeddings', 'small_bert_L12_256': 'BertEmbeddings', 'small_bert_L12_512': 'BertEmbeddings', 'small_bert_L12_768': 'BertEmbeddings', 'small_bert_L2_128': 'BertEmbeddings', 'small_bert_L2_256': 'BertEmbeddings', 'small_bert_L2_512': 'BertEmbeddings', 'small_bert_L2_768': 'BertEmbeddings', 'small_bert_L4_128': 'BertEmbeddings', 'small_bert_L4_256': 'BertEmbeddings', 'small_bert_L4_512': 'BertEmbeddings', 'small_bert_L4_768': 'BertEmbeddings', 'small_bert_L6_128': 'BertEmbeddings', 'small_bert_L6_256': 'BertEmbeddings', 'small_bert_L6_512': 'BertEmbeddings', 'small_bert_L6_768': 'BertEmbeddings', 'small_bert_L8_128': 'BertEmbeddings', 'small_bert_L8_256': 'BertEmbeddings', 'small_bert_L8_512': 'BertEmbeddings', 'small_bert_L8_768': 'BertEmbeddings', 'snomed_icd10cm_mapper': 'ChunkMapperModel', 'snomed_icdo_mapper': 'ChunkMapperModel', 'snomed_umls_mapper': 'ChunkMapperModel', 'spanbert_base_coref': 'SpanBertCorefModel', 'spark_nlp_tokenizer': 'TokenizerModel', 'spellcheck_dl': 'ContextSpellCheckerModel', 'spellcheck_drug_norvig': 'NorvigSweetingModel', 'spellcheck_norvig': 'NorvigSweetingModel', 'spellcheck_sd': 'SymmetricDeleteModel', 'stemmer': 'Stemmer', 'stopwords_af': 'StopWordsCleaner', 'stopwords_ar': 'StopWordsCleaner', 'stopwords_bg': 'StopWordsCleaner', 'stopwords_bn': 'StopWordsCleaner', 'stopwords_br': 'StopWordsCleaner', 'stopwords_ca': 'StopWordsCleaner', 'stopwords_cs': 'StopWordsCleaner', 'stopwords_de': 'StopWordsCleaner', 'stopwords_el': 'StopWordsCleaner', 'stopwords_en': 'StopWordsCleaner', 'stopwords_eo': 'StopWordsCleaner', 'stopwords_es': 'StopWordsCleaner', 'stopwords_eu': 'StopWordsCleaner', 'stopwords_fa': 'StopWordsCleaner', 'stopwords_fi': 'StopWordsCleaner', 'stopwords_fr': 'StopWordsCleaner', 'stopwords_ga': 'StopWordsCleaner', 'stopwords_gl': 'StopWordsCleaner', 'stopwords_ha': 'StopWordsCleaner', 'stopwords_he': 'StopWordsCleaner', 'stopwords_hi': 'StopWordsCleaner', 'stopwords_hu': 'StopWordsCleaner', 'stopwords_hy': 'StopWordsCleaner', 'stopwords_id': 'StopWordsCleaner', 'stopwords_iso': 'StopWordsCleaner', 'stopwords_it': 'StopWordsCleaner', 'stopwords_ja': 'StopWordsCleaner', 'stopwords_la': 'StopWordsCleaner', 'stopwords_lv': 'StopWordsCleaner', 'stopwords_mr': 'StopWordsCleaner', 'stopwords_pl': 'StopWordsCleaner', 'stopwords_pt': 'StopWordsCleaner', 'stopwords_ro': 'StopWordsCleaner', 'stopwords_ru': 'StopWordsCleaner', 'stopwords_sk': 'StopWordsCleaner', 'stopwords_sl': 'StopWordsCleaner', 'stopwords_so': 'StopWordsCleaner', 'stopwords_st': 'StopWordsCleaner', 'stopwords_sv': 'StopWordsCleaner', 'stopwords_sw': 'StopWordsCleaner', 'stopwords_th': 'StopWordsCleaner', 'stopwords_tr': 'StopWordsCleaner', 'stopwords_yo': 'StopWordsCleaner', 'stopwords_zu': 'StopWordsCleaner', 'swedish_ner_6B_100': 'NerDLModel', 'swedish_ner_6B_300': 'NerDLModel', 'swedish_ner_840B_300': 'NerDLModel', 't5_active_to_passive_styletransfer': 'T5Transformer', 't5_base': 'T5Transformer', 't5_base_mediqa_mnli': 'T5Transformer', 't5_base_pubmedqa': 'T5Transformer', 't5_formal_to_informal_styletransfer': 'T5Transformer', 't5_grammar_error_corrector': 'T5Transformer', 't5_informal_to_formal_styletransfer': 'T5Transformer', 't5_passive_to_active_styletransfer': 'T5Transformer', 't5_question_generation_small': 'T5Transformer', 't5_small': 'T5Transformer', 't5_small_wikiSQL': 'T5Transformer', 'table_qa_table_question_answering_tapas': 'TapasForQuestionAnswering', 'table_qa_tapas_base_finetuned_sqa': 'TapasForQuestionAnswering', 'table_qa_tapas_base_finetuned_wikisql_supervised': 'TapasForQuestionAnswering', 'table_qa_tapas_large_finetuned_sqa': 'TapasForQuestionAnswering', 'table_qa_tapas_large_finetuned_wikisql_supervised': 'TapasForQuestionAnswering', 'table_qa_tapas_large_finetuned_wtq': 'TapasForQuestionAnswering', 'table_qa_tapas_medium_finetuned_sqa': 'TapasForQuestionAnswering', 'table_qa_tapas_medium_finetuned_wikisql_supervised': 'TapasForQuestionAnswering', 'table_qa_tapas_medium_finetuned_wtq': 'TapasForQuestionAnswering', 'table_qa_tapas_mini_finetuned_sqa': 'TapasForQuestionAnswering', 'table_qa_tapas_mini_finetuned_wtq': 'TapasForQuestionAnswering', 'table_qa_tapas_small_finetuned_sqa': 'TapasForQuestionAnswering', 'table_qa_tapas_small_finetuned_wikisql_supervised': 'TapasForQuestionAnswering', 'table_qa_tapas_small_finetuned_wtq': 'TapasForQuestionAnswering', 'table_qa_tapas_temporary_repo': 'TapasForQuestionAnswering', 'table_qa_tapas_tiny_finetuned_sqa': 'TapasForQuestionAnswering', 'table_qa_tapas_tiny_finetuned_wtq': 'TapasForQuestionAnswering', 'text_matcher': 'TextMatcher', 'tfhub_use': 'UniversalSentenceEncoder', 'tfhub_use_lg': 'UniversalSentenceEncoder', 'tfhub_use_multi': 'UniversalSentenceEncoder', 'tfhub_use_multi_lg': 'UniversalSentenceEncoder', 'turkish_ner_840B_300': 'NerDLModel', 'turkish_ner_bert': 'NerDLModel', 'twitter_xlm_roberta_base': 'XlmRoBertaEmbeddings', 'uner_mk_140M_300d': 'NerDLModel', 'urduvec_140M_300d': 'WordEmbeddingsModel', 'visual_document_classifier': 'VisualDocumentClassifier', 'visual_document_classifier_tobacco3482': 'VisualDocumentClassifier', 'w2v_cc_300d': 'WordEmbeddingsModel', 'wikiner_6B_100': 'NerDLModel', 'wikiner_6B_300': 'NerDLModel', 'wikiner_840B_300': 'NerDLModel', 'word2vec_cbow_legal_d100_cased': 'WordEmbeddingsModel', 'word2vec_cbow_legal_d100_uncased': 'WordEmbeddingsModel', 'word2vec_cbow_legal_d300_cased': 'WordEmbeddingsModel', 'word2vec_cbow_legal_d300_uncased': 'WordEmbeddingsModel', 'word2vec_cbow_legal_d50_cased': 'WordEmbeddingsModel', 'word2vec_cbow_legal_d50_uncased': 'WordEmbeddingsModel', 'word2vec_gigaword_300': 'Word2VecModel', 'word2vec_gigaword_wiki_300': 'Word2VecModel', 'word2vec_osf_lemmatized_legal': 'WordEmbeddingsModel', 'word2vec_osf_raw_legal': 'WordEmbeddingsModel', 'word2vec_osf_replaced_lemmatized_legal': 'WordEmbeddingsModel', 'word2vec_osf_replaced_raw_legal': 'WordEmbeddingsModel', 'word2vec_skipgram_legal_d100_cased': 'WordEmbeddingsModel', 'word2vec_skipgram_legal_d100_uncased': 'WordEmbeddingsModel', 'word2vec_skipgram_legal_d300_cased': 'WordEmbeddingsModel', 'word2vec_skipgram_legal_d300_uncased': 'WordEmbeddingsModel', 'word2vec_skipgram_legal_d50_cased': 'WordEmbeddingsModel', 'word2vec_skipgram_legal_d50_uncased': 'WordEmbeddingsModel', 'word2vec_wac_200': 'WordEmbeddingsModel', 'word2vec_wiki_1000': 'WordEmbeddingsModel', 'wordseg_best': 'WordSegmenterModel', 'wordseg_ctb9': 'WordSegmenterModel', 'wordseg_gsd_ud': 'WordSegmenterModel', 'wordseg_gsd_ud_trad': 'WordSegmenterModel', 'wordseg_kaist_ud': 'WordSegmenterModel', 'wordseg_large': 'WordSegmenterModel', 'wordseg_msra': 'WordSegmenterModel', 'wordseg_pku': 'WordSegmenterModel', 'wordseg_weibo': 'WordSegmenterModel', 'xlm_roberta_base': 'XlmRoBertaEmbeddings', 'xlm_roberta_base_finetuned_amharic': 'XlmRoBertaEmbeddings', 'xlm_roberta_base_finetuned_hausa': 'XlmRoBertaEmbeddings', 'xlm_roberta_base_finetuned_igbo': 'XlmRoBertaEmbeddings', 'xlm_roberta_base_finetuned_kinyarwanda': 'XlmRoBertaEmbeddings', 'xlm_roberta_base_finetuned_luganda': 'XlmRoBertaEmbeddings', 'xlm_roberta_base_finetuned_luo': 'XlmRoBertaEmbeddings', 'xlm_roberta_base_finetuned_naija': 'XlmRoBertaEmbeddings', 'xlm_roberta_base_finetuned_swahili': 'XlmRoBertaEmbeddings', 'xlm_roberta_base_finetuned_wolof': 'XlmRoBertaEmbeddings', 'xlm_roberta_base_finetuned_yoruba': 'XlmRoBertaEmbeddings', 'xlm_roberta_base_qa_squad2': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_base_sequence_classifier_ag_news': 'XlmRoBertaForSequenceClassification', 'xlm_roberta_base_sequence_classifier_allocine': 'XlmRoBertaForSequenceClassification', 'xlm_roberta_base_sequence_classifier_imdb': 'XlmRoBertaForSequenceClassification', 'xlm_roberta_base_token_classifier_conll03': 'XlmRoBertaForTokenClassification', 'xlm_roberta_base_token_classifier_ner': 'XlmRoBertaForTokenClassification', 'xlm_roberta_base_token_classifier_ontonotes': 'XlmRoBertaForTokenClassification', 'xlm_roberta_large': 'XlmRoBertaEmbeddings', 'xlm_roberta_large_token_classification_ner': 'XlmRoBertaForTokenClassification', 'xlm_roberta_large_token_classifier_conll03': 'XlmRoBertaForTokenClassification', 'xlm_roberta_large_token_classifier_hrl': 'XlmRoBertaForTokenClassification', 'xlm_roberta_large_token_classifier_masakhaner': 'XlmRoBertaForTokenClassification', 'xlm_roberta_qa_ADDI_CH_XLM_R': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_ADDI_DE_XLM_R': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_ADDI_FI_XLM_R': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_ADDI_IT_XLM_R': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_Part_1_XLM_Model_E1': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_Part_2_XLM_Model_E1': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_TQA': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_XLM_Turkish': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_XLMr_ENIS_QA_Is': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_XLMr_ENIS_QA_IsQ_EnA': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_afriberta_base_finetuned_tydiqa': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_hindi_question_answering_23865268': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265897': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265898': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265899': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265900': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265901': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265902': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265903': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265904': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265905': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265906': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265907': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265908': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265909': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265910': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_more_fine_tune_24465520_26265911': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465514': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465515': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465516': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465517': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465518': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465519': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465520': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465521': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465522': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465523': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465524': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_autonlp_roberta_base_squad2_24465525': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_distill_xlm_mrc': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_farm2tran': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_hitalmqa_finetuned_squad': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_klue_mrc_roberta_base': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_mrc2reader': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_squadv2_xlm_roberta_base': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_thai_xlm_roberta_base_squad2': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_3lang': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_all': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_l_uetqa': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_multi_roberta_large_chaii': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_base_arabic': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_base_chaii': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_base_chinese': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_base_finetune_qa': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_base_finetuned_chaii': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_base_german': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_base_hindi': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_base_spanish': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_base_squad2_distilled': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_base_squad2_distilled_finetuned_chaii': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_base_squad2_distilled_finetuned_chaii_small': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_base_vietnamese': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_base_xquad': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_ckpt_95000': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_est_qa': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_large_arabic_qa': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_large_chaii': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_large_fa_qa': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_large_korquad_mask': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_large_qa_multilingual_finedtuned_ru_ru_AlexKay': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_large_qa_multilingual_finedtuned_ru_ru_alexkay': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_large_squad2': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_large_vi_qa': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_large_xquad': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_qa_chaii': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_squad_tamil': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlm_roberta_squad_v1.1': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlmr_base_texas_squad_da_da_saattrupdan': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlmr_base_texas_squad_de_de_saattrupdan': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlmr_base_texas_squad_es_es_saattrupdan': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlmr_base_texas_squad_fr_fr_saattrupdan': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlmr_base_texas_squad_is_is_saattrupdan': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlmr_large_qa_sv_sv_m3hrdadfi': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_qa_xlmroberta_large_tweetqa': 'XlmRoBertaForQuestionAnswering', 'xlm_roberta_token_classifier_ner_40_lang': 'XlmRoBertaForTokenClassification', 'xlm_roberta_xtreme_base': 'XlmRoBertaEmbeddings', 'xlmroberta_classifier_autonlp_fake_news_detection_system_29906863': 'XlmRoBertaForSequenceClassification', 'xlmroberta_classifier_base_mrpc': 'XlmRoBertaForSequenceClassification', 'xlmroberta_classifier_base_snli_mnli_anli_xnli': 'XlmRoBertaForSequenceClassification', 'xlmroberta_classifier_da_xlmr_ned': 'XlmRoBertaForSequenceClassification', 'xlmroberta_classifier_deoffxlmr_mono_kannada': 'XlmRoBertaForSequenceClassification', 'xlmroberta_classifier_deoffxlmr_mono_malyalam': 'XlmRoBertaForSequenceClassification', 'xlmroberta_classifier_deoffxlmr_mono_tamil': 'XlmRoBertaForSequenceClassification', 'xlmroberta_classifier_large_en_ru_mnli': 'XlmRoBertaForSequenceClassification', 'xlmroberta_classifier_twitter_emotion': 'XlmRoBertaForSequenceClassification', 'xlmroberta_classifier_verdict': 'XlmRoBertaForSequenceClassification', 'xlmroberta_embeddings_afriberta_base': 'XlmRoBertaEmbeddings', 'xlmroberta_embeddings_afriberta_large': 'XlmRoBertaEmbeddings', 'xlmroberta_embeddings_afriberta_small': 'XlmRoBertaEmbeddings', 'xlmroberta_embeddings_cino_base_v2': 'XlmRoBertaEmbeddings', 'xlmroberta_embeddings_cino_large': 'XlmRoBertaEmbeddings', 'xlmroberta_embeddings_cino_large_v2': 'XlmRoBertaEmbeddings', 'xlmroberta_embeddings_cino_small_v2': 'XlmRoBertaEmbeddings', 'xlmroberta_embeddings_fairlex_cail_minilm': 'XlmRoBertaEmbeddings', 'xlmroberta_embeddings_fairlex_fscs_minilm': 'XlmRoBertaEmbeddings', 'xlmroberta_embeddings_indic_transformers_bn_xlmroberta': 'XlmRoBertaEmbeddings', 'xlmroberta_embeddings_indic_transformers_hi_xlmroberta': 'XlmRoBertaEmbeddings', 'xlmroberta_embeddings_indic_transformers_te_xlmroberta': 'XlmRoBertaEmbeddings', 'xlmroberta_embeddings_litlat_bert': 'XlmRoBertaEmbeddings', 'xlmroberta_embeddings_marathi_roberta': 'XlmRoBertaEmbeddings', 'xlmroberta_embeddings_roberta_large_eng_ara_128k': 'XlmRoBertaEmbeddings', 'xlmroberta_embeddings_xlm_roberta_base': 'XlmRoBertaEmbeddings', 'xlmroberta_ner_akshat_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_andyjennings_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_ardallie_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_arned_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_arrandi_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_atlantis_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_aytugkaya_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_bc5cdr': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_bionlp2004': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_fin': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_amharic_finetuned_ner_swahili': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_arman': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_hausa_finetuned_ner_swahili': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_igbo_finetuned_ner_swahili': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_kinyarwanda_finetuned_ner_kinyarwand': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_kinyarwanda_finetuned_ner_swahili': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_luganda': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_luganda_finetuned_luganda': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_luganda_finetuned_ner_swahili': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_luo_finetuned_ner': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_luo_finetuned_ner_swahili': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_naija': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_naija_finetuned_naija': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_naija_finetuned_ner_swahili': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_ner': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_ner_kinyarwand': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_ner_swahili': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_ner_wolof': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_panx_de_data': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_peyma': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_recipe': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_recipe_all': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_swahili_finetuned_luganda': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_swahili_finetuned_naija': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_swahili_finetuned_ner': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_swahili_finetuned_ner_kinyarwand': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_swahili_finetuned_ner_swahili': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_swahili_finetuned_ner_wolof': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_wolof_finetuned_ner_swahili': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_wolof_finetuned_ner_wolof': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_finetuned_yoruba_finetuned_ner_swahili': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_indonesian': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_masakhan': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_panx_dataset': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_sadilar': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_uncased_all_english': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_uncased_bc5cdr': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_uncased_bionlp2004': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_uncased_conll2003': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_uncased_fin': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_uncased_mit_movie_trivia': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_uncased_mit_restaurant': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_uncased_panx_dataset': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_uncased_wnut2017': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_base_wnut2017': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_be4rr_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_cj_mills_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_cj_mills_base_finetuned_panx_all': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_claytonsamples_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_clisi2000_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_cole_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_coolzhao_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_cyner_base': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_danhsf_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_davinam_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_dfsj_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_dkasti_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_dkasti_base_finetuned_panx_all': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_edwardjross_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_edwardjross_base_finetuned_panx_all': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_employment_contract_ner': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_ericklerouge123_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_evs_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_extract_names': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_flood_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_flood_base_finetuned_panx_all': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_fullstop_punctuation_multilang_larg': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_furyhawk_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_fvector_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_gbennett_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_gpt2_large_detector_de_v1': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_hadxu_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_haesun_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_haesun_base_finetuned_panx_all': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_harish3110_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_hiner_original_large': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_hugsao123_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_iis2009002_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_iis2009002_base_finetuned_panx_all': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_imyday_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_jamesmarcel_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_jamie613_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_jasonyim2_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_jboever_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_jdang_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_jgriffi_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_jgriffi_base_finetuned_panx_all': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_jonfrank_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_jplu_r_40_lang': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_k3nneth_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_kaykozaronek_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_large_bionlp2004': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_large_conll2003': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_large_fin': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_large_finetuned_conll03_english': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_large_indonesian': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_large_panx_dataset': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_large_uncased_all_english': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_large_uncased_bc5cdr': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_large_uncased_bionlp2004': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_large_uncased_conll2003': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_large_uncased_fin': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_large_uncased_mit_movie_trivia': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_large_uncased_mit_restaurant': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_large_uncased_panx_dataset': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_large_uncased_wnut2017': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_large_wnut2017': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_leixu_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_leizhang_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_lge_panx_dataset': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_lijingxin_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_lijingxin_base_finetuned_panx_all': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_manqingliu_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_mertyrgn_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_miyagawaorj_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_moghis_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_mubikan_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_naam_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_naomiyjchen_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_neha2608_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_neha2608_base_finetuned_panx_all': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_netoass_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_ninh_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_novarac23_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_olpa_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_osanseviero_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_pdroberts_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_pglauner_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_pitspits_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_rav_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_rgl73_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_rishav_hub_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_rishiyoung_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_roberta_multilingual_medieval': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_robkayinto_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_robkayinto_base_finetuned_panx_all': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_selamatpagi_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_shopee': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_simulst_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_skr3178_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_skr3178_base_finetuned_panx_all': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_skyr_wikineural_multilingual': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_tf_r_40_lang': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_tner_base_all_english': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_tner_base_conll2003': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_tner_base_ontonotes5': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_tner_base_uncased_ontonotes5': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_tner_large_all_english': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_tner_large_bc5cdr': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_tner_large_multiconer_multi': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_tner_large_ontonotes5': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_tner_large_uncased_ontonotes5': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_transformersbook_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_transformersbook_base_finetuned_panx_all': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_uk_ner': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_v3rx2000_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_v3rx2000_base_finetuned_panx_all': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_venturaville_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_victen_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xliu128_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_base_finetuned_amharic_finetuned_ner_amharic': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_base_finetuned_hausa_finetuned_ner_hausa': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_base_finetuned_igbo_finetuned_ner_igbo': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_base_finetuned_ner_amharic': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_base_finetuned_ner_hausa': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_base_finetuned_ner_igbo': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_base_finetuned_ner_yoruba': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_base_finetuned_panx_ner': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_base_finetuned_swahili_finetuned_ner_amharic': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_base_finetuned_swahili_finetuned_ner_hausa': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_base_finetuned_swahili_finetuned_ner_igbo': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_base_finetuned_swahili_finetuned_ner_yoruba': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_base_finetuned_yoruba_finetuned_ner_yoruba': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_base_ner_hrl': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_base_nl_emoji_ner': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_base_turkish_ner': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_base_wikiann_ner': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_large_finetuned_conll03_german': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_large_ner_hrl': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xlm_roberta_large_ner_spanish': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xml_roberta_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_xugenpeng_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_yaxin_base_conll2003': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_yomexa_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_ner_zpablo_base_finetuned_panx': 'XlmRoBertaForTokenClassification', 'xlmroberta_pos_uk_morph': 'XlmRoBertaForTokenClassification', 'xlmroberta_pos_xlm_roberta_base_english_upos': 'XlmRoBertaForTokenClassification', 'xlmroberta_qa_ukrainian': 'XlmRoBertaForQuestionAnswering', 'xlmroberta_qa_xlmr_large': 'XlmRoBertaForQuestionAnswering', 'xlnet_base_cased': 'XlnetEmbeddings', 'xlnet_base_sequence_classifier_ag_news': 'XlnetForSequenceClassification', 'xlnet_base_sequence_classifier_imdb': 'XlnetForSequenceClassification', 'xlnet_base_token_classifier_conll03': 'XlnetForTokenClassification', 'xlnet_large_cased': 'XlnetEmbeddings', 'xlnet_large_token_classifier_conll03': 'XlnetForTokenClassification', 'yake': 'YakeKeywordExtraction', 'zero_shot_ner_roberta': 'ZeroShotNerModel', 'zero_shot_re': 'ZeroShotRelationExtractionModel', 'asr_hubert_large_ls960': 'HubertForCTC', 'image_classifier_swin_tiny_patch4_window7_224':'SwinForImageClassification', 'camembert_base_qa_fquad': 'CamemBertForQuestionAnswering', }
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/spellbook.py
spellbook.py
import glob import json import logging import os import sys from dataclasses import dataclass import nlu COMPONENT_INFO_FILE_NAME = 'component_infos.json' logger = logging.getLogger('nlu') class AllComponentsInfo: def __init__(self): ''' Initialize every NLU component_to_resolve info object and provide access to them''' self.all_components = {} self.classifiers = {} self.embeddings = {} self.normalizers = {} self.pretrained_pipelines = {} self.selectors = {} self.spell_checkers = {} self.stemmers = {} self.tokenizers = {} self.utils = {} self.all_multi_lang_base_ner_languages = ['en', 'fr', 'de', 'it', 'pl', 'pt', 'ru', 'es'] self.all_multi_lang_xtreme_ner_languages = ['af', 'ar', 'bg', 'bn', 'de', 'el', 'en', 'es', 'et', 'eu', 'fa', 'fi', 'fr', 'he', 'hi', 'hu', 'id', 'it', 'ja', 'jv', 'ka', 'kk', 'ko', 'ml', 'mr', 'ms', 'my', 'nl', 'pt', 'ru', 'sw', 'ta', 'te', 'th', 'tl', 'tr', 'ur', 'vi', 'yo', 'zh'] self.all_right_to_left_langs_with_pretrained_tokenizer = ['zh', 'ko', 'ja'] self.all_pretrained_pipe_languages = ['en', 'nl', 'fr', 'de', 'it', 'no', 'pl', 'pt', 'ru', 'es', 'xx', ] self.all_pretrained_model_languages = ['vi', 'mt', 'ta', 'af', 'cy', 'et', 'bh', 'am', 'da', 'fr', 'de', 'it', 'nb', 'no', 'nn', 'pl', 'pt', 'ru', 'es', 'af', 'ar', 'hy', 'eu', 'bn', 'br', 'bg', 'ca', 'cs', 'eo', 'fi', 'gl', 'el', 'ha', 'he', 'hi', 'hu', 'id', 'ga', 'ja', 'la', 'lv', 'mr', 'fa', 'ro', 'sk', 'sl', 'so', 'st', 'sw', 'sv', 'th', 'tr', 'uk', 'yo', 'zu', 'zh', 'xx', 'ur', 'ko', 'yi', 'uk', 'te', 'ta', 'sd', 'pa', 'ne', 'ml', 'mr', 'kn', 'id', 'gu', 'bs', 'ig', 'lg', 'lou', 'pcm', 'wo', 'rw', 'is', ] + self.all_multi_lang_xtreme_ner_languages self.all_languages = set(self.all_pretrained_pipe_languages).union(set(self.all_pretrained_model_languages)) self.all_classifier_classes = [] # this maps a requested token to a class self.all_nlu_actions = ['tokenize', 'pos', 'ner', 'embed', 'classify', 'sentiment', 'emotion', 'spell', 'dependency', 'dep', 'dep.untyped', 'match', 'sentence_detector', 'spell', 'stopwords' 'labled_dependency', 'lemma', 'norm', 'select', 'pretrained_pipe', 'util', 'embed_sentence', 'embed_chunk', 'ngram'] # all_component_paths_regex = nlu.nlu_package_location + 'components/*/*/' # all_component_paths = glob.glob(all_component_paths_regex) # for path in all_component_paths: # if '__py' in path: continue # # logger.info('Loading info dict @ path'+ path) # component = ComponentInfo.from_directory(path) # self.all_components[component.name] = component # if component.type == 'classifier': self.classifiers[component.name] = component # if component.type == 'embedding': self.embeddings[component.name] = component # if component.type == 'normalizer': self.normalizers[component.name] = component # if component.type == 'pretrained_pipeline': self.pretrained_pipelines[component.name] = component # if component.type == 'selector': self.selectors[component.name] = component # if component.type == 'spell_checker': self.spell_checkers[component.name] = component # if component.type == 'stemmer': self.stemmers[component.name] = component # if component.type == 'tokenizer': self.tokenizers[component.name] = component # if component.type == 'util': self.utils[component.name] = component def list_all_components(self): print("--------------Avaiable Components in NLU :--------------") for name in self.all_components.keys(): print(name) def get_component_info_by_name(self, name): return self.all_components[name] def list_all_components_of_type(self, component_type='embeddings'): pass @staticmethod def list_all_components_of_language(component_lang='ger'): pass @staticmethod def list_all_components_of_languageand_type(component_lang='ger', component_type='embeddings'): pass @staticmethod def get_default_component_of_type(): pass @staticmethod def list_avaiable_output_types(): pass @staticmethod def get_all_component_info_obj(): pass @dataclass class ComponentInfo: name: str description: str # general annotator/model_anno_obj/component_to_resolve/pipeline info outputs: list # this is which columns/output types this component_to_resolve is providing inputs: list # this tells us which columns/input types the component_to_resolve is depending on type: str # this tells us which kind of component_to_resolve this is output_level: str # document, sentence, token, chunk, input_dependent or model_dependent spark_input_column_names: list # default expected name for input columns when forking with spark nlp annotators on spark DFs spark_output_column_names: list # default expected name for output columns when forking with spark nlp annotators on spark DFs provider: str # Who provides the implementation of this annotator, Spark-NLP for base. Would be license: str # open source or private computation_context: str # Will this component_to_resolve do its computation in Spark land (like all of Spark NLP annotators do) or does it require some other computation engine or library like Tensorflow, Numpy, HuggingFace, etc.. output_context: str # Will this components final result trainable: bool @classmethod def from_directory(cls, component_info_dir): """Create ComponentInfo class from the component_infos.json which is provided for every component_to_resolve @param component_info_dir: dataset_info_dir: `str` The directory containing the metadata file. This should be the root directory of a specific dataset version. """ if not component_info_dir: raise ValueError("Calling DatasetInfo.from_directory() with undefined dataset_info_dir.") component_info_dir = component_info_dir.replace('//', '/') with open(os.path.join(component_info_dir, COMPONENT_INFO_FILE_NAME), "r", encoding="utf8") as f: dataset_info_dict = json.load(f) try: return cls(**dataset_info_dict) # dataset_info_dict except: print(" Exception Occured! For Path", component_info_dir, " Json file most likely has missing features. Todo nicer output error info", sys.exc_info()[0]) raise
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/info.py
info.py
__version__ = '4.0.0' import nlu.utils.environment.env_utils as env_utils if not env_utils.try_import_pyspark_in_streamlit(): raise ImportError("You ned to install Pyspark to run nlu. Run pip install pyspark==3.0.1") if not env_utils.try_import_spark_nlp(): raise ImportError("You need Spark NLP to run NLU. run pip install spark-nlp") import sparknlp import warnings import nlu.utils.environment.authentication as auth_utils import nlu.utils.environment.offline_load_utils as offline_utils from nlu.universe.universes import Licenses from nlu.utils.environment.authentication import * from nlu.pipe.pipeline import NLUPipeline from nlu.pipe.pipe_logic import PipelineCompleter from nlu.discovery import Discoverer from nlu.pipe.component_resolution import * def version(): return __version__ warnings.filterwarnings("ignore") logger = logging.getLogger('nlu') logger.setLevel(logging.CRITICAL) ch = logging.StreamHandler() ch.setLevel(logging.CRITICAL) logger.addHandler(ch) st_cache_enabled = False nlu_package_location = nlu.__file__[:-11] discoverer = Discoverer() slack_link = 'https://join.slack.com/t/spark-nlp/shared_invite/zt-lutct9gm-kuUazcyFKhuGY3_0AMkxqA' github_issues_link = 'https://github.com/JohnSnowLabs/nlu/issues' def viz(nlp_pipe: Union[Pipeline, LightPipeline, PipelineModel, List], data: str, viz_type: str = '', labels_to_viz=None, viz_colors={}, return_html=False, ner_col: str = None, pos_col: str = None, dep_untyped_col: str = None, dep_typed_col: str = None, resolution_col: str = None, relation_col: str = None, assertion_col: str = None, ): """ Visualize input data with an already configured Spark NLP pipeline, for Algorithms of type (Ner,Assertion, Relation, Resolution, Dependency) using Spark-NLP-Display. Automatically infers applicable viz type and output columns to use for visualization. Data may only be a string. If a pipeline has multiple models candidates that can be used for a viz, the first Annotator that is vizzable will be used to create viz. You can specify which type of viz to create with the viz_type parameter Output columns to use for the viz are automatically deducted from the pipeline, by using the first pipe that provides the correct output type for a specific viz. You can specify which columns to use for a viz by using the corresponding ner_col, pos_col, dep_untyped_col, dep_typed_col, resolution_col, relation_col, assertion_col, parameters. :param nlp_pipe: One of [Pipeline, LightPipeline, PipelineModel, List[Annotator] ] :param data: String to viz :param viz_type: Viz type, one of [ner,dep,resolution,relation,assert]. If none defined, nlu will infer and apply all applicable viz :param labels_to_viz: Defines a subset of NER labels to viz i.e. ['PER'] , by default=[] which will display all labels. Applicable only for NER viz :param viz_colors: Applicable for [ner, resolution, assert ], a dictionary where key = label, value=hex color, i.e. viz_colors={'TREATMENT':'#008080', 'problem':'#800080'} :param return_html: :param ner_col: Specify NER column for NER/Resolution/Assertion viz. :param pos_col: Specify POS column for Dependency Tree viz :param dep_untyped_col: Specify Untyped Dependency Column for Tree Viz :param dep_typed_col: Specify Typed Dependency Column for Tree Viz :param resolution_col: Specify Resolution col for resolution viz :param relation_col: Specify Relation col for Relation viz :param assertion_col: Specify Assertion col for Assertion viz :return: """ return to_nlu_pipe(nlp_pipe, True).viz(text_to_viz=data, viz_type=viz_type, labels_to_viz=labels_to_viz, return_html=return_html, viz_colors=viz_colors, ner_col=ner_col, pos_col=pos_col, dep_untyped_col=dep_untyped_col, dep_typed_col=dep_typed_col, resolution_col=resolution_col, relation_col=relation_col, assertion_col=assertion_col, ) def autocomplete_pipeline(pipe: Union[Pipeline, LightPipeline, PipelineModel, List], lang='en'): """ Auto-Complete a pipeline or single annotator into a runnable pipeline by harnessing NLU's DAG Autocompletion algorithm and returns it as NLU pipeline. The standard Spark pipeline is available on the `.vanilla_transformer_pipe` attribute of the returned nlu pipe Every Annotator and Pipeline of Annotators defines a `DAG` of tasks, with various dependencies that must be satisfied in `topological order`. NLU enables the completion of an incomplete DAG by finding or creating a path between the very first input node which is almost always is `DocumentAssembler/MultiDocumentAssembler` and the very last node(s), which is given by the `topological sorting` the iterable annotators' parameter. Paths are created by resolving input features of annotators to the corresponding providers with matching storage references. :param pipe: Pipeline, list of Annotators or single annotator to complete :param lang: Language of the elements in pipe, used to resolve correct language for dependencies of the pipe. :return: NLU pipeline with completed pipeline """ # If you dont set lang, you can get storage ref errors! if isinstance(pipe, List): pipe = to_nlu_pipe(pipe, is_pre_configured=False) else: pipe = to_nlu_pipe([pipe], is_pre_configured=False) pipe = PipelineCompleter.check_and_fix_nlu_pipeline(pipe) return pipe def to_pretty_df(nlp_pipe: Union[Pipeline, LightPipeline, PipelineModel, List], data, positions=False, output_level='', metadata=False, ): """ Annotates a Pandas Dataframe/Pandas Series/Numpy Array/Spark DataFrame/Python List strings /Python String with given Spark NLP pipeline, which is assumed to be complete and runnable. Annotators are grouped internally by NLU into specific output levels, which will be zipped and exploded together to create the final output df. Additionally, most keys from the metadata dictionary in the result annotations will be expanded into their own columns in the resulting Dataframe. Some columns are omitted from metadata to reduce total amount of output columns, these can be re-enabled by setting metadata=True :param nlp_pipe: Pipeline or List of annotators to use for prediction :param data: Data to predict on :param output_level: output level, either document/sentence/chunk/token/relation :param positions: whether to output indexes that map predictions back to position in origin string :param metadata: whether to keep additional metadata in final df or not like confidences of every possible class for predictions. """ return to_nlu_pipe(nlp_pipe, True).predict(data, positions=positions, output_level=output_level, metadata=metadata) def to_nlu_pipe(nlp_pipe: Union[Pipeline, LightPipeline, PipelineModel, List], is_pre_configured=True) -> NLUPipeline: """ Convert a pipeline or list which contains sparknlp/sparknlp_jsl annotators into NLU pipeline, while maintaining original configuration. The pipeline does not need to be pre-fitted. :param nlp_pipe: the pipeline to convert, must have iterable attribute of pipe stages. :param is_pre_configured: Is the pipeline already configured, i.e. input/output cols are properly matched between anotators? \ If True, NLU will treat this as a already configured pipeline and will not do any pipeline autocompletion or configs :return: nlu pipe wrapping """ pipe = NLUPipeline() components = get_nlu_pipe_for_nlp_pipe(nlp_pipe, is_pre_configured) for c in components: pipe.add(c, is_pre_configured) if c.license == Licenses.hc: pipe.has_licensed_components = True return pipe def load(request: str = 'from_disk', path: Optional[str] = None, verbose: bool = False, gpu: bool = False, streamlit_caching: bool = False, m1_chip: bool = False ) -> NLUPipeline: ''' Load either a prebuild pipeline or a set of components identified by a whitespace seperated list of components You must call nlu.auth() BEFORE calling nlu.load() to access licensed models. If you did not call nlu.auth() but did call nlu.load() you must RESTART your Python Process and call nlu.auth(). You cannot authorize once nlu.load() is called because of Spark Context. :param verbose: Whether to output debug prints :param gpu: Whether to leverage GPU :param streamlit_caching: Whether streamlit caching should be used in Streamlit visualizations. Trade Speed-Up for repeated requests for larger memory usage :param path: If path is not None, the model_anno_obj/component_list for the NLU reference will be loaded from the path. Useful for offline mode. Currently only loading entire NLU pipelines is supported, but not loading singular pipes :param request: A NLU model_anno_obj/pipeline/component_to_resolve reference. You can request multiple components by separating them with whitespace. I.e. nlu.load('elmo bert albert') :return: returns a non fitted nlu pipeline object ''' if streamlit_caching and not nlu.st_cache_enabled: enable_streamlit_caching() return nlu.load(request, path, verbose, gpu, streamlit_caching) # check if secrets are in default loc, if yes load them and create licensed context automatically auth(gpu=gpu) spark = get_open_source_spark_context(gpu, m1_chip) spark.catalog.clearCache() if verbose: enable_verbose() else: disable_verbose() try: if path is not None: logger.info(f'Trying to load nlu pipeline from local hard drive, located at {path}') pipe = load_nlu_pipe_from_hdd(path, request) pipe.nlu_ref = request return pipe except Exception as err: if verbose: log_verbose_error(err) raise Exception( f"Something while loading the pipe in {path}. Is the path correct? use nlu.load(verbose=True) for more info.") # Try to manifest SparkNLP Annotator from nlu_ref components_requested = request.split(' ') pipe = NLUPipeline() language = parse_language_from_nlu_ref(request) pipe.lang = language pipe.nlu_ref = request try: for nlu_ref in components_requested: # Iterate over each nlu_ref in the request. Multiple nlu_refs can be passed by seperating them via whitesapce nlu_ref.replace(' ', '') if nlu_ref == '': continue nlu_component = nlu_ref_to_component(nlu_ref) # if we get a list of components, then the NLU reference is a pipeline, we do not need to check order if isinstance(nlu_component, list): # lists are parsed down to multiple components, result of pipeline request (stack of components) for c in nlu_component: pipe.add(c, nlu_ref, pretrained_pipe_component=True) else: # just a single component_to_resolve requested pipe.add(nlu_component, nlu_ref) except Exception as err: if verbose: log_verbose_error(err) raise Exception( f"Something went wrong during creating the Spark NLP model_anno_obj for your request = {request}" f"Did you use a NLU Spell?") # Complete Spark NLP Pipeline, which is defined as a DAG given by the starting Annotators try: pipe = PipelineCompleter.check_and_fix_nlu_pipeline(pipe) pipe.nlu_ref = request return pipe except: if verbose: e = sys.exc_info() print(e[0]) print(e[1]) raise Exception(f"Something went wrong during completing the DAG for the Spark NLP Pipeline." f"If this error persists, please contact us in Slack {slack_link} " f"Or open an issue on Github {github_issues_link}") def auth(HEALTHCARE_LICENSE_OR_JSON_PATH='/content/spark_nlp_for_healthcare.json', AWS_ACCESS_KEY_ID='', AWS_SECRET_ACCESS_KEY='', HEALTHCARE_SECRET='', OCR_LICENSE='', OCR_SECRET='', gpu=False): """ Authenticate environment for JSL Licensed models Installs NLP-Healthcare if not in environment detected Either provide path to spark_nlp_for_healthcare.json file as first param or manually enter them, HEALTHCARE_LICENSE_OR_JSON_PATH,AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,HEALTHCARE_SECRET . Set gpu=true if you want to enable GPU mode """ auth_utils.auth(HEALTHCARE_LICENSE_OR_JSON_PATH=HEALTHCARE_LICENSE_OR_JSON_PATH, AWS_SECRET_ACCESS_KEY=AWS_SECRET_ACCESS_KEY, AWS_ACCESS_KEY_ID=AWS_ACCESS_KEY_ID, HEALTHCARE_SECRET=HEALTHCARE_SECRET, OCR_LICENSE=OCR_LICENSE, OCR_SECRET=OCR_SECRET, gpu=gpu) return nlu def load_nlu_pipe_from_hdd(pipe_path, request) -> NLUPipeline: """Either there is a pipeline of models in the path or just one singular model_anno_obj. If it is a component_list, load the component_list and return it. If it is a singular model_anno_obj, load it to the correct AnnotatorClass and NLU component_to_resolve and then generate pipeline for it """ pipe = NLUPipeline() nlu_ref = request # pipe_path if os.path.exists(pipe_path): # Resource in path is a pipeline if offline_utils.is_pipe(pipe_path): # language, nlp_ref, nlu_ref,path=None, is_licensed=False # todo deduct lang and if Licensed or not pipe_components = get_trained_component_list_for_nlp_pipe_ref('en', nlu_ref, nlu_ref, pipe_path, False) # Resource in path is a single model_anno_obj elif offline_utils.is_model(pipe_path): c = offline_utils.verify_and_create_model(pipe_path) c.nlu_ref = nlu_ref pipe.add(c, nlu_ref, pretrained_pipe_component=True) return PipelineCompleter.check_and_fix_nlu_pipeline(pipe) else: print( f"Could not load model_anno_obj in path {pipe_path}. Make sure the jsl_folder contains either a stages subfolder or a metadata subfolder.") raise ValueError for c in pipe_components: pipe.add(c, nlu_ref, pretrained_pipe_component=True) return pipe else: print( f"Could not load model_anno_obj in path {pipe_path}. Make sure the jsl_folder contains either a stages subfolder or a metadata subfolder.") raise ValueError def get_open_source_spark_context(gpu, m1_chip): if env_utils.is_env_pyspark_3_x(): if m1_chip: return sparknlp.start(gpu=gpu, m1=True) else: return sparknlp.start(gpu=gpu) raise ValueError(f"Failure starting Spark Context! Current Spark version {get_pyspark_version()} not supported! " f"Please install any of Pyspark 3.X versions.") def enable_verbose() -> None: logger.setLevel(logging.INFO) ch = logging.StreamHandler() ch.setLevel(logging.INFO) logger.addHandler(ch) def disable_verbose() -> None: logger.setLevel(logging.ERROR) ch = logging.StreamHandler() ch.setLevel(logging.ERROR) logger.addHandler(ch) def enable_streamlit_caching(): # dynamically monkeypatch aka replace the nlu.load() method with the wrapped st.cache if streamlit_caching nlu.st_cache_enabled = True nlu.non_caching_load = load nlu.load = wrap_with_st_cache_if_available_and_set_layout_to_wide(nlu.load) # def disable_streamlit_caching(): # WIP not working # if hasattr(nlu, 'non_caching_load') : nlu.load = nlu.non_caching_load # else : print("Could not disable caching.") def wrap_with_st_cache_if_available_and_set_layout_to_wide(f): """Wrap function with ST cache method if streamlit is importable""" try: import streamlit as st st.set_page_config(layout='wide') logger.info("Using streamlit cache for load") return st.cache(f, allow_output_mutation=True, show_spinner=False) except: logger.exception("Could not import streamlit and apply caching") print("You need streamlit to run use this method") return f # Discovery def print_all_languages(): ''' Print all languages which are available in NLU Spark NLP pointer ''' discoverer.print_all_languages() def print_all_nlu_components_for_lang(lang='en', c_type='classifier'): '''Print all NLU components available for a language Spark NLP pointer''' discoverer.print_all_nlu_components_for_lang(lang, c_type) def print_components(lang='', action=''): '''Print every single NLU reference for models and pipeliens and their Spark NLP pointer :param lang: Language requirements for the components filterd. See nlu.languages() for supported languages :param action: Components that will be filterd.''' discoverer.print_components(lang, action) def print_component_types(): ''' Prints all unique component_to_resolve types in NLU''' discoverer.print_component_types() def print_all_model_kinds_for_action(action): discoverer.print_all_model_kinds_for_action(action) def print_all_model_kinds_for_action_and_lang(lang, action): discoverer.print_all_model_kinds_for_action_and_lang(lang, action) def print_trainable_components(): '''Print every trainable Algorithm/Model''' discoverer.print_trainable_components() def get_components(m_type='', include_pipes=False, lang='', licensed=False, get_all=False): return discoverer.get_components(m_type, include_pipes, lang, licensed, get_all) def log_verbose_error(err): import traceback print(traceback.format_exc()) print(err)
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/__init__.py
__init__.py
from dataclasses import dataclass from typing import List, Union from nlu.universe.atoms import JslAnnoId, JslFeature, ExternalFeature from nlu.universe.feature_node_ids import NLP_NODE_IDS, OCR_NODE_IDS, NLP_HC_NODE_IDS from nlu.universe.feature_universes import NLP_FEATURES, OCR_FEATURES, NLP_HC_FEATURES ### ____ Pipeline Graph Representation Logik Building Blocks ______ @dataclass class FeatureNode: # or Mode Node? """Defines a Node in a ML Dependency Feature Graph. Anno= Node, In = In arrows, Out = out Arrows Each NLU Component will output one of these NODE = Anno Class INPUTS = Array of ML-Features OUTPUTS = Array of ML-Features Used to cast the pipeline dependency resolution algorithm into an abstract grpah """ JSL_generator_anno_class: JslAnnoId # JSL Annotator that can generate this Triplet. Could be from OCR/JSL-Internal/Spark-NLP ins: List[JslFeature] # JSL Annotator that can generate this Triplet outs: List[JslFeature] # JSL Annotator that can generate this Triplet @dataclass class NlpFeatureNode: # or Mode Node? (FeatureNode) """A node representation for a Spark OCR Annotator Used to cast the pipeline dependency resolution algorithm into an abstract grpah """ node: Union[JslAnnoId] # JSL Annotator that can generate this Triplet. Could be from OCR/JSL-Internal/Spark-NLP ins: List[JslFeature] # JSL Annotator that can generate this Triplet outs: List[JslFeature] # JSL Annotator that can generate this Triplet class NlpHcFeatureNode(FeatureNode): pass class OcrFeatureNode(FeatureNode): pass class EXTERNAL_NODES(): """ Start Node definitions for the NLU Pipeline Graph completion logic These are analogus to the various input types NLU may accept """ RAW_TEXT = ExternalFeature('text') NON_WHITESPACED_TEXT = ExternalFeature('non_whitespaced_text') # i.e. Chinese, Russian, etc.. # TODO define how its derivable, i.e Accepted input types that can be converted to spark DF types # str_array = 'str_array' # # pandas_df = 'pandas_df' # pd_series = 'pandas_series' # # np_array = 'pandas_series' # # img_path = 'pandas_series' # file_path = 'file_path' # todo more granuar, i.e. by file type? @dataclass class NLP_FEATURE_NODES: # or Mode Node? """All avaiable Feature nodes in Spark NLP Used to cast the pipeline dependency resolution algorithm into an abstract grpah """ # High Level NLP Feature Nodes E = EXTERNAL_NODES A = NLP_NODE_IDS F = NLP_FEATURES nodes = { A.PARTIALLY_IMPLEMENTED: NlpFeatureNode(A.PARTIALLY_IMPLEMENTED, [F.UNKOWN], [F.UNKOWN]), A.COREF_SPAN_BERT: NlpFeatureNode(A.COREF_SPAN_BERT, [F.DOCUMENT, F.TOKEN], [F.COREF_TOKEN]), A.BIG_TEXT_MATCHER: NlpFeatureNode(A.BIG_TEXT_MATCHER, [F.DOCUMENT, F.TOKEN], [F.CHUNK]), A.CHUNK2DOC: NlpFeatureNode(A.CHUNK2DOC, [F.NAMED_ENTITY_CONVERTED], [F.DOCUMENT_FROM_CHUNK]), A.CHUNK_EMBEDDINGS_CONVERTER: NlpFeatureNode(A.CHUNK_EMBEDDINGS_CONVERTER, [F.CHUNK, F.WORD_EMBEDDINGS], [F.CHUNK_EMBEDDINGS]), A.CHUNK_TOKENIZER: NlpFeatureNode(A.CHUNK_TOKENIZER, [F.CHUNK], [F.TOKEN_CHUNKED]), A.CHUNKER: NlpFeatureNode(A.CHUNKER, [F.DOCUMENT, F.POS], [F.CHUNK]), A.CLASSIFIER_DL: NlpFeatureNode(A.CLASSIFIER_DL, [F.SENTENCE_EMBEDDINGS], [F.CATEGORY]), A.TRAINABLE_CLASSIFIER_DL: NlpFeatureNode(A.CLASSIFIER_DL, [F.SENTENCE_EMBEDDINGS], [F.CATEGORY]), A.CONTEXT_SPELL_CHECKER: NlpFeatureNode(A.CONTEXT_SPELL_CHECKER, [F.TOKEN], [F.TOKEN_SPELL_CHECKED]), A.DATE_MATCHER: NlpFeatureNode(A.DATE_MATCHER, [F.DOCUMENT], [F.DATE]), A.UNTYPED_DEPENDENCY_PARSER: NlpFeatureNode(A.UNTYPED_DEPENDENCY_PARSER, [F.DOCUMENT, F.POS, F.TOKEN], [F.UNLABLED_DEPENDENCY]), A.TYPED_DEPENDENCY_PARSER: NlpFeatureNode(A.TYPED_DEPENDENCY_PARSER, [F.TOKEN, F.POS, F.UNLABLED_DEPENDENCY], [F.LABELED_DEPENDENCY]), A.DOC2CHUNK: NlpFeatureNode(A.DOC2CHUNK, [F.DOCUMENT], [F.DOCUMENT_FROM_CHUNK]), A.MULTI_DOCUMENT_ASSEMBLER: NlpFeatureNode(A.MULTI_DOCUMENT_ASSEMBLER, [F.RAW_QUESTION, F.RAW_QUESTION_CONTEXT], [F.DOCUMENT_QUESTION, F.DOCUMENT_QUESTION_CONTEXT]), A.TABLE_ASSEMBLER: NlpFeatureNode(A.TABLE_ASSEMBLER, [F.DOCUMENT_QUESTION_CONTEXT], [F.ASSEMBLED_TABULAR_DATA]), A.TAPAS_FOR_QA: NlpFeatureNode(A.TAPAS_FOR_QA, [F.DOCUMENT_QUESTION, F.ASSEMBLED_TABULAR_DATA, ], [F.TABULAR_ANSWER]), A.ALBERT_FOR_QUESTION_ANSWERING: NlpFeatureNode(A.ALBERT_FOR_QUESTION_ANSWERING, [F.DOCUMENT_QUESTION, F.DOCUMENT_QUESTION_CONTEXT], [F.CLASSIFIED_SPAN]), A.CAMEMBERT_FOR_QUESTION_ANSWERING: NlpFeatureNode(A.CAMEMBERT_FOR_QUESTION_ANSWERING, [F.DOCUMENT_QUESTION, F.DOCUMENT_QUESTION_CONTEXT], [F.CLASSIFIED_SPAN]), A.BERT_FOR_QUESTION_ANSWERING: NlpFeatureNode(A.BERT_FOR_QUESTION_ANSWERING, [F.DOCUMENT_QUESTION, F.DOCUMENT_QUESTION_CONTEXT], [F.CLASSIFIED_SPAN]), A.DE_BERTA_FOR_QUESTION_ANSWERING: NlpFeatureNode(A.DE_BERTA_FOR_QUESTION_ANSWERING, [F.DOCUMENT_QUESTION, F.DOCUMENT_QUESTION_CONTEXT], [F.CLASSIFIED_SPAN]), A.DISTIL_BERT_FOR_QUESTION_ANSWERING: NlpFeatureNode(A.DISTIL_BERT_FOR_QUESTION_ANSWERING, [F.DOCUMENT_QUESTION, F.DOCUMENT_QUESTION_CONTEXT], [F.CLASSIFIED_SPAN]), A.LONGFORMER_FOR_QUESTION_ANSWERING: NlpFeatureNode(A.LONGFORMER_FOR_QUESTION_ANSWERING, [F.DOCUMENT_QUESTION, F.DOCUMENT_QUESTION_CONTEXT], [F.CLASSIFIED_SPAN]), A.ROBERTA_FOR_QUESTION_ANSWERING: NlpFeatureNode(A.ROBERTA_FOR_QUESTION_ANSWERING, [F.DOCUMENT_QUESTION, F.DOCUMENT_QUESTION_CONTEXT], [F.CLASSIFIED_SPAN]), A.XLM_ROBERTA_FOR_QUESTION_ANSWERING: NlpFeatureNode(A.XLM_ROBERTA_FOR_QUESTION_ANSWERING, [F.DOCUMENT_QUESTION, F.DOCUMENT_QUESTION_CONTEXT], [F.CLASSIFIED_SPAN]), A.DOCUMENT_ASSEMBLER: NlpFeatureNode(A.DOCUMENT_ASSEMBLER, [E.RAW_TEXT], [F.DOCUMENT]), A.AUDIO_ASSEMBLER: NlpFeatureNode(A.AUDIO_ASSEMBLER, [F.RAW_AUDIO], [F.AUDIO]), # A.WAV2VEC_FOR_CTC: NlpFeatureNode(A.PARTIALLY_IMPLEMENTED, [F.AUDIO], [F.RECOGNIZED_SPEECH_TEXT]), A.WAV2VEC_FOR_CTC: NlpFeatureNode(A.WAV2VEC_FOR_CTC, [F.AUDIO], [E.RAW_TEXT]), A.HUBERT_FOR_CTC: NlpFeatureNode(A.HUBERT_FOR_CTC, [F.AUDIO], [E.RAW_TEXT]), A.IMAGE_ASSEMBLER: NlpFeatureNode(A.IMAGE_ASSEMBLER, [F.SPARK_NLP_IMAGE, F.SPARK_NLP_FILE_PATH], [F.IMAGE]), A.DOCUMENT_NORMALIZER: NlpFeatureNode(A.DOCUMENT_NORMALIZER, [F.DOCUMENT], [F.DOCUMENT_GENERATED]), A.EMBEDDINGS_FINISHER: NlpFeatureNode(A.EMBEDDINGS_FINISHER, [F.ANY_EMBEDDINGS], [F.FINISHED_EMBEDDINGS]), # A.# ENTITY_RULER : NlpFeatureNode(A.ENTITY_RULER, [F.], [F.]) # TODO? , A.FINISHER: NlpFeatureNode(A.FINISHER, [F.ANY], [F.ANY_FINISHED]), A.GRAPH_EXTRACTION: NlpFeatureNode(A.GRAPH_EXTRACTION, [F.DOCUMENT, F.TOKEN, F.NAMED_ENTITY_IOB], [F.NODE]), # A.# GRAPH_FINISHER : NlpFeatureNode(A.GRAPH_FINISHER, [F.], [F.]) , A.LANGUAGE_DETECTOR_DL: NlpFeatureNode(A.LANGUAGE_DETECTOR_DL, [F.DOCUMENT], [F.LANGUAGE]), A.LEMMATIZER: NlpFeatureNode(A.LEMMATIZER, [F.TOKEN], [F.TOKEN_LEMATIZED]), A.MULTI_CLASSIFIER_DL: NlpFeatureNode(A.MULTI_CLASSIFIER_DL, [F.SENTENCE_EMBEDDINGS], [F.MULTI_DOCUMENT_CLASSIFICATION]), A.TRAINABLE_MULTI_CLASSIFIER_DL: NlpFeatureNode(A.MULTI_CLASSIFIER_DL, [F.SENTENCE_EMBEDDINGS], [F.MULTI_DOCUMENT_CLASSIFICATION]), A.MULTI_DATE_MATCHER: NlpFeatureNode(A.MULTI_DATE_MATCHER, [F.DOCUMENT], [F.DATE]), A.N_GRAMM_GENERATOR: NlpFeatureNode(A.N_GRAMM_GENERATOR, [F.TOKEN], [F.CHUNK]), A.NER_CONVERTER: NlpFeatureNode(A.NER_CONVERTER, [F.TOKEN, F.DOCUMENT, F.NAMED_ENTITY_IOB], [F.NAMED_ENTITY_CONVERTED]), A.NER_CRF: NlpFeatureNode(A.NER_CRF, [F.DOCUMENT, F.TOKEN, F.WORD_EMBEDDINGS], [F.NAMED_ENTITY_IOB]), A.NER_DL: NlpFeatureNode(A.NER_DL, [F.DOCUMENT, F.TOKEN, F.WORD_EMBEDDINGS], [F.NAMED_ENTITY_IOB]), A.TRAINABLE_NER_DL: NlpFeatureNode(A.TRAINABLE_NER_DL, [F.DOCUMENT, F.TOKEN, F.WORD_EMBEDDINGS], [F.NAMED_ENTITY_IOB]), A.NER_OVERWRITER: NlpFeatureNode(A.NER_OVERWRITER, [F.NAMED_ENTITY_IOB], [F.NAMED_ENTITY_IOB]), A.NORMALIZER: NlpFeatureNode(A.NORMALIZER, [F.TOKEN], [F.TOKEN_NORMALIZED]), A.NORVIG_SPELL_CHECKER: NlpFeatureNode(A.NORVIG_SPELL_CHECKER, [F.TOKEN], [F.TOKEN_SPELL_CHECKED]), A.POS: NlpFeatureNode(A.POS, [F.TOKEN, F.DOCUMENT], [F.POS]), A.TRAINABLE_POS: NlpFeatureNode(A.POS, [F.TOKEN, F.DOCUMENT], [F.POS]), A.RECURISVE_TOKENIZER: NlpFeatureNode(A.RECURISVE_TOKENIZER, [F.DOCUMENT], [F.TOKEN]), A.REGEX_MATCHER: NlpFeatureNode(A.REGEX_MATCHER, [F.DOCUMENT], [F.NAMED_ENTITY_CONVERTED]), A.REGEX_TOKENIZER: NlpFeatureNode(A.REGEX_TOKENIZER, [F.DOCUMENT], [F.TOKEN]), A.SENTENCE_DETECTOR: NlpFeatureNode(A.SENTENCE_DETECTOR, [F.DOCUMENT], [F.SENTENCE]), A.SENTENCE_DETECTOR_DL: NlpFeatureNode(A.SENTENCE_DETECTOR_DL, [F.DOCUMENT], [F.SENTENCE]), A.SENTENCE_EMBEDDINGS_CONVERTER: NlpFeatureNode(A.SENTENCE_EMBEDDINGS_CONVERTER, [F.DOCUMENT, F.WORD_EMBEDDINGS], [F.SENTENCE_EMBEDDINGS]), A.SENTIMENT_DL: NlpFeatureNode(A.SENTIMENT_DL, [F.SENTENCE_EMBEDDINGS], [F.DOCUMENT_CLASSIFICATION]), A.TRAINABLE_SENTIMENT_DL: NlpFeatureNode(A.TRAINABLE_SENTIMENT_DL, [F.SENTENCE_EMBEDDINGS], [F.DOCUMENT_CLASSIFICATION]), # A.# SENTENCE_DETECTOR : NlpFeatureNode(A.SENTENCE_DETECTOR, [F.TOKEN, F.DOCUMENT], [F.DOCUMENT_CLASSIFICATION] , A.STEMMER: NlpFeatureNode(A.STEMMER, [F.TOKEN], [F.TOKEN_STEMMED]), A.STOP_WORDS_CLEANER: NlpFeatureNode(A.STOP_WORDS_CLEANER, [F.TOKEN], [F.TOKEN_STOP_WORD_REMOVED]), A.SYMMETRIC_DELETE_SPELLCHECKER: NlpFeatureNode(A.SYMMETRIC_DELETE_SPELLCHECKER, [F.TOKEN], [F.TOKEN_SPELL_CHECKED]), A.TEXT_MATCHER: NlpFeatureNode(A.TEXT_MATCHER, [F.DOCUMENT, F.TOKEN], [F.CHUNK]), A.TOKEN2CHUNK: NlpFeatureNode(A.TOKEN2CHUNK, [F.TOKEN], [F.CHUNK]), A.TOKEN_ASSEMBLER: NlpFeatureNode(A.TOKEN_ASSEMBLER, [F.DOCUMENT, F.TOKEN], [F.DOCUMENT]), A.TOKENIZER: NlpFeatureNode(A.TOKENIZER, [F.DOCUMENT], [F.TOKEN]), A.VIVEKN_SENTIMENT: NlpFeatureNode(A.VIVEKN_SENTIMENT, [F.TOKEN, F.DOCUMENT], [F.DOCUMENT_CLASSIFICATION]), A.SENTIMENT_DETECTOR: NlpFeatureNode(A.SENTIMENT_DETECTOR, [F.TOKEN, F.DOCUMENT], [F.DOCUMENT_CLASSIFICATION]), A.WORD_EMBEDDINGS: NlpFeatureNode(A.WORD_EMBEDDINGS, [F.DOCUMENT, F.TOKEN], [F.WORD_EMBEDDINGS]), A.WORD_SEGMENTER: NlpFeatureNode(A.WORD_SEGMENTER, [F.DOCUMENT], [F.TOKEN]), A.YAKE_KEYWORD_EXTRACTION: NlpFeatureNode(A.YAKE_KEYWORD_EXTRACTION, [F.TOKEN], [F.CHUNK]), A.ALBERT_EMBEDDINGS: NlpFeatureNode(A.ALBERT_EMBEDDINGS, [F.DOCUMENT, F.TOKEN], [F.WORD_EMBEDDINGS]), A.CAMEMBERT_FOR_TOKEN_CLASSIFICATION: NlpFeatureNode(A.CAMEMBERT_FOR_TOKEN_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.TOKEN_CLASSIFICATION]), A.DEBERTA_FOR_TOKEN_CLASSIFICATION: NlpFeatureNode(A.DEBERTA_FOR_TOKEN_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.TOKEN_CLASSIFICATION]), A.ALBERT_FOR_TOKEN_CLASSIFICATION: NlpFeatureNode(A.ALBERT_FOR_TOKEN_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.TOKEN_CLASSIFICATION]), A.BERT_EMBEDDINGS: NlpFeatureNode(A.BERT_EMBEDDINGS, [F.DOCUMENT, F.TOKEN], [F.WORD_EMBEDDINGS]), A.CAMEMBERT_EMBEDDINGS: NlpFeatureNode(A.CAMEMBERT_EMBEDDINGS, [F.DOCUMENT, F.TOKEN], [F.WORD_EMBEDDINGS]), A.DEBERTA_WORD_EMBEDDINGS: NlpFeatureNode(A.BERT_EMBEDDINGS, [F.DOCUMENT, F.TOKEN], [F.WORD_EMBEDDINGS]), A.BERT_FOR_TOKEN_CLASSIFICATION: NlpFeatureNode(A.BERT_FOR_TOKEN_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.TOKEN_CLASSIFICATION]), A.BERT_SENTENCE_EMBEDDINGS: NlpFeatureNode(A.BERT_SENTENCE_EMBEDDINGS, [F.DOCUMENT], [F.SENTENCE_EMBEDDINGS]), A.DISTIL_BERT_EMBEDDINGS: NlpFeatureNode(A.DISTIL_BERT_EMBEDDINGS, [F.DOCUMENT, F.TOKEN], [F.WORD_EMBEDDINGS]), A.DISTIL_BERT_FOR_TOKEN_CLASSIFICATION: NlpFeatureNode(A.DISTIL_BERT_FOR_TOKEN_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.TOKEN_CLASSIFICATION]), A.ELMO_EMBEDDINGS: NlpFeatureNode(A.ELMO_EMBEDDINGS, [F.DOCUMENT, F.TOKEN], [F.WORD_EMBEDDINGS]), A.LONGFORMER_EMBEDDINGS: NlpFeatureNode(A.LONGFORMER_EMBEDDINGS, [F.DOCUMENT, F.TOKEN], [F.WORD_EMBEDDINGS]), A.LONGFORMER_FOR_TOKEN_CLASSIFICATION: NlpFeatureNode(A.LONGFORMER_FOR_TOKEN_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.TOKEN_CLASSIFICATION]), A.MARIAN_TRANSFORMER: NlpFeatureNode(A.MARIAN_TRANSFORMER, [F.DOCUMENT], [F.DOCUMENT_TRANSLATED]), A.ROBERTA_EMBEDDINGS: NlpFeatureNode(A.ROBERTA_EMBEDDINGS, [F.DOCUMENT, F.TOKEN], [F.WORD_EMBEDDINGS]), A.ROBERTA_FOR_TOKEN_CLASSIFICATION: NlpFeatureNode(A.ROBERTA_FOR_TOKEN_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.TOKEN_CLASSIFICATION]), A.ROBERTA_SENTENCE_EMBEDDINGS: NlpFeatureNode(A.ROBERTA_SENTENCE_EMBEDDINGS, [F.DOCUMENT], [F.SENTENCE_EMBEDDINGS]), A.T5_TRANSFORMER: NlpFeatureNode(A.T5_TRANSFORMER, [F.DOCUMENT], [F.DOCUMENT_GENERATED]), A.UNIVERSAL_SENTENCE_ENCODER: NlpFeatureNode(A.UNIVERSAL_SENTENCE_ENCODER, [F.DOCUMENT], [F.SENTENCE_EMBEDDINGS]), A.XLM_ROBERTA_EMBEDDINGS: NlpFeatureNode(A.XLM_ROBERTA_EMBEDDINGS, [F.DOCUMENT, F.TOKEN], [F.WORD_EMBEDDINGS]), A.XLM_ROBERTA_FOR_TOKEN_CLASSIFICATION: NlpFeatureNode(A.XLM_ROBERTA_FOR_TOKEN_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.TOKEN_CLASSIFICATION]), A.XLM_ROBERTA_SENTENCE_EMBEDDINGS: NlpFeatureNode(A.XLM_ROBERTA_SENTENCE_EMBEDDINGS, [F.DOCUMENT], [F.SENTENCE_EMBEDDINGS]), A.XLNET_EMBEDDINGS: NlpFeatureNode(A.XLNET_EMBEDDINGS, [F.DOCUMENT, F.TOKEN], [F.WORD_EMBEDDINGS]), A.XLNET_FOR_TOKEN_CLASSIFICATION: NlpFeatureNode(A.XLNET_FOR_TOKEN_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.TOKEN_CLASSIFICATION]), A.DOC2VEC: NlpFeatureNode(A.DOC2VEC, [F.TOKEN], [F.WORD_EMBEDDINGS]), A.TRAIANBLE_DOC2VEC: NlpFeatureNode(A.TRAIANBLE_DOC2VEC, [F.TOKEN], [F.WORD_EMBEDDINGS]), A.BERT_FOR_SEQUENCE_CLASSIFICATION: NlpFeatureNode(A.BERT_FOR_SEQUENCE_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.SEQUENCE_CLASSIFICATION]), A.BERT_FOR_ZERO_SHOT_CLASSIFICATION: NlpFeatureNode(A.BERT_FOR_ZERO_SHOT_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.SEQUENCE_CLASSIFICATION]), A.CAMEMBERT_FOR_SEQUENCE_CLASSIFICATION : NlpFeatureNode(A.CAMEMBERT_FOR_SEQUENCE_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.SEQUENCE_CLASSIFICATION]), A.DEBERTA_FOR_SEQUENCE_CLASSIFICATION: NlpFeatureNode(A.BERT_FOR_SEQUENCE_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.SEQUENCE_CLASSIFICATION]), A.DISTIL_BERT_FOR_SEQUENCE_CLASSIFICATION: NlpFeatureNode(A.DISTIL_BERT_FOR_SEQUENCE_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.SEQUENCE_CLASSIFICATION]), A.DISTIL_BERT_FOR_ZERO_SHOT_CLASSIFICATION: NlpFeatureNode(A.DISTIL_BERT_FOR_ZERO_SHOT_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.SEQUENCE_CLASSIFICATION]), A.XLM_ROBERTA_FOR_SEQUENCE_CLASSIFICATION: NlpFeatureNode(A.XLM_ROBERTA_FOR_SEQUENCE_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.SEQUENCE_CLASSIFICATION]), A.ROBERTA_FOR_SEQUENCE_CLASSIFICATION: NlpFeatureNode(A.ROBERTA_FOR_SEQUENCE_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.SEQUENCE_CLASSIFICATION]), A.ROBERTA_FOR_ZERO_SHOT_CLASSIFICATION: NlpFeatureNode(A.ROBERTA_FOR_ZERO_SHOT_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.SEQUENCE_CLASSIFICATION]), A.LONGFORMER_FOR_SEQUENCE_CLASSIFICATION: NlpFeatureNode(A.LONGFORMER_FOR_SEQUENCE_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.SEQUENCE_CLASSIFICATION]), A.ALBERT_FOR_SEQUENCE_CLASSIFICATION: NlpFeatureNode(A.ALBERT_FOR_SEQUENCE_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.SEQUENCE_CLASSIFICATION]), A.XLNET_FOR_SEQUENCE_CLASSIFICATION: NlpFeatureNode(A.XLNET_FOR_SEQUENCE_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.SEQUENCE_CLASSIFICATION]), A.GPT2: NlpFeatureNode(A.GPT2, [F.DOCUMENT], [F.DOCUMENT_GENERATED]), A.WORD_2_VEC: NlpFeatureNode(A.WORD_2_VEC, [F.TOKEN], [F.WORD_EMBEDDINGS]), A.BERT_SENTENCE_CHUNK_EMBEDDINGS: NlpFeatureNode(A.BERT_SENTENCE_CHUNK_EMBEDDINGS, [F.DOCUMENT], [F.NAMED_ENTITY_CONVERTED]), A.VIT_IMAGE_CLASSIFICATION: NlpFeatureNode(A.VIT_IMAGE_CLASSIFICATION, [F.IMAGE], [F.CLASSIFIED_IMAGE]), A.SWIN_IMAGE_CLASSIFICATION: NlpFeatureNode(A.SWIN_IMAGE_CLASSIFICATION, [F.IMAGE], [F.CLASSIFIED_IMAGE]), } @dataclass class OCR_FEATURE_NODES: """All avaiable Feature nodes in OCR Used to cast the pipeline dependency resolution algorithm into an abstract grpah """ # Visual Document UnderstandingBINARY2IMAGE A = OCR_NODE_IDS F = OCR_FEATURES nodes = { A.VISUAL_DOCUMENT_CLASSIFIER: OcrFeatureNode(A.VISUAL_DOCUMENT_CLASSIFIER, [F.HOCR], [F.VISUAL_CLASSIFIER_PREDICTION, F.VISUAL_CLASSIFIER_CONFIDENCE]), A.IMAGE2HOCR: OcrFeatureNode(A.IMAGE2HOCR, [F.OCR_IMAGE], [F.HOCR]), # VISUAL_DOCUMENT_NER : OcrFeatureNode(A.VISUAL_DOCUMENT_NER, [OcrFeature.HOCR, OcrFeature.FILE_PATH], [NlpFeature.NER_Annotation]), # TODO NlpFeature Space! # Object Detection A.IMAGE_HANDWRITTEN_DETECTOR: OcrFeatureNode(A.IMAGE_HANDWRITTEN_DETECTOR, [F.OCR_IMAGE, ], [F.OCR_REGION]), # TABLE Processors/Recognition TODO REGION::CELL>?? A.IMAGE_TABLE_DETECTOR: OcrFeatureNode(A.IMAGE_TABLE_DETECTOR, [F.OCR_IMAGE, ], [F.OCR_TABLE]), # TODO REGION or TABLE??? IS IT THE SAME??? A.IMAGE_TABLE_CELL_DETECTOR: OcrFeatureNode(A.IMAGE_TABLE_CELL_DETECTOR, [F.OCR_IMAGE, ], [F.OCR_TABLE_CELLS]), # TODO REGION or TABLE??? IS IT THE SAME??? A.IMAGE_TABLE_CELL2TEXT_TABLE: OcrFeatureNode(A.IMAGE_TABLE_CELL2TEXT_TABLE, [F.OCR_IMAGE, F.OCR_TABLE_CELLS], [F.OCR_TABLE]), # TODO OUPUT!! REGION or TABLE??? IS IT THE SAME??? # TODO are POSITIOns and REGIONS the same??? Regions is an ARRAY of PSOTISIONS. BUT is REGION::: TABLE??? Samefor CELLs # PDF Processing A.PDF2TEXT: OcrFeatureNode(A.PDF2TEXT, [F.BINARY_PDF, F.FILE_PATH], [F.TEXT, F.PAGE_NUM]), A.PDF2IMAGE: OcrFeatureNode(A.PDF2IMAGE, [F.BINARY_PDF, F.FILE_PATH, F.FALL_BACK], [F.OCR_IMAGE, F.PAGE_NUM]), A.IMAGE2PDF: OcrFeatureNode(A.IMAGE2PDF, [F.OCR_IMAGE, F.FILE_PATH], [F.BINARY_PDF]), A.TEXT2PDF: OcrFeatureNode(A.TEXT2PDF, [F.OCR_POSITIONS, F.OCR_IMAGE, F.OCR_TEXT, F.FILE_PATH, F.BINARY_PDF], [F.BINARY_PDF]), A.PDF_ASSEMBLER: OcrFeatureNode(A.PDF_ASSEMBLER, [F.BINARY_PDF_PAGE, F.FILE_PATH, F.PAGE_NUM], [F.BINARY_PDF]), A.PDF_DRAW_REGIONS: OcrFeatureNode(A.PDF_DRAW_REGIONS, [F.BINARY_PDF, F.FILE_PATH, F.OCR_POSITIONS], [F.BINARY_PDF]), A.PDF2TEXT_TABLE: OcrFeatureNode(A.PDF2TEXT_TABLE, [F.BINARY_DOCX, F.FILE_PATH, ], [F.OCR_TABLE]), # DOCX Processing A.DOC2TEXT: OcrFeatureNode(A.DOC2TEXT, [F.BINARY_DOCX, F.FILE_PATH, ], [F.TEXT, F.PAGE_NUM]), A.DOC2TEXT_TABLE: OcrFeatureNode(A.DOC2TEXT_TABLE, [F.BINARY_DOCX, F.FILE_PATH], [F.OCR_TABLE]), A.DOC2PDF: OcrFeatureNode(A.DOC2PDF, [F.BINARY_DOCX, F.FILE_PATH], [F.BINARY_PDF]), A.PPT2TEXT_TABLE: OcrFeatureNode(A.PPT2TEXT_TABLE, [F.BINARY_DOCX, F.FILE_PATH], [F.OCR_TABLE]), A.PPT2PDF: OcrFeatureNode(A.PPT2PDF, [F.BINARY_PPT, F.FILE_PATH], [F.BINARY_PDF]), # DICOM Processing A.DICOM2IMAGE: OcrFeatureNode(A.DICOM2IMAGE, [F.BINARY_DICOM, F.FILE_PATH], [F.OCR_IMAGE, F.PAGE_NUM, F.DICOM_METADATA]), A.IMAGE2DICOM: OcrFeatureNode(A.IMAGE2DICOM, [F.OCR_IMAGE, F.FILE_PATH, F.DICOM_METADATA], [F.BINARY_DICOM]), # Image Pre-Processing A.BINARY2IMAGE: OcrFeatureNode(A.BINARY2IMAGE, [F.BINARY_IMG, F.FILE_PATH], [F.OCR_IMAGE]), A.GPU_IMAGE_TRANSFORMER: OcrFeatureNode(A.GPU_IMAGE_TRANSFORMER, [F.OCR_IMAGE], [F.OCR_IMAGE]), A.IMAGE_BINARIZER: OcrFeatureNode(A.IMAGE_BINARIZER, [F.OCR_IMAGE], [F.OCR_IMAGE]), A.IMAGE_ADAPTIVE_BINARIZER: OcrFeatureNode(A.IMAGE_ADAPTIVE_BINARIZER, [F.OCR_IMAGE], [F.OCR_IMAGE]), A.IMAGE_ADAPTIVE_THRESHOLDING: OcrFeatureNode(A.IMAGE_ADAPTIVE_THRESHOLDING, [F.OCR_IMAGE], [F.OCR_IMAGE]), A.IMAGE_SCALER: OcrFeatureNode(A.IMAGE_SCALER, [F.OCR_IMAGE], [F.OCR_IMAGE]), A.IMAGE_ADAPTIVE_SCALER: OcrFeatureNode(A.IMAGE_ADAPTIVE_SCALER, [F.OCR_IMAGE], [F.OCR_IMAGE]), A.IMAGE_SKEW_CORRECTOR: OcrFeatureNode(A.IMAGE_SKEW_CORRECTOR, [F.OCR_IMAGE], [F.OCR_IMAGE]), # TODO THESE ALL BLOW??? Region??? A.IMAGE_NOISE_SCORER: OcrFeatureNode(A.IMAGE_NOISE_SCORER, [F.OCR_IMAGE, F.OCR_REGION], [F.OCR_IMAGE]), # TODO WHAT IS REGION???? There is no schema for that A.IMAGE_REMOVE_OBJECTS: OcrFeatureNode(A.IMAGE_REMOVE_OBJECTS, [F.OCR_IMAGE], [F.OCR_IMAGE]), # TODO A.IMAGE_MORPHOLOGY_OPERATION: OcrFeatureNode(A.IMAGE_MORPHOLOGY_OPERATION, [F.OCR_IMAGE], [F.OCR_IMAGE]), # TODO A.IMAGE_CROPPER: OcrFeatureNode(A.IMAGE_CROPPER, [F.OCR_IMAGE], [F.OCR_IMAGE]), # TODO A.IMAGE2REGION: OcrFeatureNode(A.IMAGE2PDF, [F.OCR_IMAGE], [F.OCR_IMAGE]), # TODO A.IMAGE_LAYOUT_ANALZYER: OcrFeatureNode(A.IMAGE_LAYOUT_ANALZYER, [F.OCR_IMAGE], [F.OCR_IMAGE]), # TODO A.IMAGE_SPLIT_REGIONS: OcrFeatureNode(A.IMAGE_SPLIT_REGIONS, [F.OCR_IMAGE], [F.OCR_IMAGE]), # TODO A.IMAGE_DRAW_REGIONS: OcrFeatureNode(A.IMAGE_DRAW_REGIONS, [F.OCR_IMAGE], [F.OCR_IMAGE]), # TODO # Character Recognition .. TODO these should be correct but not 100% sure about the positions A.IMAGE2TEXT: OcrFeatureNode(A.IMAGE2TEXT, [F.OCR_IMAGE], [F.TEXT, F.OCR_POSITIONS]), A.IMAGE2TEXTPDF: OcrFeatureNode(A.IMAGE2TEXTPDF, [F.OCR_IMAGE, F.FILE_PATH, F.PAGE_NUM], [F.BINARY_PDF]), # TODO is ouput HOCR format as in HOCR_DOCUMENT_ASSAMBLER??? A.IMAGE_BRANDS2TEXT: OcrFeatureNode(A.IMAGE_BRANDS2TEXT, [F.OCR_IMAGE], [F.OCR_POSITIONS, F.TEXT, F.OCR_IMAGE]), # TODO what is the STRUCTURE of output image_brand ??? OCR_IE?? A.POSITION_FINDER: OcrFeatureNode(A.POSITION_FINDER, [F.TEXT_ENTITY, F.OCR_PAGE_MATRIX], [F.OCR_POSITIONS]), # TODO COORDINATE::POSITION?? ## TODO Updates text at a position? I.e. Change the text at given corodinates BUT THEN why is output position??? A.UPDATE_TEXT_POSITION: OcrFeatureNode(A.POSITION_FINDER, [F.OCR_POSITIONS, F.TEXT_ENTITY], [F.OCR_POSITIONS]), # TODO COORDINATE::POSITION?? ## Cancer Document Test parser. Required Text of Header Field of something A.FOUNDATION_ONE_REPORT_PARSER: OcrFeatureNode(A.FOUNDATION_ONE_REPORT_PARSER, [F.OCR_TEXT, F.FILE_PATH], [F.JSON_FOUNDATION_ONE_REPORT]), # HOCR A.HOCR_DOCUMENT_ASSEMBLER: OcrFeatureNode(A.HOCR_TOKENIZER, [F.HOCR], [F.TEXT_DOCUMENT]), A.HOCR_TOKENIZER: OcrFeatureNode(A.HOCR_TOKENIZER, [F.HOCR], [F.TEXT_DOCUMENT_TOKENIZED]), } @dataclass class NLP_HC_FEATURE_NODES(): """All avaiable Feature nodes in NLP Healthcare Used to cast the pipeline dependency resolution algorithm into an abstract grpah """ # Visual Document Understanding A = NLP_HC_NODE_IDS F = NLP_FEATURES H_F = NLP_HC_FEATURES # HC Feature Nodes nodes = { A.ZERO_SHOT_NER : NlpHcFeatureNode(A.ZERO_SHOT_NER, [F.TOKEN, F.DOCUMENT], [F.NAMED_ENTITY_IOB]), A.CHUNK_MAPPER_MODEL: NlpHcFeatureNode(A.CHUNK_MAPPER_MODEL, [F.NAMED_ENTITY_CONVERTED], [H_F.MAPPED_CHUNK]), A.ASSERTION_DL: NlpHcFeatureNode(A.ASSERTION_DL, [F.DOCUMENT, F.NAMED_ENTITY_CONVERTED, F.WORD_EMBEDDINGS], [H_F.ASSERTION]), A.TRAINABLE_ASSERTION_DL: NlpHcFeatureNode(A.TRAINABLE_ASSERTION_DL, [F.DOCUMENT, F.NAMED_ENTITY_CONVERTED, F.WORD_EMBEDDINGS], [H_F.ASSERTION]), A.ASSERTION_FILTERER: NlpHcFeatureNode(A.ASSERTION_FILTERER, [F.DOCUMENT, F.CHUNK, H_F.ASSERTION], [F.CHUNK]), A.ASSERTION_LOG_REG: NlpHcFeatureNode(A.ASSERTION_LOG_REG, [F.DOCUMENT, F.CHUNK, F.WORD_EMBEDDINGS], [H_F.ASSERTION]), A.TRAINABLE_ASSERTION_LOG_REG: NlpHcFeatureNode(A.TRAINABLE_ASSERTION_LOG_REG, [F.DOCUMENT, F.CHUNK, F.WORD_EMBEDDINGS], [H_F.ASSERTION]), A.CHUNK2TOKEN: NlpHcFeatureNode(A.CHUNK2TOKEN, [F.CHUNK], [F.TOKEN]), A.CHUNK_ENTITY_RESOLVER: NlpHcFeatureNode(A.CHUNK_ENTITY_RESOLVER, [F.TOKEN, F.WORD_EMBEDDINGS], [H_F.RESOLVED_ENTITY]), A.TRAINABLE_CHUNK_ENTITY_RESOLVER: NlpHcFeatureNode(A.TRAINABLE_CHUNK_ENTITY_RESOLVER, [F.TOKEN, F.WORD_EMBEDDINGS], [H_F.RESOLVED_ENTITY]), A.CHUNK_FILTERER: NlpHcFeatureNode(A.CHUNK_FILTERER, [F.DOCUMENT, F.CHUNK], [F.CHUNK]), # TODO chunk subtype?, A.CHUNK_KEY_PHRASE_EXTRACTION: NlpHcFeatureNode(A.CHUNK_KEY_PHRASE_EXTRACTION, [F.DOCUMENT, F.CHUNK], [F.CHUNK]), A.CHUNK_MERGE: NlpHcFeatureNode(A.CHUNK_MERGE, [F.CHUNK, F.CHUNK], [F.CHUNK]), A.CONTEXTUAL_PARSER: NlpHcFeatureNode(A.CONTEXTUAL_PARSER, [F.DOCUMENT, F.TOKEN], [F.CHUNK]), A.DE_IDENTIFICATION: NlpHcFeatureNode(A.DE_IDENTIFICATION, [F.DOCUMENT, F.TOKEN, F.NAMED_ENTITY_CONVERTED], [F.DOCUMENT_DE_IDENTIFIED]), A.TRAINABLE_DE_IDENTIFICATION: NlpHcFeatureNode(A.DE_IDENTIFICATION, [F.DOCUMENT, F.TOKEN, F.CHUNK], [F.DOCUMENT]), A.DOCUMENT_LOG_REG_CLASSIFIER: NlpHcFeatureNode(A.DOCUMENT_LOG_REG_CLASSIFIER, [F.TOKEN], [F.DOCUMENT_CLASSIFICATION]), A.TRAINABLE_DOCUMENT_LOG_REG_CLASSIFIER: NlpHcFeatureNode(A.TRAINABLE_DOCUMENT_LOG_REG_CLASSIFIER, [F.TOKEN], [F.DOCUMENT_CLASSIFICATION]), A.DRUG_NORMALIZER: NlpHcFeatureNode(A.DRUG_NORMALIZER, [F.DOCUMENT], [F.DOCUMENT_NORMALIZED]), # A.# FEATURES_ASSEMBLER : NlpHcFeatureNode( [H_F.FEATURE_VECTOR]) # TODO data types?, A.GENERIC_CLASSIFIER: NlpHcFeatureNode(A.GENERIC_CLASSIFIER, [H_F.FEATURE_VECTOR], [F.DOCUMENT_CLASSIFICATION]), A.TRAINABLE_GENERIC_CLASSIFIER: NlpHcFeatureNode(A.TRAINABLE_GENERIC_CLASSIFIER, [H_F.FEATURE_VECTOR], [F.DOCUMENT_CLASSIFICATION]), A.IOB_TAGGER: NlpHcFeatureNode(A.IOB_TAGGER, [F.TOKEN, F.CHUNK], [F.NAMED_ENTITY_IOB]), A.MEDICAL_NER: NlpHcFeatureNode(A.MEDICAL_NER, [F.DOCUMENT, F.TOKEN, F.WORD_EMBEDDINGS], [F.NAMED_ENTITY_IOB]), A.TRAINABLE_MEDICAL_NER: NlpHcFeatureNode(A.TRAINABLE_MEDICAL_NER, [F.DOCUMENT, F.TOKEN, F.WORD_EMBEDDINGS], [F.NAMED_ENTITY_IOB]), A.NER_CHUNKER: NlpHcFeatureNode(A.NER_CHUNKER, [F.DOCUMENT, F.NAMED_ENTITY_IOB], [F.CHUNK]), A.NER_CONVERTER_INTERNAL: NlpHcFeatureNode(A.NER_CONVERTER_INTERNAL, [F.DOCUMENT, F.TOKEN, F.NAMED_ENTITY_IOB], [F.NAMED_ENTITY_CONVERTED]), A.NER_DISAMBIGUATOR: NlpHcFeatureNode(A.NER_DISAMBIGUATOR, [F.CHUNK, F.SENTENCE_EMBEDDINGS], [H_F.DISAMBIGUATION]), A.RELATION_NER_CHUNKS_FILTERER: NlpHcFeatureNode(A.RELATION_NER_CHUNKS_FILTERER, [F.CHUNK, F.UNLABLED_DEPENDENCY], [F.CHUNK]), A.RE_IDENTIFICATION: NlpHcFeatureNode(A.RE_IDENTIFICATION, [F.DOCUMENT, F.CHUNK], [F.DOCUMENT_RE_IDENTIFIED]), A.RELATION_EXTRACTION: NlpHcFeatureNode(A.RELATION_EXTRACTION, [F.NAMED_ENTITY_CONVERTED, F.WORD_EMBEDDINGS, F.POS, F.UNLABLED_DEPENDENCY], [H_F.RELATION]), A.ZERO_SHOT_RELATION_EXTRACTION: NlpHcFeatureNode(A.ZERO_SHOT_RELATION_EXTRACTION, [F.NAMED_ENTITY_CONVERTED, F.DOCUMENT, ], [H_F.RELATION]), A.TRAINABLE_RELATION_EXTRACTION: NlpHcFeatureNode(A.TRAINABLE_RELATION_EXTRACTION, [F.NAMED_ENTITY_CONVERTED, F.WORD_EMBEDDINGS, F.POS, F.UNLABLED_DEPENDENCY], [H_F.RELATION]), A.RELATION_EXTRACTION_DL: NlpHcFeatureNode(A.RELATION_EXTRACTION_DL, [F.NAMED_ENTITY_CONVERTED, F.DOCUMENT], [H_F.RELATION]), A.SENTENCE_ENTITY_RESOLVER: NlpHcFeatureNode(A.SENTENCE_ENTITY_RESOLVER, [F.DOCUMENT_FROM_CHUNK, F.SENTENCE_EMBEDDINGS], [H_F.RESOLVED_ENTITY]), A.TRAINABLE_SENTENCE_ENTITY_RESOLVER: NlpHcFeatureNode(A.TRAINABLE_SENTENCE_ENTITY_RESOLVER, [F.SENTENCE_EMBEDDINGS], [H_F.ASSERTION]), A.MEDICAL_BERT_FOR_TOKEN_CLASSIFICATION: NlpFeatureNode(A.MEDICAL_BERT_FOR_TOKEN_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.TOKEN_CLASSIFICATION]), A.MEDICAL_BERT_FOR_SEQUENCE_CLASSIFICATION: NlpFeatureNode(A.MEDICAL_BERT_FOR_SEQUENCE_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.SEQUENCE_CLASSIFICATION]), A.MEDICAL_DISTILBERT_FOR_SEQUENCE_CLASSIFICATION: NlpFeatureNode( A.MEDICAL_DISTILBERT_FOR_SEQUENCE_CLASSIFICATION, [F.DOCUMENT, F.TOKEN], [F.SEQUENCE_CLASSIFICATION]), }
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/universe/feature_node_universes.py
feature_node_universes.py
from dataclasses import dataclass from nlu.pipe.nlu_component import NluComponent from nlu.universe.component_universes import ComponentUniverse from nlu.universe.feature_node_ids import NLP_NODE_IDS, NLP_HC_NODE_IDS, OCR_NODE_IDS from nlu.universe.feature_universes import NLP_FEATURES, OCR_FEATURES ### ____ Annotator Feature Resolutions ____ @dataclass class ResolvedFeature: nlu_ref: str nlp_ref: str language: str get_pretrained: bool # Call get_pretrained(nlp_ref, lang, bucket) or get_default() on the AnnotatorClass nlu_component: NluComponent # Resolving component_to_resolve class FeatureResolutions: # Map each requested Feature to a pre-defined optimal resolution, given by FeatureNode # Also We need Alternative Default whether licensed or not!! # Ideally we define nlu_ref for each of these # default_resolutions: Dict[JslFeature,JslAnnoId] = None # TODO use lang families, i.e. en.tokenize works for all Latin style languages but not Chinese, I.e. not actually multi lingual default_OS_resolutions = { NLP_FEATURES.DOCUMENT_QUESTION: ResolvedFeature('multi_document_assembler', 'multi_document_assembler', 'xx', False, ComponentUniverse.components[ NLP_NODE_IDS.MULTI_DOCUMENT_ASSEMBLER]), NLP_FEATURES.AUDIO: ResolvedFeature('audio_assembler', 'audio_assembler', 'xx', False, ComponentUniverse.components[NLP_NODE_IDS.AUDIO_ASSEMBLER]), NLP_FEATURES.ASSEMBLED_TABULAR_DATA: ResolvedFeature('table_assembler', 'table_assembler', 'xx', False, ComponentUniverse.components[NLP_NODE_IDS.TABLE_ASSEMBLER]), # NLP_FEATURES.DOCUMENT_QUESTION_CONTEXT: ResolvedFeature('multi_document_assembler', 'multi_document_assembler', 'xx', False, # ComponentUniverse.components[NLP_NODE_IDS.MULTI_DOCUMENT_ASSEMBLER]), NLP_FEATURES.DOCUMENT: ResolvedFeature('document_assembler', 'document_assembler', 'xx', False, ComponentUniverse.components[NLP_NODE_IDS.DOCUMENT_ASSEMBLER]), NLP_FEATURES.TOKEN: ResolvedFeature('en.tokenize', 'spark_nlp_tokenizer', 'en', False, ComponentUniverse.components[NLP_NODE_IDS.TOKENIZER]), NLP_FEATURES.SENTENCE: ResolvedFeature('detect_sentence', 'sentence_detector_dl', 'en', False, ComponentUniverse.components[NLP_NODE_IDS.SENTENCE_DETECTOR_DL]), NLP_FEATURES.SENTENCE_EMBEDDINGS: ResolvedFeature('en.embed_sentence.small_bert_L2_128', 'sent_small_bert_L2_128', 'en', True, ComponentUniverse.components[ NLP_NODE_IDS.BERT_SENTENCE_EMBEDDINGS]), NLP_FEATURES.WORD_EMBEDDINGS: ResolvedFeature('en.embed.bert.small_L2_128', 'small_bert_L2_128', 'en', True, ComponentUniverse.components[NLP_NODE_IDS.BERT_EMBEDDINGS]), NLP_FEATURES.POS: ResolvedFeature('en.pos', 'pos_anc', 'en', True, ComponentUniverse.components[NLP_NODE_IDS.POS]), NLP_FEATURES.NAMED_ENTITY_IOB: ResolvedFeature('en.ner.onto.bert.cased_base', 'onto_bert_base_cased', 'en', True, ComponentUniverse.components[NLP_NODE_IDS.NER_DL]), NLP_FEATURES.NAMED_ENTITY_CONVERTED: ResolvedFeature('ner_converter', 'ner_converter', 'xx', False, ComponentUniverse.components[NLP_NODE_IDS.NER_CONVERTER]), NLP_FEATURES.UNLABLED_DEPENDENCY: ResolvedFeature('en.dep.untyped', 'dependency_conllu', 'en', True, ComponentUniverse.components[ NLP_NODE_IDS.UNTYPED_DEPENDENCY_PARSER]), NLP_FEATURES.LABELED_DEPENDENCY: ResolvedFeature('en.dep.typed', 'dependency_typed_conllu', 'en', True, ComponentUniverse.components[ NLP_NODE_IDS.TYPED_DEPENDENCY_PARSER]), NLP_FEATURES.CHUNK: ResolvedFeature('en.chunk', 'default_chunker', 'xx', False, ComponentUniverse.components[NLP_NODE_IDS.CHUNKER]), NLP_FEATURES.DOCUMENT_FROM_CHUNK: ResolvedFeature(NLP_NODE_IDS.CHUNK2DOC, NLP_NODE_IDS.CHUNK2DOC, 'xx', False, ComponentUniverse.components[NLP_NODE_IDS.CHUNK2DOC]), NLP_FEATURES.CHUNK_EMBEDDINGS: ResolvedFeature('en.embed_chunk', 'chunk_embeddings', 'xx', False, ComponentUniverse.components[ NLP_NODE_IDS.CHUNK_EMBEDDINGS_CONVERTER]), NLP_FEATURES.IMAGE: ResolvedFeature('image_assembler', 'image_assembler', 'xx', False, ComponentUniverse.components[ NLP_NODE_IDS.IMAGE_ASSEMBLER]), } default_HC_resolutions = { # TODO we need ideal resolution for each lang and domain...! NLP_FEATURES.NAMED_ENTITY_IOB: ResolvedFeature('en.med_ner.jsl', 'ner_jsl', 'en', True, ComponentUniverse.components[NLP_HC_NODE_IDS.MEDICAL_NER]), NLP_FEATURES.NAMED_ENTITY_CONVERTED: ResolvedFeature(NLP_HC_NODE_IDS.NER_CONVERTER_INTERNAL, NLP_HC_NODE_IDS.NER_CONVERTER_INTERNAL, 'xx', False, ComponentUniverse.components[ NLP_HC_NODE_IDS.NER_CONVERTER_INTERNAL]), } default_HC_train_resolutions = { NLP_FEATURES.NAMED_ENTITY_CONVERTED: ResolvedFeature(NLP_NODE_IDS.DOC2CHUNK, NLP_NODE_IDS.DOC2CHUNK, 'xx', False, ComponentUniverse.components[NLP_NODE_IDS.DOC2CHUNK]), } default_OCR_resolutions = { OCR_FEATURES.OCR_IMAGE: ResolvedFeature(OCR_NODE_IDS.BINARY2IMAGE, OCR_NODE_IDS.BINARY2IMAGE, 'xx', False, ComponentUniverse.components[OCR_NODE_IDS.BINARY2IMAGE]), OCR_FEATURES.HOCR: ResolvedFeature(OCR_NODE_IDS.IMAGE2HOCR, OCR_NODE_IDS.IMAGE2HOCR, 'xx', False, ComponentUniverse.components[OCR_NODE_IDS.IMAGE2HOCR]), }
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/universe/feature_resolutions.py
feature_resolutions.py
from functools import partial from nlu.components.assertions.assertion_dl.assertion_dl import AssertionDL from nlu.components.assertions.assertion_log_reg.assertion_log_reg import AssertionLogReg from nlu.components.chunkers.chunk_mapper.chunk_mapper import ChunkMapper from nlu.components.chunkers.contextual_parser.contextual_parser import ContextualParser from nlu.components.chunkers.default_chunker.default_chunker import DefaultChunker from nlu.components.chunkers.ngram.ngram import NGram from nlu.components.classifiers.asr.wav2Vec import Wav2Vec from nlu.components.classifiers.asr_hubert.hubert import Hubert from nlu.components.classifiers.bert_zero_shot_classification.bert_zero_shot import BertZeroShotClassifier from nlu.components.classifiers.classifier_dl.classifier_dl import ClassifierDl from nlu.components.classifiers.distil_bert_zero_shot_classification.distil_bert_zero_shot import \ DistilBertZeroShotClassifier from nlu.components.classifiers.generic_classifier.generic_classifier import GenericClassifier from nlu.components.classifiers.image_classification_swin.swin import SwinImageClassifier from nlu.components.classifiers.image_classification_vit.vit_image_classifier import VitImageClassifier from nlu.components.classifiers.language_detector.language_detector import LanguageDetector from nlu.components.classifiers.multi_classifier.multi_classifier import MultiClassifier from nlu.components.classifiers.named_entity_recognizer_crf.ner_crf import NERDLCRF from nlu.components.classifiers.ner.ner_dl import NERDL from nlu.components.classifiers.ner_healthcare.ner_dl_healthcare import NERDLHealthcare from nlu.components.classifiers.ner_zero_shot.ner_zero_shot import ZeroShotNer from nlu.components.classifiers.pos.part_of_speech_jsl import PartOfSpeechJsl from nlu.components.classifiers.roberta_zero_shot_classification.roberta_zero_shot import \ RoBertaForZeroShotClassification from nlu.components.classifiers.sentiment_detector.sentiment_detector import Sentiment from nlu.components.classifiers.sentiment_dl.sentiment_dl import SentimentDl from nlu.components.classifiers.seq_albert.seq_albert import SeqAlbertClassifier from nlu.components.classifiers.seq_bert.seq_bert_classifier import SeqBertClassifier from nlu.components.classifiers.seq_bert_medical.seq_bert_medical_classifier import SeqBertMedicalClassifier from nlu.components.classifiers.seq_camembert.seq_camembert import SeqCamembertClassifier from nlu.components.classifiers.seq_deberta.seq_deberta_classifier import SeqDebertaClassifier from nlu.components.classifiers.seq_distilbert.seq_distilbert_classifier import SeqDilstilBertClassifier from nlu.components.classifiers.seq_distilbert_medical.seq_distilbert_medical_classifier import \ SeqDilstilBertMedicalClassifier from nlu.components.classifiers.seq_longformer.seq_longformer import SeqLongformerClassifier from nlu.components.classifiers.seq_roberta.seq_roberta import SeqRobertaClassifier from nlu.components.classifiers.seq_xlm_roberta.seq_xlm_roberta import SeqXlmRobertaClassifier from nlu.components.classifiers.seq_xlnet.seq_xlnet import SeqXlnetClassifier from nlu.components.classifiers.span_bert.span_bert import SpanBertClassifier from nlu.components.classifiers.span_camembert.span_camembert import SpanCamemBert from nlu.components.classifiers.span_deberta.span_deberta import SpanDeBertaClassifier from nlu.components.classifiers.span_distilbert.span_distilbert import SpanDistilBertClassifier from nlu.components.classifiers.span_longformer.span_longformer import SpanLongFormerClassifier from nlu.components.classifiers.span_roberta.span_roberta import SpanRobertaClassifier from nlu.components.classifiers.span_xlm_roberta.span_xlm_roberta import SpanXlmRobertaClassifier from nlu.components.classifiers.token_albert.token_albert import TokenAlbert from nlu.components.classifiers.token_bert.token_bert import TokenBert from nlu.components.classifiers.token_bert_healthcare.token_bert_healthcare import TokenBertHealthcare from nlu.components.classifiers.token_camembert.token_camembert import TokenCamembert from nlu.components.classifiers.token_deberta.token_deberta import TokenDeBerta from nlu.components.classifiers.token_distilbert.token_distilbert import TokenDistilBert from nlu.components.classifiers.token_longformer.token_longformer import TokenLongFormer from nlu.components.classifiers.token_roberta.token_roberta import TokenRoBerta from nlu.components.classifiers.token_xlm_roberta.token_xlmroberta import TokenXlmRoBerta from nlu.components.classifiers.token_xlnet.token_xlnet import TokenXlnet from nlu.components.classifiers.vivekn_sentiment.vivekn_sentiment_detector import ViveknSentiment from nlu.components.classifiers.yake.yake import Yake from nlu.components.coref.coref_bert.coref_bert import CorefBert from nlu.components.deidentifiers.deidentifier.deidentifier import Deidentifier from nlu.components.dependency_typeds.labeled_dependency_parser.labeled_dependency_parser import \ LabeledDependencyParser from nlu.components.dependency_untypeds.unlabeled_dependency_parser.unlabeled_dependency_parser import \ UnlabeledDependencyParser from nlu.components.embeddings.albert.spark_nlp_albert import SparkNLPAlbert from nlu.components.embeddings.bert.spark_nlp_bert import SparkNLPBert from nlu.components.embeddings.bert_sentence_chunk.bert_sentence_chunk import BertSentenceChunkEmbeds from nlu.components.embeddings.camenbert.camenbert import CamemBert from nlu.components.embeddings.deberta.deberta import Deberta from nlu.components.embeddings.distil_bert.distilbert import DistilBert from nlu.components.embeddings.doc2vec.doc2vec import Doc2Vec from nlu.components.embeddings.elmo.spark_nlp_elmo import SparkNLPElmo from nlu.components.embeddings.glove.glove import Glove from nlu.components.embeddings.longformer.longformer import Longformer from nlu.components.embeddings.roberta.roberta import Roberta from nlu.components.embeddings.sentence_bert.BertSentenceEmbedding import BertSentence from nlu.components.embeddings.sentence_xlm.sentence_xlm import Sentence_XLM from nlu.components.embeddings.use.spark_nlp_use import SparkNLPUse from nlu.components.embeddings.word2vec.word2vec import Word2Vec from nlu.components.embeddings.xlm.xlm import XLM from nlu.components.embeddings.xlnet.spark_nlp_xlnet import SparkNLPXlnet from nlu.components.embeddings_chunks.chunk_embedder.chunk_embedder import ChunkEmbedder from nlu.components.lemmatizers.lemmatizer.spark_nlp_lemmatizer import SparkNLPLemmatizer from nlu.components.matchers.regex_matcher.regex_matcher import RegexMatcher from nlu.components.normalizers.document_normalizer.spark_nlp_document_normalizer import SparkNLPDocumentNormalizer from nlu.components.normalizers.drug_normalizer.drug_normalizer import DrugNorm from nlu.components.normalizers.normalizer.spark_nlp_normalizer import SparkNLPNormalizer from nlu.components.relation_extractors.relation_extractor.relation_extractor import RelationExtraction from nlu.components.relation_extractors.relation_extractor_dl.relation_extractor_dl import RelationExtractionDL from nlu.components.relation_extractors.zero_shot_relation_extractor.zero_shot_relation_extractor import \ ZeroShotRelationExtractor from nlu.components.resolutions.sentence_entity_resolver.sentence_resolver import SentenceResolver from nlu.components.sentence_detectors.deep_sentence_detector.deep_sentence_detector import SentenceDetectorDeep from nlu.components.sentence_detectors.pragmatic_sentence_detector.sentence_detector import PragmaticSentenceDetector from nlu.components.seq2seqs.gpt2.gpt2 import GPT2 from nlu.components.seq2seqs.marian.marian import Marian from nlu.components.seq2seqs.t5.t5 import T5 # from nlu.components.seq2seqs.med_summarizer.med_summarizer import MedSummarizer from nlu.components.seq2seqs.tapas_qa.tapas_qa import TapasQA from nlu.components.spell_checkers.context_spell.context_spell_checker import ContextSpellChecker from nlu.components.spell_checkers.norvig_spell.norvig_spell_checker import NorvigSpellChecker from nlu.components.spell_checkers.symmetric_spell.symmetric_spell_checker import SymmetricSpellChecker from nlu.components.stemmers.stemmer.spark_nlp_stemmer import SparkNLPStemmer from nlu.components.stopwordscleaners.stopwordcleaner.nlustopwordcleaner import NLUStopWordcleaner from nlu.components.tokenizers.default_tokenizer.default_tokenizer import DefaultTokenizer from nlu.components.tokenizers.regex_tokenizer.regex_tokenizer import RegexTokenizer from nlu.components.tokenizers.word_segmenter.word_segmenter import WordSegmenter from nlu.components.utils.audio_assembler.audio_assembler import AudioAssembler_ from nlu.components.utils.chunk_2_doc.doc_2_chunk import Chunk_2_Doc from nlu.components.utils.doc2chunk.doc_2_chunk import Doc_2_Chunk from nlu.components.utils.document_assembler.spark_nlp_document_assembler import SparkNlpDocumentAssembler from nlu.components.utils.image_assembler.spark_nlp_image_assembler import SparkNlpImageAssembler from nlu.components.utils.multi_document_assembler.spark_nlp_multi_document_assembler import \ SparkNlpMultiDocumentAssembler from nlu.components.utils.ner_to_chunk_converter.ner_to_chunk_converter import NerToChunkConverter from nlu.components.utils.ner_to_chunk_converter_licensed.ner_to_chunk_converter_licensed import \ NerToChunkConverterLicensed from nlu.components.utils.sdf_finisher.sdf_finisher import SdfFinisher from nlu.components.utils.sentence_embeddings.spark_nlp_sentence_embedding import SparkNLPSentenceEmbeddings from nlu.components.utils.table_assembler.spark_nlp_multi_document_assembler import SparkNlpTableAssembler from nlu.ocr_components.table_extractors.doc_table_extractor.doc2table import Doc2TextTable from nlu.ocr_components.table_extractors.pdf_table_extractor.pdf2table import PDF2TextTable from nlu.ocr_components.table_extractors.ppt_table_extractor.ppt2table import PPT2TextTable from nlu.ocr_components.text_recognizers.doc2text.doc2text import Doc2Text from nlu.ocr_components.text_recognizers.img2text.img2text import Img2Text from nlu.ocr_components.text_recognizers.pdf2text.pdf2text import Pdf2Text from nlu.ocr_components.utils.binary2image.binary2image import Binary2Image from nlu.ocr_components.utils.image2hocr.image2hocr import Image2Hocr # from nlu.ocr_components.visual_classifiers.visual_doc_classifier.visual_doc_classifier import VisualDocClassifier from nlu.pipe.col_substitution.col_substitution_HC import * from nlu.pipe.col_substitution.col_substitution_OCR import substitute_recognized_text_cols from nlu.pipe.col_substitution.col_substitution_OS import * from nlu.pipe.extractors.extractor_configs_HC import * from nlu.pipe.extractors.extractor_configs_OCR import default_text_recognizer_config, default_binary_to_image_config from nlu.pipe.extractors.extractor_configs_OS import * from nlu.pipe.nlu_component import NluComponent from nlu.universe.annotator_class_universe import AnnoClassRef from nlu.universe.atoms import JslAnnoId, LicenseType, JslAnnoPyClass from nlu.universe.feature_node_ids import NLP_NODE_IDS, NLP_HC_NODE_IDS from nlu.universe.feature_node_ids import OCR_NODE_IDS from nlu.universe.feature_node_universes import NLP_FEATURE_NODES from nlu.universe.feature_node_universes import NLP_HC_FEATURE_NODES, OCR_FEATURE_NODES from nlu.universe.feature_universes import NLP_FEATURES from nlu.universe.logic_universes import NLP_LEVELS, AnnoTypes from nlu.universe.universes import ComponentBackends from nlu.universe.universes import Licenses, ComputeContexts def anno_class_to_empty_component(anno_class) -> NluComponent: """ For a given anno-class returns NLU-Component which wraps the corresponding pipe class but has no model_anno_obj yet loaded onto it. :param anno_class: compatible nlu-component to find for :return: NluComponent which can load anno_class models """ jsl_anno_id = anno_class_to_jsl_id(anno_class) try: if jsl_anno_id not in ComponentUniverse.components: raise ValueError(f'Invalid JSL-Anno-ID={jsl_anno_id}') component = ComponentUniverse.components[jsl_anno_id]() except Exception as err: raise ValueError(f'Failed to create annotator for JSL-Anno-ID={jsl_anno_id}, error={err}') return component def jsl_id_to_empty_component(jsl_id) -> NluComponent: """ Get NLU component with given JSL-ID with no model_anno_obj loaded onto it :param jsl_id: identifier of component/pipe type :return: NluComponent for jsl_id """ return anno_class_to_empty_component(jsl_id_to_anno_class(jsl_id)) def jsl_id_to_anno_class(jsl_id) -> JslAnnoPyClass: """Returns anno_class name for jsl_id :param jsl_id: id of anno :return JslAnnoPyClass : cass of annotator """ if jsl_id in AnnoClassRef.JSL_anno2_py_class: anno_class = AnnoClassRef.JSL_anno2_py_class[jsl_id] elif jsl_id in AnnoClassRef.JSL_anno_HC_ref_2_py_class: anno_class = AnnoClassRef.JSL_anno_HC_ref_2_py_class[jsl_id] elif jsl_id in AnnoClassRef.JSL_anno_OCR_ref_2_py_class: anno_class = AnnoClassRef.JSL_anno_OCR_ref_2_py_class[jsl_id] else: raise ValueError(f'Cannot find anno_class for jsl-id={jsl_id}') return anno_class def anno_class_to_jsl_id(anno_class) -> JslAnnoId: """Returns JSL-Anno-ID and default license type for given anno_class name. Note that an anno which maps to a component with default OS_license, may load a HC model_anno_obj and nlu component must be updated to HC license then :param anno_class: class name of the annotator :return:JslAnnoID of anno class """ if anno_class in AnnoClassRef.get_os_pyclass_2_anno_id_dict(): jsl_anno_id = AnnoClassRef.get_os_pyclass_2_anno_id_dict()[anno_class] elif anno_class in AnnoClassRef.get_hc_pyclass_2_anno_id_dict(): jsl_anno_id = AnnoClassRef.get_hc_pyclass_2_anno_id_dict()[anno_class] elif anno_class in AnnoClassRef.get_ocr_pyclass_2_anno_id_dict(): jsl_anno_id = AnnoClassRef.get_ocr_pyclass_2_anno_id_dict()[anno_class] else: raise ValueError(f'Cannot get class metadata for invalid anno_class={anno_class}') return jsl_anno_id def get_anno_class_metadata(anno_class) -> Tuple[JslAnnoId, LicenseType]: """Returns JSL-Anno-ID and default license type for given anno_class name. Note that an anno which maps to a component with default OS_license, may load a HC model_anno_obj and nlu component must be updated to HC license then :param anno_class: class name of the annotator :return: Tuple, first entry JslAnnoID, second entry Default LicenseType """ if anno_class in AnnoClassRef.JSL_OS_py_class_2_anno_id: jsl_anno_id = AnnoClassRef.JSL_OS_py_class_2_anno_id[anno_class] license_type = Licenses.open_source elif anno_class in AnnoClassRef.JSL_HC_py_class_2_anno_id: jsl_anno_id = AnnoClassRef.JSL_HC_py_class_2_anno_id[anno_class] license_type = Licenses.open_source elif anno_class in AnnoClassRef.JSL_OCR_py_class_2_anno_id: jsl_anno_id = AnnoClassRef.JSL_OCR_py_class_2_anno_id[anno_class] license_type = Licenses.open_source else: raise ValueError(f'Cannot get class metadata for invalid anno_class={anno_class}') return jsl_anno_id, license_type class ComponentUniverse: # Encapsulate all Open Source components Constructors by mappping each individual Annotator class to a specific Construction A = NLP_NODE_IDS H_A = NLP_HC_NODE_IDS O_A = OCR_NODE_IDS T = AnnoTypes F = NLP_FEATURES L = NLP_LEVELS ACR = AnnoClassRef # os_components = {} # hc_components = {} # ocr_components = {} components = { #### Partially Implemented A.PARTIALLY_IMPLEMENTED: partial(NluComponent, name=A.PARTIALLY_IMPLEMENTED, jsl_anno_class_id=A.PARTIALLY_IMPLEMENTED, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIALLY_IMPLEMENTED], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_Router: partial(NluComponent, name=A.PARTIAL_Router, jsl_anno_class_id=A.PARTIAL_Router, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_Router], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_full_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.CHUNK, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_ChunkMergeApproach: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_ChunkMergeApproach, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_ChunkMergeApproach], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.CHUNK, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_AssertionFilterer: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_AssertionFilterer, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_AssertionFilterer], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.CHUNK, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_ChunkConverter: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_ChunkConverter, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_ChunkConverter], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_ChunkKeyPhraseExtraction: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_ChunkKeyPhraseExtraction, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.PARTIAL_ChunkKeyPhraseExtraction], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_ChunkSentenceSplitter: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_ChunkSentenceSplitter, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.PARTIAL_ChunkSentenceSplitter], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_ChunkFiltererApproach: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_ChunkFiltererApproach, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.PARTIAL_ChunkFiltererApproach], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_ChunkFilterer: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_ChunkFilterer, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_ChunkFilterer], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_ChunkMapperApproach: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_ChunkMapperApproach, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_ChunkMapperApproach], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_ChunkMapperFilterer: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_ChunkMapperFilterer, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_ChunkMapperFilterer], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_DocumentLogRegClassifierApproach: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_DocumentLogRegClassifierApproach, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.PARTIAL_DocumentLogRegClassifierApproach], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={ 'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_DocumentLogRegClassifierModel: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_DocumentLogRegClassifierModel, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.PARTIAL_DocumentLogRegClassifierModel], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={ 'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_ContextualParserApproach: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_ContextualParserApproach, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.PARTIAL_ContextualParserApproach], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_ReIdentification: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_ReIdentification, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_ReIdentification], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_NerDisambiguator: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_NerDisambiguator, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_NerDisambiguator], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_NerDisambiguatorModel: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_NerDisambiguatorModel, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.PARTIAL_NerDisambiguatorModel], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_AverageEmbeddings: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_AverageEmbeddings, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_AverageEmbeddings], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_EntityChunkEmbeddings: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_EntityChunkEmbeddings, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.PARTIAL_EntityChunkEmbeddings], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_IOBTagger: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_IOBTagger, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_IOBTagger], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_NerChunker: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_NerChunker, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_NerChunker], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_DateNormalizer: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_DateNormalizer, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_DateNormalizer], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_RENerChunksFilter: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_RENerChunksFilter, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_RENerChunksFilter], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_ResolverMerger: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_ResolverMerger, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_ResolverMerger], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_AnnotationMerger: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_AnnotationMerger, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_AnnotationMerger], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_Word2VecApproach: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_Word2VecApproach, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_Word2VecApproach], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_WordEmbeddings: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_WordEmbeddings, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_WordEmbeddings], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_EntityRulerApproach: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_EntityRulerApproach, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_EntityRulerApproach], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_EntityRulerModel: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_EntityRulerModel, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_EntityRulerModel], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_TextMatcherModel: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_TextMatcherModel, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_TextMatcherModel], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_BigTextMatcher: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_BigTextMatcher, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_BigTextMatcher], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_BigTextMatcherModel: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_BigTextMatcherModel, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_BigTextMatcherModel], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_DateMatcher: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_DateMatcher, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_DateMatcher], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_MultiDateMatcher: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_MultiDateMatcher, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_MultiDateMatcher], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_RegexMatcher: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_RegexMatcher, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_RegexMatcher], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_TextMatcher: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_TextMatcher, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_TextMatcher], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_NerApproach: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_NerApproach, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_NerApproach], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_NerCrfApproach: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_NerCrfApproach, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_NerCrfApproach], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_NerOverwriter: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_NerOverwriter, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_NerOverwriter], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_DependencyParserApproach: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_DependencyParserApproach, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.PARTIAL_DependencyParserApproach], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_TypedDependencyParserApproach: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_TypedDependencyParserApproach, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.PARTIAL_TypedDependencyParserApproach], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={ 'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_SentenceDetectorDLApproach: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_SentenceDetectorDLApproach, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.PARTIAL_SentenceDetectorDLApproach], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={ 'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_SentimentDetector: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_SentimentDetector, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_SentimentDetector], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_ViveknSentimentApproach: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_ViveknSentimentApproach, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.PARTIAL_ViveknSentimentApproach], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_SymmetricDeleteApproach: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_SymmetricDeleteApproach, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.PARTIAL_SymmetricDeleteApproach], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_ChunkTokenizer: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_ChunkTokenizer, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_ChunkTokenizer], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_ChunkTokenizerModel: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_ChunkTokenizerModel, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_ChunkTokenizerModel], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_RecursiveTokenizer: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_RecursiveTokenizer, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_RecursiveTokenizer], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_RecursiveTokenizerModel: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_RecursiveTokenizerModel, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.PARTIAL_RecursiveTokenizerModel], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_Token2Chunk: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_Token2Chunk, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_Token2Chunk], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_WordSegmenterApproach: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_WordSegmenterApproach, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.PARTIAL_WordSegmenterApproach], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_GraphExtraction: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_GraphExtraction, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_GraphExtraction], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_Lemmatizer: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_Lemmatizer, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_Lemmatizer], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.PARTIAL_Normalizer: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_Normalizer, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_Normalizer], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.PARTIALLY_READY, pdf_extractor_methods={'default': default_partial_implement_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=partially_implemented_substitutor, output_level=L.DOCUMENT, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), #### Open Source A.CHUNK2DOC: partial(NluComponent, name=A.CHUNK2DOC, type=T.HELPER_ANNO, get_default_model=Chunk_2_Doc.get_default_model, pdf_extractor_methods={'default_full': default_full_config, }, # 'default': '', TODO no extractor pdf_col_name_substitutor=substitute_doc2chunk_cols, output_level=L.DOCUMENT, node=NLP_FEATURE_NODES.nodes[A.CHUNK2DOC], description='TODO', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.CHUNK2DOC, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.CHUNK2DOC], ), A.CHUNK_EMBEDDINGS_CONVERTER: partial(NluComponent, name=A.CHUNK_EMBEDDINGS_CONVERTER, type=T.HELPER_ANNO, get_default_model=ChunkEmbedder.get_default_model, pdf_extractor_methods={'default': default_chunk_embedding_config, 'default_full': default_full_config, }, # TODO no extractor pdf_col_name_substitutor=substitute_chunk_embed_cols, output_level=L.CHUNK, node=NLP_FEATURE_NODES.nodes[A.CHUNK_EMBEDDINGS_CONVERTER], description='Convert Chunks to Doc type col', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.CHUNK_EMBEDDINGS_CONVERTER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.CHUNK_EMBEDDINGS_CONVERTER], is_storage_ref_producer=True, has_storage_ref=True, ), A.BERT_SENTENCE_CHUNK_EMBEDDINGS: partial(NluComponent, name=A.BERT_SENTENCE_CHUNK_EMBEDDINGS, type=T.CHUNK_EMBEDDING, get_default_model=BertSentenceChunkEmbeds.get_default_model, get_pretrained_model=BertSentenceChunkEmbeds.get_pretrained_model, pdf_extractor_methods={'default': default_chunk_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_chunk_embed_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.BERT_SENTENCE_CHUNK_EMBEDDINGS], description='Converts NER chunks into Chunk Embeddings generated from sentence embedder', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.BERT_SENTENCE_CHUNK_EMBEDDINGS, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.BERT_SENTENCE_CHUNK_EMBEDDINGS], is_storage_ref_producer=True, has_storage_ref=True, ), # TODO just placeholder A.TRAINABLE_TOKENIZER: partial(NluComponent, name=A.POS, type=T.TOKEN_CLASSIFIER, get_default_model=RegexTokenizer.get_default_model, pdf_extractor_methods={'default': default_tokenizer_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_tokenizer_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.POS], description='todo', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.REGEX_TOKENIZER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.REGEX_TOKENIZER], ), A.CHUNKER: partial(NluComponent, name=A.CHUNKER, type=T.CHUNK_CLASSIFIER, get_default_model=DefaultChunker.get_default_model, pdf_extractor_methods={'default': default_chunk_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_chunk_cols, output_level=L.CHUNK, node=NLP_FEATURE_NODES.nodes[A.CHUNKER], description='Regex matcher that matches patters defined by part-of-speech (POS) tags', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.CHUNKER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.CHUNKER], ), A.CLASSIFIER_DL: partial(NluComponent, name=A.CLASSIFIER_DL, type=T.DOCUMENT_CLASSIFIER, get_default_model=ClassifierDl.get_default_model, get_pretrained_model=ClassifierDl.get_pretrained_model, get_trainable_model=ClassifierDl.get_trainable_model, pdf_extractor_methods={'default': default_classifier_dl_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_classifier_dl_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[A.CLASSIFIER_DL], description='Deep Learning based general classifier for many problems', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.CLASSIFIER_DL, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.CLASSIFIER_DL], has_storage_ref=True, is_storage_ref_consumer=True, trainable_mirror_anno=A.TRAINABLE_CLASSIFIER_DL, ), A.TRAINABLE_CLASSIFIER_DL: partial(NluComponent, name=A.TRAINABLE_CLASSIFIER_DL, type=T.DOCUMENT_CLASSIFIER, get_default_model=ClassifierDl.get_default_model, get_pretrained_model=ClassifierDl.get_pretrained_model, get_trainable_model=ClassifierDl.get_trainable_model, pdf_extractor_methods={'default': default_classifier_dl_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_classifier_dl_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[A.TRAINABLE_CLASSIFIER_DL], description='Deep Learning based general classifier for many problems', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.TRAINABLE_CLASSIFIER_DL, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.TRAINABLE_CLASSIFIER_DL], has_storage_ref=True, is_storage_ref_consumer=True, trainable=True, trained_mirror_anno=A.CLASSIFIER_DL, ), A.CONTEXT_SPELL_CHECKER: partial(NluComponent, name=A.CONTEXT_SPELL_CHECKER, type=T.SPELL_CHECKER, get_default_model=ContextSpellChecker.get_default_model, get_pretrained_model=ContextSpellChecker.get_pretrained_model, get_trainable_model=ContextSpellChecker.get_default_trainable_model, pdf_extractor_methods={'default': default_spell_context_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_spell_context_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.CONTEXT_SPELL_CHECKER], description='Deep Learning based spell checker that uses context to predict correct corrections.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.CONTEXT_SPELL_CHECKER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.CONTEXT_SPELL_CHECKER], trainable_mirror_anno=A.TRAINABLE_CONTEXT_SPELL_CHECKER, ), A.UNTYPED_DEPENDENCY_PARSER: partial(NluComponent, name=A.UNTYPED_DEPENDENCY_PARSER, type=T.TOKEN_CLASSIFIER, get_default_model=LabeledDependencyParser.get_default_model, get_pretrained_model=LabeledDependencyParser.get_pretrained_model, get_trainable_model=LabeledDependencyParser.get_default_trainable_model, pdf_extractor_methods={'default': default_dep_typed_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_labled_dependency_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.UNTYPED_DEPENDENCY_PARSER], description='todo', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.UNTYPED_DEPENDENCY_PARSER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.UNTYPED_DEPENDENCY_PARSER], trainable_mirror_anno=A.TRAINABLE_DEP_PARSE_UN_TYPED, ), A.TYPED_DEPENDENCY_PARSER: partial(NluComponent, name=A.TYPED_DEPENDENCY_PARSER, type=T.TOKEN_CLASSIFIER, get_default_model=UnlabeledDependencyParser.get_default_model, get_pretrained_model=UnlabeledDependencyParser.get_pretrained_model, get_trainable_model=UnlabeledDependencyParser.get_default_trainable_model, pdf_extractor_methods={'default': default_dep_untyped_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_un_labled_dependency_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.TYPED_DEPENDENCY_PARSER], description='todo', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.TYPED_DEPENDENCY_PARSER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.TYPED_DEPENDENCY_PARSER], trainable_mirror_anno=A.TRAINABLE_DEP_PARSE_TYPED, ), A.DOC2CHUNK: partial(NluComponent, name=A.DOC2CHUNK, type=T.HELPER_ANNO, get_default_model=Doc_2_Chunk.get_default_model, pdf_extractor_methods={'default': default_doc2chunk_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_doc2chunk_cols, output_level=L.CHUNK, node=NLP_FEATURE_NODES.nodes[A.DOC2CHUNK], description='Converts Document type col to Chunk type col', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.DOC2CHUNK, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.DOC2CHUNK], ), A.DOCUMENT_ASSEMBLER: partial(NluComponent, name=A.DOCUMENT_ASSEMBLER, type=T.HELPER_ANNO, get_default_model=SparkNlpDocumentAssembler.get_default_model, pdf_extractor_methods={'default': default_document_config, 'default_full': default_full_config}, pdf_col_name_substitutor=substitute_doc_assembler_cols, output_level=L.DOCUMENT, node=NLP_FEATURE_NODES.nodes[A.DOCUMENT_ASSEMBLER], description='todo', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.DOCUMENT_ASSEMBLER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.DOCUMENT_ASSEMBLER], ), A.AUDIO_ASSEMBLER: partial(NluComponent, name=A.AUDIO_ASSEMBLER, type=T.HELPER_ANNO, get_default_model=AudioAssembler_.get_default_model, pdf_extractor_methods={'default': default_only_result_popped_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=audio_assembler_cols, output_level=L.AUDIO_SERIES, node=NLP_FEATURE_NODES.nodes[A.AUDIO_ASSEMBLER], description='todo', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.AUDIO_ASSEMBLER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.AUDIO_ASSEMBLER], ), A.WAV2VEC_FOR_CTC: partial(NluComponent, name=A.WAV2VEC_FOR_CTC, type=T.SPEECH_RECOGNIZER, get_default_model=Wav2Vec.get_default_model, get_pretrained_model=Wav2Vec.get_pretrained_model, pdf_extractor_methods={'default': default_only_result_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_wav2vec_cols, output_level=L.DOCUMENT, node=NLP_FEATURE_NODES.nodes[A.WAV2VEC_FOR_CTC], description='todo', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.WAV2VEC_FOR_CTC, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.WAV2VEC_FOR_CTC], # Bas on Librosa which uses http://www.mega-nerd.com/libsndfile/ applicable_file_types=['wav', 'mp3', 'flac', 'aiff', 'aifc', 'ogg', 'aflac', 'alac', 'dsd', 'pcm', ] ), A.HUBERT_FOR_CTC: partial(NluComponent, name=A.HUBERT_FOR_CTC, type=T.SPEECH_RECOGNIZER, get_default_model=Hubert.get_default_model, get_pretrained_model=Hubert.get_pretrained_model, pdf_extractor_methods={'default': default_only_result_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_wav2vec_cols, output_level=L.DOCUMENT, node=NLP_FEATURE_NODES.nodes[A.HUBERT_FOR_CTC], description='todo', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.HUBERT_FOR_CTC, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.HUBERT_FOR_CTC], # Bas on Librosa which uses http://www.mega-nerd.com/libsndfile/ applicable_file_types=['wav', 'mp3', 'flac', 'aiff', 'aifc', 'ogg', 'aflac', 'alac', 'dsd', 'pcm', ] ), A.TAPAS_FOR_QA: partial(NluComponent, name=A.TAPAS_FOR_QA, type=T.QUESTION_TABLE_ANSWERER, get_default_model=TapasQA.get_default_model, get_pretrained_model=TapasQA.get_pretrained_model, pdf_extractor_methods={ 'default': default_tapas_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_tapas_qa_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[A.TAPAS_FOR_QA], description='todo', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.TAPAS_FOR_QA, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.TAPAS_FOR_QA], ), A.TABLE_ASSEMBLER: partial(NluComponent, name=A.TABLE_ASSEMBLER, type=T.HELPER_ANNO, get_default_model=SparkNlpTableAssembler.get_default_model, pdf_extractor_methods={'default': default_only_result_config, # TODO 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_wav2vec_cols, # TODO output_level=L.DOCUMENT, node=NLP_FEATURE_NODES.nodes[A.TABLE_ASSEMBLER], description='todo', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.TABLE_ASSEMBLER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.TABLE_ASSEMBLER], applicable_file_types=['csv', 'json'] # or str/pd format ), A.DOCUMENT_NORMALIZER: partial(NluComponent, name=A.DOCUMENT_NORMALIZER, type=T.TEXT_NORMALIZER, get_default_model=SparkNLPDocumentNormalizer.get_default_model, pdf_extractor_methods={'default': default_norm_document_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_doc_norm_cols, output_level=L.DOCUMENT, node=NLP_FEATURE_NODES.nodes[A.DOCUMENT_NORMALIZER], description='todo', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.DOCUMENT_NORMALIZER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.DOCUMENT_NORMALIZER], ), A.FINISHER: partial(NluComponent, # TODO WIP name=A.FINISHER, type=T.HELPER_ANNO, get_default_model=SdfFinisher.get_default_model, # TODO EXTRACTOR pdf_extractor_methods={'default': default_full_config, 'default_full': default_full_config, }, # TODO SUBSTITOR pdf_col_name_substitutor=None, # TODO no sub defined output_level=L.DOCUMENT, # TODO sub-token actually(?) node=NLP_FEATURE_NODES.nodes[A.FINISHER], description='Get lemmatized base version of tokens', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.FINISHER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.FINISHER], ), A.LANGUAGE_DETECTOR_DL: partial(NluComponent, name=A.LANGUAGE_DETECTOR_DL, type=T.DOCUMENT_CLASSIFIER, get_default_model=LanguageDetector.get_default_model, get_pretrained_model=LanguageDetector.get_pretrained_model, pdf_extractor_methods={'default': default_lang_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=None, # TODO no sub defined output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, # TODO sub-token actually(?) node=NLP_FEATURE_NODES.nodes[A.LANGUAGE_DETECTOR_DL], description='Get lemmatized base version of tokens', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.LANGUAGE_DETECTOR_DL, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.LANGUAGE_DETECTOR_DL], ), A.LEMMATIZER: partial(NluComponent, name=A.LEMMATIZER, type=T.TOKEN_NORMALIZER, output_context=ComputeContexts.spark, get_default_model=SparkNLPLemmatizer.get_default_model, get_pretrained_model=SparkNLPLemmatizer.get_pretrained_model, get_trainable_model=SparkNLPLemmatizer.get_default_trainable_model, pdf_extractor_methods={'default': default_lemma_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_lem_cols, output_level=L.TOKEN, # TODO sub-token actually(?) node=NLP_FEATURE_NODES.nodes[A.LEMMATIZER], description='Get lemmatized base version of tokens', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, jsl_anno_class_id=A.LEMMATIZER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.LEMMATIZER], trainable_mirror_anno=A.TRAINABLE_LEMMATIZER ), A.MULTI_CLASSIFIER_DL: partial(NluComponent, name=A.MULTI_CLASSIFIER_DL, type=T.DOCUMENT_CLASSIFIER, output_level=L.MULTI_TOKEN_CLASSIFIER, get_default_model=MultiClassifier.get_default_model, get_pretrained_model=MultiClassifier.get_pretrained_model, get_trainable_model=MultiClassifier.get_default_trainable_model, pdf_extractor_methods={'default': default_multi_classifier_dl_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_multi_classifier_dl_cols, node=NLP_FEATURE_NODES.nodes[A.MULTI_CLASSIFIER_DL], description='Deep Learning based general classifier for multi-label classification problem. I.e. problems, where one document may be labled with multiple labels at the same time.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.MULTI_CLASSIFIER_DL, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.MULTI_CLASSIFIER_DL], has_storage_ref=True, is_storage_ref_consumer=True, trainable_mirror_anno=A.TRAINABLE_MULTI_CLASSIFIER_DL, ), A.TRAINABLE_MULTI_CLASSIFIER_DL: partial(NluComponent, name=A.TRAINABLE_MULTI_CLASSIFIER_DL, type=T.DOCUMENT_CLASSIFIER, output_level=L.MULTI_TOKEN_CLASSIFIER, get_default_model=MultiClassifier.get_default_model, get_pretrained_model=MultiClassifier.get_pretrained_model, get_trainable_model=MultiClassifier.get_default_trainable_model, pdf_extractor_methods={'default': default_multi_classifier_dl_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_multi_classifier_dl_cols, node=NLP_FEATURE_NODES.nodes[A.TRAINABLE_MULTI_CLASSIFIER_DL], description='Trainable Deep Learning based general classifier for multi-label classification problem. I.e. problems, where one document may be labled with multiple labels at the same time.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.TRAINABLE_MULTI_CLASSIFIER_DL, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.TRAINABLE_MULTI_CLASSIFIER_DL], has_storage_ref=True, is_storage_ref_consumer=True, trainable=True, trained_mirror_anno=A.CLASSIFIER_DL, # Should be A.MULTI_CLASSIFIER_DL, but fitted class is actually classifier DL, special edge case ), A.N_GRAMM_GENERATOR: partial(NluComponent, name=A.N_GRAMM_GENERATOR, type=T.CHUNK_CLASSIFIER, # Classify each n-gram wether they match Pattern or not get_default_model=NGram.get_default_model, pdf_extractor_methods={'default': default_ngram_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_ngram_cols, output_level=L.CHUNK, node=NLP_FEATURE_NODES.nodes[A.N_GRAMM_GENERATOR], description='Extract N-Gram chunks from texts', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.N_GRAMM_GENERATOR, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.N_GRAMM_GENERATOR], ), A.NER_CONVERTER: partial(NluComponent, name=A.NER_CONVERTER, type=T.HELPER_ANNO, get_default_model=NerToChunkConverter.get_default_model, pdf_extractor_methods={'default': default_ner_converter_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_ner_converter_cols, output_level=L.CHUNK, node=NLP_FEATURE_NODES.nodes[A.NER_CONVERTER], description='Convert NER-IOB tokens into concatenated strings (aka chunks)', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.NER_CONVERTER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.NER_CONVERTER], ), A.NER_CRF: partial(NluComponent, name=A.NER_CRF, type=T.TOKEN_CLASSIFIER, output_level=L.TOKEN, get_default_model=NERDLCRF.get_default_model, get_pretrained_model=NERDLCRF.get_pretrained_model, get_trainable_model=NERDLCRF.get_default_trainable_model, pdf_extractor_methods={'default': '', 'default_full': default_full_config, }, pdf_col_name_substitutor=None, # TODO node=NLP_FEATURE_NODES.nodes[A.NER_CRF], description='Classical NER model_anno_obj based on conditional random fields (CRF). Predicts IOB tags ', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.NER_CRF, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.NER_CRF], trainable_mirror_anno=A.TRAINABLE_NER_CRF, ), A.NER_DL: partial(NluComponent, name=A.NER_DL, type=T.TOKEN_CLASSIFIER, output_level=L.TOKEN, get_default_model=NERDL.get_default_model, get_pretrained_model=NERDL.get_pretrained_model, get_trainable_model=NERDL.get_default_trainable_model, pdf_extractor_methods={'default': default_NER_config, 'meta': meta_NER_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_ner_dl_cols, node=NLP_FEATURE_NODES.nodes[A.NER_DL], description='Deep Learning based NER model_anno_obj that predicts IOB tags. ', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.NER_DL, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.NER_DL], trainable_mirror_anno=A.TRAINABLE_NER_DL, has_storage_ref=True, is_storage_ref_consumer=True ), A.TRAINABLE_NER_DL: partial(NluComponent, name=A.TRAINABLE_NER_DL, type=T.TOKEN_CLASSIFIER, get_default_model=NERDL.get_default_model, get_pretrained_model=NERDL.get_pretrained_model, get_trainable_model=NERDL.get_default_trainable_model, pdf_extractor_methods={'default': default_NER_config, 'meta': meta_NER_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_ner_dl_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.TRAINABLE_NER_DL], description='Deep Learning based NER model_anno_obj that predicts IOB tags. ', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.TRAINABLE_NER_DL, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.TRAINABLE_NER_DL], trained_mirror_anno=A.NER_DL, trainable=True, has_storage_ref=True, is_storage_ref_consumer=True ), A.NORMALIZER: partial(NluComponent, name=A.NORMALIZER, type=T.TOKEN_NORMALIZER, get_default_model=SparkNLPNormalizer.get_default_model, get_pretrained_model=SparkNLPNormalizer.get_pretrained_model, # get_trainable_model=SparkNLPLemmatizer.get_default_trainable_model, pdf_extractor_methods={'default': default_norm_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_norm_cols, output_level=L.TOKEN, # TODO sub-token actually(?) node=NLP_FEATURE_NODES.nodes[A.NORMALIZER], description='Get lemmatized base version of tokens', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.NORMALIZER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.NORMALIZER], trainable_mirror_anno=A.TRAINABLE_NORMALIZER ), A.NORVIG_SPELL_CHECKER: partial(NluComponent, name=A.NORVIG_SPELL_CHECKER, type=T.SPELL_CHECKER, get_default_model=NorvigSpellChecker.get_default_model, get_pretrained_model=NorvigSpellChecker.get_pretrained_model, get_trainable_model=NorvigSpellChecker.get_default_trainable_model, pdf_extractor_methods={'default': default_spell_norvig_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_spell_norvig_cols, output_level=L.TOKEN, # TODO sub-token actually node=NLP_FEATURE_NODES.nodes[A.NORVIG_SPELL_CHECKER], description='Norvig algorithm based Spell Checker', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.NORVIG_SPELL_CHECKER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.NORVIG_SPELL_CHECKER], trainable_mirror_anno=A.TRAINABLE_NORVIG_SPELL_CHECKER ), A.POS: partial(NluComponent, name=A.POS, type=T.TOKEN_CLASSIFIER, get_default_model=PartOfSpeechJsl.get_default_model, get_pretrained_model=PartOfSpeechJsl.get_pretrained_model, get_trainable_model=PartOfSpeechJsl.get_default_trainable_model, pdf_extractor_methods={'default': default_POS_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_pos_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.POS], description='todo', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.POS, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.POS], trainable_mirror_anno=A.TRAINABLE_POS, ), A.TRAINABLE_POS: partial(NluComponent, name=A.TRAINABLE_POS, type=T.TOKEN_CLASSIFIER, get_default_model=PartOfSpeechJsl.get_default_model, get_pretrained_model=PartOfSpeechJsl.get_pretrained_model, get_trainable_model=PartOfSpeechJsl.get_default_trainable_model, pdf_extractor_methods={'default': default_POS_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_pos_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.TRAINABLE_POS], description='todo', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.TRAINABLE_POS, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.TRAINABLE_POS], trained_mirror_anno=A.POS, trainable=True ), A.REGEX_MATCHER: partial(NluComponent, # TODO , type as ner_converted ok ? name=A.REGEX_MATCHER, type=T.HELPER_ANNO, get_default_model=RegexMatcher.get_default_model, # TODO extractor?? pdf_extractor_methods={'default': default_ner_converter_config, 'default_full': default_full_config, }, # TODO substitor?? pdf_col_name_substitutor=substitute_ner_converter_cols, output_level=L.CHUNK, node=NLP_FEATURE_NODES.nodes[A.REGEX_MATCHER], description='Matches chunks in text based on regex rules', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.REGEX_MATCHER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.REGEX_MATCHER], ), A.REGEX_TOKENIZER: partial(NluComponent, name=A.POS, type=T.TOKEN_CLASSIFIER, get_default_model=RegexTokenizer.get_default_model, pdf_extractor_methods={'default': default_tokenizer_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_tokenizer_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.POS], description='todo', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.REGEX_TOKENIZER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.REGEX_TOKENIZER], ), A.SENTENCE_DETECTOR: partial(NluComponent, name=A.SENTENCE_DETECTOR, type=T.SENTENCE_DETECTOR, get_default_model=PragmaticSentenceDetector.get_default_model, pdf_extractor_methods={'default': default_sentence_detector_DL_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_sentence_detector_dl_cols, output_level=L.SENTENCE, node=NLP_FEATURE_NODES.nodes[A.SENTENCE_DETECTOR], description='Classical rule based Sentence Detector', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.SENTENCE_DETECTOR, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.SENTENCE_DETECTOR], ), A.SENTENCE_DETECTOR_DL: partial(NluComponent, name=A.SENTENCE_DETECTOR_DL, type=T.SENTENCE_DETECTOR, get_default_model=SentenceDetectorDeep.get_default_model, get_pretrained_model=SentenceDetectorDeep.get_pretrained_model, # get_trainable_model=SentenceDetectorDeep.get_trainable_model, pdf_extractor_methods={'default': default_sentence_detector_DL_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_sentence_detector_dl_cols, output_level=L.SENTENCE, node=NLP_FEATURE_NODES.nodes[A.SENTENCE_DETECTOR_DL], description='Deep Learning based sentence Detector', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.SENTENCE_DETECTOR_DL, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.SENTENCE_DETECTOR_DL], trainable_mirror_anno=A.TRAINABLE_SENTENCE_DETECTOR_DL ), A.SENTENCE_EMBEDDINGS_CONVERTER: partial(NluComponent, name=A.SENTENCE_EMBEDDINGS_CONVERTER, type=T.DOCUMENT_EMBEDDING, get_default_model=SparkNLPSentenceEmbeddings.get_default_model, pdf_extractor_methods={'default': default_sentence_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_sent_embed_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_EMBEDDING, node=NLP_FEATURE_NODES.nodes[A.SENTENCE_EMBEDDINGS_CONVERTER], description='Converts Word Embeddings to Sentence/Document Embeddings', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.SENTENCE_EMBEDDINGS_CONVERTER, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.SENTENCE_EMBEDDINGS_CONVERTER], is_storage_ref_producer=True, has_storage_ref=True ), A.STEMMER: partial(NluComponent, name=A.STEMMER, type=T.TOKEN_NORMALIZER, get_default_model=SparkNLPStemmer.get_default_model, pdf_extractor_methods={'default': default_stemm_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_stem_cols, output_level=L.TOKEN, # TODO sub-token actually(?) node=NLP_FEATURE_NODES.nodes[A.STEMMER], description='Get stemmed base version of tokens', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.STEMMER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.STEMMER], ), A.STOP_WORDS_CLEANER: partial(NluComponent, name=A.STOP_WORDS_CLEANER, type=T.TEXT_NORMALIZER, get_default_model=NLUStopWordcleaner.get_default_model, get_pretrained_model=NLUStopWordcleaner.get_pretrained_model, pdf_extractor_methods={'default': default_stopwords_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_stopwords_cols, output_level=L.TOKEN, # TODO sub-token actually node=NLP_FEATURE_NODES.nodes[A.STOP_WORDS_CLEANER], description='Removes stopwords from text based on internal list of stop words.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.STOP_WORDS_CLEANER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.STOP_WORDS_CLEANER], ), A.SYMMETRIC_DELETE_SPELLCHECKER: partial(NluComponent, name=A.SYMMETRIC_DELETE_SPELLCHECKER, type=T.SPELL_CHECKER, get_default_model=SymmetricSpellChecker.get_default_model, get_pretrained_model=SymmetricSpellChecker.get_pretrained_model, get_trainable_model=SymmetricSpellChecker.get_default_trainable_model, pdf_extractor_methods={'default': default_spell_symmetric_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_spell_symm_cols, output_level=L.TOKEN, # TODO sub-token actually node=NLP_FEATURE_NODES.nodes[A.SYMMETRIC_DELETE_SPELLCHECKER], description='Symmetric Spell Checker', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.SYMMETRIC_DELETE_SPELLCHECKER, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.SYMMETRIC_DELETE_SPELLCHECKER], trainable_mirror_anno=A.TRAINABLE_SYMMETRIC_DELETE_SPELLCHECKER ), A.TOKENIZER: partial(NluComponent, name=A.TOKENIZER, type=T.TOKENIZER, get_default_model=DefaultTokenizer.get_default_model, pdf_extractor_methods={'default': default_tokenizer_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_tokenizer_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.TOKENIZER], description='Default tokenizer', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.TOKENIZER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.TOKENIZER], ), A.SENTIMENT_DL: partial(NluComponent, name=A.SENTIMENT_DL, type=T.DOCUMENT_CLASSIFIER, get_default_model=SentimentDl.get_default_model, get_pretrained_model=SentimentDl.get_pretrained_model, get_trainable_model=SentimentDl.get_default_trainable_model, pdf_extractor_methods={'default': default_sentiment_dl_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_sentiment_dl_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[A.SENTIMENT_DL], description='Deep Learning based Sentiment Detector', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.SENTIMENT_DL, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.SENTIMENT_DL], trainable_mirror_anno=A.TRAINABLE_SENTIMENT_DL, is_storage_ref_consumer=True, has_storage_ref=True ), A.TRAINABLE_SENTIMENT_DL: partial(NluComponent, name=A.TRAINABLE_SENTIMENT_DL, type=T.DOCUMENT_CLASSIFIER, get_default_model=SentimentDl.get_default_model, get_pretrained_model=SentimentDl.get_pretrained_model, get_trainable_model=SentimentDl.get_default_trainable_model, pdf_extractor_methods={'default': default_sentiment_dl_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_sentiment_dl_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[A.TRAINABLE_SENTIMENT_DL], description='Deep Learning based Sentiment Detector', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.TRAINABLE_SENTIMENT_DL, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.TRAINABLE_SENTIMENT_DL], trained_mirror_anno=A.SENTIMENT_DL, is_storage_ref_consumer=True, has_storage_ref=True, trainable=True ), A.SENTIMENT_DETECTOR: partial(NluComponent, name=A.SENTIMENT_DETECTOR, type=T.DOCUMENT_CLASSIFIER, get_default_model=Sentiment.get_default_model, # get_pretrained_model = Sentiment.get_pretrained_model, get_trainable_model=Sentiment.get_default_trainable_model, pdf_extractor_methods={'default': default_sentiment_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_sentiment_dl_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[A.SENTIMENT_DETECTOR], description='Rule based sentiment detector, which calculates a score based on predefined keywords', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.SENTIMENT_DETECTOR, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.SENTIMENT_DETECTOR], trainable_mirror_anno=A.TRAINABLE_SENTIMENT, ), A.VIVEKN_SENTIMENT: partial(NluComponent, name=A.VIVEKN_SENTIMENT, type=T.DOCUMENT_CLASSIFIER, get_default_model=ViveknSentiment.get_default_model, get_pretrained_model=ViveknSentiment.get_pretrained_model, get_trainable_model=ViveknSentiment.get_default_trainable_model, pdf_extractor_methods={'default': default_sentiment_vivk_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_sentiment_vivk_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[A.VIVEKN_SENTIMENT], description='Sentiment detector based on the vivekn algorithm', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.VIVEKN_SENTIMENT, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.VIVEKN_SENTIMENT], trainable_mirror_anno=A.TRAINABLE_VIVEKN_SENTIMENT ), A.WORD_EMBEDDINGS: partial(NluComponent, name=A.WORD_EMBEDDINGS, type=T.TOKEN_EMBEDDING, get_default_model=Glove.get_default_model, get_pretrained_model=Glove.get_pretrained_model, pdf_extractor_methods={'default': default_word_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_word_embed_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.WORD_EMBEDDINGS], description='Static Word Embeddings generator, i.e. Glove, etc..', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.WORD_EMBEDDINGS, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.WORD_EMBEDDINGS], is_storage_ref_producer=True, has_storage_ref=True, ), A.WORD_SEGMENTER: partial(NluComponent, name=A.WORD_SEGMENTER, type=T.TOKENIZER, get_default_model=WordSegmenter.get_default_model, get_pretrained_model=WordSegmenter.get_pretrained_model, get_trainable_model=WordSegmenter.get_default_model_for_lang, pdf_extractor_methods={'default': default_word_segmenter_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_word_seg_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.WORD_SEGMENTER], description='Segments non white space seperated text into tokens, like Chinese or Japanese. ', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.WORD_SEGMENTER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.WORD_SEGMENTER], trainable_mirror_anno=A.TRAINABLE_WORD_SEGMENTER ), A.YAKE_KEYWORD_EXTRACTION: partial(NluComponent, name=A.YAKE_KEYWORD_EXTRACTION, type=T.CHUNK_CLASSIFIER, # TODO??? Classifies each chunks/ngram likelyhood of beeing a Ketyword get_default_model=Yake.get_default_model, pdf_extractor_methods={'default': default_yake_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_YAKE_cols, output_level=L.CHUNK, # Actual sub-ngram/ngram filter node=NLP_FEATURE_NODES.nodes[A.YAKE_KEYWORD_EXTRACTION], description='Calculates probability of each n-gram beeing a keyword. Yields a selection of these n-grams with specific filters,i.e. length, probability, etc..', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.YAKE_KEYWORD_EXTRACTION, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.YAKE_KEYWORD_EXTRACTION], has_storage_ref=False, is_storage_ref_consumer=False, is_storage_ref_producer=False, ), A.DOC2VEC: partial(NluComponent, name=A.DOC2VEC, type=T.TOKEN_EMBEDDING, get_default_model=Doc2Vec.get_default_model, get_trainable_model=Doc2Vec.get_trainable_model, get_pretrained_model=Doc2Vec.get_pretrained_model, pdf_extractor_methods={'default': default_sentence_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_sent_embed_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.DOC2VEC], description='Trains a Word2Vec model_anno_obj that creates vector representations of words in a text corpus. The algorithm first constructs a vocabulary from the corpus and then learns vector representation of words in the vocabulary. The vector representation can be used as features in natural language processing and machine learning algorithms.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.DOC2VEC, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.DOC2VEC], has_storage_ref=True, is_storage_ref_producer=True, trainable_mirror_anno=A.TRAINABLE_DOC2VEC ), A.TRAINABLE_DOC2VEC: partial(NluComponent, name=A.TRAINABLE_DOC2VEC, type=T.TOKEN_EMBEDDING, get_default_model=Doc2Vec.get_default_model, get_trainable_model=Doc2Vec.get_trainable_model, get_pretrained_model=Doc2Vec.get_pretrained_model, pdf_extractor_methods={'default': default_sentence_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_sent_embed_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.TRAINABLE_DOC2VEC], description='Trains a Word2Vec model_anno_obj that creates vector representations of words in a text corpus. The algorithm first constructs a vocabulary from the corpus and then learns vector representation of words in the vocabulary. The vector representation can be used as features in natural language processing and machine learning algorithms.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.TRAINABLE_DOC2VEC, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.TRAINABLE_DOC2VEC], has_storage_ref=True, is_storage_ref_producer=True, trained_mirror_anno=A.DOC2VEC, trainable=True ), ### ________ TRANSFORMERS BELOW _________ A.ALBERT_EMBEDDINGS: partial(NluComponent, name=A.ALBERT_EMBEDDINGS, type=T.TOKEN_EMBEDDING, get_default_model=SparkNLPAlbert.get_default_model, get_pretrained_model=SparkNLPAlbert.get_pretrained_model, pdf_extractor_methods={'default': default_word_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_word_embed_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.ALBERT_EMBEDDINGS], description='ALBERT: A LITE BERT FOR SELF-SUPERVISED LEARNING OF LANGUAGE REPRESENTATIONS - Google Research, Toyota Technological Institute at Chicago', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.ALBERT_EMBEDDINGS, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.ALBERT_EMBEDDINGS], has_storage_ref=True, is_storage_ref_producer=True, ), A.COREF_SPAN_BERT: partial(NluComponent, name=A.COREF_SPAN_BERT, type=T.TOKEN_CLASSIFIER, get_default_model=CorefBert.get_default_model, get_pretrained_model=CorefBert.get_pretrained_model, pdf_extractor_methods={'default': default_coref_spanbert_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_coref_cols, output_level=L.CO_REFERENCE, node=NLP_FEATURE_NODES.nodes[A.COREF_SPAN_BERT], description='Spanbert for coreference ', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.COREF_SPAN_BERT, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.COREF_SPAN_BERT], ), A.DEBERTA_FOR_TOKEN_CLASSIFICATION: partial(NluComponent, name=A.DEBERTA_FOR_TOKEN_CLASSIFICATION, type=T.TRANSFORMER_TOKEN_CLASSIFIER, get_default_model=TokenDeBerta.get_default_model, get_pretrained_model=TokenDeBerta.get_pretrained_model, pdf_extractor_methods={'default': default_token_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_transformer_token_classifier_cols, output_level=L.TOKEN, # Handled like NER model_anno_obj node=NLP_FEATURE_NODES.nodes[A.DEBERTA_FOR_TOKEN_CLASSIFICATION], description='AlbertForTokenClassification can load ALBERT Models with a token classification head on top (a linear layer on top of the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.DEBERTA_FOR_TOKEN_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.DEBERTA_FOR_TOKEN_CLASSIFICATION], ), A.CAMEMBERT_FOR_TOKEN_CLASSIFICATION: partial(NluComponent, name=A.CAMEMBERT_FOR_TOKEN_CLASSIFICATION, type=T.TRANSFORMER_TOKEN_CLASSIFIER, get_default_model=TokenCamembert.get_default_model, get_pretrained_model=TokenCamembert.get_pretrained_model, pdf_extractor_methods={'default': default_token_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_transformer_token_classifier_cols, output_level=L.TOKEN, # Handled like NER model_anno_obj node=NLP_FEATURE_NODES.nodes[ A.CAMEMBERT_FOR_TOKEN_CLASSIFICATION], description='CamemBertForTokenClassification can load ALBERT Models with a token classification head on top (a linear layer on top of the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.CAMEMBERT_FOR_TOKEN_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.CAMEMBERT_FOR_TOKEN_CLASSIFICATION], ), A.ALBERT_FOR_TOKEN_CLASSIFICATION: partial(NluComponent, name=A.ALBERT_FOR_TOKEN_CLASSIFICATION, type=T.TRANSFORMER_TOKEN_CLASSIFIER, get_default_model=TokenAlbert.get_default_model, get_pretrained_model=TokenAlbert.get_pretrained_model, pdf_extractor_methods={'default': default_token_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_transformer_token_classifier_cols, output_level=L.TOKEN, # Handled like NER model_anno_obj node=NLP_FEATURE_NODES.nodes[A.ALBERT_FOR_TOKEN_CLASSIFICATION], description='AlbertForTokenClassification can load ALBERT Models with a token classification head on top (a linear layer on top of the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.ALBERT_FOR_TOKEN_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.ALBERT_FOR_TOKEN_CLASSIFICATION], ), # A.CAMEMBERT_EMBEDDINGS: partial(NluComponent, name=A.CAMEMBERT_EMBEDDINGS, type=T.TOKEN_EMBEDDING, get_default_model=CamemBert.get_default_model, get_pretrained_model=CamemBert.get_pretrained_model, pdf_extractor_methods={'default': default_word_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_word_embed_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.CAMEMBERT_EMBEDDINGS], description='Token-level embeddings using CAMEN-BERT', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.CAMEMBERT_EMBEDDINGS, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.CAMEMBERT_EMBEDDINGS], has_storage_ref=True, is_storage_ref_producer=True, ), A.BERT_EMBEDDINGS: partial(NluComponent, name=A.BERT_EMBEDDINGS, type=T.TOKEN_EMBEDDING, get_default_model=SparkNLPBert.get_default_model, get_pretrained_model=SparkNLPBert.get_pretrained_model, pdf_extractor_methods={'default': default_word_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_word_embed_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.BERT_EMBEDDINGS], description='Token-level embeddings using BERT. BERT (Bidirectional Encoder Representations from Transformers) provides dense vector representations for natural language by using a deep, pre-trained neural network with the Transformer architecture.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.BERT_EMBEDDINGS, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.BERT_EMBEDDINGS], has_storage_ref=True, is_storage_ref_producer=True, ), A.BERT_SENTENCE_EMBEDDINGS: partial(NluComponent, name=A.BERT_SENTENCE_EMBEDDINGS, type=T.DOCUMENT_EMBEDDING, get_default_model=BertSentence.get_default_model, get_pretrained_model=BertSentence.get_pretrained_model, pdf_extractor_methods={'default': default_sentence_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_sent_embed_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_EMBEDDING, node=NLP_FEATURE_NODES.nodes[A.BERT_SENTENCE_EMBEDDINGS], description='Sentence-level embeddings using BERT. BERT (Bidirectional Encoder Representations from Transformers) provides dense vector representations for natural language by using a deep, pre-trained neural network with the Transformer architecture.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.BERT_SENTENCE_EMBEDDINGS, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.BERT_SENTENCE_EMBEDDINGS], has_storage_ref=True, is_storage_ref_producer=True, ), A.BERT_FOR_TOKEN_CLASSIFICATION: partial(NluComponent, name=A.BERT_FOR_TOKEN_CLASSIFICATION, type=T.TRANSFORMER_TOKEN_CLASSIFIER, get_default_model=TokenBert.get_default_model, get_pretrained_model=TokenBert.get_pretrained_model, pdf_extractor_methods={'default': default_token_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_transformer_token_classifier_cols, output_level=L.TOKEN, # Handled like NER model_anno_obj node=NLP_FEATURE_NODES.nodes[A.BERT_FOR_TOKEN_CLASSIFICATION], description='BertForTokenClassification can load Bert Models with a token classification head on top (a linear layer on top of the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.BERT_FOR_TOKEN_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.BERT_FOR_TOKEN_CLASSIFICATION], ), A.BERT_FOR_SEQUENCE_CLASSIFICATION: partial(NluComponent, name=A.BERT_FOR_SEQUENCE_CLASSIFICATION, type=T.TRANSFORMER_SEQUENCE_CLASSIFIER, get_default_model=SeqBertClassifier.get_default_model, get_pretrained_model=SeqBertClassifier.get_pretrained_model, pdf_extractor_methods={'default': default_classifier_dl_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_seq_bert_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[A.BERT_FOR_SEQUENCE_CLASSIFICATION], description='BertForSequenceClassification can load Bert Models with sequence classification/regression head on top (a linear layer on top of the pooled output) e.g. for multi-class document classification tasks.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.BERT_FOR_SEQUENCE_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.BERT_FOR_SEQUENCE_CLASSIFICATION], ), A.CAMEMBERT_FOR_SEQUENCE_CLASSIFICATION: partial(NluComponent, name=A.CAMEMBERT_FOR_SEQUENCE_CLASSIFICATION, type=T.TRANSFORMER_SEQUENCE_CLASSIFIER, get_default_model=SeqCamembertClassifier.get_default_model, get_pretrained_model=SeqCamembertClassifier.get_pretrained_model, pdf_extractor_methods={'default': default_classifier_dl_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_seq_bert_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[ A.CAMEMBERT_FOR_SEQUENCE_CLASSIFICATION], description='BertForSequenceClassification can load Bert Models with sequence classification/regression head on top (a linear layer on top of the pooled output) e.g. for multi-class document classification tasks.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.CAMEMBERT_FOR_SEQUENCE_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.CAMEMBERT_FOR_SEQUENCE_CLASSIFICATION], ), A.DISTIL_BERT_EMBEDDINGS: partial(NluComponent, name=A.DISTIL_BERT_EMBEDDINGS, type=T.TOKEN_EMBEDDING, get_default_model=DistilBert.get_default_model, get_pretrained_model=DistilBert.get_pretrained_model, pdf_extractor_methods={'default': default_word_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_word_embed_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.DISTIL_BERT_EMBEDDINGS], description='DistilBERT is a small, fast, cheap and light Transformer model_anno_obj trained by distilling BERT base. It has 40% less parameters than bert-base-uncased, runs 60% faster while preserving over 95% of BERT’s performances as measured on the GLUE language understanding benchmark.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.DISTIL_BERT_EMBEDDINGS, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.DISTIL_BERT_EMBEDDINGS], has_storage_ref=True, is_storage_ref_producer=True, ), A.DISTIL_BERT_FOR_SEQUENCE_CLASSIFICATION: partial(NluComponent, name=A.DISTIL_BERT_FOR_SEQUENCE_CLASSIFICATION, type=T.TRANSFORMER_SEQUENCE_CLASSIFIER, get_default_model=SeqDilstilBertClassifier.get_default_model, get_pretrained_model=SeqDilstilBertClassifier.get_pretrained_model, pdf_extractor_methods={ 'default': default_classifier_dl_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_seq_bert_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[ A.DISTIL_BERT_FOR_SEQUENCE_CLASSIFICATION], description='DistilBertForSequenceClassification can load DistilBERT Models with sequence classification/regression head on top (a linear layer on top of the pooled output) e.g. for multi-class document classification tasks.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.DISTIL_BERT_FOR_SEQUENCE_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.DISTIL_BERT_FOR_SEQUENCE_CLASSIFICATION], ), A.DISTIL_BERT_FOR_TOKEN_CLASSIFICATION: partial(NluComponent, name=A.DISTIL_BERT_FOR_TOKEN_CLASSIFICATION, type=T.TRANSFORMER_TOKEN_CLASSIFIER, get_default_model=TokenDistilBert.get_default_model, get_pretrained_model=TokenDistilBert.get_pretrained_model, pdf_extractor_methods={ 'default': default_token_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_transformer_token_classifier_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[ A.DISTIL_BERT_FOR_TOKEN_CLASSIFICATION], description='DistilBertForTokenClassification can load Bert Models with a token classification head on top (a linear layer on top of the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.DISTIL_BERT_FOR_TOKEN_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.DISTIL_BERT_FOR_TOKEN_CLASSIFICATION], ), A.ELMO_EMBEDDINGS: partial(NluComponent, name=A.ELMO_EMBEDDINGS, type=T.TOKEN_EMBEDDING, get_default_model=SparkNLPElmo.get_default_model, get_pretrained_model=SparkNLPElmo.get_pretrained_model, pdf_extractor_methods={'default': default_word_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_word_embed_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.ELMO_EMBEDDINGS], description='Word embeddings from ELMo (Embeddings from Language Models), a language model_anno_obj trained on the 1 Billion Word Benchmark.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.ELMO_EMBEDDINGS, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.ELMO_EMBEDDINGS], has_storage_ref=True, is_storage_ref_producer=True, ), A.LONGFORMER_EMBEDDINGS: partial(NluComponent, name=A.LONGFORMER_EMBEDDINGS, type=T.TOKEN_EMBEDDING, get_default_model=Longformer.get_default_model, get_pretrained_model=Longformer.get_pretrained_model, pdf_extractor_methods={'default': default_word_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_word_embed_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.LONGFORMER_EMBEDDINGS], description='Longformer is a transformer model_anno_obj for long documents. The Longformer model_anno_obj was presented in Longformer: The Long-Document Transformer by Iz Beltagy, Matthew E. Peters, Arman Cohan. longformer-base-4096 is a BERT-like model_anno_obj started from the RoBERTa checkpoint and pretrained for MLM on long documents. It supports sequences of length up to 4,096.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.LONGFORMER_EMBEDDINGS, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.LONGFORMER_EMBEDDINGS], has_storage_ref=True, is_storage_ref_producer=True, ), A.LONGFORMER_FOR_TOKEN_CLASSIFICATION: partial(NluComponent, name=A.LONGFORMER_FOR_TOKEN_CLASSIFICATION, type=T.TRANSFORMER_TOKEN_CLASSIFIER, get_default_model=TokenLongFormer.get_default_model, get_pretrained_model=TokenLongFormer.get_pretrained_model, pdf_extractor_methods={ 'default': default_token_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_transformer_token_classifier_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[ A.LONGFORMER_FOR_TOKEN_CLASSIFICATION], description='LongformerForTokenClassification can load Longformer Models with a token classification head on top (a linear layer on top of the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.LONGFORMER_FOR_TOKEN_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.LONGFORMER_FOR_TOKEN_CLASSIFICATION], ), A.MARIAN_TRANSFORMER: partial(NluComponent, name=A.MARIAN_TRANSFORMER, type=T.DOCUMENT_CLASSIFIER, get_default_model=Marian.get_default_model, get_pretrained_model=Marian.get_pretrained_model, pdf_extractor_methods={'default': default_marian_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_marian_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[A.MARIAN_TRANSFORMER], description='Marian is an efficient, free Neural Machine Translation framework written in pure C++ with minimal dependencies. It is mainly being developed by the Microsoft Translator team. Many academic (most notably the University of Edinburgh and in the past the Adam Mickiewicz University in Poznań) and commercial contributors help with its development. MarianTransformer uses the models trained by MarianNMT.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.MARIAN_TRANSFORMER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.MARIAN_TRANSFORMER], ), A.ROBERTA_EMBEDDINGS: partial(NluComponent, name=A.ROBERTA_EMBEDDINGS, type=T.TOKEN_EMBEDDING, get_default_model=Roberta.get_default_model, get_pretrained_model=Roberta.get_pretrained_model, pdf_extractor_methods={'default': default_word_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_word_embed_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.ROBERTA_EMBEDDINGS], description='The RoBERTa model_anno_obj was proposed in RoBERTa: A Robustly Optimized BERT Pretraining Approach by Yinhan Liu, Myle Ott, Naman Goyal, Jingfei Du, Mandar Joshi, Danqi Chen, Omer Levy, Mike Lewis, Luke Zettlemoyer, Veselin Stoyanov. It is based on Google’s BERT model_anno_obj released in 2018.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.ROBERTA_EMBEDDINGS, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.ROBERTA_EMBEDDINGS], has_storage_ref=True, is_storage_ref_producer=True, ), A.ROBERTA_FOR_TOKEN_CLASSIFICATION: partial(NluComponent, name=A.ROBERTA_FOR_TOKEN_CLASSIFICATION, type=T.TRANSFORMER_SEQUENCE_CLASSIFIER, get_default_model=TokenRoBerta.get_default_model, get_pretrained_model=TokenRoBerta.get_pretrained_model, pdf_extractor_methods={'default': default_token_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_transformer_token_classifier_cols, output_level=L.TOKEN, # Handled like NER model_anno_obj node=NLP_FEATURE_NODES.nodes[A.ROBERTA_FOR_TOKEN_CLASSIFICATION], description='RoBertaForTokenClassification can load RoBERTa Models with a token classification head on top (a linear layer on top of the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.ROBERTA_FOR_TOKEN_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.ROBERTA_FOR_TOKEN_CLASSIFICATION], ), A.ROBERTA_SENTENCE_EMBEDDINGS: partial(NluComponent, name=A.ROBERTA_SENTENCE_EMBEDDINGS, type=T.DOCUMENT_EMBEDDING, get_default_model=BertSentence.get_default_model, get_pretrained_model=BertSentence.get_pretrained_model, pdf_extractor_methods={'default': default_sentence_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_sent_embed_cols, pipe_prediction_output_level=L.INPUT_DEPENDENT_DOCUMENT_EMBEDDING, node=NLP_FEATURE_NODES.nodes[A.ROBERTA_SENTENCE_EMBEDDINGS], description='Sentence-level embeddings using BERT. BERT (Bidirectional Encoder Representations from Transformers) provides dense vector representations for natural language by using a deep, pre-trained neural network with the Transformer architecture.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id_id=A.ROBERTA_SENTENCE_EMBEDDINGS, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.ROBERTA_SENTENCE_EMBEDDINGS], has_storage_ref=True, is_is_storage_ref_producer=True, ), A.T5_TRANSFORMER: partial(NluComponent, # TODO task based construction, i.e. get_preconfigured_model name=A.T5_TRANSFORMER, type=T.DOCUMENT_CLASSIFIER, get_default_model=T5.get_default_model, get_pretrained_model=T5.get_pretrained_model, pdf_extractor_methods={'default': default_T5_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_T5_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[A.T5_TRANSFORMER], description='T5 reconsiders all NLP tasks into a unified text-to-text-format where the input and output are always text strings, in contrast to BERT-style models that can only output either a class label or a span of the input. The text-to-text framework is able to use the same model_anno_obj, loss function, and hyper-parameters on any NLP task, including machine translation, document summarization, question answering, and classification tasks (e.g., sentiment analysis). T5 can even apply to regression tasks by training it to predict the string representation of a number instead of the number itself.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.T5_TRANSFORMER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.T5_TRANSFORMER], ), A.UNIVERSAL_SENTENCE_ENCODER: partial(NluComponent, name=A.UNIVERSAL_SENTENCE_ENCODER, type=T.DOCUMENT_EMBEDDING, get_default_model=SparkNLPUse.get_default_model, get_pretrained_model=SparkNLPUse.get_pretrained_model, pdf_extractor_methods={'default': default_sentence_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_sent_embed_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_EMBEDDING, node=NLP_FEATURE_NODES.nodes[A.UNIVERSAL_SENTENCE_ENCODER], description='The Universal Sentence Encoder encodes text into high dimensional vectors that can be used for text classification, semantic similarity, clustering and other natural language tasks.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.UNIVERSAL_SENTENCE_ENCODER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.UNIVERSAL_SENTENCE_ENCODER], has_storage_ref=True, is_storage_ref_producer=True, ), A.XLM_ROBERTA_EMBEDDINGS: partial(NluComponent, name=A.XLM_ROBERTA_EMBEDDINGS, type=T.TOKEN_EMBEDDING, get_default_model=XLM.get_default_model, get_pretrained_model=XLM.get_pretrained_model, pdf_extractor_methods={'default': default_word_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_word_embed_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.XLM_ROBERTA_EMBEDDINGS], description='The XLM-RoBERTa model_anno_obj was proposed in Unsupervised Cross-lingual Representation Learning at Scale by Alexis Conneau, Kartikay Khandelwal, Naman Goyal, Vishrav Chaudhary, Guillaume Wenzek, Francisco Guzmán, Edouard Grave, Myle Ott, Luke Zettlemoyer and Veselin Stoyanov. It is based on Facebook’s RoBERTa model_anno_obj released in 2019. It is a large multi-lingual language model_anno_obj, trained on 2.5TB of filtered CommonCrawl data.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.XLM_ROBERTA_EMBEDDINGS, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.XLM_ROBERTA_EMBEDDINGS], has_storage_ref=True, is_storage_ref_producer=True, ), A.XLM_ROBERTA_FOR_TOKEN_CLASSIFICATION: partial(NluComponent, name=A.XLM_ROBERTA_FOR_TOKEN_CLASSIFICATION, type=T.TRANSFORMER_TOKEN_CLASSIFIER, get_default_model=TokenXlmRoBerta.get_default_model, get_pretrained_model=TokenXlmRoBerta.get_pretrained_model, pdf_extractor_methods={ 'default': default_token_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_transformer_token_classifier_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[ A.XLM_ROBERTA_FOR_TOKEN_CLASSIFICATION], description='XlmRoBertaForTokenClassification can load XLM-RoBERTa Models with a token classification head on top (a linear layer on top of the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.XLM_ROBERTA_FOR_TOKEN_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.XLM_ROBERTA_FOR_TOKEN_CLASSIFICATION], ), A.XLM_ROBERTA_SENTENCE_EMBEDDINGS: partial(NluComponent, name=A.XLM_ROBERTA_SENTENCE_EMBEDDINGS, type=T.DOCUMENT_EMBEDDING, get_default_model=Sentence_XLM.get_default_model, get_pretrained_model=Sentence_XLM.get_pretrained_model, pdf_extractor_methods={'default': default_sentence_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_sent_embed_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_EMBEDDING, node=NLP_FEATURE_NODES.nodes[A.XLM_ROBERTA_SENTENCE_EMBEDDINGS], description='Sentence-level embeddings using XLM-RoBERTa. The XLM-RoBERTa model_anno_obj was proposed in Unsupervised Cross-lingual Representation Learning at Scale by Alexis Conneau, Kartikay Khandelwal, Naman Goyal, Vishrav Chaudhary, Guillaume Wenzek, Francisco Guzmán, Edouard Grave, Myle Ott, Luke Zettlemoyer and Veselin Stoyanov. It is based on Facebook’s RoBERTa model_anno_obj released in 2019. It is a large multi-lingual language model_anno_obj, trained on 2.5TB of filtered CommonCrawl data.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.XLM_ROBERTA_SENTENCE_EMBEDDINGS, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.XLM_ROBERTA_SENTENCE_EMBEDDINGS], has_storage_ref=True, is_storage_ref_producer=True, ), A.XLNET_EMBEDDINGS: partial(NluComponent, name=A.XLNET_EMBEDDINGS, type=T.TOKEN_EMBEDDING, get_default_model=SparkNLPXlnet.get_default_model, get_pretrained_model=SparkNLPXlnet.get_pretrained_model, pdf_extractor_methods={'default': default_word_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_word_embed_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.XLNET_EMBEDDINGS], description='XLNet is a new unsupervised language representation learning method based on a novel generalized permutation language modeling objective. Additionally, XLNet employs Transformer-XL as the backbone model_anno_obj, exhibiting excellent performance for language tasks involving long context. Overall, XLNet achieves state-of-the-art (SOTA) results on various downstream language tasks including question answering, natural language inference, sentiment analysis, and document ranking.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.XLNET_EMBEDDINGS, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.XLNET_EMBEDDINGS], has_storage_ref=True, is_storage_ref_producer=True, ), A.XLNET_FOR_TOKEN_CLASSIFICATION: partial(NluComponent, name=A.XLNET_FOR_TOKEN_CLASSIFICATION, type=T.TRANSFORMER_TOKEN_CLASSIFIER, get_default_model=TokenXlnet.get_default_model, get_pretrained_model=TokenXlnet.get_pretrained_model, pdf_extractor_methods={'default': default_token_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_transformer_token_classifier_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.XLNET_FOR_TOKEN_CLASSIFICATION], description='XlnetForTokenClassification can load XLNet Models with a token classification head on top (a linear layer on top of the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.XLNET_FOR_TOKEN_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.XLNET_FOR_TOKEN_CLASSIFICATION], ), A.XLM_ROBERTA_FOR_SEQUENCE_CLASSIFICATION: partial(NluComponent, name=A.XLM_ROBERTA_FOR_SEQUENCE_CLASSIFICATION, type=T.TRANSFORMER_SEQUENCE_CLASSIFIER, get_default_model=SeqXlmRobertaClassifier.get_default_model, get_pretrained_model=SeqXlmRobertaClassifier.get_pretrained_model, pdf_extractor_methods={ 'default': default_classifier_dl_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_seq_bert_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[ A.XLM_ROBERTA_FOR_SEQUENCE_CLASSIFICATION], description='XlmRoBertaForSequenceClassification can load XLM-RoBERTa Models with sequence classification/regression head on top (a linear layer on top of the pooled output) e.g. for multi-class document classification task', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.XLM_ROBERTA_FOR_SEQUENCE_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.XLM_ROBERTA_FOR_SEQUENCE_CLASSIFICATION], ), A.ROBERTA_FOR_SEQUENCE_CLASSIFICATION: partial(NluComponent, name=A.ROBERTA_FOR_SEQUENCE_CLASSIFICATION, type=T.TRANSFORMER_SEQUENCE_CLASSIFIER, get_default_model=SeqRobertaClassifier.get_default_model, get_pretrained_model=SeqRobertaClassifier.get_pretrained_model, pdf_extractor_methods={'default': default_classifier_dl_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_seq_bert_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[ A.ROBERTA_FOR_SEQUENCE_CLASSIFICATION], description='RoBertaForSequenceClassification can load RoBERTa Models with sequence classification/regression head on top (a linear layer on top of the pooled output) e.g. for multi-class document classification tasks', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.ROBERTA_FOR_SEQUENCE_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.ROBERTA_FOR_SEQUENCE_CLASSIFICATION], ), A.LONGFORMER_FOR_SEQUENCE_CLASSIFICATION: partial(NluComponent, name=A.LONGFORMER_FOR_SEQUENCE_CLASSIFICATION, type=T.TRANSFORMER_SEQUENCE_CLASSIFIER, get_default_model=SeqLongformerClassifier.get_default_model, get_pretrained_model=SeqLongformerClassifier.get_pretrained_model, pdf_extractor_methods={ 'default': default_seq_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_seq_bert_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[ A.LONGFORMER_FOR_SEQUENCE_CLASSIFICATION], description='RoBertaForSequenceClassification can load RoBERTa Models with sequence classification/regression head on top (a linear layer on top of the pooled output) e.g. for multi-class document classification tasks', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.LONGFORMER_FOR_SEQUENCE_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.LONGFORMER_FOR_SEQUENCE_CLASSIFICATION], ), A.ALBERT_FOR_SEQUENCE_CLASSIFICATION: partial(NluComponent, name=A.ALBERT_FOR_SEQUENCE_CLASSIFICATION, type=T.TRANSFORMER_SEQUENCE_CLASSIFIER, get_default_model=SeqAlbertClassifier.get_default_model, get_pretrained_model=SeqAlbertClassifier.get_pretrained_model, pdf_extractor_methods={'default': default_seq_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_seq_bert_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[ A.ALBERT_FOR_SEQUENCE_CLASSIFICATION], description='AlbertForSequenceClassification can load ALBERT Models with sequence classification/regression head on top (a linear layer on top of the pooled output) e.g. for multi-class document classification tasks.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.ALBERT_FOR_SEQUENCE_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.ALBERT_FOR_SEQUENCE_CLASSIFICATION], ), A.BERT_FOR_ZERO_SHOT_CLASSIFICATION: partial(NluComponent, name=A.BERT_FOR_ZERO_SHOT_CLASSIFICATION, type=T.TRANSFORMER_SEQUENCE_CLASSIFIER, get_default_model=BertZeroShotClassifier.get_default_model, get_pretrained_model=BertZeroShotClassifier.get_pretrained_model, pdf_extractor_methods={'default': default_seq_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_seq_bert_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[ A.BERT_FOR_ZERO_SHOT_CLASSIFICATION], description='Bert Zero Shot Classifier.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.BERT_FOR_ZERO_SHOT_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.BERT_FOR_ZERO_SHOT_CLASSIFICATION], ), A.DISTIL_BERT_FOR_ZERO_SHOT_CLASSIFICATION: partial(NluComponent, name=A.DISTIL_BERT_FOR_ZERO_SHOT_CLASSIFICATION, type=T.TRANSFORMER_SEQUENCE_CLASSIFIER, get_default_model=DistilBertZeroShotClassifier.get_default_model, get_pretrained_model=DistilBertZeroShotClassifier.get_pretrained_model, pdf_extractor_methods={ 'default': default_seq_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_seq_bert_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[ A.DISTIL_BERT_FOR_ZERO_SHOT_CLASSIFICATION], description='Distil Bert Zero Shot Classifier.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.DISTIL_BERT_FOR_ZERO_SHOT_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.DISTIL_BERT_FOR_ZERO_SHOT_CLASSIFICATION], ), A.ROBERTA_FOR_ZERO_SHOT_CLASSIFICATION: partial(NluComponent, name=A.ROBERTA_FOR_ZERO_SHOT_CLASSIFICATION, type=T.TRANSFORMER_SEQUENCE_CLASSIFIER, get_default_model=RoBertaForZeroShotClassification.get_default_model, get_pretrained_model=RoBertaForZeroShotClassification.get_pretrained_model, pdf_extractor_methods={ 'default': default_seq_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_seq_bert_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[ A.ROBERTA_FOR_ZERO_SHOT_CLASSIFICATION], description='Roberta Zero shot Classifier.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.ROBERTA_FOR_ZERO_SHOT_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.ROBERTA_FOR_ZERO_SHOT_CLASSIFICATION], ), A.XLNET_FOR_SEQUENCE_CLASSIFICATION: partial(NluComponent, name=A.XLNET_FOR_SEQUENCE_CLASSIFICATION, type=T.TRANSFORMER_SEQUENCE_CLASSIFIER, get_default_model=SeqXlnetClassifier.get_default_model, get_pretrained_model=SeqXlnetClassifier.get_pretrained_model, pdf_extractor_methods={'default': default_seq_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_seq_bert_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[A.XLNET_FOR_SEQUENCE_CLASSIFICATION], description='AlbertForSequenceClassification can load ALBERT Models with sequence classification/regression head on top (a linear layer on top of the pooled output) e.g. for multi-class document classification tasks.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.XLNET_FOR_SEQUENCE_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.XLNET_FOR_SEQUENCE_CLASSIFICATION], ), A.GPT2: partial(NluComponent, name=A.GPT2, type=T.TRANSFORMER_SEQUENCE_CLASSIFIER, get_default_model=GPT2.get_default_model, get_pretrained_model=GPT2.get_pretrained_model, pdf_extractor_methods={'default': default_gpt2_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_gpt2_cols, # TIODO TESt output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[A.GPT2], description='AlbertForSequenceClassification can load ALBERT Models with sequence classification/regression head on top (a linear layer on top of the pooled output) e.g. for multi-class document classification tasks.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.GPT2, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.GPT2], ), A.WORD_2_VEC: partial(NluComponent, # TOOD name=A.WORD_2_VEC, type=T.TOKEN_EMBEDDING, get_default_model=Word2Vec.get_default_model, get_pretrained_model=Word2Vec.get_pretrained_model, get_trainable_model=Word2Vec.get_trainable_model, pdf_extractor_methods={'default': default_word_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_word_embed_cols, # TODO? output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.WORD_2_VEC], description='We use Word2Vec implemented in Spark ML. It uses skip-gram model_anno_obj in our implementation and a hierarchical softmax method to train the model_anno_obj. The variable names in the implementation match the original C implementation.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.WORD_2_VEC, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.WORD_2_VEC], has_storage_ref=True, is_storage_ref_producer=True, ), A.DEBERTA_WORD_EMBEDDINGS: partial(NluComponent, name=A.DEBERTA_WORD_EMBEDDINGS, type=T.TOKEN_EMBEDDING, get_default_model=Deberta.get_default_model, get_pretrained_model=Deberta.get_pretrained_model, pdf_extractor_methods={'default': default_word_embedding_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_word_embed_cols, output_level=L.TOKEN, node=NLP_FEATURE_NODES.nodes[A.DEBERTA_WORD_EMBEDDINGS], description='Token-level embeddings using DeBERTa. The DeBERTa model_anno_obj was proposed in DeBERTa: Decoding-enhanced BERT with Disentangled Attention by Pengcheng He, Xiaodong Liu, Jianfeng Gao, Weizhu Chen. It is based on Google’s BERT model_anno_obj released in 2018 and Facebook’s RoBERTa model_anno_obj released in 2019.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.DEBERTA_WORD_EMBEDDINGS, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.DEBERTA_WORD_EMBEDDINGS], has_storage_ref=True, is_storage_ref_producer=True, ), A.DEBERTA_FOR_SEQUENCE_CLASSIFICATION: partial(NluComponent, name=A.DEBERTA_FOR_SEQUENCE_CLASSIFICATION, type=T.TRANSFORMER_SEQUENCE_CLASSIFIER, get_default_model=SeqDebertaClassifier.get_default_model, get_pretrained_model=SeqDebertaClassifier.get_pretrained_model, pdf_extractor_methods={ 'default': default_seq_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_seq_bert_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, node=NLP_FEATURE_NODES.nodes[ A.DEBERTA_FOR_SEQUENCE_CLASSIFICATION], description='The DeBERTa model_anno_obj was proposed in DeBERTa: Decoding-enhanced BERT with Disentangled Attention by Pengcheng He, Xiaodong Liu, Jianfeng Gao, Weizhu Chen. It is based on Google’s BERT model_anno_obj released in 2018 and Facebook’s RoBERTa model_anno_obj released in 2019. This classifier uses DeBERTa embeddingss with a linear classification head ontop.', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.DEBERTA_FOR_SEQUENCE_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.DEBERTA_FOR_SEQUENCE_CLASSIFICATION], ), A.CAMEMBERT_FOR_QUESTION_ANSWERING: partial(NluComponent, name=A.CAMEMBERT_FOR_QUESTION_ANSWERING, jsl_anno_class_id=A.CAMEMBERT_FOR_QUESTION_ANSWERING, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.CAMEMBERT_FOR_QUESTION_ANSWERING], node=NLP_FEATURE_NODES.nodes[A.CAMEMBERT_FOR_QUESTION_ANSWERING], get_default_model=SpanCamemBert.get_default_model, get_pretrained_model=SpanCamemBert.get_pretrained_model, type=T.QUESTION_SPAN_CLASSIFIER, pdf_extractor_methods={ 'default': default_span_classifier_config, 'default_full': default_full_span_classifier_config, }, pdf_col_name_substitutor=substitute_span_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, description='TODO', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.BERT_FOR_QUESTION_ANSWERING: partial(NluComponent, name=A.BERT_FOR_QUESTION_ANSWERING, jsl_anno_class_id=A.BERT_FOR_QUESTION_ANSWERING, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.BERT_FOR_QUESTION_ANSWERING], node=NLP_FEATURE_NODES.nodes[A.BERT_FOR_QUESTION_ANSWERING], get_default_model=SpanBertClassifier.get_default_model, get_pretrained_model=SpanBertClassifier.get_pretrained_model, type=T.QUESTION_SPAN_CLASSIFIER, pdf_extractor_methods={ 'default': default_span_classifier_config, 'default_full': default_full_span_classifier_config, }, pdf_col_name_substitutor=substitute_span_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, description='TODO', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.DE_BERTA_FOR_QUESTION_ANSWERING: partial(NluComponent, name=A.DE_BERTA_FOR_QUESTION_ANSWERING, jsl_anno_class_id=A.DE_BERTA_FOR_QUESTION_ANSWERING, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.DE_BERTA_FOR_QUESTION_ANSWERING], node=NLP_FEATURE_NODES.nodes[A.DE_BERTA_FOR_QUESTION_ANSWERING], get_default_model=SpanDeBertaClassifier.get_default_model, get_pretrained_model=SpanDeBertaClassifier.get_pretrained_model, type=T.QUESTION_SPAN_CLASSIFIER, pdf_extractor_methods={ 'default': default_span_classifier_config, 'default_full': default_full_span_classifier_config, }, pdf_col_name_substitutor=substitute_span_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, description='TODO', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.DISTIL_BERT_FOR_QUESTION_ANSWERING: partial(NluComponent, name=A.DISTIL_BERT_FOR_QUESTION_ANSWERING, jsl_anno_class_id=A.DISTIL_BERT_FOR_QUESTION_ANSWERING, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.DISTIL_BERT_FOR_QUESTION_ANSWERING], node=NLP_FEATURE_NODES.nodes[ A.DISTIL_BERT_FOR_QUESTION_ANSWERING], get_default_model=SpanDistilBertClassifier.get_default_model, get_pretrained_model=SpanDistilBertClassifier.get_pretrained_model, type=T.QUESTION_SPAN_CLASSIFIER, pdf_extractor_methods={ 'default': default_span_classifier_config, 'default_full': default_full_span_classifier_config, }, pdf_col_name_substitutor=substitute_span_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, description='TODO', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.LONGFORMER_FOR_QUESTION_ANSWERING: partial(NluComponent, name=A.LONGFORMER_FOR_QUESTION_ANSWERING, jsl_anno_class_id=A.LONGFORMER_FOR_QUESTION_ANSWERING, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.LONGFORMER_FOR_QUESTION_ANSWERING], node=NLP_FEATURE_NODES.nodes[A.LONGFORMER_FOR_QUESTION_ANSWERING], get_default_model=SpanLongFormerClassifier.get_default_model, get_pretrained_model=SpanLongFormerClassifier.get_pretrained_model, type=T.QUESTION_SPAN_CLASSIFIER, pdf_extractor_methods={ 'default': default_span_classifier_config, 'default_full': default_full_span_classifier_config, }, pdf_col_name_substitutor=substitute_span_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, description='TODO', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.ROBERTA_FOR_QUESTION_ANSWERING: partial(NluComponent, name=A.ROBERTA_FOR_QUESTION_ANSWERING, jsl_anno_class_id=A.ROBERTA_FOR_QUESTION_ANSWERING, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.ROBERTA_FOR_QUESTION_ANSWERING], node=NLP_FEATURE_NODES.nodes[A.ROBERTA_FOR_QUESTION_ANSWERING], get_default_model=SpanRobertaClassifier.get_default_model, get_pretrained_model=SpanRobertaClassifier.get_pretrained_model, type=T.QUESTION_SPAN_CLASSIFIER, pdf_extractor_methods={ 'default': default_span_classifier_config, 'default_full': default_full_span_classifier_config, }, pdf_col_name_substitutor=substitute_span_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, description='TODO', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.XLM_ROBERTA_FOR_QUESTION_ANSWERING: partial(NluComponent, name=A.XLM_ROBERTA_FOR_QUESTION_ANSWERING, jsl_anno_class_id=A.XLM_ROBERTA_FOR_QUESTION_ANSWERING, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.XLM_ROBERTA_FOR_QUESTION_ANSWERING], node=NLP_FEATURE_NODES.nodes[ A.XLM_ROBERTA_FOR_QUESTION_ANSWERING], get_default_model=SpanXlmRobertaClassifier.get_default_model, get_pretrained_model=SpanXlmRobertaClassifier.get_pretrained_model, type=T.QUESTION_SPAN_CLASSIFIER, pdf_extractor_methods={ 'default': default_span_classifier_config, 'default_full': default_full_span_classifier_config, }, pdf_col_name_substitutor=substitute_span_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, description='TODO', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), A.MULTI_DOCUMENT_ASSEMBLER: partial(NluComponent, name=A.MULTI_DOCUMENT_ASSEMBLER, type=T.HELPER_ANNO, get_default_model=SparkNlpMultiDocumentAssembler.get_default_model, pdf_extractor_methods={'default': default_binary_to_image_config}, pdf_col_name_substitutor=substitute_recognized_text_cols, output_level=L.DOCUMENT, node=NLP_FEATURE_NODES.nodes[ A.MULTI_DOCUMENT_ASSEMBLER], description='TODO', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.MULTI_DOCUMENT_ASSEMBLER, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.MULTI_DOCUMENT_ASSEMBLER], ), A.VIT_IMAGE_CLASSIFICATION: partial(NluComponent, name=A.VIT_IMAGE_CLASSIFICATION, type=T.IMAGE_CLASSIFICATION, get_default_model=VitImageClassifier.get_default_model, get_pretrained_model=VitImageClassifier.get_pretrained_model, pdf_extractor_methods={'default': default_document_config, 'default_full': default_full_config}, pdf_col_name_substitutor=substitute_recognized_text_cols, output_level=L.DOCUMENT, node=NLP_FEATURE_NODES.nodes[ A.VIT_IMAGE_CLASSIFICATION], description='TODO', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.VIT_IMAGE_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.VIT_IMAGE_CLASSIFICATION], ), A.SWIN_IMAGE_CLASSIFICATION: partial(NluComponent, name=A.SWIN_IMAGE_CLASSIFICATION, type=T.IMAGE_CLASSIFICATION, get_default_model=SwinImageClassifier.get_default_model, get_pretrained_model=SwinImageClassifier.get_pretrained_model, pdf_extractor_methods={'default': default_document_config, 'default_full': default_full_config}, pdf_col_name_substitutor=substitute_recognized_text_cols, output_level=L.DOCUMENT, node=NLP_FEATURE_NODES.nodes[ A.SWIN_IMAGE_CLASSIFICATION], description='TODO', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.SWIN_IMAGE_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.SWIN_IMAGE_CLASSIFICATION], ), A.IMAGE_ASSEMBLER: partial(NluComponent, name=A.IMAGE_ASSEMBLER, type=T.HELPER_ANNO, get_default_model=SparkNlpImageAssembler.get_default_model, pdf_extractor_methods={'default': default_image_assembler_config, 'default_full': default_full_config}, pdf_col_name_substitutor=substitute_img_assembler_cols, output_level=L.DOCUMENT, node=NLP_FEATURE_NODES.nodes[A.IMAGE_ASSEMBLER], description='todo', provider=ComponentBackends.open_source, license=Licenses.open_source, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=A.IMAGE_ASSEMBLER, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.IMAGE_ASSEMBLER], applicable_file_types=['JPEG', 'PNG', 'BMP', 'WBMP', 'GIF', 'JPG', 'TIFF'] ), ######### HEALTHCARE ############## H_A.ASSERTION_DL: partial(NluComponent, name=H_A.ASSERTION_DL, type=T.CHUNK_CLASSIFIER, get_default_model=AssertionDL.get_default_model, get_pretrained_model=AssertionDL.get_pretrained_model, get_trainable_model=AssertionDL.get_default_trainable_model, pdf_extractor_methods={'default': default_assertion_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_assertion_cols, output_level=L.CHUNK, node=NLP_HC_FEATURE_NODES.nodes[H_A.ASSERTION_DL], description='Deep Learning based Assertion model_anno_obj that maps NER-Chunks into a pre-defined terminology.', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.ASSERTION_DL, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[H_A.ASSERTION_DL], has_storage_ref=True, is_storage_ref_consumer=True, trainable_mirror_anno=H_A.TRAINABLE_ASSERTION_DL ), H_A.TRAINABLE_ASSERTION_DL: partial(NluComponent, name=H_A.TRAINABLE_ASSERTION_DL, type=T.CHUNK_CLASSIFIER, get_default_model=AssertionDL.get_default_model, get_pretrained_model=AssertionDL.get_pretrained_model, get_trainable_model=AssertionDL.get_default_trainable_model, pdf_extractor_methods={'default': default_assertion_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_assertion_cols, output_level=L.CHUNK, node=NLP_HC_FEATURE_NODES.nodes[H_A.TRAINABLE_ASSERTION_DL], description='Trainable Deep Learning based Assertion model_anno_obj that maps NER-Chunks into a pre-defined terminology.', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.TRAINABLE_ASSERTION_DL, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[ H_A.TRAINABLE_ASSERTION_DL], has_storage_ref=True, is_storage_ref_consumer=True, trainable=True, trained_mirror_anno=H_A.ASSERTION_DL), # H_A.ASSERTION_FILTERER: partial(NluComponent, # TODO not integrated # name=H_A.ASSERTION_FILTERER, # type=T.CHUNK_FILTERER, # get_default_model=AssertionDL.get_default_model, # get_pretrained_model=AssertionDL.get_pretrained_model, # get_trainable_model=AssertionDL.get_default_trainable_model, # pdf_extractor_methods={'default': default_assertion_config, 'default_full': default_full_config, }, # pdf_col_name_substitutor=substitute_assertion_cols, # pipe_prediction_output_level=L.CHUNK, # node=NLP_HC_FEATURE_NODES.ASSERTION_DL, # description='Trainable Deep Learning based Assertion model_anno_obj that maps NER-Chunks into a pre-defined terminology.', # provider=ComponentBackends.hc, # license=Licenses.hc, # computation_context=ComputeContexts.spark, # output_context=ComputeContexts.spark, # jsl_anno_class_id_id=H_A.ASSERTION_FILTERER, # jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[H_A.ASSERTION_FILTERER], # # has_storage_ref=True, # is_is_storage_ref_consumer=True, # trainable=True, # trained_mirror_anno=H_A.ASSERTION_FILTERER), AssertionLogReg H_A.ASSERTION_LOG_REG: partial(NluComponent, name=H_A.ASSERTION_LOG_REG, type=T.CHUNK_CLASSIFIER, get_default_model=AssertionLogReg.get_default_model, get_pretrained_model=AssertionLogReg.get_pretrained_model, get_trainable_model=AssertionLogReg.get_default_trainable_model, pdf_extractor_methods={'default': default_assertion_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_assertion_cols, output_level=L.CHUNK, node=NLP_HC_FEATURE_NODES.nodes[H_A.ASSERTION_LOG_REG], description='Classical ML based Assertion model_anno_obj that maps NER-Chunks into a pre-defined terminology.', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.ASSERTION_LOG_REG, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[H_A.ASSERTION_LOG_REG], trained_mirror_anno=H_A.TRAINABLE_ASSERTION_LOG_REG), H_A.TRAINABLE_ASSERTION_LOG_REG: partial(NluComponent, name=H_A.TRAINABLE_ASSERTION_LOG_REG, type=T.CHUNK_CLASSIFIER, get_default_model=AssertionLogReg.get_default_model, get_pretrained_model=AssertionLogReg.get_pretrained_model, get_trainable_model=AssertionLogReg.get_default_trainable_model, pdf_extractor_methods={'default': default_assertion_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_assertion_cols, output_level=L.CHUNK, node=NLP_HC_FEATURE_NODES.nodes[H_A.TRAINABLE_ASSERTION_LOG_REG], description='Classical ML based Assertion model_anno_obj that maps NER-Chunks into a pre-defined terminology.', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.TRAINABLE_ASSERTION_LOG_REG, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[ H_A.TRAINABLE_ASSERTION_LOG_REG], trained_mirror_anno=H_A.ASSERTION_LOG_REG), H_A.CHUNK_MERGE: partial(NluComponent, name=H_A.CONTEXTUAL_PARSER, type=T.CHUNK_CLASSIFIER, get_default_model=ContextualParser.get_default_model, get_trainable_model=ContextualParser.get_trainable_model, # TODO method extractr method pdf_extractor_methods={'default': default_chunk_config, 'default_full': default_full_config, }, # TODO substitor pdf_col_name_substitutor=substitute_chunk_cols, output_level=L.CHUNK, node=NLP_HC_FEATURE_NODES.nodes[H_A.CHUNK_MERGE], description='Rule based entity extractor.', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.CHUNK_MERGE, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[H_A.CHUNK_MERGE], ), H_A.CONTEXTUAL_PARSER: partial(NluComponent, name=H_A.CONTEXTUAL_PARSER, type=T.CHUNK_CLASSIFIER, get_default_model=ContextualParser.get_default_model, get_trainable_model=ContextualParser.get_trainable_model, # TODO extractr method pdf_extractor_methods={'default': default_full_config, 'default_full': default_full_config, }, # TODO substitor pdf_col_name_substitutor=substitute_context_parser_cols, output_level=L.CHUNK, node=NLP_HC_FEATURE_NODES.nodes[H_A.CONTEXTUAL_PARSER], description='Rule based entity extractor.', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.CONTEXTUAL_PARSER, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[H_A.CONTEXTUAL_PARSER], ), H_A.DE_IDENTIFICATION: partial(NluComponent, name=H_A.DE_IDENTIFICATION, type=T.CHUNK_CLASSIFIER, get_default_model=Deidentifier.get_default_model, get_pretrained_model=Deidentifier.get_pretrained_model, pdf_extractor_methods={'default': default_de_identification_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_de_identification_cols, output_level=L.DOCUMENT, node=NLP_HC_FEATURE_NODES.nodes[H_A.DE_IDENTIFICATION], description='De-Identify named entity according to various Healthcare Data Protection standards', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.DE_IDENTIFICATION, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[H_A.DE_IDENTIFICATION], trainable_mirror_anno=H_A.TRAINABLE_DE_IDENTIFICATION ), H_A.TRAINABLE_DE_IDENTIFICATION: partial(NluComponent, # TODO WIP name=H_A.TRAINABLE_DE_IDENTIFICATION, type=T.CHUNK_CLASSIFIER, get_default_model=Deidentifier.get_default_model, get_pretrained_model=Deidentifier.get_pretrained_model, get_trainable_model=Deidentifier.get_trainable_model, pdf_extractor_methods={'default': default_de_identification_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_de_identification_cols, output_level=L.DOCUMENT, node=NLP_HC_FEATURE_NODES.nodes[H_A.TRAINABLE_DE_IDENTIFICATION], description='De-Identify named entity according to various Healthcare Data Protection standards', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.TRAINABLE_DE_IDENTIFICATION, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[ H_A.TRAINABLE_DE_IDENTIFICATION], trainable=True, trained_mirror_anno=H_A.DE_IDENTIFICATION ), H_A.DRUG_NORMALIZER: partial(NluComponent, name=H_A.DRUG_NORMALIZER, type=T.CHUNK_CLASSIFIER, get_default_model=DrugNorm.get_default_model, pdf_extractor_methods={'default': default_only_result_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_drug_normalizer_cols, output_level=L.DOCUMENT, node=NLP_HC_FEATURE_NODES.nodes[H_A.DRUG_NORMALIZER], description='Normalizes raw clinical and crawled text which contains drug names into cleaned and standardized representation', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.DRUG_NORMALIZER, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[H_A.DRUG_NORMALIZER], ), # H_A.FEATURES_ASSEMBLER: partial(NluComponent, # TODO partially integrated. featire mpde ,ossomg # name=H_A.FEATURES_ASSEMBLER, # type=T.HELPER_ANNO, # get_default_model=SparkNLPFeatureAssembler.get_default_model, # pdf_extractor_methods={'default': default_feature_assembler_config, 'default_full': default_full_config, }, # # pdf_col_name_substitutor=substitute_drug_normalizer_cols, # TODO no substition # pipe_prediction_output_level=L.DOCUMENT, # TODO double check output level? # node=NLP_HC_FEATURE_NODES.FEATURES_ASSEMBLER, # description='Aggregated features from various annotators into one column for training generic classifiers', # provider=ComponentBackends.hc, # license=Licenses.hc, # computation_context=ComputeContexts.spark, # output_context=ComputeContexts.spark, # jsl_anno_class_id_id=H_A.FEATURES_ASSEMBLER, # jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[H_A.FEATURES_ASSEMBLER], # H_A.GENERIC_CLASSIFIER: partial(NluComponent, name=H_A.GENERIC_CLASSIFIER, type=T.DOCUMENT_CLASSIFIER, get_default_model=GenericClassifier.get_default_model, get_trainable_model=GenericClassifier.get_default_model, get_pretrained_model=GenericClassifier.get_default_model, pdf_extractor_methods={'default': default_generic_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_generic_classifier_parser_cols, output_level=L.DOCUMENT, node=NLP_HC_FEATURE_NODES.nodes[H_A.GENERIC_CLASSIFIER], description='Generic Deep Learning based tensorflow classifier', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.GENERIC_CLASSIFIER, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[H_A.GENERIC_CLASSIFIER], trainable_mirror_anno=H_A.TRAINABLE_GENERIC_CLASSIFIER ), H_A.TRAINABLE_GENERIC_CLASSIFIER: partial(NluComponent, name=H_A.TRAINABLE_GENERIC_CLASSIFIER, type=T.DOCUMENT_CLASSIFIER, get_default_model=GenericClassifier.get_default_model, get_trainable_model=GenericClassifier.get_default_model, get_pretrained_model=GenericClassifier.get_default_model, pdf_extractor_methods={'default': default_generic_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_generic_classifier_parser_cols, output_level=L.DOCUMENT, node=NLP_HC_FEATURE_NODES.nodes[H_A.GENERIC_CLASSIFIER], description='Generic Deep Learning based tensorflow classifier', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.TRAINABLE_GENERIC_CLASSIFIER, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[ H_A.TRAINABLE_GENERIC_CLASSIFIER], trained_mirror_anno=H_A.GENERIC_CLASSIFIER ), H_A.MEDICAL_NER: partial(NluComponent, name=H_A.MEDICAL_NER, type=T.CHUNK_CLASSIFIER, get_default_model=NERDLHealthcare.get_default_model, get_trainable_model=NERDLHealthcare.get_default_trainable_model, get_pretrained_model=NERDLHealthcare.get_pretrained_model, pdf_extractor_methods={'default': default_ner_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_ner_dl_cols, output_level=L.TOKEN, node=NLP_HC_FEATURE_NODES.nodes[H_A.MEDICAL_NER], description='Deep Learning based Medical Named Entity Recognizer (NER)', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.MEDICAL_NER, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[H_A.MEDICAL_NER], trainable_mirror_anno=H_A.TRAINABLE_MEDICAL_NER, has_storage_ref=True, is_storage_ref_consumer=True ), H_A.TRAINABLE_MEDICAL_NER: partial(NluComponent, name=H_A.TRAINABLE_MEDICAL_NER, type=T.CHUNK_CLASSIFIER, get_default_model=NERDLHealthcare.get_default_model, get_trainable_model=NERDLHealthcare.get_default_model, get_pretrained_model=NERDLHealthcare.get_default_model, pdf_extractor_methods={'default': default_ner_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_ner_dl_cols, output_level=L.TOKEN, node=NLP_HC_FEATURE_NODES.nodes[H_A.TRAINABLE_MEDICAL_NER], description='Trainable Deep Learning based Medical Named Entity Recognizer (NER)', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.TRAINABLE_MEDICAL_NER, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[H_A.TRAINABLE_MEDICAL_NER], trained_mirror_anno=H_A.TRAINABLE_MEDICAL_NER, has_storage_ref=True, is_storage_ref_consumer=True ), H_A.NER_CONVERTER_INTERNAL: partial(NluComponent, name=H_A.NER_CONVERTER_INTERNAL, type=T.HELPER_ANNO, get_default_model=NerToChunkConverterLicensed.get_default_model, pdf_extractor_methods={'default': default_NER_converter_licensed_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_ner_internal_converter_cols, output_level=L.CHUNK, node=NLP_HC_FEATURE_NODES.nodes[H_A.NER_CONVERTER_INTERNAL], description='Convert NER-IOB tokens into concatenated strings (aka chunks)', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.NER_CONVERTER_INTERNAL, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[ H_A.NER_CONVERTER_INTERNAL], ), A.PARTIAL_NerConverterInternalModel: partial(NluComponent, name=A.PARTIAL_NerConverterInternalModel, jsl_anno_class_id=A.PARTIAL_NerConverterInternalModel, jsl_anno_py_class=ACR.JSL_anno2_py_class[ A.PARTIAL_NerConverterInternalModel], node=NLP_HC_FEATURE_NODES.nodes[H_A.NER_CONVERTER_INTERNAL], type=T.HELPER_ANNO, pdf_extractor_methods={ 'default': default_NER_converter_licensed_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_ner_internal_converter_cols, output_level=L.CHUNK, description='Not fully integrated', provider=ComponentBackends.open_source, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), H_A.RELATION_EXTRACTION: partial(NluComponent, name=H_A.RELATION_EXTRACTION, type=T.RELATION_CLASSIFIER, get_default_model=RelationExtraction.get_default_model, get_pretrained_model=RelationExtraction.get_pretrained_model, get_trainable_model=RelationExtraction.get_default_trainable_model, pdf_extractor_methods={ 'default': default_relation_extraction_positional_config, 'positional': default_relation_extraction_positional_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_relation_cols, output_level=L.RELATION, node=NLP_HC_FEATURE_NODES.nodes[H_A.RELATION_EXTRACTION], description='Classical ML model_anno_obj for predicting relation ship between entity pairs', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.RELATION_EXTRACTION, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[H_A.RELATION_EXTRACTION], trainable_mirror_anno=H_A.TRAINABLE_RELATION_EXTRACTION, has_storage_ref=True, is_storage_ref_consumer=True ), A.PARTIAL_PosologyREModel: partial(NluComponent, name=A.PARTIAL_ChunkMergeApproach, jsl_anno_class_id=A.PARTIAL_PosologyREModel, jsl_anno_py_class=ACR.JSL_anno2_py_class[A.PARTIAL_PosologyREModel], node=NLP_FEATURE_NODES.nodes[A.PARTIALLY_IMPLEMENTED], type=T.RELATION_CLASSIFIER, pdf_extractor_methods={ 'default': default_relation_extraction_positional_config, # 'positional': default_relation_extraction_positional_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_relation_cols, output_level=L.RELATION, description='Not fully integrated', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, ), H_A.TRAINABLE_RELATION_EXTRACTION: partial(NluComponent, name=H_A.TRAINABLE_RELATION_EXTRACTION, type=T.RELATION_CLASSIFIER, get_default_model=RelationExtraction.get_default_model, get_pretrained_model=RelationExtraction.get_pretrained_model, get_trainable_model=RelationExtraction.get_default_trainable_model, pdf_extractor_methods={ 'default': default_relation_extraction_positional_config, 'positional': default_relation_extraction_positional_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_relation_cols, output_level=L.RELATION, node=NLP_HC_FEATURE_NODES.nodes[H_A.TRAINABLE_RELATION_EXTRACTION], description='Trainable Classical ML model_anno_obj for predicting relation ship between entity pairs', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.TRAINABLE_RELATION_EXTRACTION, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[ H_A.TRAINABLE_RELATION_EXTRACTION], trained_mirror_anno=H_A.RELATION_EXTRACTION, trainable=True, has_storage_ref=True, is_storage_ref_consumer=True ), H_A.ZERO_SHOT_RELATION_EXTRACTION: partial(NluComponent, name=H_A.ZERO_SHOT_RELATION_EXTRACTION, type=T.RELATION_CLASSIFIER, get_default_model=ZeroShotRelationExtractor.get_default_model, get_pretrained_model=ZeroShotRelationExtractor.get_pretrained_model, pdf_extractor_methods={ 'default': default_relation_extraction_positional_config, 'positional': default_relation_extraction_positional_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_relation_cols, output_level=L.RELATION, node=NLP_HC_FEATURE_NODES.nodes[H_A.ZERO_SHOT_RELATION_EXTRACTION], description='Zero-shot relation extraction model_anno_obj that leverages BertForSequenceClassificaiton to return, based on a predefined set of relation', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.ZERO_SHOT_RELATION_EXTRACTION, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[ H_A.ZERO_SHOT_RELATION_EXTRACTION], trained_mirror_anno=H_A.RELATION_EXTRACTION, ), H_A.ZERO_SHOT_NER: partial(NluComponent, name=H_A.ZERO_SHOT_NER, type=T.CHUNK_CLASSIFIER, get_default_model=ZeroShotNer.get_default_model, get_pretrained_model=ZeroShotNer.get_pretrained_model, pdf_extractor_methods={'default': default_ner_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_ner_dl_cols, output_level=L.TOKEN, node=NLP_HC_FEATURE_NODES.nodes[H_A.ZERO_SHOT_NER], description='Deep Learning based Zero SHot Named Entity Recognizer (NER)', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.ZERO_SHOT_NER, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[H_A.ZERO_SHOT_NER], ), H_A.RELATION_EXTRACTION_DL: partial(NluComponent, name=H_A.RELATION_EXTRACTION_DL, type=T.RELATION_CLASSIFIER, get_default_model=RelationExtractionDL.get_default_model, get_pretrained_model=RelationExtractionDL.get_pretrained_model, # get_trainable_model=RelationExtractionDL.get_default_trainable_model, pdf_extractor_methods={ 'default': default_relation_extraction_positional_config, 'positional': default_relation_extraction_positional_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_relation_cols, output_level=L.RELATION, node=NLP_HC_FEATURE_NODES.nodes[H_A.RELATION_EXTRACTION_DL], description='Deep Learning based model_anno_obj for predicting relation ship between entity pairs', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.RELATION_EXTRACTION_DL, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[ H_A.RELATION_EXTRACTION_DL], # trainable_mirror_anno=H_A.TRAINABLE_RELATION_EXTRACTION_DL ), H_A.CHUNK_MAPPER_MODEL: partial(NluComponent, name=H_A.CHUNK_MAPPER_MODEL, type=T.CHUNK_MAPPER, get_default_model=ChunkMapper.get_default_model, get_pretrained_model=ChunkMapper.get_pretrained_model, # TODO EXTRACTORS!/subs pdf_extractor_methods={'default': default_chunk_mapper_config, # 'positional': default_relation_extraction_positional_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_chunk_mapper_cols, output_level=L.CHUNK, node=NLP_HC_FEATURE_NODES.nodes[H_A.CHUNK_MAPPER_MODEL], description='Map entities into relation and metadata', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.CHUNK_MAPPER_MODEL, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[ H_A.CHUNK_MAPPER_MODEL], # trainable_mirror_anno=H_A.TRAINABLE_RELATION_EXTRACTION_DL ), # H_A.TRAINABLE_RELATION_EXTRACTION_DL: partial(NluComponent, # DOES NOT EXIST! # name=H_A.TRAINABLE_RELATION_EXTRACTION_DL, # type=T.RELATION_CLASSIFIER, # get_default_model=RelationExtractionDL.get_default_model, # get_pretrained_model=RelationExtractionDL.get_pretrained_model, # pdf_extractor_methods={ 'default': default_relation_extraction_config, 'positional': default_relation_extraction_positional_config, 'default_full' : default_full_config, }, # pdf_col_name_substitutor=substitute_relation_cols, # pipe_prediction_output_level=L.RELATION, # node=NLP_HC_FEATURE_NODES.TRAINABLE_RELATION_EXTRACTION_DL, # description='Trainable Deep Learning based model_anno_obj for predicting relation ship between entity pairs', # provider=ComponentBackends.hc, # license=Licenses.hc, # computation_context=ComputeContexts.spark, # output_context=ComputeContexts.spark, # jsl_anno_class_id_id=H_A.TRAINABLE_RELATION_EXTRACTION_DL, # jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[H_A.TRAINABLE_RELATION_EXTRACTION_DL], # # trained_mirror_anno=H_A.RELATION_EXTRACTION_DL, # trainable=True # ), H_A.SENTENCE_ENTITY_RESOLVER: partial(NluComponent, name=H_A.SENTENCE_ENTITY_RESOLVER, type=T.CHUNK_CLASSIFIER, get_pretrained_model=SentenceResolver.get_pretrained_model, get_trainable_model=SentenceResolver.get_default_trainable_model, pdf_extractor_methods={'default': resolver_conifg_with_metadata, 'default_full': full_resolver_config, }, pdf_col_name_substitutor=substitute_sentence_resolution_cols, output_level=L.CHUNK, node=NLP_HC_FEATURE_NODES.nodes[H_A.SENTENCE_ENTITY_RESOLVER], description='Deep Learning based entity resolver which extracts resolved entities directly from Sentence Embedding. No NER model_anno_obj required.', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.SENTENCE_ENTITY_RESOLVER, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[ H_A.SENTENCE_ENTITY_RESOLVER], trained_mirror_anno=H_A.TRAINABLE_SENTENCE_ENTITY_RESOLVER, is_storage_ref_consumer=True, has_storage_ref=True ), H_A.TRAINABLE_SENTENCE_ENTITY_RESOLVER: partial(NluComponent, name=H_A.TRAINABLE_SENTENCE_ENTITY_RESOLVER, type=T.CHUNK_CLASSIFIER, get_pretrained_model=SentenceResolver.get_pretrained_model, get_trainable_model=SentenceResolver.get_default_trainable_model, pdf_extractor_methods={ 'default': default_chunk_resolution_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_sentence_resolution_cols, output_level=L.RELATION, node=NLP_HC_FEATURE_NODES.nodes[ H_A.TRAINABLE_SENTENCE_ENTITY_RESOLVER], description='Trainable Deep Learning based entity resolver which extracts resolved entities directly from Sentence Embedding. No NER model_anno_obj required.', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.TRAINABLE_SENTENCE_ENTITY_RESOLVER, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[ H_A.TRAINABLE_SENTENCE_ENTITY_RESOLVER], trained_mirror_anno=H_A.SENTENCE_ENTITY_RESOLVER, is_storage_ref_consumer=True, trainable=True, has_storage_ref=True ), H_A.MEDICAL_BERT_FOR_TOKEN_CLASSIFICATION: partial(NluComponent, name=H_A.MEDICAL_BERT_FOR_TOKEN_CLASSIFICATION, type=T.TRANSFORMER_TOKEN_CLASSIFIER, get_default_model=TokenBertHealthcare.get_default_model, get_pretrained_model=TokenBertHealthcare.get_pretrained_model, pdf_extractor_methods={ 'default': default_token_classifier_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_transformer_token_classifier_cols, output_level=L.TOKEN, # Handled like NER model_anno_obj node=NLP_HC_FEATURE_NODES.nodes[ H_A.MEDICAL_BERT_FOR_TOKEN_CLASSIFICATION], description='MedicalBertForTokenClassification can load Bert Models with a token classification head on top (a linear layer on top of the hidden-states output) e.g. for Named-Entity-Recognition (NER) tasks.', provider=ComponentBackends.open_source, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.MEDICAL_BERT_FOR_TOKEN_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[ H_A.MEDICAL_BERT_FOR_TOKEN_CLASSIFICATION], ), H_A.MEDICAL_BERT_FOR_SEQUENCE_CLASSIFICATION: partial(NluComponent, name=H_A.MEDICAL_BERT_FOR_SEQUENCE_CLASSIFICATION, type=T.TRANSFORMER_SEQUENCE_CLASSIFIER, get_default_model=SeqBertMedicalClassifier.get_default_model, get_pretrained_model=SeqBertMedicalClassifier.get_pretrained_model, pdf_extractor_methods={ 'default': default_classifier_dl_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_seq_bert_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, # Handled like NER model_anno_obj node=NLP_HC_FEATURE_NODES.nodes[ H_A.MEDICAL_BERT_FOR_SEQUENCE_CLASSIFICATION], description='Custom Architecture John Snow labs developed, called MedicalBertForSequenceClassification. It can load BERT Models with sequence classification/regression head on top (a linear layer on top of the pooled output) e.g. for multi-class document classification tasks.', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.MEDICAL_BERT_FOR_SEQUENCE_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[ H_A.MEDICAL_BERT_FOR_SEQUENCE_CLASSIFICATION], ), H_A.MEDICAL_DISTILBERT_FOR_SEQUENCE_CLASSIFICATION: partial(NluComponent, name=H_A.MEDICAL_DISTILBERT_FOR_SEQUENCE_CLASSIFICATION, type=T.TRANSFORMER_SEQUENCE_CLASSIFIER, get_default_model=SeqDilstilBertMedicalClassifier.get_default_model, get_pretrained_model=SeqDilstilBertMedicalClassifier.get_pretrained_model, pdf_extractor_methods={ 'default': default_classifier_dl_config, 'default_full': default_full_config, }, pdf_col_name_substitutor=substitute_seq_bert_classifier_cols, output_level=L.INPUT_DEPENDENT_DOCUMENT_CLASSIFIER, # Handled like NER model_anno_obj node=NLP_HC_FEATURE_NODES.nodes[ H_A.MEDICAL_DISTILBERT_FOR_SEQUENCE_CLASSIFICATION], description='Custom Architecture John Snow labs developed, called MedicalDistilBertForSequenceClassification. It can load DistilBERT Models with sequence classification/regression head on top (a linear layer on top of the pooled output) e.g. for multi-class document classification tasks.', provider=ComponentBackends.hc, license=Licenses.hc, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=H_A.MEDICAL_DISTILBERT_FOR_SEQUENCE_CLASSIFICATION, jsl_anno_py_class=ACR.JSL_anno_HC_ref_2_py_class[ H_A.MEDICAL_DISTILBERT_FOR_SEQUENCE_CLASSIFICATION], ), ######### OCR ############## O_A.IMAGE2TEXT: partial(NluComponent, name=O_A.IMAGE2TEXT, type=T.TEXT_RECOGNIZER, get_default_model=Img2Text.get_default_model, pdf_extractor_methods={'default': default_text_recognizer_config}, pdf_col_name_substitutor=substitute_recognized_text_cols, # TODO substitor output_level=L.DOCUMENT, # TODO new output level IMG? Or treat as DOC? node=OCR_FEATURE_NODES.nodes[O_A.IMAGE2TEXT], description='Recognize text from image files', provider=ComponentBackends.ocr, license=Licenses.ocr, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=O_A.IMAGE2TEXT, jsl_anno_py_class=ACR.JSL_anno_OCR_ref_2_py_class[O_A.IMAGE2TEXT], applicable_file_types=['JPEG', 'PNG', 'BMP', 'WBMP', 'GIF', 'JPG', '.TIFF'] ), O_A.DOC2TEXT: partial(NluComponent, name=O_A.DOC2TEXT, type=T.TEXT_RECOGNIZER, get_default_model=Doc2Text.get_default_model, pdf_extractor_methods={'default': default_text_recognizer_config}, pdf_col_name_substitutor=substitute_recognized_text_cols, # TODO substitor output_level=L.DOCUMENT, # TODO new output level IMG? Or treat as DOC? node=OCR_FEATURE_NODES.nodes[O_A.DOC2TEXT], description='Recognize text from DOC/DOCX files', provider=ComponentBackends.ocr, license=Licenses.ocr, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=O_A.DOC2TEXT, jsl_anno_py_class=ACR.JSL_anno_OCR_ref_2_py_class[O_A.DOC2TEXT], applicable_file_types=['DOC', 'DOCX'] ), O_A.PDF2TEXT: partial(NluComponent, name=O_A.PDF2TEXT, type=T.TEXT_RECOGNIZER, get_default_model=Pdf2Text.get_default_model, pdf_extractor_methods={'default': default_text_recognizer_config}, pdf_col_name_substitutor=substitute_recognized_text_cols, # TODO substitor output_level=L.DOCUMENT, # TODO new output level IMG? Or treat as DOC? node=OCR_FEATURE_NODES.nodes[O_A.PDF2TEXT], description='Recognize text from PDF files', provider=ComponentBackends.ocr, license=Licenses.ocr, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=O_A.PDF2TEXT, jsl_anno_py_class=ACR.JSL_anno_OCR_ref_2_py_class[O_A.PDF2TEXT], applicable_file_types=['PDF'] ), O_A.BINARY2IMAGE: partial(NluComponent, name=O_A.BINARY2IMAGE, type=T.HELPER_ANNO, get_default_model=Binary2Image.get_default_model, pdf_extractor_methods={'default': default_binary_to_image_config}, pdf_col_name_substitutor=substitute_recognized_text_cols, # TODO substitor output_level=L.DOCUMENT, # TODO new output level IMG? Or treat as DOC? node=OCR_FEATURE_NODES.nodes[O_A.BINARY2IMAGE], description='Convert binary image data to OCR image Spark struct representation', provider=ComponentBackends.ocr, license=Licenses.ocr, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=O_A.BINARY2IMAGE, jsl_anno_py_class=ACR.JSL_anno_OCR_ref_2_py_class[O_A.BINARY2IMAGE], applicable_file_types=['JPEG', 'PNG', 'BMP', 'WBMP', 'GIF', 'JPG', 'TIFF'] ), O_A.PDF2TEXT_TABLE: partial(NluComponent, name=O_A.PDF2TEXT_TABLE, type=T.TABLE_RECOGNIZER, get_default_model=PDF2TextTable.get_default_model, pdf_extractor_methods={'default': default_binary_to_image_config}, # TODO EXtractor pdf_col_name_substitutor=substitute_recognized_text_cols, # TODO substitor output_level=L.DOCUMENT, node=OCR_FEATURE_NODES.nodes[O_A.PDF2TEXT_TABLE], description='Extract Tables from PDFs with have highlightable text', provider=ComponentBackends.ocr, license=Licenses.ocr, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=O_A.PDF2TEXT_TABLE, jsl_anno_py_class=ACR.JSL_anno_OCR_ref_2_py_class[O_A.PDF2TEXT_TABLE], applicable_file_types=['PDF'] ), O_A.PPT2TEXT_TABLE: partial(NluComponent, name=O_A.PPT2TEXT_TABLE, type=T.TABLE_RECOGNIZER, get_default_model=PPT2TextTable.get_default_model, pdf_extractor_methods={'default': default_binary_to_image_config}, # TODO EXtractor pdf_col_name_substitutor=substitute_recognized_text_cols, # TODO substitor output_level=L.DOCUMENT, node=OCR_FEATURE_NODES.nodes[O_A.PPT2TEXT_TABLE], description='Extract Tables from PPT and PPTX files', provider=ComponentBackends.ocr, license=Licenses.ocr, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=O_A.PPT2TEXT_TABLE, jsl_anno_py_class=ACR.JSL_anno_OCR_ref_2_py_class[O_A.PPT2TEXT_TABLE], applicable_file_types=['PPT', 'PPTX'] ), O_A.DOC2TEXT_TABLE: partial(NluComponent, name=O_A.DOC2TEXT_TABLE, type=T.TABLE_RECOGNIZER, get_default_model=Doc2TextTable.get_default_model, pdf_extractor_methods={'default': default_binary_to_image_config}, # TODO EXtractor pdf_col_name_substitutor=substitute_recognized_text_cols, # TODO substitor output_level=L.DOCUMENT, node=OCR_FEATURE_NODES.nodes[O_A.DOC2TEXT_TABLE], description='Extract Tables from PPT and PPTX files', provider=ComponentBackends.ocr, license=Licenses.ocr, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=O_A.DOC2TEXT_TABLE, jsl_anno_py_class=ACR.JSL_anno_OCR_ref_2_py_class[O_A.DOC2TEXT_TABLE], applicable_file_types=['DOCX', 'DOC'] ), # O_A.VISUAL_DOCUMENT_CLASSIFIER: partial(NluComponent, # name=O_A.VISUAL_DOCUMENT_CLASSIFIER, # type=T.PDF_BUILDER, # get_default_model=VisualDocClassifier.get_default_model, # get_pretrained_model=VisualDocClassifier.get_pretrained_model, # # pdf_extractor_methods={'default': default_visual_classifier_config}, # # TODO EXtractor # pdf_col_name_substitutor=substitute_recognized_text_cols, # # TODO substitor # output_level=L.DOCUMENT, # node=OCR_FEATURE_NODES.nodes[O_A.VISUAL_DOCUMENT_CLASSIFIER], # description='Convert text to PDF file', # provider=ComponentBackends.ocr, # license=Licenses.ocr, # computation_context=ComputeContexts.spark, # output_context=ComputeContexts.spark, # jsl_anno_class_id=O_A.VISUAL_DOCUMENT_CLASSIFIER, # jsl_anno_py_class=ACR.JSL_anno_OCR_ref_2_py_class[ # O_A.VISUAL_DOCUMENT_CLASSIFIER], # applicable_file_types=['JPG', 'JPEG'] # ), # O_A.IMAGE2HOCR: partial(NluComponent, name=O_A.IMAGE2HOCR, type=T.OCR_UTIL, get_default_model=Image2Hocr.get_default_model, # TODO EXtractor0 pdf_extractor_methods={'default': default_binary_to_image_config}, # TODO substitor pdf_col_name_substitutor=substitute_recognized_text_cols, output_level=L.DOCUMENT, node=OCR_FEATURE_NODES.nodes[O_A.IMAGE2HOCR], description='Convert text to PDF file', provider=ComponentBackends.ocr, license=Licenses.ocr, computation_context=ComputeContexts.spark, output_context=ComputeContexts.spark, jsl_anno_class_id=O_A.IMAGE2HOCR, jsl_anno_py_class=ACR.JSL_anno_OCR_ref_2_py_class[ O_A.IMAGE2HOCR], applicable_file_types=['DOCX', 'DOC'] ), }
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/universe/component_universes.py
component_universes.py
from nlu.universe.atoms import JslFeature ### ____ Annotator Feature Representations ____ class NLP_FEATURES(JslFeature): """ NLP Features """ # STems from an nlp annotator in the NLP lib, i.e. Fnisher or so. Generates NO JSL-Annotation Schema for result df. Just 1 str per orw UNKOWN = JslFeature("unkown") DOCUMENT = JslFeature("document") DOCUMENT_FROM_CHUNK = JslFeature("document_from_chunk") DOCUMENT_DE_IDENTIFIED = JslFeature("document_de_identified") DOCUMENT_RE_IDENTIFIED = JslFeature("document_re_identified") DOCUMENT_NORMALIZED = JslFeature("document_normalized") DOCUMENT_TRANSLATED = JslFeature("document_translated") RAW_AUDIO = JslFeature("raw_audio") AUDIO = JslFeature("audio") RECOGNIZED_SPEECH_TEXT = JslFeature("recognized_speech_text") ASSEMBLED_TABULAR_DATA = JslFeature("assembled_tabular_data") TABULAR_ANSWER = JslFeature("tabular_answer") RAW_QUESTION = JslFeature("question") # RAW_TABULAR_DATA = JslFeature("tabular_data") # Json or csv RAW_QUESTION_CONTEXT = JslFeature("context") DOCUMENT_QUESTION = JslFeature("document_question") DOCUMENT_QUESTION_CONTEXT = JslFeature("document_question_context") CLASSIFIED_SPAN = JslFeature("classified_span") # GPT, T5, X2IMG (PDF2IMG, IMG2IMG, etc..) DOCUMENT_GENERATED = JslFeature("document_generated") SENTENCE = JslFeature("sentence") QUESTION_SENTENCES = JslFeature("question_sentences") TOKEN = JslFeature("token") COREF_TOKEN = JslFeature("coref_token") TOKEN_CHUNKED = JslFeature("token_chunked") TOKEN_SPELL_CHECKED = JslFeature("token_chunked") TOKEN_LEMATIZED = JslFeature("token_lemmatized") TOKEN_STEMMED = JslFeature("token_stemmed") TOKEN_NORMALIZED = JslFeature("token_stemmed") TOKEN_STOP_WORD_REMOVED = JslFeature("token_stemmed") WORDPIECE = JslFeature("wordpiece") ANY = JslFeature("any") ANY_FINISHED = JslFeature("any_finished") ANY_EMBEDDINGS = JslFeature("any_embeddings") FINISHED_EMBEDDINGS = JslFeature("word_embeddings") WORD_EMBEDDINGS = JslFeature("word_embeddings") CHUNK_EMBEDDINGS = JslFeature("chunk_embeddings") SENTENCE_EMBEDDINGS = JslFeature("sentence_embeddings") CATEGORY = JslFeature("category") DATE = JslFeature("date") MULTI_DOCUMENT_CLASSIFICATION = JslFeature('multi_document_classification') DOCUMENT_CLASSIFICATION = JslFeature('document_classification') TOKEN_CLASSIFICATION = JslFeature('token_classification') SEQUENCE_CLASSIFICATION = JslFeature('sequence_classification') SENTIMENT = JslFeature("sentiment") POS = JslFeature("pos") CHUNK = JslFeature("chunk") NAMED_ENTITY_IOB = JslFeature("named_entity_iob") NAMED_ENTITY_CONVERTED = JslFeature("named_entity_converted") NAMED_ENTITY_CONVERTED_AND_CONVERTED_TO_DOC = JslFeature("NAMED_ENTITY_CONVERTED_AND_CONVERTED_TO_DOC") NEGEX = JslFeature("negex") UNLABLED_DEPENDENCY = JslFeature("unlabeled_dependency") LABELED_DEPENDENCY = JslFeature("labeled_dependency") LANGUAGE = JslFeature("language") NODE = JslFeature("node") DUMMY = JslFeature("dummy") IMAGE = JslFeature("spark_nlp_image") CLASSIFIED_IMAGE = JslFeature('classified_image') SPARK_NLP_IMAGE = JslFeature('image') SPARK_NLP_FILE_PATH = JslFeature('path') class OCR_FEATURES(JslFeature): """ OCR Features """ BINARY_IMG = JslFeature("content") # img - BINARY_PDF = JslFeature("content") # pdf bin_pdf BINARY_PPT = JslFeature("bin_ppt") # Powerpoint bin_ppt BINARY_PDF_PAGE = JslFeature("bin_pdf_page") # just a page BINARY_DOCX = JslFeature("content") # pdf2text - bin_docx BINARY_DOCX_PAGE = JslFeature("bin_docx_page") # just a page BINARY_TOKEN = JslFeature("bin_token") # img - BINARY_DICOM = JslFeature("bin_dicom") # DICOM image DICOM_METADATA = JslFeature("dicom_metadata") # DICOM metadata (json formatted) FILE_PATH = JslFeature("path") # TODO this is externalL??? TEXT = JslFeature("text") # TODO should be same class as the spark NLP ones TODO EXTERNANMALLL?? TEXT_ENTITY = JslFeature('text_entity') # chunk/entity TEXT_DOCUMENT = JslFeature("text_document") # TODO should be same class as the spark NLP ones TEXT_DOCUMENT_TOKENIZED = JslFeature("text_tokenized") # TODO should be same class as the spark NLP ones HOCR = JslFeature("hocr") # img - # All OCR_* features are structs generated from OCR lib FALL_BACK = JslFeature("fall_back") # OCR_IMAGE = JslFeature("ocr_image") # OCR struct image representation OCR_PAGE_MATRIX = JslFeature("ocr_page_matrix") # OCR struct image representation OCR_POSITIONS = JslFeature( "ocr_positions") # OCR struct POSITION representation # TODO is POSITIONS==COORDINATES??? OCR_REGION = JslFeature("ocr_region") # OCR array of POSITION struct OCR_TEXT = JslFeature("ocr_text") # raw text extracted by OCR anno like PDFtoImage OCR_TABLE = JslFeature("ocr_table") # OCR extracted table TODO array of COORDINATES/POSITION? OCR_TABLE_CELLS = JslFeature("ocr_table_cells") # OCR extracted table TODO array of COORDINATES/POSITION?? OCR_MAPPING = JslFeature("ocr_table") # TODO wat is MPAPING??? PAGE_NUM = JslFeature("page_num") # TODO is this just int or some struct? JSON_FOUNDATION_ONE_REPORT = JslFeature("json_foundation_one_report") PREDICTION_TEXT_TABLE = JslFeature("prediction_text_lable") # TODO is this just int or some struct? PREDICTION_CONFIDENCE = JslFeature("prediction_confidence") # TODO is this just int or some struct? VISUAL_CLASSIFIER_CONFIDENCE = JslFeature("visual_classifier_confidence") VISUAL_CLASSIFIER_PREDICTION = JslFeature("visual_classifier_prediction") class NLP_HC_FEATURES(JslFeature): """ NLP HC Feature aka Annotator Types """ ASSERTION = JslFeature('assertion') RESOLVED_ENTITY = JslFeature('resolved_entity') FEATURE_VECTOR = JslFeature('feature_vector') MAPPED_CHUNK = JslFeature('mapped_chunk') DISAMBIGUATION = JslFeature('disambiguation') RELATION = JslFeature('relation')
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/universe/feature_universes.py
feature_universes.py
from nlu.universe.atoms import LicenseType, NlpLevel class NLP_LEVELS(NlpLevel): """ XXX_SUPER is a N to M Mapping, with M <= N XXX_SUB is a N to M mapping, with M >=N no prefix implies a N to N mapping to be expected """ DOCUMENT = NlpLevel('document') CHUNK = NlpLevel('chunk') SENTENCE = NlpLevel('sentence') TOKEN = NlpLevel('token') CO_REFERENCE = NlpLevel('coreference') RELATION = NlpLevel('relation') MULTI_TOKEN_CLASSIFIER = NlpLevel('multi_token_classifier') AUDIO_SERIES = NlpLevel('audio_series') IMAGE= NlpLevel("image") INPUT_DEPENDENT_DOCUMENT_CLASSIFIER = NlpLevel('input_dependent_document_classifier') INPUT_DEPENDENT_DOCUMENT_EMBEDDING = NlpLevel('input_dependent_document_embedding') # Not used for now # NGRAM_CHUNK = NlpLevel('NGRAM_CHUNK') # SUB_TOKEN = NlpLevel('sub_token') # SUPER_TOKEN = NlpLevel('super_token') # SUPER_CHUNK = NlpLevel('super_chunk') # SUB_CHUNK = NlpLevel('sub_chunk') # POS_CHUNK = NlpLevel('POS_CHUNK') # KEYWORD_CHUNK = NlpLevel('KEYWORD_CHUNK') # NER_CHUNK = NlpLevel("ner_chunk") class OCR_OUTPUT_LEVELS: # PAGES ARE LIKE TOKENS!! Book is full document! PAGES = 'pages' # Generate 1 output per PAGE in each input document. I.e if 2 PDFs input with 5 pages each, gens 10 rows. 1 to many mapping FILE = 'file' # Generate 1 output per document, I.e. 2 PDFS with 5 pages each gen 2 Row, 1 to one mapping OBJECT = 'object' # Generate 1 output row per detected Object in Input document. I.e. if 2 PDFS with 5 Cats each, generates 10 rows. ---> REGION or Not? CHARACTER = 'character' # Generate 1 oputput row per OCR'd character, I.e. 2 PDFS with 100 Chars each, gens 100 Rows. TABLE = 'table' # 1 Pandas DF per Table. class AnnoTypes: # DOCUMENT_XX can be sbustituted for SENTENCE CHUNK_MAPPER = 'chunk_mapper' TOKENIZER = 'tokenizer' TOKEN_CLASSIFIER = 'token_classifier' QUESTION_SPAN_CLASSIFIER = 'span_classifier' QUESTION_TABLE_ANSWERER = 'question_table_answerer' TRANSFORMER_TOKEN_CLASSIFIER = 'transformer_token_classifier' # Can be token level but also NER level TRANSFORMER_SEQUENCE_CLASSIFIER = 'transformer_sequence_classifier' # Can be token level but also NER level CHUNK_CLASSIFIER = 'chunk_classifier' # ASSERTION/ NER GENERATES/CONTEXT_PARSER THESE but DOES NOT TAKE THEM IN!!! Split into NER-CHUNK Classifier, etc..? DOCUMENT_CLASSIFIER = 'document_classifier' RELATION_CLASSIFIER = 'relation_classifier' # Pairs of chunks TOKEN_EMBEDDING = 'token_embedding' CHUNK_EMBEDDING = 'chunk_embedding' DOCUMENT_EMBEDDING = 'document_embedding' SENTENCE_DETECTOR = 'sentence_detector' SENTENCE_EMBEDDING = 'sentence_embedding' SPELL_CHECKER = 'spell_checker' HELPER_ANNO = 'helper_anno' SPEECH_RECOGNIZER = 'speech_recognizer' TEXT_NORMALIZER = 'text_normalizer' TOKEN_NORMALIZER = 'token_normalizer' # TODO chunk sub-classes? I.e. POS-CHUNKS, NER-CHUNKS, KEYWORD-CHUNKS, RESOLUTION-CHUNKS, etc?? pos_regex_chunker = 'token_normalizer' CHUNK_FILTERER = 'chunk_filterer' TEXT_RECOGNIZER = 'text_recognizer' TABLE_RECOGNIZER = 'table_recognizer' PDF_BUILDER = 'table_recognizer' OCR_UTIL = 'ocr_util' IMAGE_CLASSIFICATION = 'image_classifier' PARTIALLY_READY = 'partially_ready'
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/universe/logic_universes.py
logic_universes.py
from typing import Dict from nlu.universe.atoms import JslAnnoId, JslAnnoPyClass from nlu.universe.feature_node_ids import OCR_NODE_IDS, NLP_NODE_IDS, NLP_HC_NODE_IDS class AnnoClassRef: # Reference of every Annotator class name in OS/HC/OCR # Maps JslAnnoID to ClassNames in Python/Java from Spark NLP/Healthcare/ OCR A_O = OCR_NODE_IDS A_H = None # NLP_HC_ANNO A_N = NLP_NODE_IDS HC_A_N = NLP_HC_NODE_IDS # Map AnnoID to PyCLass JSL_anno2_py_class: Dict[JslAnnoId, JslAnnoPyClass] = { A_N.HUBERT_FOR_CTC: 'HubertForCTC', A_N.CAMEMBERT_FOR_QUESTION_ANSWERING: 'CamemBertForQuestionAnswering', A_N.SWIN_IMAGE_CLASSIFICATION: 'SwinForImageClassification', A_N.CAMEMBERT_FOR_SEQUENCE_CLASSIFICATION: 'CamemBertForSequenceClassification', A_N.TAPAS_FOR_QA: 'TapasForQuestionAnswering', A_N.TABLE_ASSEMBLER: 'TableAssembler', A_N.CAMEMBERT_FOR_TOKEN_CLASSIFICATION: 'CamemBertForTokenClassification', A_N.COREF_SPAN_BERT: 'SpanBertCorefModel', A_N.PARTIALLY_IMPLEMENTED: 'PartiallyIntegrated', A_N.BIG_TEXT_MATCHER: 'BigTextMatcher', A_N.CHUNK2DOC: 'Chunk2Doc', A_N.CHUNK_EMBEDDINGS_CONVERTER: 'ChunkEmbeddings', A_N.CHUNK_TOKENIZER: 'Tokenizer', A_N.CHUNKER: 'Chunker', A_N.CLASSIFIER_DL: 'ClassifierDLModel', A_N.CONTEXT_SPELL_CHECKER: 'ContextSpellCheckerModel', A_N.DATE_MATCHER: 'DateMatcher', A_N.UNTYPED_DEPENDENCY_PARSER: 'DependencyParserModel', A_N.TYPED_DEPENDENCY_PARSER: 'TypedDependencyParserModel', A_N.DOC2CHUNK: 'Doc2Chunk', A_N.DOC2VEC: 'Doc2VecModel', A_N.TRAIANBLE_DOC2VEC: 'Doc2VecApproach', A_N.MULTI_DOCUMENT_ASSEMBLER: 'MultiDocumentAssembler', A_N.ALBERT_FOR_QUESTION_ANSWERING: 'AlbertForQuestionAnswering', A_N.BERT_FOR_QUESTION_ANSWERING: 'BertForQuestionAnswering', A_N.DE_BERTA_FOR_QUESTION_ANSWERING: 'DeBertaForQuestionAnswering', A_N.DISTIL_BERT_FOR_QUESTION_ANSWERING: 'DistilBertForQuestionAnswering', A_N.LONGFORMER_FOR_QUESTION_ANSWERING: 'LongformerForQuestionAnswering', A_N.ROBERTA_FOR_QUESTION_ANSWERING: 'RoBertaForQuestionAnswering', A_N.XLM_ROBERTA_FOR_QUESTION_ANSWERING: 'XlmRoBertaForQuestionAnswering', A_N.AUDIO_ASSEMBLER: 'AudioAssembler', A_N.WAV2VEC_FOR_CTC: 'Wav2Vec2ForCTC', A_N.DOCUMENT_ASSEMBLER: 'DocumentAssembler', A_N.IMAGE_ASSEMBLER: 'ImageAssembler', A_N.DOCUMENT_NORMALIZER: 'DocumentNormalizer', A_N.EMBEDDINGS_FINISHER: 'EmbeddingsFinisher', A_N.ENTITY_RULER: 'EntityRulerModel', A_N.FINISHER: 'Finisher', A_N.GRAPH_EXTRACTION: 'GraphExtraction', A_N.GRAPH_FINISHER: 'GraphFinisher', A_N.LANGUAGE_DETECTOR_DL: 'LanguageDetectorDL', A_N.LEMMATIZER: 'LemmatizerModel', A_N.MULTI_CLASSIFIER_DL: 'MultiClassifierDLModel', A_N.MULTI_DATE_MATCHER: 'MultiDateMatcher', A_N.N_GRAMM_GENERATOR: 'NGramGenerator', A_N.NER_CONVERTER: 'NerConverter', A_N.NER_CRF: 'NerCrfModel', A_N.NER_DL: 'NerDLModel', A_N.NER_OVERWRITER: 'NerOverwriter', A_N.NORMALIZER: 'NormalizerModel', A_N.NORVIG_SPELL_CHECKER: 'NorvigSweetingModel', A_N.POS: 'PerceptronModel', A_N.RECURISVE_TOKENIZER: 'RecursiveTokenizerModel', A_N.REGEX_MATCHER: 'RegexMatcherModel', A_N.REGEX_TOKENIZER: 'RegexTokenizer', A_N.SENTENCE_DETECTOR: 'SentenceDetector', A_N.SENTENCE_DETECTOR_DL: 'SentenceDetectorDLModel', A_N.SENTENCE_EMBEDDINGS_CONVERTER: 'SentenceEmbeddings', A_N.STEMMER: 'Stemmer', A_N.STOP_WORDS_CLEANER: 'StopWordsCleaner', A_N.SYMMETRIC_DELETE_SPELLCHECKER: 'SymmetricDeleteModel', A_N.TEXT_MATCHER: 'TextMatcherModel', A_N.TOKEN2CHUNK: 'Token2Chunk', A_N.TOKEN_ASSEMBLER: 'TokenAssembler', A_N.TOKENIZER: 'TokenizerModel', A_N.SENTIMENT_DL: 'SentimentDLModel', A_N.SENTIMENT_DETECTOR: 'SentimentDetectorModel', A_N.VIVEKN_SENTIMENT: 'ViveknSentimentModel', A_N.WORD_EMBEDDINGS: 'WordEmbeddingsModel', A_N.WORD_SEGMENTER: 'WordSegmenterModel', A_N.YAKE_KEYWORD_EXTRACTION: 'YakeKeywordExtraction', A_N.ALBERT_EMBEDDINGS: 'AlbertEmbeddings', A_N.ALBERT_FOR_TOKEN_CLASSIFICATION: 'AlbertForTokenClassification', A_N.BERT_EMBEDDINGS: 'BertEmbeddings', A_N.BERT_FOR_TOKEN_CLASSIFICATION: 'BertForTokenClassification', A_N.BERT_SENTENCE_EMBEDDINGS: 'BertSentenceEmbeddings', A_N.DISTIL_BERT_EMBEDDINGS: 'DistilBertEmbeddings', A_N.DISTIL_BERT_FOR_SEQUENCE_CLASSIFICATION: 'DistilBertForSequenceClassification', A_N.DISTIL_BERT_FOR_ZERO_SHOT_CLASSIFICATION: 'DistilBertForZeroShotClassification', A_N.BERT_FOR_ZERO_SHOT_CLASSIFICATION: 'BertForZeroShotClassification', A_N.ROBERTA_FOR_ZERO_SHOT_CLASSIFICATION: 'RoBertaForZeroShotClassification', A_N.BERT_FOR_SEQUENCE_CLASSIFICATION: 'BertForSequenceClassification', A_N.ELMO_EMBEDDINGS: 'ElmoEmbeddings', A_N.LONGFORMER_EMBEDDINGS: 'LongformerEmbeddings', A_N.LONGFORMER_FOR_TOKEN_CLASSIFICATION: 'LongformerForTokenClassification', A_N.MARIAN_TRANSFORMER: 'MarianTransformer', A_N.ROBERTA_EMBEDDINGS: 'RoBertaEmbeddings', A_N.ROBERTA_FOR_TOKEN_CLASSIFICATION: 'RoBertaForTokenClassification', A_N.ROBERTA_SENTENCE_EMBEDDINGS: 'RoBertaSentenceEmbeddings', A_N.T5_TRANSFORMER: 'T5Transformer', A_N.UNIVERSAL_SENTENCE_ENCODER: 'UniversalSentenceEncoder', A_N.XLM_ROBERTA_EMBEDDINGS: 'XlmRoBertaEmbeddings', A_N.XLM_ROBERTA_FOR_TOKEN_CLASSIFICATION: 'XlmRoBertaForTokenClassification', A_N.XLM_ROBERTA_SENTENCE_EMBEDDINGS: 'XlmRoBertaSentenceEmbeddings', A_N.XLNET_EMBEDDINGS: 'XlnetEmbeddings', A_N.XLNET_FOR_TOKEN_CLASSIFICATION: 'XlnetForTokenClassification', A_N.XLM_ROBERTA_FOR_SEQUENCE_CLASSIFICATION: 'XlmRoBertaForSequenceClassification', A_N.ROBERTA_FOR_SEQUENCE_CLASSIFICATION: 'RoBertaForSequenceClassification', A_N.LONGFORMER_FOR_SEQUENCE_CLASSIFICATION: 'LongformerForSequenceClassification', A_N.ALBERT_FOR_SEQUENCE_CLASSIFICATION: 'AlbertForSequenceClassification', A_N.XLNET_FOR_SEQUENCE_CLASSIFICATION: 'XlnetForSequenceClassification', A_N.GPT2: 'GPT2Transformer', A_N.DEBERTA_WORD_EMBEDDINGS: 'DeBertaEmbeddings', A_N.DEBERTA_FOR_TOKEN_CLASSIFICATION: 'DeBertaForTokenClassification', A_N.CAMEMBERT_EMBEDDINGS: 'CamemBertEmbeddings', A_N.TRAINABLE_VIVEKN_SENTIMENT: 'ViveknSentimentApproach', A_N.TRAINABLE_SENTIMENT: 'SentimentDetector', A_N.TRAINABLE_SENTIMENT_DL: 'SentimentDLApproach', A_N.TRAINABLE_CLASSIFIER_DL: 'ClassifierDLApproach', A_N.TRAINABLE_MULTI_CLASSIFIER_DL: 'MultiClassifierDLApproach', A_N.TRAINABLE_NER_DL: 'NerDLApproach', A_N.TRAINABLE_NER_CRF: 'NerCrfApproach', A_N.TRAINABLE_POS: 'PerceptronApproach', A_N.TRAINABLE_DEP_PARSE_TYPED: 'TypedDependencyParserApproach', A_N.TRAINABLE_DEP_PARSE_UN_TYPED: 'DependencyParserApproach', A_N.TRAINABLE_DOC2VEC: 'Doc2VecApproach', A_N.TRAINABLE_ENTITY_RULER: 'EntityRulerApproach', A_N.TRAINABLE_LEMMATIZER: 'Lemmatizer', A_N.TRAINABLE_NORMALIZER: 'Normalizer', A_N.TRAINABLE_NORVIG_SPELL_CHECKER: 'NorvigSweetingApproach', A_N.TRAINABLE_RECURISVE_TOKENIZER: 'RecursiveTokenizer', A_N.TRAINABLE_REGEX_MATCHER: 'RegexMatcher', A_N.TRAINABLE_SENTENCE_DETECTOR_DL: 'SentenceDetectorDLApproach', A_N.TRAINABLE_WORD_EMBEDDINGS: 'WordEmbeddings', A_N.TRAINABLE_SYMMETRIC_DELETE_SPELLCHECKER: 'SymmetricDeleteApproach', A_N.TRAINABLE_TEXT_MATCHER: 'TextMatcher', A_N.TRAINABLE_TOKENIZER: 'Tokenizer', A_N.TRAINABLE_WORD_SEGMENTER: 'WordSegmenterApproach', A_N.DISTIL_BERT_FOR_TOKEN_CLASSIFICATION: 'DistilBertForTokenClassification', A_N.WORD_2_VEC: 'Word2VecModel', A_N.DEBERTA_FOR_SEQUENCE_CLASSIFICATION: 'DeBertaForSequenceClassification', A_N.BERT_SENTENCE_CHUNK_EMBEDDINGS: 'BertSentenceChunkEmbeddings', A_N.PARTIAL_AssertionFilterer: 'AssertionFilterer', A_N.PARTIAL_ChunkConverter: 'ChunkConverter', A_N.PARTIAL_ChunkKeyPhraseExtraction: 'ChunkKeyPhraseExtraction', A_N.PARTIAL_ChunkSentenceSplitter: 'ChunkSentenceSplitter', A_N.PARTIAL_ChunkFiltererApproach: 'ChunkFiltererApproach', A_N.PARTIAL_ChunkFiltererApproach: 'ChunkFiltererApproach', A_N.PARTIAL_ChunkFilterer: 'ChunkFilterer', A_N.PARTIAL_ChunkMapperApproach: 'ChunkMapperApproach', A_N.PARTIAL_ChunkMapperApproach: 'ChunkMapperApproach', A_N.PARTIAL_ChunkMapperFilterer: 'ChunkMapperFilterer', A_N.PARTIAL_DocumentLogRegClassifierApproach: 'DocumentLogRegClassifierApproach', A_N.PARTIAL_DocumentLogRegClassifierApproach: 'DocumentLogRegClassifierApproach', A_N.PARTIAL_DocumentLogRegClassifierModel: 'DocumentLogRegClassifierModel', A_N.PARTIAL_ContextualParserApproach: 'ContextualParserApproach', A_N.PARTIAL_ContextualParserApproach: 'ContextualParserApproach', A_N.PARTIAL_ReIdentification: 'ReIdentification', A_N.PARTIAL_NerDisambiguator: 'NerDisambiguator', A_N.PARTIAL_NerDisambiguatorModel: 'NerDisambiguatorModel', A_N.PARTIAL_AverageEmbeddings: 'AverageEmbeddings', A_N.PARTIAL_EntityChunkEmbeddings: 'EntityChunkEmbeddings', A_N.PARTIAL_ChunkMergeApproach: 'ChunkMergeApproach', A_N.PARTIAL_ChunkMergeApproach: 'ChunkMergeApproach', A_N.PARTIAL_IOBTagger: 'IOBTagger', A_N.PARTIAL_NerChunker: 'NerChunker', A_N.PARTIAL_NerConverterInternalModel: 'NerConverterInternalModel', A_N.PARTIAL_DateNormalizer: 'DateNormalizer', A_N.PARTIAL_PosologyREModel: 'PosologyREModel', A_N.PARTIAL_RENerChunksFilter: 'RENerChunksFilter', A_N.PARTIAL_ResolverMerger: 'ResolverMerger', A_N.PARTIAL_AnnotationMerger: 'AnnotationMerger', A_N.PARTIAL_Router: 'Router', A_N.PARTIAL_Word2VecApproach: 'Word2VecApproach', A_N.PARTIAL_Word2VecApproach: 'Word2VecApproach', A_N.PARTIAL_WordEmbeddings: 'WordEmbeddings', A_N.PARTIAL_EntityRulerApproach: 'EntityRulerApproach', A_N.PARTIAL_EntityRulerApproach: 'EntityRulerApproach', A_N.PARTIAL_EntityRulerModel: 'EntityRulerModel', A_N.PARTIAL_TextMatcherModel: 'TextMatcherModel', A_N.PARTIAL_BigTextMatcher: 'BigTextMatcher', A_N.PARTIAL_BigTextMatcherModel: 'BigTextMatcherModel', A_N.PARTIAL_DateMatcher: 'DateMatcher', A_N.PARTIAL_MultiDateMatcher: 'MultiDateMatcher', A_N.PARTIAL_RegexMatcher: 'RegexMatcher', A_N.PARTIAL_TextMatcher: 'TextMatcher', A_N.PARTIAL_NerApproach: 'NerApproach', A_N.PARTIAL_NerCrfApproach: 'NerCrfApproach', A_N.PARTIAL_NerCrfApproach: 'NerCrfApproach', A_N.PARTIAL_NerCrfApproach: 'NerCrfApproach', A_N.PARTIAL_NerOverwriter: 'NerOverwriter', A_N.PARTIAL_DependencyParserApproach: 'DependencyParserApproach', A_N.PARTIAL_DependencyParserApproach: 'DependencyParserApproach', A_N.PARTIAL_TypedDependencyParserApproach: 'TypedDependencyParserApproach', A_N.PARTIAL_TypedDependencyParserApproach: 'TypedDependencyParserApproach', A_N.PARTIAL_SentenceDetectorDLApproach: 'SentenceDetectorDLApproach', A_N.PARTIAL_SentenceDetectorDLApproach: 'SentenceDetectorDLApproach', A_N.PARTIAL_SentimentDetector: 'SentimentDetector', A_N.PARTIAL_ViveknSentimentApproach: 'ViveknSentimentApproach', A_N.PARTIAL_ViveknSentimentApproach: 'ViveknSentimentApproach', A_N.PARTIAL_ContextSpellCheckerApproach: 'ContextSpellCheckerApproach', A_N.PARTIAL_ContextSpellCheckerApproach: 'ContextSpellCheckerApproach', A_N.PARTIAL_NorvigSweetingApproach: 'NorvigSweetingApproach', A_N.PARTIAL_NorvigSweetingApproach: 'NorvigSweetingApproach', A_N.PARTIAL_SymmetricDeleteApproach: 'SymmetricDeleteApproach', A_N.PARTIAL_SymmetricDeleteApproach: 'SymmetricDeleteApproach', A_N.PARTIAL_ChunkTokenizer: 'ChunkTokenizer', A_N.PARTIAL_ChunkTokenizerModel: 'ChunkTokenizerModel', A_N.PARTIAL_RecursiveTokenizer: 'RecursiveTokenizer', A_N.PARTIAL_RecursiveTokenizerModel: 'RecursiveTokenizerModel', A_N.PARTIAL_Token2Chunk: 'Token2Chunk', A_N.PARTIAL_WordSegmenterApproach: 'WordSegmenterApproach', A_N.PARTIAL_WordSegmenterApproach: 'WordSegmenterApproach', A_N.PARTIAL_GraphExtraction: 'GraphExtraction', A_N.PARTIAL_Lemmatizer: 'Lemmatizer', A_N.PARTIAL_Normalizer: 'Normalizer', A_N.VIT_IMAGE_CLASSIFICATION: 'ViTForImageClassification', } JSL_anno_HC_ref_2_py_class: Dict[JslAnnoId, JslAnnoPyClass] = { HC_A_N.ZERO_SHOT_NER: 'ZeroShotNerModel', HC_A_N.CHUNK_MAPPER_MODEL: 'ChunkMapperModel', HC_A_N.ASSERTION_DL: 'AssertionDLModel', HC_A_N.TRAINABLE_ASSERTION_DL: 'AssertionDLApproach', HC_A_N.ASSERTION_FILTERER: 'AssertionFilterer', HC_A_N.ASSERTION_LOG_REG: 'AssertionLogRegModel', HC_A_N.TRAINABLE_ASSERTION_LOG_REG: 'AssertionLogRegApproach', HC_A_N.CHUNK2TOKEN: 'Chunk2Token', HC_A_N.CHUNK_ENTITY_RESOLVER: '', # DEPRECATED HC_A_N.TRAINABLE_CHUNK_ENTITY_RESOLVER: '', # DEPRECATED HC_A_N.CHUNK_FILTERER: 'ChunkFilterer', HC_A_N.TRAINABLE_CHUNK_FILTERER: 'ChunkFiltererApproach', HC_A_N.CHUNK_KEY_PHRASE_EXTRACTION: 'ChunkKeyPhraseExtraction', HC_A_N.CHUNK_MERGE: 'ChunkMergeModel', HC_A_N.TRAINABLE_CHUNK_MERGE: 'ChunkMergeApproach', HC_A_N.CONTEXTUAL_PARSER: 'ContextualParserModel', HC_A_N.TRAIANBLE_CONTEXTUAL_PARSER: 'ContextualParserApproach', HC_A_N.DE_IDENTIFICATION: 'DeIdentificationModel', HC_A_N.TRAINABLE_DE_IDENTIFICATION: 'DeIdentification', HC_A_N.DOCUMENT_LOG_REG_CLASSIFIER: 'DocumentLogRegClassifierModel', HC_A_N.TRAINABLE_DOCUMENT_LOG_REG_CLASSIFIER: 'DocumentLogRegClassifierApproach', HC_A_N.DRUG_NORMALIZER: 'DrugNormalizer', # HC_A_N.FEATURES_ASSEMBLER : '', # TODO spark calss>? HC_A_N.GENERIC_CLASSIFIER: 'GenericClassifierModel', HC_A_N.TRAINABLE_GENERIC_CLASSIFIER: 'GenericClassifierApproach', HC_A_N.IOB_TAGGER: 'IOBTagger', HC_A_N.MEDICAL_NER: 'MedicalNerModel', HC_A_N.TRAINABLE_MEDICAL_NER: 'MedicalNerApproach', HC_A_N.NER_CHUNKER: 'NerChunker', HC_A_N.NER_CONVERTER_INTERNAL: 'NerConverterInternal', HC_A_N.NER_DISAMBIGUATOR: 'NerDisambiguatorModel', HC_A_N.TRAINABLE_NER_DISAMBIGUATOR: 'NerDisambiguatorModel', HC_A_N.RELATION_NER_CHUNKS_FILTERER: 'NerDisambiguator', HC_A_N.RE_IDENTIFICATION: 'ReIdentification', HC_A_N.RELATION_EXTRACTION: 'RelationExtractionModel', HC_A_N.TRAINABLE_RELATION_EXTRACTION: 'RelationExtractionApproach', HC_A_N.RELATION_EXTRACTION_DL: 'RelationExtractionDLModel', # HC_A_N.TRAINABLE_RELATION_EXTRACTION_DL : '', HC_A_N.SENTENCE_ENTITY_RESOLVER: 'SentenceEntityResolverModel', HC_A_N.TRAINABLE_SENTENCE_ENTITY_RESOLVER: 'SentenceEntityResolverApproach', HC_A_N.MEDICAL_BERT_FOR_TOKEN_CLASSIFICATION: 'MedicalBertForTokenClassifier', HC_A_N.MEDICAL_BERT_FOR_SEQUENCE_CLASSIFICATION: 'MedicalBertForSequenceClassification', HC_A_N.MEDICAL_DISTILBERT_FOR_SEQUENCE_CLASSIFICATION: 'MedicalDistilBertForSequenceClassification', HC_A_N.ENTITY_CHUNK_EMBEDDING: 'EntityChunkEmbeddings', HC_A_N.ZERO_SHOT_RELATION_EXTRACTION: 'ZeroShotRelationExtractionModel', } JSL_anno_OCR_ref_2_py_class: Dict[JslAnnoId, JslAnnoPyClass] = { OCR_NODE_IDS.IMAGE2TEXT: 'ImageToText', OCR_NODE_IDS.PDF2TEXT: 'PdfToText', OCR_NODE_IDS.DOC2TEXT: 'DocToText', OCR_NODE_IDS.BINARY2IMAGE: 'BinaryToImage', OCR_NODE_IDS.PDF2TEXT_TABLE: 'PdfToTextTable', OCR_NODE_IDS.PPT2TEXT_TABLE: 'PptToTextTable', OCR_NODE_IDS.DOC2TEXT_TABLE: 'DocToTextTable', OCR_NODE_IDS.TEXT2PDF: 'TextToPdf', OCR_NODE_IDS.VISUAL_DOCUMENT_CLASSIFIER: 'VisualDocumentClassifier', OCR_NODE_IDS.IMAGE2HOCR: 'ImageToHocr', } @staticmethod def get_os_pyclass_2_anno_id_dict(): # Flipped, maps PyClass to AnnoID JSL_py_class_2_anno_id: Dict[JslAnnoPyClass, JslAnnoId] = {AnnoClassRef.JSL_anno2_py_class[k]: k for k in AnnoClassRef.JSL_anno2_py_class} return JSL_py_class_2_anno_id @staticmethod def get_hc_pyclass_2_anno_id_dict(): # Flipped, maps PyClass to AnnoID JSL_HC_py_class_2_anno_id: Dict[JslAnnoId, JslAnnoPyClass] = {AnnoClassRef.JSL_anno_HC_ref_2_py_class[k]: k for k in AnnoClassRef.JSL_anno_HC_ref_2_py_class} return JSL_HC_py_class_2_anno_id @staticmethod def get_ocr_pyclass_2_anno_id_dict(): # Flipped, maps PyClass to AnnoID JSL_OCR_py_class_2_anno_id: Dict[JslAnnoId, JslAnnoPyClass] = {AnnoClassRef.JSL_anno_OCR_ref_2_py_class[k]: k for k in AnnoClassRef.JSL_anno_OCR_ref_2_py_class} return JSL_OCR_py_class_2_anno_id # Flipped, maps PyClass to AnnoID AnnoClassRef.JSL_OS_py_class_2_anno_id: Dict[JslAnnoPyClass, JslAnnoId] = {AnnoClassRef.JSL_anno2_py_class[k]: k for k in AnnoClassRef.JSL_anno2_py_class} AnnoClassRef.JSL_HC_py_class_2_anno_id: Dict[JslAnnoId, JslAnnoPyClass] = {AnnoClassRef.JSL_anno_HC_ref_2_py_class[k]: k for k in AnnoClassRef.JSL_anno_HC_ref_2_py_class} AnnoClassRef.JSL_OCR_py_class_2_anno_id: Dict[JslAnnoId, JslAnnoPyClass] = { AnnoClassRef.JSL_anno_OCR_ref_2_py_class[k]: k for k in AnnoClassRef.JSL_anno_OCR_ref_2_py_class}
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/universe/annotator_class_universe.py
annotator_class_universe.py
from nlu.universe.atoms import JslAnnoId class NLP_NODE_IDS: """All avaiable Feature nodes in NLP.. Used to cast the pipeline dependency resolution algorithm into an abstract graph """ # Visual Document Understanding BIG_TEXT_MATCHER = JslAnnoId('big_text_matcher') CHUNK2DOC = JslAnnoId('chunk2doc') CHUNK_EMBEDDINGS_CONVERTER = JslAnnoId('chunk_embeddings_converter') CHUNK_TOKENIZER = JslAnnoId('chunk_tokenizer') CHUNKER = JslAnnoId('chunker') CLASSIFIER_DL = JslAnnoId('classifier_dl') CONTEXT_SPELL_CHECKER = JslAnnoId('context_spell_checker') DATE_MATCHER = JslAnnoId('date_matcher') UNTYPED_DEPENDENCY_PARSER = JslAnnoId('untyped_dependency_parser') TYPED_DEPENDENCY_PARSER = JslAnnoId('typed_dependency_parser') DOC2CHUNK = JslAnnoId('doc2chunk') DOC2VEC = JslAnnoId('doc2vec') TRAIANBLE_DOC2VEC = JslAnnoId('trainable_doc2vec') DOCUMENT_ASSEMBLER = JslAnnoId('document_assembler') AUDIO_ASSEMBLER = JslAnnoId('audio_assembler') WAV2VEC_FOR_CTC = JslAnnoId('wav2vec_for_ctc') HUBERT_FOR_CTC = JslAnnoId('hubert_for_ctc') TABLE_ASSEMBLER = JslAnnoId('table_assembler') TAPAS_FOR_QA = JslAnnoId('tapas') MULTI_DOCUMENT_ASSEMBLER = JslAnnoId('multi_document_assembler') IMAGE_ASSEMBLER = JslAnnoId("image_assembler") ALBERT_FOR_QUESTION_ANSWERING = JslAnnoId('albert_for_question_answering') CAMEMBERT_FOR_QUESTION_ANSWERING = JslAnnoId('camembert_for_question_answering') BERT_FOR_QUESTION_ANSWERING = JslAnnoId('bert_for_question_answering') DE_BERTA_FOR_QUESTION_ANSWERING = JslAnnoId('de_berta_for_question_answering') DISTIL_BERT_FOR_QUESTION_ANSWERING = JslAnnoId('distil_bert_for_question_answering') LONGFORMER_FOR_QUESTION_ANSWERING = JslAnnoId('longformer_for_question_answering') ROBERTA_FOR_QUESTION_ANSWERING = JslAnnoId('roberta_for_question_answering') XLM_ROBERTA_FOR_QUESTION_ANSWERING = JslAnnoId('xlm_roberta_for_question_answering') SPAN_BERT_COREF_MODEL = JslAnnoId('span_bert_coref_model') DOCUMENT_NORMALIZER = JslAnnoId('document_normalizer') EMBEDDINGS_FINISHER = JslAnnoId('embeddings_finisher') ENTITY_RULER = JslAnnoId('entitiy_ruler') FINISHER = JslAnnoId('FINISHER') GRAPH_EXTRACTION = JslAnnoId('graph_extraction') GRAPH_FINISHER = JslAnnoId('graph_finisher') LANGUAGE_DETECTOR_DL = JslAnnoId('language_detector_dl') LEMMATIZER = JslAnnoId('lemmatizer') MULTI_CLASSIFIER_DL = JslAnnoId('multi_classifier_dl') MULTI_DATE_MATCHER = JslAnnoId('multi_date_matcher') N_GRAMM_GENERATOR = JslAnnoId('n_gramm_generator') NER_CONVERTER = JslAnnoId('ner_converter') NER_CRF = JslAnnoId('ner_crf') NER_DL = JslAnnoId('ner_dl') NER_OVERWRITER = JslAnnoId('ner_overwriter') NORMALIZER = JslAnnoId('normalizer') NORVIG_SPELL_CHECKER = JslAnnoId('norvig_spell_checker') POS = JslAnnoId('pos') RECURISVE_TOKENIZER = JslAnnoId('recursive_tokenizer') REGEX_MATCHER = JslAnnoId('regex_matcher') REGEX_TOKENIZER = JslAnnoId('regex_tokenizer') SENTENCE_DETECTOR = JslAnnoId('sentence_detector') SENTENCE_DETECTOR_DL = JslAnnoId('sentence_detector_dl') SENTENCE_EMBEDDINGS_CONVERTER = JslAnnoId('sentence_embeddings_converter') STEMMER = JslAnnoId('stemmer') STOP_WORDS_CLEANER = JslAnnoId('stop_words_cleaner') SYMMETRIC_DELETE_SPELLCHECKER = JslAnnoId('symmetric_delete_spellchecker') TEXT_MATCHER = JslAnnoId('text_matcher') TOKEN2CHUNK = JslAnnoId('token2chunk') TOKEN_ASSEMBLER = JslAnnoId('token_assembler') TOKENIZER = JslAnnoId('tokenizer') SENTIMENT_DL = JslAnnoId('sentiment_dl') SENTIMENT_DETECTOR = JslAnnoId('sentiment_detector') VIVEKN_SENTIMENT = JslAnnoId('vivekn_sentiment') WORD_EMBEDDINGS = JslAnnoId('word_embeddings') WORD_SEGMENTER = JslAnnoId('word_segmenter') YAKE_KEYWORD_EXTRACTION = JslAnnoId('yake_keyword_extraction') ALBERT_EMBEDDINGS = JslAnnoId('albert_embeddings') ALBERT_FOR_TOKEN_CLASSIFICATION = JslAnnoId('albert_for_token_classification') BERT_EMBEDDINGS = JslAnnoId('bert_embeddings') CAMEMBERT_EMBEDDINGS = JslAnnoId('camenbert_embeddings') BERT_FOR_TOKEN_CLASSIFICATION = JslAnnoId('bert_for_token_classification') CAMEMBERT_FOR_TOKEN_CLASSIFICATION = JslAnnoId('camenbert_for_token_classification') CAMEMBERT_FOR_SEQUENCE_CLASSIFICATION = JslAnnoId('camenbert_for_sequence_classification') BERT_SENTENCE_EMBEDDINGS = JslAnnoId('bert_sentence_embeddings') DISTIL_BERT_EMBEDDINGS = JslAnnoId('distil_bert_embeddings') DISTIL_BERT_FOR_TOKEN_CLASSIFICATION = JslAnnoId('distil_bert_for_token_classification') DISTIL_BERT_FOR_SEQUENCE_CLASSIFICATION = JslAnnoId('distil_bert_for_sequence_classification') BERT_FOR_SEQUENCE_CLASSIFICATION = JslAnnoId('bert_for_sequence_classification') ELMO_EMBEDDINGS = JslAnnoId('elmo_embeddings') LONGFORMER_EMBEDDINGS = JslAnnoId('longformer_embeddings') LONGFORMER_FOR_TOKEN_CLASSIFICATION = JslAnnoId('longformer_for_token_classification') MARIAN_TRANSFORMER = JslAnnoId('marian_transformer') ROBERTA_EMBEDDINGS = JslAnnoId('roberta_embeddings') ROBERTA_FOR_TOKEN_CLASSIFICATION = JslAnnoId('roberta_for_token_classification') ROBERTA_SENTENCE_EMBEDDINGS = JslAnnoId('roberta_sentence_embeddings') T5_TRANSFORMER = JslAnnoId('t5_transformer') VIT_IMAGE_CLASSIFICATION = JslAnnoId("vit_image_classification") SWIN_IMAGE_CLASSIFICATION = JslAnnoId("swin_image_classification") BERT_FOR_ZERO_SHOT_CLASSIFICATION = JslAnnoId('bert_zero_shot') DISTIL_BERT_FOR_ZERO_SHOT_CLASSIFICATION = JslAnnoId('distil_bert_zero_shot') ROBERTA_FOR_ZERO_SHOT_CLASSIFICATION = JslAnnoId('roberta_zero_shot') UNIVERSAL_SENTENCE_ENCODER = JslAnnoId('universal_sentence_encoder') XLM_ROBERTA_EMBEDDINGS = JslAnnoId('xlm_roberta_embeddings') XLM_ROBERTA_FOR_TOKEN_CLASSIFICATION = JslAnnoId('xlm_roberta_for_token_classification') XLM_ROBERTA_SENTENCE_EMBEDDINGS = JslAnnoId('xlm_roberta_sentence_embeddings') XLNET_EMBEDDINGS = JslAnnoId('xlnet_embeddings') XLNET_FOR_TOKEN_CLASSIFICATION = JslAnnoId('xlnet_for_token_classification') XLM_ROBERTA_FOR_SEQUENCE_CLASSIFICATION = JslAnnoId('xlm_roberta_for_sequence_classification') ROBERTA_FOR_SEQUENCE_CLASSIFICATION = JslAnnoId('roberta_for_sequence_classification') LONGFORMER_FOR_SEQUENCE_CLASSIFICATION = JslAnnoId('longformer_for_sequence_classification') ALBERT_FOR_SEQUENCE_CLASSIFICATION = JslAnnoId('albert_for_sequence_classification') XLNET_FOR_SEQUENCE_CLASSIFICATION = JslAnnoId('xlnet_for_sequence_classification') GPT2 = JslAnnoId('gpt2') WORD_2_VEC = JslAnnoId('word_2_vec') DEBERTA_WORD_EMBEDDINGS = JslAnnoId('deberta') DEBERTA_FOR_SEQUENCE_CLASSIFICATION = JslAnnoId('deberta_for_sequence_classification') DEBERTA_FOR_TOKEN_CLASSIFICATION = JslAnnoId('deberta_for_token_classification') COREF_SPAN_BERT = JslAnnoId('coref_spanbert') TRAINABLE_CONTEXT_SPELL_CHECKER = JslAnnoId('trainable_context_spell_checker') TRAINABLE_VIVEKN_SENTIMENT = JslAnnoId('trainable_vivekn_sentiment') TRAINABLE_SENTIMENT_DL = JslAnnoId('trainable_sentiment_dl') TRAINABLE_CLASSIFIER_DL = JslAnnoId('trainable_classifier_dl') TRAINABLE_MULTI_CLASSIFIER_DL = JslAnnoId('trainable_multi_classifier_dl') TRAINABLE_NER_DL = JslAnnoId('trainable_ner_dl') TRAINABLE_NER_CRF = JslAnnoId('trainable_ner_crf') TRAINABLE_POS = JslAnnoId('trainable_pos') TRAINABLE_DEP_PARSE_TYPED = JslAnnoId('trainable_dependency_parser') TRAINABLE_DEP_PARSE_UN_TYPED = JslAnnoId('trainable_dependency_parser_untyped') TRAINABLE_DOC2VEC = JslAnnoId('trainable_doc2vec') TRAINABLE_ENTITY_RULER = JslAnnoId('trainable_entity_ruler') TRAINABLE_LEMMATIZER = JslAnnoId('trainable_lemmatizer') TRAINABLE_NORMALIZER = JslAnnoId('trainable_normalizer') TRAINABLE_NORVIG_SPELL_CHECKER = JslAnnoId('trainable_norvig_spell') TRAINABLE_RECURISVE_TOKENIZER = JslAnnoId('trainable_recursive_tokenizer') TRAINABLE_REGEX_MATCHER = JslAnnoId('trainable_regex_tokenizer') TRAINABLE_SENTENCE_DETECTOR_DL = JslAnnoId('trainable_sentence_detector_dl') TRAINABLE_SENTIMENT = JslAnnoId('trainable_sentiment') TRAINABLE_WORD_EMBEDDINGS = JslAnnoId('trainable_word_embeddings') TRAINABLE_SYMMETRIC_DELETE_SPELLCHECKER = JslAnnoId('trainable_symmetric_spell_checker') TRAINABLE_TEXT_MATCHER = JslAnnoId('trainable_text_matcher') TRAINABLE_TOKENIZER = JslAnnoId('trainable_tokenizer') TRAINABLE_WORD_SEGMENTER = JslAnnoId('trainable_word_segmenter') BERT_SENTENCE_CHUNK_EMBEDDINGS = JslAnnoId('bert_sentence_chunk_embeddings') PARTIALLY_IMPLEMENTED = JslAnnoId('partially_implemented') PARTIAL_AssertionFilterer = JslAnnoId('PARTIAL_AssertionFilterer') PARTIAL_ChunkConverter = JslAnnoId('PARTIAL_ChunkConverter') PARTIAL_ChunkKeyPhraseExtraction = JslAnnoId('PARTIAL_ChunkKeyPhraseExtraction') PARTIAL_ChunkSentenceSplitter = JslAnnoId('PARTIAL_ChunkSentenceSplitter') PARTIAL_ChunkFiltererApproach = JslAnnoId('PARTIAL_ChunkFiltererApproach') PARTIAL_ChunkFiltererApproach = JslAnnoId('PARTIAL_ChunkFiltererApproach') PARTIAL_ChunkFilterer = JslAnnoId('PARTIAL_ChunkFilterer') PARTIAL_ChunkMapperApproach = JslAnnoId('PARTIAL_ChunkMapperApproach') PARTIAL_ChunkMapperApproach = JslAnnoId('PARTIAL_ChunkMapperApproach') PARTIAL_ChunkMapperFilterer = JslAnnoId('PARTIAL_ChunkMapperFilterer') PARTIAL_DocumentLogRegClassifierApproach = JslAnnoId('PARTIAL_DocumentLogRegClassifierApproach') PARTIAL_DocumentLogRegClassifierApproach = JslAnnoId('PARTIAL_DocumentLogRegClassifierApproach') PARTIAL_DocumentLogRegClassifierModel = JslAnnoId('PARTIAL_DocumentLogRegClassifierModel') PARTIAL_ContextualParserApproach = JslAnnoId('PARTIAL_ContextualParserApproach') PARTIAL_ContextualParserApproach = JslAnnoId('PARTIAL_ContextualParserApproach') PARTIAL_ReIdentification = JslAnnoId('PARTIAL_ReIdentification') PARTIAL_NerDisambiguator = JslAnnoId('PARTIAL_NerDisambiguator') PARTIAL_NerDisambiguatorModel = JslAnnoId('PARTIAL_NerDisambiguatorModel') PARTIAL_AverageEmbeddings = JslAnnoId('PARTIAL_AverageEmbeddings') PARTIAL_EntityChunkEmbeddings = JslAnnoId('PARTIAL_EntityChunkEmbeddings') PARTIAL_ChunkMergeApproach = JslAnnoId('PARTIAL_ChunkMergeApproach') PARTIAL_ChunkMergeApproach = JslAnnoId('PARTIAL_ChunkMergeApproach') PARTIAL_IOBTagger = JslAnnoId('PARTIAL_IOBTagger') PARTIAL_NerChunker = JslAnnoId('PARTIAL_NerChunker') PARTIAL_NerConverterInternalModel = JslAnnoId('PARTIAL_NerConverterInternalModel') PARTIAL_DateNormalizer = JslAnnoId('PARTIAL_DateNormalizer') PARTIAL_PosologyREModel = JslAnnoId('PARTIAL_PosologyREModel') PARTIAL_RENerChunksFilter = JslAnnoId('PARTIAL_RENerChunksFilter') PARTIAL_ResolverMerger = JslAnnoId('PARTIAL_ResolverMerger') PARTIAL_AnnotationMerger = JslAnnoId('PARTIAL_AnnotationMerger') PARTIAL_Router = JslAnnoId('PARTIAL_Router') PARTIAL_Word2VecApproach = JslAnnoId('PARTIAL_Word2VecApproach') PARTIAL_Word2VecApproach = JslAnnoId('PARTIAL_Word2VecApproach') PARTIAL_WordEmbeddings = JslAnnoId('PARTIAL_WordEmbeddings') PARTIAL_EntityRulerApproach = JslAnnoId('PARTIAL_EntityRulerApproach') PARTIAL_EntityRulerApproach = JslAnnoId('PARTIAL_EntityRulerApproach') PARTIAL_EntityRulerModel = JslAnnoId('PARTIAL_EntityRulerModel') PARTIAL_TextMatcherModel = JslAnnoId('PARTIAL_TextMatcherModel') PARTIAL_BigTextMatcher = JslAnnoId('PARTIAL_BigTextMatcher') PARTIAL_BigTextMatcherModel = JslAnnoId('PARTIAL_BigTextMatcherModel') PARTIAL_DateMatcher = JslAnnoId('PARTIAL_DateMatcher') PARTIAL_MultiDateMatcher = JslAnnoId('PARTIAL_MultiDateMatcher') PARTIAL_RegexMatcher = JslAnnoId('PARTIAL_RegexMatcher') PARTIAL_TextMatcher = JslAnnoId('PARTIAL_TextMatcher') PARTIAL_NerApproach = JslAnnoId('PARTIAL_NerApproach') PARTIAL_NerCrfApproach = JslAnnoId('PARTIAL_NerCrfApproach') PARTIAL_NerCrfApproach = JslAnnoId('PARTIAL_NerCrfApproach') PARTIAL_NerCrfApproach = JslAnnoId('PARTIAL_NerCrfApproach') PARTIAL_NerOverwriter = JslAnnoId('PARTIAL_NerOverwriter') PARTIAL_DependencyParserApproach = JslAnnoId('PARTIAL_DependencyParserApproach') PARTIAL_DependencyParserApproach = JslAnnoId('PARTIAL_DependencyParserApproach') PARTIAL_TypedDependencyParserApproach = JslAnnoId('PARTIAL_TypedDependencyParserApproach') PARTIAL_TypedDependencyParserApproach = JslAnnoId('PARTIAL_TypedDependencyParserApproach') PARTIAL_SentenceDetectorDLApproach = JslAnnoId('PARTIAL_SentenceDetectorDLApproach') PARTIAL_SentenceDetectorDLApproach = JslAnnoId('PARTIAL_SentenceDetectorDLApproach') PARTIAL_SentimentDetector = JslAnnoId('PARTIAL_SentimentDetector') PARTIAL_ViveknSentimentApproach = JslAnnoId('PARTIAL_ViveknSentimentApproach') PARTIAL_ViveknSentimentApproach = JslAnnoId('PARTIAL_ViveknSentimentApproach') PARTIAL_ContextSpellCheckerApproach = JslAnnoId('PARTIAL_ContextSpellCheckerApproach') PARTIAL_ContextSpellCheckerApproach = JslAnnoId('PARTIAL_ContextSpellCheckerApproach') PARTIAL_NorvigSweetingApproach = JslAnnoId('PARTIAL_NorvigSweetingApproach') PARTIAL_NorvigSweetingApproach = JslAnnoId('PARTIAL_NorvigSweetingApproach') PARTIAL_SymmetricDeleteApproach = JslAnnoId('PARTIAL_SymmetricDeleteApproach') PARTIAL_SymmetricDeleteApproach = JslAnnoId('PARTIAL_SymmetricDeleteApproach') PARTIAL_ChunkTokenizer = JslAnnoId('PARTIAL_ChunkTokenizer') PARTIAL_ChunkTokenizerModel = JslAnnoId('PARTIAL_ChunkTokenizerModel') PARTIAL_RecursiveTokenizer = JslAnnoId('PARTIAL_RecursiveTokenizer') PARTIAL_RecursiveTokenizerModel = JslAnnoId('PARTIAL_RecursiveTokenizerModel') PARTIAL_Token2Chunk = JslAnnoId('PARTIAL_Token2Chunk') PARTIAL_WordSegmenterApproach = JslAnnoId('PARTIAL_WordSegmenterApproach') PARTIAL_WordSegmenterApproach = JslAnnoId('PARTIAL_WordSegmenterApproach') PARTIAL_GraphExtraction = JslAnnoId('PARTIAL_GraphExtraction') PARTIAL_Lemmatizer = JslAnnoId('PARTIAL_Lemmatizer') PARTIAL_Normalizer = JslAnnoId('PARTIAL_Normalizer') PARTIAL_SpanBertCorefModel = JslAnnoId('PARTIAL_SpanBertCorefModel') class NLP_HC_NODE_IDS: # or Mode Node? """All avaiable Feature nodes in Healthcare Library. Defines High Level Identifiers Used to cast the pipeline dependency resolution algorithm into an abstract grpah """ ASSERTION_DL = JslAnnoId('assertion_dl') TRAINABLE_ASSERTION_DL = JslAnnoId('trainable_assertion_dl') ASSERTION_FILTERER = JslAnnoId('assertion_filterer') # TODO traianble? ASSERTION_LOG_REG = JslAnnoId('assertion_log_reg') TRAINABLE_ASSERTION_LOG_REG = JslAnnoId('trainable_assertion_log_reg') CHUNK2TOKEN = JslAnnoId('chunk2token') CHUNK_ENTITY_RESOLVER = JslAnnoId('chunk_entity_resolver') TRAINABLE_CHUNK_ENTITY_RESOLVER = JslAnnoId('traianble_chunk_entity_resolver') CHUNK_FILTERER = JslAnnoId('chunk_filterer') TRAINABLE_CHUNK_FILTERER = JslAnnoId('trainable_chunk_filterer') # Todo not integrated CHUNK_KEY_PHRASE_EXTRACTION = JslAnnoId('chunk_key_phrase_extraction') CHUNK_MERGE = JslAnnoId('chunk_merge') TRAINABLE_CHUNK_MERGE = JslAnnoId('trainable_chunk_merge') # Todo not integrated CONTEXTUAL_PARSER = JslAnnoId('contextual_parser') TRAIANBLE_CONTEXTUAL_PARSER = JslAnnoId('trainable_contextual_parser') # Todo not integrated DE_IDENTIFICATION = JslAnnoId('de_identification') TRAINABLE_DE_IDENTIFICATION = JslAnnoId('trainable_de_identification') DOCUMENT_LOG_REG_CLASSIFIER = JslAnnoId('document_log_reg_classifier') TRAINABLE_DOCUMENT_LOG_REG_CLASSIFIER = JslAnnoId('traianble_document_log_reg_classifier') DRUG_NORMALIZER = JslAnnoId('drug_normalizer') FEATURES_ASSEMBLER = JslAnnoId('features_assembler') GENERIC_CLASSIFIER = JslAnnoId('generic_classifier') TRAINABLE_GENERIC_CLASSIFIER = JslAnnoId('traianble_generic_classifier') IOB_TAGGER = JslAnnoId('iob_tagger') MEDICAL_NER = JslAnnoId('medical_ner') TRAINABLE_MEDICAL_NER = JslAnnoId('trainable_medical_ner') NER_CHUNKER = JslAnnoId('ner_chunker') NER_CONVERTER_INTERNAL = JslAnnoId('ner_converter_internal') NER_DISAMBIGUATOR = JslAnnoId('ner_disambiguator') TRAINABLE_NER_DISAMBIGUATOR = JslAnnoId('trainable_ner_disambiguator') RELATION_NER_CHUNKS_FILTERER = JslAnnoId('relation_ner_chunks_filterer') RE_IDENTIFICATION = JslAnnoId('re_identification') RELATION_EXTRACTION = JslAnnoId('relation_extraction') TRAINABLE_RELATION_EXTRACTION = JslAnnoId('trainable_relation_extraction') ZERO_SHOT_RELATION_EXTRACTION = JslAnnoId('zero_shot_relation_extraction') ZERO_SHOT_NER = JslAnnoId('zero_shot_ner') RELATION_EXTRACTION_DL = JslAnnoId('relation_extraction_dl') CHUNK_MAPPER_MODEL = JslAnnoId('chunk_mapper_model') # TRAINABLE_RELATION_EXTRACTION_DL = JslAnnoId('trainable_relation_extraction_dl') SENTENCE_ENTITY_RESOLVER = JslAnnoId('sentence_entity_resolver') TRAINABLE_SENTENCE_ENTITY_RESOLVER = JslAnnoId('trainable_sentence_entity_resolver') MEDICAL_BERT_FOR_TOKEN_CLASSIFICATION = JslAnnoId('medical_bert_for_token_classification') MEDICAL_BERT_FOR_SEQUENCE_CLASSIFICATION = JslAnnoId('medical_bert_for_sequence_classification') MEDICAL_DISTILBERT_FOR_SEQUENCE_CLASSIFICATION = JslAnnoId('medical_distilbert_for_sequence_classification') ENTITY_CHUNK_EMBEDDING = JslAnnoId('entity_chunk_embedding') class OCR_NODE_IDS: """All available Feature nodes in OCR Used to cast the pipeline dependency resolution algorithm into an abstract graph """ # Visual Document Understanding VISUAL_DOCUMENT_CLASSIFIER = JslAnnoId('visual_document_classifier') VISUAL_DOCUMENT_NER = JslAnnoId('visual_document_NER') # Object Detection IMAGE_HANDWRITTEN_DETECTOR = JslAnnoId('image_handwritten_detector') # TABLE Processors/Recognition IMAGE_TABLE_DETECTOR = JslAnnoId('image_table_detector') IMAGE_TABLE_CELL_DETECTOR = JslAnnoId('image_table_cell_detector') IMAGE_TABLE_CELL2TEXT_TABLE = JslAnnoId('image_table_cell2text_table') # PDF Processing PDF2TEXT = JslAnnoId('pdf2text') PDF2IMAGE = JslAnnoId('pdf2image') IMAGE2PDF = JslAnnoId('image2pdf') TEXT2PDF = JslAnnoId('text2pdf') PDF_ASSEMBLER = JslAnnoId('pdf_assembler') PDF_DRAW_REGIONS = JslAnnoId('pdf_draw_regions') PDF2TEXT_TABLE = JslAnnoId('pdf2table') # DOCX Processing DOC2TEXT = JslAnnoId('doc2text') DOC2TEXT_TABLE = JslAnnoId('doc2text_table') DOC2PDF = JslAnnoId('doc2pdf') PPT2TEXT_TABLE = JslAnnoId('ppt2text_table') PPT2PDF = JslAnnoId('ppt2pdf') # DICOM Processing DICOM2IMAGE = JslAnnoId('dicom2image') IMAGE2DICOM = JslAnnoId('IMAGE2DICOM') # Image Pre-Processing BINARY2IMAGE = JslAnnoId('binary2image') GPU_IMAGE_TRANSFORMER = JslAnnoId('GPU_IMAGE_TRANSFORMER') IMAGE_BINARIZER = JslAnnoId('image_binarizer') IMAGE_ADAPTIVE_BINARIZER = JslAnnoId('image_adaptive_binarizer') IMAGE_ADAPTIVE_THRESHOLDING = JslAnnoId('image_adaptive_thresholding') IMAGE_SCALER = JslAnnoId('image_scaler') IMAGE_ADAPTIVE_SCALER = JslAnnoId('image_adaptive_scaler') IMAGE_SKEW_CORRECTOR = JslAnnoId('image_skew_corrector') IMAGE_NOISE_SCORER = JslAnnoId('image_noise_scorer') IMAGE_REMOVE_OBJECTS = JslAnnoId('image_remove_objects') IMAGE_MORPHOLOGY_OPERATION = JslAnnoId('image_morphology_operation') IMAGE_CROPPER = JslAnnoId('image_cropper') IMAGE2REGION = JslAnnoId('image2region') IMAGE_LAYOUT_ANALZYER = JslAnnoId('image_layout_analyzer') IMAGE_SPLIT_REGIONS = JslAnnoId('image_split_regions') IMAGE_DRAW_REGIONS = JslAnnoId('image_draw_regions') # Character Recognition IMAGE2TEXT = JslAnnoId('image2text') IMAGE2TEXTPDF = JslAnnoId('image2textpdf') IMAGE2HOCR = JslAnnoId('image2hocr') IMAGE_BRANDS2TEXT = JslAnnoId('image_brands2text') # Other POSITION_FINDER = JslAnnoId('position_finder') UPDATE_TEXT_POSITION = JslAnnoId('update_text_position') FOUNDATION_ONE_REPORT_PARSER = JslAnnoId('foundation_one_report_parser') HOCR_DOCUMENT_ASSEMBLER = JslAnnoId('hocr_document_assembler') HOCR_TOKENIZER = JslAnnoId('hocr_tokenizer')
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/universe/feature_node_ids.py
feature_node_ids.py
from typing import Dict, List, Union, Optional, Callable from pyspark.ml import PipelineModel, Pipeline from sparknlp.pretrained import PretrainedPipeline, LightPipeline from nlu.pipe.nlu_component import NluComponent from nlu.pipe.utils.component_utils import ComponentUtils from nlu.pipe.utils.pipe_utils import PipeUtils from nlu.pipe.utils.resolution.storage_ref_resolution_utils import * from nlu.spellbook import Spellbook from nlu.universe.atoms import LicenseType from nlu.universe.component_universes import ComponentUniverse, anno_class_to_empty_component from nlu.universe.feature_resolutions import FeatureResolutions from nlu.universe.feature_universes import NLP_HC_FEATURES, OCR_FEATURES from nlu.universe.universes import Licenses, license_to_bucket, ModelBuckets logger = logging.getLogger('nlu') def init_component(component): # Init partial constructor if isinstance(component, Callable): component = component() return component def is_produced_by_multi_output_component(missing_feature_type: Union[NLP_FEATURES, OCR_FEATURES, NLP_HC_FEATURES]): """For these components we resolve to None, because they are already beeign satisfied by another component that outputs multiple features including the ones coverd here """ return missing_feature_type == NLP_FEATURES.DOCUMENT_QUESTION_CONTEXT def resolve_feature(missing_feature_type: Union[NLP_FEATURES, OCR_FEATURES, NLP_HC_FEATURES], language='en', is_licensed=False, is_trainable_pipe=False) -> NluComponent: ''' This function returns a default component_to_resolve for a missing component_to_resolve type and core part to the pipeline feature resolution. It is used to auto complete pipelines, which are missing required components. :param missing_feature_type: String which is either just the component_to_resolve type or componenttype@spark_nlp_reference which stems from a models storageref and refers to some pretrained embeddings or model_anno_obj :return: a NLU component_to_resolve which is a either the default if there is no '@' in the @param missing_component_type or a default component_to_resolve for that particular type ''' logger.info(f'Getting default for missing_feature_type={missing_feature_type}') if is_produced_by_multi_output_component(missing_feature_type): # Special Edge Case for QA annotators return None if '@' not in missing_feature_type: # Resolve feature which has no storage ref or if storage ref is irrelevant at this point if is_licensed and is_trainable_pipe and missing_feature_type in FeatureResolutions.default_HC_train_resolutions.keys(): feature_resolution = FeatureResolutions.default_HC_resolutions[missing_feature_type] license_type = Licenses.hc model_bucket = ModelBuckets.hc elif is_licensed and missing_feature_type in FeatureResolutions.default_HC_resolutions.keys(): feature_resolution = FeatureResolutions.default_HC_resolutions[missing_feature_type] license_type = Licenses.hc model_bucket = ModelBuckets.hc elif is_licensed and missing_feature_type in FeatureResolutions.default_OCR_resolutions.keys(): feature_resolution = FeatureResolutions.default_OCR_resolutions[missing_feature_type] license_type = Licenses.ocr # model_bucket = 'clinical/models' # no bucket based models supported model_bucket = ModelBuckets.ocr elif missing_feature_type in FeatureResolutions.default_OS_resolutions.keys(): feature_resolution = FeatureResolutions.default_OS_resolutions[missing_feature_type] license_type = Licenses.open_source model_bucket = ModelBuckets.open_source else: raise ValueError(f"Could not resolve feature={missing_feature_type}") nlu_component = init_component(feature_resolution.nlu_component) # Call the partial and init the nlu component # Either call get_pretrained(nlp_ref, lang,bucket) or get_default_model() to instantiate Annotator object if feature_resolution.get_pretrained: return nlu_component.set_metadata( nlu_component.get_pretrained_model(feature_resolution.nlp_ref, feature_resolution.language, model_bucket), feature_resolution.nlu_ref, feature_resolution.nlp_ref, language, False, license_type) else: return nlu_component.set_metadata(nlu_component.get_default_model(), feature_resolution.nlu_ref, feature_resolution.nlp_ref, language, False, license_type) else: # if there is an @ in the name, we must get some specific # pretrained model_anno_obj from the sparknlp reference that should follow after the @ missing_feature_type, storage_ref = missing_feature_type.split('@') if storage_ref == '': # Storage ref empty for trainable resolution. # Use default embed defined in feature resolution if is_licensed and is_trainable_pipe and missing_feature_type in FeatureResolutions.default_HC_train_resolutions.keys(): feature_resolution = FeatureResolutions.default_HC_resolutions[missing_feature_type] license_type = Licenses.hc model_bucket = ModelBuckets.hc elif missing_feature_type in FeatureResolutions.default_OS_resolutions.keys(): feature_resolution = FeatureResolutions.default_OS_resolutions[missing_feature_type] license_type = Licenses.open_source model_bucket = ModelBuckets.open_source else: raise ValueError( f"Could not resolve empty storage ref with default feature for missing feature = {missing_feature_type}") nlu_component = init_component( feature_resolution.nlu_component) # Call the partial and init the nlu component return nlu_component.set_metadata( nlu_component.get_pretrained_model(feature_resolution.nlp_ref, feature_resolution.language, model_bucket), feature_resolution.nlu_ref, feature_resolution.nlp_ref, language, False, license_type) # Actually resolve storage ref nlu_ref, nlp_ref, is_licensed, language = resolve_storage_ref(language, storage_ref, missing_feature_type) license_type = Licenses.hc if is_licensed else Licenses.open_source nlu_component = get_trained_component_for_nlp_model_ref(language, nlu_ref, nlp_ref, license_type) return nlu_component def nlu_ref_to_component(nlu_ref, detect_lang=False, authenticated=False) -> Union[NluComponent, List[NluComponent]]: ''' This method implements the main namespace for all component_to_resolve names. It parses the input request and passes the data to a resolver method which searches the namespace for a Component for the input request It returns a list of NLU.component_to_resolve objects or just one NLU.component_to_resolve object alone if just one component_to_resolve was specified. It maps a correctly namespaced name to a corresponding component_to_resolve for pipeline If no lang is provided, default language eng is assumed. General format <lang>.<class>.<dataset>.<embeddings> For embedding format : <lang>.<class>.<variant> This method will parse <language>.<NLU_action> Additional data about dataset and variant will be resolved by corrosponding action classes If train prefix is part of the nlu_ref ,the trainable namespace will e searched if 'translate_to' or 'marian' is inside the nlu_ref, 'xx' will be prefixed to the ref and set as lang if it is not already Since all translate models are xx lang :param nlu_ref: User request (should be a NLU reference) :param detect_lang: Whether to automatically detect language :return: Pipeline or component_to_resolve for the NLU reference. ''' infos = nlu_ref.split('.') if len(infos) == 0: raise ValueError(f"EXCEPTION: Could not create a component_to_resolve for nlu reference={nlu_ref}", ) if 'train' in infos: if nlu_ref in Spellbook.trainable_models.keys(): if nlu_ref not in Spellbook.trainable_models: s = "\n" raise ValueError(f'Could not find trainable model_anno_obj for nlu_ref={nlu_ref}.' f'Supported values = {s.join(nlu.Spellbook.trainable_models.keys())}') return get_trainable_component_for_nlu_ref(nlu_ref) lang, nlu_ref, nlp_ref, license_type, is_pipe, model_params = nlu_ref_to_nlp_metadata(nlu_ref) if is_pipe: resolved_component = get_trained_component_list_for_nlp_pipe_ref(lang, nlp_ref, nlu_ref, license_type=license_type) else: resolved_component = get_trained_component_for_nlp_model_ref(lang, nlu_ref, nlp_ref, license_type, model_params) if resolved_component is None: raise ValueError(f"EXCEPTION: Could not create a component_to_resolve for nlu reference={nlu_ref}", ) return resolved_component def get_trainable_component_for_nlu_ref(nlu_ref) -> NluComponent: if nlu_ref in Spellbook.traianble_nlu_ref_to_jsl_anno_id: anno_id = Spellbook.traianble_nlu_ref_to_jsl_anno_id[nlu_ref] else: raise ValueError(f'Could not find trainable Model for nlu_spell ={nlu_ref}') if anno_id in ComponentUniverse.components: component = ComponentUniverse.components[anno_id]() return component.set_metadata(component.get_trainable_model(), nlu_ref, '', 'xx', False) else: raise ValueError(f'Could not find trainable Model for anno_id ={anno_id}') def get_trained_component_list_for_nlp_pipe_ref(language, nlp_ref, nlu_ref, path=None, license_type: LicenseType = Licenses.open_source, ) -> List[NluComponent]: """ creates a list of components from a Spark NLP Pipeline reference 1. download pipeline 2. unpack pipeline to annotators and create list of nlu components 3. return list of nlu components :param license_type: Type of license for the component :param nlu_ref: Nlu ref that points to this pipe :param language: language of the pipeline :param nlp_ref: Reference to a spark nlp pretrained pipeline :param path: Load component_list from HDD :return: Each element of the Spark NLP pipeline wrapped as a NLU component_to_resolve inside a list """ logger.info(f'Building pretrained pipe for nlu_ref={nlu_ref} nlp_ref={nlp_ref}') if 'language' in nlp_ref: # special edge case for lang detectors language = 'xx' if path is None: if license_type != Licenses.open_source: pipe = PretrainedPipeline(nlp_ref, lang=language, remote_loc='clinical/models') else: pipe = PretrainedPipeline(nlp_ref, lang=language) iterable_stages = pipe.light_model.pipeline_model.stages else: pipe = LightPipeline(PipelineModel.load(path=path)) iterable_stages = pipe.pipeline_model.stages constructed_components = get_component_list_for_iterable_stages(iterable_stages, language, nlp_ref, nlu_ref, license_type) return ComponentUtils.set_storage_ref_attribute_of_embedding_converters( PipeUtils.set_column_values_on_components_from_pretrained_pipe(constructed_components, nlp_ref, language, path)) def get_nlu_pipe_for_nlp_pipe(pipe: Union[Pipeline, LightPipeline, PipelineModel, List], is_pre_configured=True): """Get a list of NLU components wrapping each Annotator in pipe. Pipe should be of class Pipeline, LightPipeline, or PipelineModel :param pipe: for which to extract list of nlu components which embellish each annotator :return: list of nlu components, one per annotator in pipe """ if isinstance(pipe, List): pipe = get_component_list_for_iterable_stages(pipe, is_pre_configured=is_pre_configured) elif isinstance(pipe, Pipeline): pipe = get_component_list_for_iterable_stages(pipe.getStages(), is_pre_configured=is_pre_configured) elif isinstance(pipe, LightPipeline): pipe = get_component_list_for_iterable_stages(pipe.pipeline_model.stages, is_pre_configured=is_pre_configured) elif isinstance(pipe, PipelineModel): pipe = get_component_list_for_iterable_stages(pipe.stages, is_pre_configured=is_pre_configured) elif isinstance(pipe, PretrainedPipeline): pipe = get_component_list_for_iterable_stages(pipe.model.stages, is_pre_configured=is_pre_configured) else: raise ValueError( f'Invalid Pipe-Like class {type(pipe)} supported types: Pipeline,LightPipeline,PipelineModel,List') if is_pre_configured: return set_cols_on_nlu_components(pipe) else: return pipe def set_cols_on_nlu_components(iterable_components): for c in iterable_components: c.spark_input_column_names = c.model.getInputCols() if hasattr(c.model, 'getInputCols') else [ c.model.getInputCol()] if hasattr(c.model, 'getOutputCol'): c.spark_output_column_names = [c.model.getOutputCol()] elif hasattr(c.model, 'getOutputCols'): c.spark_output_column_names = [c.model.getOutputCols()] return iterable_components def get_component_list_for_iterable_stages(iterable_stages, language=None, nlp_ref=None, nlu_ref=None, is_pre_configured=True ): constructed_components = [] for jsl_anno_object in iterable_stages: anno_class_name = type(jsl_anno_object).__name__ logger.info(f"Building NLU component for class_name = {anno_class_name} ") component = anno_class_to_empty_component(anno_class_name) component.set_metadata(jsl_anno_object, nlu_ref, nlp_ref, language, is_pre_configured) constructed_components.append(component) if None in constructed_components or len(constructed_components) == 0: raise Exception(f"Failure inferring type anno_class={anno_class_name} ") return constructed_components def get_trained_component_for_nlp_model_ref(lang: str, nlu_ref: Optional[str] = '', nlp_ref: str = '', license_type: LicenseType = Licenses.open_source, model_configs: Optional[Dict[str, any]] = None) -> NluComponent: anno_class = Spellbook.nlp_ref_to_anno_class[nlp_ref] component = anno_class_to_empty_component(anno_class) model_bucket = license_to_bucket(license_type) try: if component.get_pretrained_model: component = component.set_metadata( component.get_pretrained_model(nlp_ref, lang, model_bucket), nlu_ref, nlp_ref, lang, False, license_type) else: component = component.set_metadata(component.get_default_model(), nlu_ref, nlp_ref, lang, False, license_type) if model_configs: for method_name, parameter in model_configs.items(): # Dynamically call method from provided name and value, to set parameters like T5 task code = f'component.model.{method_name}({parameter})' eval(code) except Exception as e: raise ValueError(f'Failure making component, nlp_ref={nlp_ref}, nlu_ref={nlu_ref}, lang={lang}, \n err={e}') return component
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/component_resolution.py
component_resolution.py
import logging from typing import Union import sparknlp from pyspark.sql.types import StructType, StructField, StringType from sparknlp.base import * from sparknlp.base import LightPipeline from nlu.pipe.col_substitution.col_name_substitution_utils import ColSubstitutionUtils from nlu.pipe.extractors.extractor_configs_HC import default_full_config from nlu.pipe.extractors.extractor_methods.base_extractor_methods import * from nlu.pipe.extractors.extractor_methods.ocr_extractors import extract_tables from nlu.pipe.nlu_component import NluComponent from nlu.pipe.utils.component_utils import ComponentUtils from nlu.pipe.utils.data_conversion_utils import DataConversionUtils from nlu.pipe.utils.output_level_resolution_utils import OutputLevelUtils from nlu.pipe.utils.resolution.storage_ref_utils import StorageRefUtils from nlu.universe.universes import Licenses from nlu.utils.environment.env_utils import is_running_in_databricks, try_import_streamlit logger = logging.getLogger('nlu') class NLUPipeline(dict): # we inherhit from dict so the component_list is indexable and we have a nice shortcut for accessing the spark nlp model_anno_obj def __init__(self): """ Initializes a pretrained pipeline, should only be created after a Spark Context has been created """ self.spark = sparknlp.start() self.provider = 'sparknlp' self.pipe_ready = False # ready when we have created a spark df self.failed_pyarrow_conversion = False self.anno2final_cols = [] # Maps Anno to output pandas col self.contains_ocr_components = False self.contains_audio_components = False self.has_nlp_components = False self.nlu_ref = '' self.raw_text_column = 'text' self.raw_text_matrix_slice = 1 # place holder for getting text from matrix self.spark_nlp_pipe = None self.has_trainable_components = False self.needs_fitting = True self.is_fitted = False self.output_positions = False # Wether to putput positions of Features in the final output. E.x. positions of tokens, entities, dependencies etc.. inside of the input document. self.prediction_output_level = '' # either document, chunk, sentence, token self.component_output_level = '' # document or sentence, depending on how input dependent Sentence/Doc classifier are fed self.output_different_levels = True self.light_pipe_configured = False self.components = [] # orderd list of nlu_component objects self.output_datatype = 'pandas' # What data type should be returned after predict either spark, pandas, modin, numpy, string or array self.lang = 'en' self.vanilla_transformer_pipe = None self.estimator_pipe = None self.light_transformer_pipe = None self.has_licensed_components = False self.has_span_classifiers = False self.prefer_light = False self.has_table_qa_models = False def add(self, component: NluComponent, nlu_reference=None, pretrained_pipe_component=False, name_to_add='', idx=None): ''' :param component: :return: None ''' if idx: self.components.insert(idx, component) else: self.components.append(component) # ensure that input/output cols are properly set # Spark NLP model_anno_obj reference shortcut name = component.name # .replace(' ', '').replace('train.', '') if StorageRefUtils.has_storage_ref(component) and component.is_trained: # Converters have empty storage ref intially storage_ref = StorageRefUtils.extract_storage_ref(component) if storage_ref != '': name = name + '@' + StorageRefUtils.extract_storage_ref(component) logger.info(f"Adding {name} to internal component_list") # Configure output column names of classifiers from category to something more meaningful # if self.isInstanceOfNlpClassifer(component_to_resolve.model_anno_obj): self.configure_outputs(component_to_resolve, nlu_ref) if name_to_add == '': # Add Component as self.index and in attributes if component.is_storage_ref_producer and component.nlu_ref not in self.keys() and not pretrained_pipe_component: self[name] = component.model elif name not in self.keys(): self[name] = component.model else: nlu_identifier = ComponentUtils.get_nlu_ref_identifier(component) self[name + "@" + nlu_identifier] = component.model else: self[name_to_add] = component.model def get_sample_spark_dataframe(self): data = {"text": ['This day sucks', 'I love this day', 'I dont like Sami']} text_df = pd.DataFrame(data) return sparknlp.start().createDataFrame(data=text_df) def verify_all_labels_exist(self, dataset): return 'y' in dataset.columns # or 'label' in dataset.columns or 'labels' in dataset.columns def fit(self, dataset=None, dataset_path=None, label_seperator=','): ''' if dataset is string with '/' in it, its dataset path! Converts the input Pandas Dataframe into a Spark Dataframe and trains a model_anno_obj on it. :param dataset: Pandas dataset to train on, should have a y column for label and 'text' column for text features :param dataset_path: Path to a CONLL2013 format dataset. It will be read for NER and POS training. :param label_seperator: If multi_classifier is trained, this seperator is used to split the elements into an Array column for Pyspark :return: A nlu pipeline with models fitted. ''' from nlu.pipe.pipe_logic import PipeUtils stages = [] for component in self.components: stages.append(component.model) self.spark_estimator_pipe = Pipeline(stages=stages) if dataset_path != None and 'ner' in self.nlu_ref: from sparknlp.training import CoNLL s_df = CoNLL().readDataset(self.spark, path=dataset_path, ) self.vanilla_transformer_pipe = self.spark_estimator_pipe.fit(s_df.withColumnRenamed('label', 'y')) self.light_transformer_pipe = LightPipeline(self.vanilla_transformer_pipe) elif dataset_path != None and 'pos' in self.nlu_ref: from sparknlp.training import POS s_df = POS().readDataset(self.spark, path=dataset_path, delimiter=label_seperator, outputPosCol="y", outputDocumentCol="document", outputTextCol="text") self.vanilla_transformer_pipe = self.spark_estimator_pipe.fit(s_df) self.light_transformer_pipe = LightPipeline(self.vanilla_transformer_pipe) elif isinstance(dataset, pd.DataFrame) and 'multi' in self.nlu_ref: schema = StructType([ StructField("y", StringType(), True), StructField("text", StringType(), True) ]) from pyspark.sql import functions as F df = self.spark.createDataFrame(data=dataset).withColumn('y', F.split('y', label_seperator)) # df = self.spark.createDataFrame(data=dataset, schema=schema).withColumn('y',F.split('y',label_seperator)) # df = self.spark.createDataFrame(dataset) self.vanilla_transformer_pipe = self.spark_estimator_pipe.fit(df) self.light_transformer_pipe = LightPipeline(self.vanilla_transformer_pipe) elif isinstance(dataset, pd.DataFrame): if not self.verify_all_labels_exist(dataset): raise ValueError( f"Could not detect label in provided columns={dataset.columns}\nMake sure a column named label, labels or y exists in your dataset.") dataset.y = dataset.y.apply(str) if self.has_licensed_components: # Configure Feature Assembler for c in self.components: if c.name == 'feature_assembler': vector_assembler_input_cols = [c for c in dataset.columns if c != 'text' and c != 'y' and c != 'label' and c != 'labels'] c.model.setInputCols(vector_assembler_input_cols) # os_components.model_anno_obj.spark_input_column_names = vector_assembler_input_cols # Configure Chunk resolver Sentence to Document substitution in all cols. when training Chunk resolver, we must substitute all SENTENCE cols with DOC. We MAY NOT FEED SENTENCE to CHUNK RESOLVE or we get errors self.components = PipeUtils.configure_component_output_levels_to_document(self) # Substitute @ notation to ___ because it breaks Pyspark SQL Parser... for c in self.components: for inp in c.spark_input_column_names: if 'chunk_embedding' in inp: c.spark_input_column_names.remove(inp) c.spark_input_column_names.append(inp.replace('@', "___")) c.model.setInputCols(c.spark_input_column_names) if 'sentence_embedding' in inp: c.spark_input_column_names.remove(inp) c.spark_input_column_names.append(inp.replace('@', "___")) c.model.setInputCols(c.spark_input_column_names) for out in c.spark_output_column_names: if 'chunk_embedding' in out: c.spark_output_column_names.remove(out) c.spark_output_column_names.append(out.replace('@', "___")) c.model.setOutputCol(c.spark_output_column_names[0]) if 'sentence_embedding' in out: c.spark_output_column_names.remove(out) c.spark_output_column_names.append(out.replace('@', "___")) c.model.setOutputCol(c.spark_output_column_names[0]) stages = [] for component in self.components: stages.append(component.model) ## TODO set storage ref on fitted model_anno_obj self.spark_estimator_pipe = Pipeline(stages=stages) self.vanilla_transformer_pipe = self.spark_estimator_pipe.fit( DataConversionUtils.pdf_to_sdf(dataset, self.spark)[0]) self.light_transformer_pipe = LightPipeline(self.vanilla_transformer_pipe) elif isinstance(dataset, pyspark.sql.DataFrame): if not self.verify_all_labels_exist(dataset): raise ValueError( f"Could not detect label in provided columns={dataset.columns}\nMake sure a column named label, labels or y exists in your dataset.") self.vanilla_transformer_pipe = self.spark_estimator_pipe.fit( DataConversionUtils.sdf_to_sdf(dataset, self.spark)) self.light_transformer_pipe = LightPipeline(self.vanilla_transformer_pipe) else: # fit on empty dataframe since no data provided if not self.is_fitted: logger.info( 'Fitting on empty Dataframe, could not infer correct training method. This is intended for non-trainable pipelines.') self.vanilla_transformer_pipe = self.spark_estimator_pipe.fit(self.get_sample_spark_dataframe()) self.light_transformer_pipe = LightPipeline(self.vanilla_transformer_pipe) self.has_trainable_components = False self.is_fitted = True self.light_pipe_configured = True self.components = PipeUtils.replace_untrained_component_with_trained(self, self.vanilla_transformer_pipe) return self def get_extraction_configs(self, full_meta, positions, get_embeddings, processed): """Search first OC namespace and if not found the HC Namespace for each Annotator Class in pipeline and get corresponding config Returns a dictionary of methods, where keys are column names values are methods that are applied to extract and represent the data in these columns in a more pythonic and panda-esque way """ c_level_mapping = OutputLevelUtils.get_output_level_mapping_by_component(self) # todo doc level annos and same level annos can be popped always. anno_2_ex_config = {} for c in self.components: if c.license == Licenses.ocr: # OCR can output more than 1 col extractors = {col: c.pdf_extractor_methods['default'](output_col_prefix=col) for col in c.spark_output_column_names} anno_2_ex_config.update(extractors) continue if 'embedding' in c.type and not get_embeddings: continue for col in c.spark_output_column_names: if 'default' in c.pdf_extractor_methods.keys() and not full_meta: anno_2_ex_config[col] = c.pdf_extractor_methods['default'](output_col_prefix=col) elif 'default_full' in c.pdf_extractor_methods.keys() and full_meta: anno_2_ex_config[col] = c.pdf_extractor_methods['default_full'](output_col_prefix=col) else: # Fallback if no output defined anno_2_ex_config[col] = default_full_config(output_col_prefix=col) # Tune the Extractor configs based on prediction parameters if c_level_mapping[c] == 'document' and not anno_2_ex_config[col].pop_never: # Disable popping for doc level outputs, output will not be [element] but instead element in each row. anno_2_ex_config[col].pop_meta_list = True anno_2_ex_config[col].pop_result_list = True if positions: anno_2_ex_config[col].get_positions = True else: anno_2_ex_config[col].get_begin = False anno_2_ex_config[col].get_end = False anno_2_ex_config[col].get_positions = False if c.loaded_from_pretrained_pipe: # Use original col name of pretrained pipes as prefix anno_2_ex_config[col].output_col_prefix = col # drop all entries which are missing in DF # Finisher Annotator could have dropped them bad_c = [] for c in anno_2_ex_config.keys(): if c not in processed.columns: bad_c.append(c) for c in bad_c: del anno_2_ex_config[c] return anno_2_ex_config def unpack_and_apply_extractors(self, pdf: Union[pyspark.sql.DataFrame, pd.DataFrame], keep_stranger_features=True, stranger_features=[], anno_2_ex_config={}, light_pipe_enabled=True, get_embeddings=False ) -> pd.DataFrame: """1. Unpack SDF to PDF with Spark NLP Annotator Dictionaries 2. Get the extractor configs for the corresponding Annotator classes 3. Apply The extractor configs with the extractor methods to each column and merge back with zip/explode Uses optimized PyArrow conversion to avoid representing data multiple times between the JVM and PVM Can process Spark DF output from Vanilla pipes and Pandas Converts outputs of Lightpipeline """ # Light pipe, does not fetch emebddings if light_pipe_enabled and not get_embeddings and not isinstance(pdf, pyspark.sql.dataframe.DataFrame) or self.prefer_light: return apply_extractors_and_merge(extract_light_pipe_rows(pdf), anno_2_ex_config, keep_stranger_features, stranger_features) # Vanilla Spark Pipe return apply_extractors_and_merge(pdf.toPandas().applymap(extract_pyspark_rows), anno_2_ex_config, keep_stranger_features, stranger_features) def pythonify_spark_dataframe(self, processed, keep_stranger_features=True, stranger_features=[], drop_irrelevant_cols=True, output_metadata=False, positions=False, output_level='', get_embeddings=True): """ This functions takes in a spark dataframe with Spark NLP annotations in it and transforms it into a Pandas Dataframe with common feature types for further NLP/NLU downstream tasks. It will recycle Indexes from Pandas DataFrames and Series if they exist, otherwise a custom id column will be created which is used as index later on It does this by performing the following consecutive steps : 1. Select columns to explode 2. Select columns to keep 3. Rename columns 4. Create Pandas Dataframe object :param processed: Spark dataframe which an NLU pipeline has transformed :param output_level: The output level at which returned pandas Dataframe should be :param keep_stranger_features : Whether to keep additional features from the input DF when generating the output DF or if they should be discarded for the final output DF :param stranger_features: A list of features which are not known to NLU and inside the input DF. Basically all columns, which are not named 'text' in the input. If keep_stranger_features== True, then these features will be exploded, if pipe_prediction_output_level == document, otherwise they will not be exploded :param output_metadata: Whether to keep or drop additional metadata or predictions, like prediction confidence :return: Pandas dataframe which easy accessible features """ from nlu.pipe.pipe_logic import PipeUtils if PipeUtils.has_table_extractor(self): # If pipe has table extractors, we return list of tables or table itself if only one detected processed = extract_tables(processed) if len(processed) == 1: return processed[0] return processed stranger_features += ['origin_index'] if output_level == '': # Infer output level if none defined self.prediction_output_level = OutputLevelUtils.infer_prediction_output_level(self) logger.info(f'Inferred and set output level of pipeline to {self.prediction_output_level}') else: self.prediction_output_level = output_level # Get mapping from component to feature extractor method configs anno_2_ex_config = self.get_extraction_configs(output_metadata, positions, get_embeddings, processed) # Processed becomes pandas after applying extractors processed = self.unpack_and_apply_extractors(processed, keep_stranger_features, stranger_features, anno_2_ex_config, self.light_pipe_configured, get_embeddings) # Get mapping between column_name and pipe_prediction_output_level same_level = OutputLevelUtils.get_columns_at_same_level_of_pipe(self, processed, anno_2_ex_config, get_embeddings) logger.info(f"Extracting for same_level_cols = {same_level}\n") processed = zip_and_explode(processed, same_level) processed = self.convert_embeddings_to_np(processed) processed = ColSubstitutionUtils.substitute_col_names(processed, anno_2_ex_config, self, stranger_features, get_embeddings) processed = processed.loc[:, ~processed.columns.duplicated()] if drop_irrelevant_cols and not output_metadata: processed = processed[self.drop_irrelevant_cols(list(processed.columns))] # Sort cols alphabetically processed = processed.reindex(sorted(processed.columns), axis=1) return processed def convert_embeddings_to_np(self, pdf): ''' convert all the columns in a pandas df to numpy :param pdf: Pandas Dataframe whose embedding column will be converted to numpy array objects :return: ''' for col in pdf.columns: if 'embed' in col: pdf[col] = pdf[col].apply(lambda x: np.array(x)) return pdf def finalize_return_datatype(self, df): ''' Take in a Spark dataframe with only relevant columns remaining. Depending on what value is set in self.output_datatype, this method will cast the final SDF into Pandas/Spark/Numpy/Modin/List objects :param df: :return: The predicted Data as datatype dependign on self.output_datatype ''' if self.output_datatype == 'spark': return df elif self.output_datatype == 'pandas': return df elif self.output_datatype == 'modin': import modin.pandas as mpd return mpd.DataFrame(df) elif self.output_datatype == 'pandas_series': return df elif self.output_datatype == 'modin_series': import modin.pandas as mpd return mpd.DataFrame(df) elif self.output_datatype == 'numpy': return df.to_numpy() return df def drop_irrelevant_cols(self, cols, keep_origin_index=False): ''' Takes in a list of column names removes the elements which are irrelevant to the current output level. This will be run before returning the final df Drop column candidates are document, sentence, token, chunk. columns which are NOT AT THE SAME output level will be dropped :param cols: list of column names in the df :return: list of columns with the irrelevant names removed ''' if 'doc2chunk' in cols: cols.remove('doc2chunk') if 'doc2chunk' in cols: cols.remove('doc2chunk') if self.prediction_output_level == 'token': if 'document' in cols: cols.remove('document') if 'chunk' in cols: cols.remove('chunk') if 'sentence' in cols: cols.remove('sentence') if self.prediction_output_level == 'sentence': if 'token' in cols: cols.remove('token') if 'chunk' in cols: cols.remove('chunk') if 'document' in cols: cols.remove('document') if self.prediction_output_level == 'chunk': # if 'document' in cols: cols.remove('document') if 'token' in cols: cols.remove('token') if 'sentence' in cols: cols.remove('sentence') if self.prediction_output_level == 'document': if 'token' in cols: cols.remove('token') if 'chunk' in cols: cols.remove('chunk') if 'sentence' in cols: cols.remove('sentence') if self.prediction_output_level == 'relation': if 'token' in cols: cols.remove('token') if 'chunk' in cols: cols.remove('chunk') if 'sentence' in cols: cols.remove('sentence') if keep_origin_index == False and 'origin_index' in cols: cols.remove('origin_index') return cols def save(self, path, component='entire_pipeline', overwrite=False): from nlu.utils.environment.env_utils import is_running_in_databricks if is_running_in_databricks(): if path.startswith('/dbfs/') or path.startswith('dbfs/'): nlu_path = path if path.startswith('/dbfs/'): nlp_path = path.replace('/dbfs', '') else: nlp_path = path.replace('dbfs', '') else: nlu_path = 'dbfs/' + path if path.startswith('/'): nlp_path = path else: nlp_path = '/' + path if not self.is_fitted and self.has_trainable_components: self.fit() self.is_fitted = True if component == 'entire_pipeline': self.vanilla_transformer_pipe.save(nlp_path) if overwrite and not is_running_in_databricks(): import shutil shutil.rmtree(path, ignore_errors=True) if not self.is_fitted: self.fit() self.is_fitted = True if component == 'entire_pipeline': if isinstance(self.vanilla_transformer_pipe, LightPipeline): self.vanilla_transformer_pipe.pipeline_model.save(path) else: self.vanilla_transformer_pipe.save(path) else: if component in self.keys(): self[component].save(path) print(f'Stored model_anno_obj in {path}') def predict(self, data: object, output_level: object = '', positions: object = False, keep_stranger_features: object = True, metadata: object = False, multithread: object = True, drop_irrelevant_cols: object = True, return_spark_df: object = False, get_embeddings: object = True ) -> object: ''' Annotates a Pandas Dataframe/Pandas Series/Numpy Array/Spark DataFrame/Python List strings /Python String :param data: Data to predict on :param output_level: output level, either document/sentence/chunk/token :param positions: whether to output indexes that map predictions back to position in origin string :param keep_stranger_features: whether to keep columns in the dataframe that are not generated by pandas. I.e. when you s a dataframe with 10 columns and only one of them is named text, the returned dataframe will only contain the text column when set to false :param metadata: whether to keep additional metadata in final df or not like confidences of every possible class for predictions. :param multithread: Whether to use multithreading based light pipeline. In some cases, this may cause errors. :param drop_irrelevant_cols: Whether to drop cols of different output levels, i.e. when predicting token level and drop_irrelevant_cols = True then chunk, sentence and Doc will be dropped :param return_spark_df: Prediction results will be returned right after transforming with the Spark NLP pipeline This will run fully distributed in on the Spark Master, but not prettify the output dataframe :param get_embeddings: Whether to return embeddings or not :return: ''' from nlu.pipe.utils.predict_helper import __predict__ return __predict__(self, data, output_level, positions, keep_stranger_features, metadata, multithread, drop_irrelevant_cols, return_spark_df, get_embeddings) def print_info(self, minimal=True): ''' Print out information about every component_to_resolve currently loaded in the component_list and their configurable parameters. If minimal is false, all Spark NLP Model parameters will be printed, including output/label/input cols and other attributes a NLU user should not touch. Useful for debugging. :return: None ''' print('The following parameters are configurable for this NLU pipeline (You can copy paste the examples) :') # list of tuples, where first element is component_to_resolve name and second element is list of param tuples, all ready formatted for printing all_outputs = [] iterable = None for i, component_key in enumerate(self.keys()): s = ">>> component_list['" + component_key + "'] has settable params:" p_map = self[component_key].extractParamMap() component_outputs = [] max_len = 0 for key in p_map.keys(): if minimal: if "outputCol" in key.name or "labelCol" in key.name or "inputCol" in key.name or "labelCol" in key.name or 'lazyAnnotator' in key.name or 'storageref' in key.name: continue # print("component_list['"+ component_key +"'].set"+ str( key.name[0].capitalize())+ key.name[1:]+"("+str(p_map[key])+")" + " | Info: " + str(key.doc)+ " currently Configured as : "+str(p_map[key]) ) # print("Param Info: " + str(key.doc)+ " currently Configured as : "+str(p_map[key]) ) if type(p_map[key]) == str: s1 = "component_list['" + component_key + "'].set" + str(key.name[0].capitalize()) + key.name[ 1:] + "('" + str( p_map[key]) + "') " else: s1 = "component_list['" + component_key + "'].set" + str(key.name[0].capitalize()) + key.name[ 1:] + "(" + str( p_map[key]) + ") " s2 = " | Info: " + str(key.doc) + " | Currently set to : " + str(p_map[key]) if len(s1) > max_len: max_len = len(s1) component_outputs.append((s1, s2)) all_outputs.append((s, component_outputs)) # make strings aligned form = "{:<" + str(max_len) + "}" for o in all_outputs: print(o[0]) # component_to_resolve name for o_parm in o[1]: if len(o_parm[0]) < max_len: print(form.format(o_parm[0]) + o_parm[1]) else: print(o_parm[0] + o_parm[1]) def print_exception_err(self, err): '''Print information about exception during converting or transforming dataframe''' import sys logger.exception('Exception occured') e = sys.exc_info() print("No accepted Data type or usable columns found or applying the NLU models failed. ") print( "Make sure that the first column you pass to .predict() is the one that nlu should predict on OR rename the column you want to predict on to 'text' ") print( "On try to reset restart Jupyter session and run the setup script again, you might have used too much memory") print('Full Stacktrace was', e) print('Additional info:') exc_type, exc_obj, exc_tb = sys.exc_info() import os fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] print(exc_type, fname, exc_tb.tb_lineno) err = sys.exc_info()[1] print(str(err)) print( 'Stuck? Contact us on Slack! https://join.slack.com/t/spark-nlp/shared_invite/zt-lutct9gm-kuUazcyFKhuGY3_0AMkxqA') def viz(self, text_to_viz: str, viz_type='', labels_to_viz=None, viz_colors={}, return_html=False, write_to_streamlit=False, streamlit_key='NLU_streamlit', ner_col=None, pos_col=None, dep_untyped_col=None, dep_typed_col=None, resolution_col=None, relation_col=None, assertion_col=None, ): """Visualize predictions of a Pipeline, using Spark-NLP-Display text_to_viz : String to viz viz_type : Viz type, one of [ner,dep,resolution,relation,assert]. If none defined, nlu will infer and apply all applicable viz labels_to_viz : Defines a subset of NER labels to viz i.e. ['PER'] , by default=[] which will display all labels. Applicable only for NER viz viz_colors : Applicable for [ner, resolution, assert ] key = label, value=hex color, i.e. viz_colors={'TREATMENT':'#008080', 'problem':'#800080'} Any of the col parameters can be used to point to a specific model in the pipeline, if there are multiple candidates of the same type for visualization. I.e. multiple NER models. By default, the last model in pipe of applicable viz type will be used. """ from nlu.utils.environment.env_utils import install_and_import_package install_and_import_package('spark-nlp-display', import_name='sparknlp_display') if self.vanilla_transformer_pipe is None: self.fit() is_databricks_env = is_running_in_databricks() if return_html: is_databricks_env = True # self.configure_light_pipe_usage(1, force=True) from nlu.pipe.viz.vis_utils import VizUtils if viz_type == '': viz_type = VizUtils.infer_viz_type(self) # anno_res = self.vanilla_transformer_pipe.fullAnnotate(text_to_viz)[0] # anno_res = self.spark.createDataFrame(pd.DataFrame({'text':text_to_viz})) data, stranger_features, output_datatype = DataConversionUtils.to_spark_df(text_to_viz, self.spark, self.raw_text_column) anno_res = self.vanilla_transformer_pipe.transform(data) anno_res = anno_res.collect()[0] if self.has_licensed_components == False: HTML = VizUtils.viz_OS(anno_res, self, viz_type, viz_colors, labels_to_viz, is_databricks_env, write_to_streamlit, streamlit_key, ner_col, pos_col, dep_untyped_col, dep_typed_col, ) else: HTML = VizUtils.viz_HC(anno_res, self, viz_type, viz_colors, labels_to_viz, is_databricks_env, write_to_streamlit, ner_col, pos_col, dep_untyped_col, dep_typed_col, resolution_col, relation_col, assertion_col, ) if return_html or is_databricks_env: return HTML def viz_streamlit(self, # Base Params text: Union[str, List[ str], pd.DataFrame, pd.Series] = "Angela Merkel from Germany and Donald Trump from America dont share many opinions", model_selection: List[str] = [], # SIMILARITY PARAMS similarity_texts: Tuple[str, str] = ('I love NLU <3', 'I love Streamlit <3'), # UI PARAMS title: str = 'NLU ❤️ Streamlit - Prototype your NLP startup in 0 lines of code🚀', sub_title: str = 'Play with over 1000+ scalable enterprise NLP models', side_info: str = None, visualizers: List[str] = ( "dependency_tree", "ner", "similarity", "token_features", 'classification', 'manifold'), show_models_info: bool = True, show_model_select: bool = True, show_viz_selection: bool = False, show_logo: bool = True, set_wide_layout_CSS: bool = True, show_code_snippets: bool = False, model_select_position: str = 'side', # main or side display_infos: bool = True, key: str = "NLU_streamlit", display_footer: bool = True, num_similarity_cols: int = 2, ) -> None: """Display Viz in streamlit""" try_import_streamlit() from nlu.pipe.viz.streamlit_viz.streamlit_dashboard_OS import StreamlitVizBlockHandler StreamlitVizBlockHandler.viz_streamlit_dashboard(self, text, model_selection, similarity_texts, title, sub_title, side_info, visualizers, show_models_info, show_model_select, show_viz_selection, show_logo, set_wide_layout_CSS, show_code_snippets, model_select_position, display_infos, key, display_footer, num_similarity_cols ) def viz_streamlit_token( self, text: str = 'NLU and Streamlit go together like peanutbutter and jelly', title: Optional[str] = "Token features", sub_title: Optional[str] = 'Pick from `over 1000+ models` on the left and `view the generated features`', show_feature_select: bool = True, features: Optional[List[str]] = None, metadata: bool = True, output_level: str = 'token', positions: bool = False, set_wide_layout_CSS: bool = True, generate_code_sample: bool = False, key="NLU_streamlit", show_model_select=True, model_select_position: str = 'side', # main or side show_infos: bool = True, show_logo: bool = True, show_text_input: bool = True, ): try_import_streamlit() from nlu.pipe.viz.streamlit_viz.streamlit_dashboard_OS import StreamlitVizBlockHandler StreamlitVizBlockHandler.visualize_tokens_information(self, text, title, sub_title, show_feature_select, features, metadata, output_level, positions, set_wide_layout_CSS, generate_code_sample, key, show_model_select, model_select_position, show_infos, show_logo, show_text_input) def viz_streamlit_classes( self, # nlu component_list text: Union[str, list, pd.DataFrame, pd.Series, List[str]] = ( 'I love NLU and Streamlit and sunny days!', 'I hate rainy daiys', 'CALL NOW AND WIN 1000$M'), output_level: Optional[str] = 'document', title: Optional[str] = "Text Classification", sub_title: Optional[ str] = 'View predicted `classes` and `confidences` for `hundreds of text classifiers` in `over 200 languages`', metadata: bool = False, positions: bool = False, set_wide_layout_CSS: bool = True, generate_code_sample: bool = False, key: str = "NLU_streamlit", show_model_selector: bool = True, model_select_position: str = 'side', show_infos: bool = True, show_logo: bool = True, ) -> None: try_import_streamlit() from nlu.pipe.viz.streamlit_viz.streamlit_dashboard_OS import StreamlitVizBlockHandler StreamlitVizBlockHandler.visualize_classes(self, text, output_level, title, sub_title, metadata, positions, set_wide_layout_CSS, generate_code_sample, key, show_model_selector, model_select_position, show_infos, show_logo) def viz_streamlit_dep_tree( self, # nlu component_list text: str = 'Billy likes to swim', title: Optional[str] = "Dependency Parse & Part-of-speech tags", sub_title: Optional[ str] = 'POS tags define a `grammatical label` for `each token` and the `Dependency Tree` classifies `Relations between the tokens` ', set_wide_layout_CSS: bool = True, generate_code_sample: bool = False, key="NLU_streamlit", show_infos: bool = True, show_logo: bool = True, show_text_input: bool = True, ) -> None: try_import_streamlit() from nlu.pipe.viz.streamlit_viz.streamlit_dashboard_OS import StreamlitVizBlockHandler StreamlitVizBlockHandler.visualize_dep_tree(self, text, title, sub_title, set_wide_layout_CSS, generate_code_sample, key, show_infos, show_logo, show_text_input, ) def viz_streamlit_ner( self, # Nlu component_list text: str = 'Donald Trump from America and Angela Merkel from Germany do not share many views.', ner_tags: Optional[List[str]] = None, show_label_select: bool = True, show_table: bool = False, title: Optional[str] = "Named Entities", sub_title: Optional[ str] = "Recognize various `Named Entities (NER)` in text entered and filter them. You can select from over `100 languages` in the dropdown.", colors: Dict[str, str] = {}, show_color_selector: bool = False, set_wide_layout_CSS: bool = True, generate_code_sample: bool = False, key="NLU_streamlit", model_select_position: str = 'side', # main or side show_model_select=True, show_infos: bool = True, show_logo: bool = True, show_text_input: bool = True, ): try_import_streamlit() from nlu.pipe.viz.streamlit_viz.streamlit_dashboard_OS import StreamlitVizBlockHandler StreamlitVizBlockHandler.visualize_ner(self, text, ner_tags, show_label_select, show_table, title, sub_title, colors, show_color_selector, set_wide_layout_CSS, generate_code_sample, key, model_select_position, show_model_select, show_infos, show_logo, show_text_input) def viz_streamlit_word_similarity( self, # nlu component_list texts: Union[Tuple[str, str], List[str]] = ( "Donald Trump likes to party!", "Angela Merkel likes to party!"), threshold: float = 0.5, title: Optional[str] = "Vectors & Scalar Similarity & Vector Similarity & Embedding Visualizations ", sub_tile: Optional[ str] = "Visualize a `word-wise similarity matrix` and calculate `similarity scores` for `2 texts` and every `word embedding` loaded", write_raw_pandas: bool = False, display_embed_information: bool = True, similarity_matrix=True, show_algo_select: bool = True, dist_metrics: List[str] = ('cosine'), set_wide_layout_CSS: bool = True, generate_code_sample: bool = False, key: str = "NLU_streamlit", num_cols: int = 2, display_scalar_similarities: bool = False, display_similarity_summary: bool = False, model_select_position: str = 'side', show_infos: bool = True, show_logo: bool = True, ): try_import_streamlit() from nlu.pipe.viz.streamlit_viz.streamlit_dashboard_OS import StreamlitVizBlockHandler StreamlitVizBlockHandler.display_word_similarity(self, texts, threshold, title, sub_tile, write_raw_pandas, display_embed_information, similarity_matrix, show_algo_select, dist_metrics, set_wide_layout_CSS, generate_code_sample, key, num_cols, display_scalar_similarities, display_similarity_summary, model_select_position, show_infos, show_logo, ) def viz_streamlit_word_embed_manifold(self, default_texts: List[str] = ( "Donald Trump likes to party!", "Angela Merkel likes to party!", 'Peter HATES TO PARTTY!!!! :('), title: Optional[ str] = "Lower dimensional Manifold visualization for word embeddings", sub_title: Optional[ str] = "Apply any of the 11 `Manifold` or `Matrix Decomposition` algorithms to reduce the dimensionality of `Word Embeddings` to `1-D`, `2-D` and `3-D` ", write_raw_pandas: bool = False, default_algos_to_apply: List[str] = ('TSNE', 'PCA',), target_dimensions: List[int] = (1, 2, 3), show_algo_select: bool = True, show_embed_select: bool = True, show_color_select: bool = True, MAX_DISPLAY_NUM: int = 100, display_embed_information: bool = True, set_wide_layout_CSS: bool = True, num_cols: int = 3, model_select_position: str = 'side', # side or main key: str = "NLU_streamlit", additional_classifiers_for_coloring: List[str] = ['pos', 'sentiment.imdb'], generate_code_sample: bool = False, show_infos: bool = True, show_logo: bool = True, n_jobs: Optional[int] = 3, # False ): try_import_streamlit() from nlu.pipe.viz.streamlit_viz.streamlit_dashboard_OS import StreamlitVizBlockHandler StreamlitVizBlockHandler.viz_streamlit_word_embed_manifold(self, default_texts, title, sub_title, write_raw_pandas, default_algos_to_apply, target_dimensions, show_algo_select, show_embed_select, show_color_select, MAX_DISPLAY_NUM, display_embed_information, set_wide_layout_CSS, num_cols, model_select_position, key, additional_classifiers_for_coloring, generate_code_sample, show_infos, show_logo, n_jobs, ) def viz_streamlit_sentence_embed_manifold(self, default_texts: List[str] = ( "Donald Trump likes to party!", "Angela Merkel likes to party!", 'Peter HATES TO PARTTY!!!! :('), title: Optional[ str] = "Lower dimensional Manifold visualization for sentence embeddings", sub_title: Optional[ str] = "Apply any of the 11 `Manifold` or `Matrix Decomposition` algorithms to reduce the dimensionality of `Word Embeddings` to `1-D`, `2-D` and `3-D` ", write_raw_pandas: bool = False, default_algos_to_apply: List[str] = ('TSNE', 'PCA',), target_dimensions: List[int] = (1, 2, 3), show_algo_select: bool = True, show_embed_select: bool = True, show_color_select: bool = True, MAX_DISPLAY_NUM: int = 100, display_embed_information: bool = True, set_wide_layout_CSS: bool = True, num_cols: int = 3, model_select_position: str = 'side', # side or main key: str = "NLU_streamlit", additional_classifiers_for_coloring: List[str] = ['sentiment.imdb'], generate_code_sample: bool = False, show_infos: bool = True, show_logo: bool = True, n_jobs: Optional[int] = 3, # False ): try_import_streamlit() from nlu.pipe.viz.streamlit_viz.streamlit_dashboard_OS import StreamlitVizBlockHandler StreamlitVizBlockHandler.viz_streamlit_sentence_embed_manifold(self, default_texts, title, sub_title, write_raw_pandas, default_algos_to_apply, target_dimensions, show_algo_select, show_embed_select, show_color_select, MAX_DISPLAY_NUM, display_embed_information, set_wide_layout_CSS, num_cols, model_select_position, key, additional_classifiers_for_coloring, generate_code_sample, show_infos, show_logo, n_jobs, ) def viz_streamlit_entity_embed_manifold(self, default_texts: List[str] = ("Donald Trump likes to visit New York", "Angela Merkel likes to visit Berlin!", 'Peter hates visiting Paris'), title: Optional[ str] = "Lower dimensional Manifold visualization for Entity embeddings", sub_title: Optional[ str] = "Apply any of the 11 `Manifold` or `Matrix Decomposition` algorithms to reduce the dimensionality of `Entity Embeddings` to `1-D`, `2-D` and `3-D` ", default_algos_to_apply: List[str] = ("TSNE", "PCA"), target_dimensions: List[int] = (1, 2, 3), show_algo_select: bool = True, set_wide_layout_CSS: bool = True, num_cols: int = 3, model_select_position: str = 'side', # side or main key: str = "NLU_streamlit", show_infos: bool = True, show_logo: bool = True, n_jobs: Optional[int] = 3, # False ): try_import_streamlit() from nlu.pipe.viz.streamlit_viz.streamlit_dashboard_OS import StreamlitVizBlockHandler StreamlitVizBlockHandler.viz_streamlit_entity_embed_manifold(self, default_texts, title, sub_title, default_algos_to_apply, target_dimensions, show_algo_select, set_wide_layout_CSS, num_cols, model_select_position, key, show_infos, show_logo, n_jobs) def __configure_light_pipe_usage__(self, data_instances, use_multi=True, force=False): logger.info("Configuring Light Pipeline Usage") if data_instances > 50 or use_multi is False: logger.info("Disabling light pipeline") if not self.is_fitted: self.fit() else: if not self.light_transformer_pipe or force: if not self.is_fitted: self.fit() self.light_pipe_configured = True logger.info("Enabling light pipeline") self.light_transformer_pipe = LightPipeline(self.vanilla_transformer_pipe, parse_embeddings=True)
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/pipeline.py
pipeline.py
from typing import Dict, Callable, Optional, List, Any, Union from sparknlp.common import AnnotatorApproach, AnnotatorModel from sparknlp.internal import AnnotatorTransformer from nlu.universe.atoms import NlpLevel, LicenseType, JslAnnoId, JslAnnoPyClass, LanguageIso, \ JslFeature from nlu.universe.feature_node_universes import NlpFeatureNode from nlu.universe.universes import ComponentBackends def debug_print_pipe_cols(pipe): for c in pipe.components: print(f'{c.spark_input_column_names}->{c.name}->{c.spark_output_column_names}') class NluComponent: """Contains various metadata about the loaded component_to_resolve""" def __init__(self, name: str, # Name for this anno type: str, # this tells us which kind of component_to_resolve this is # Extractor method applicable to Pandas DF for getting pretty outputs pdf_extractor_methods: Dict[str, Callable[[], any]], pdf_col_name_substitutor: Callable[[], any], # substitution method for renaming final cols to somthing redable # sdf_extractor_methods : Dict[str,Callable[[],any]] # Extractor method applicable to Spark DF for getting pretty outputs # TODO # sdf_col_name_substitutor : Optional[Callable[[],any]] # substitution method for renaming final cols to somthing redable # TODO output_level: NlpLevel, # Output level of the component_to_resolve for data transformation logic or call it putput mapping?? node: NlpFeatureNode, # Graph node description: str, # general annotator/model_anno_obj/component_to_resolve/pipeline info provider: ComponentBackends, # Who provides the implementation of this annotator, Spark-NLP for base. Would be license: LicenseType, # open source or private computation_context: str, # Will this component_to_resolve do its computation in Spark land (like all of Spark NLP annotators do) or does it require some other computation engine or library like Tensorflow, Numpy, HuggingFace, etc.. output_context: str, # Will this components final result jsl_anno_class_id: JslAnnoId, # JSL Annotator Class this belongs to jsl_anno_py_class: JslAnnoPyClass, # JSL Annotator Class this belongs to get_default_model: Optional[Callable[[], AnnotatorTransformer]] = None, # Returns Concrete JSL Annotator object. # Returns Concrete JSL Annotator object. May by None lang,name, bucket get_pretrained_model: Optional[Callable[[str, str, str], AnnotatorTransformer]] = None, # Returns Concrete JSL Annotator object. May by None get_trainable_model: Optional[Callable[[], AnnotatorTransformer]] = None, trainable: bool = False, language: [LanguageIso] = None, # constructor_args: ComponentConstructorArgs = None # Args used to originally create this component_to_resolve nlu_ref: str = None, nlp_ref: str = None, in_types: List[JslFeature] = None, out_types: List[JslFeature] = None, in_types_default: List[JslFeature] = None, out_types_default: List[JslFeature] = None, spark_input_column_names: List[str] = None, spark_output_column_names: List[str] = None, # Any anno class. Needs to be Any, so we can cover unimported HC models model: Union[AnnotatorApproach, AnnotatorModel] = None, storage_ref: Optional[str] = None, storage_ref_nlu_ref_resolution: Optional[str] = None, # nlu_ref corresponding to storage_ref loaded_from_pretrained_pipe: bool = False, # If this component_to_resolve was derived from a pre-built SparkNLP pipeline or from NLU has_storage_ref: bool = False, is_storage_ref_consumer: bool = False, # # Whether this anno takes in some features that are storage ref based is_storage_ref_producer: bool = False, # Whether this anno generates some features that are storage ref based # Reference to trainable version of this anno, if this is a non-trainable anno otherwise None trainable_mirror_anno: Optional[JslAnnoId] = None, # Reference to trained version of this anno, if this is a trainable anno otherwise None trained_mirror_anno: Optional[JslAnnoId] = None, applicable_file_types: List[str] = None, # Used for OCR annotators to deduct applicable file types is_trained: bool = True, # Set to true for trainable annotators ): self.name = name self.type = type self.pdf_extractor_methods = pdf_extractor_methods self.pdf_col_name_substitutor = pdf_col_name_substitutor self.output_level = output_level self.node = node self.description = description self.provider = provider self.license = license self.computation_context = computation_context self.output_context = output_context self.jsl_anno_class_id = jsl_anno_class_id self.jsl_anno_py_class = jsl_anno_py_class self.get_default_model = get_default_model self.get_pretrained_model = get_pretrained_model self.get_trainable_model = get_trainable_model self.trainable = trainable self.language = language self.nlu_ref = nlu_ref self.nlp_ref = nlp_ref self.in_types = in_types self.out_types = out_types self.in_types_default = in_types_default self.out_types_default = out_types_default self.spark_input_column_names = spark_input_column_names self.spark_output_column_names = spark_output_column_names self.model = model self.storage_ref = storage_ref self.storage_ref_nlu_ref_resolution = storage_ref_nlu_ref_resolution self.loaded_from_pretrained_pipe = loaded_from_pretrained_pipe self.has_storage_ref = has_storage_ref self.is_storage_ref_consumer = is_storage_ref_consumer self.is_storage_ref_producer = is_storage_ref_producer self.trainable_mirror_anno = trainable_mirror_anno self.trained_mirror_anno = trained_mirror_anno self.applicable_file_types = applicable_file_types self.is_trained = is_trained def set_metadata(self, jsl_anno_object: Union[AnnotatorApproach, AnnotatorModel], nlu_ref: str, nlp_ref: str, language: LanguageIso, loaded_from_pretrained_pipe: bool, license_type: Optional[LicenseType] = None, storage_ref: Optional[str] = None): """Write metadata to nlu component_to_resolve after constructing it """ self.model = jsl_anno_object # converted pipes from users may have no refs attached, so we use uuid for now (todo generate nicer names based on jsl_anno_id and if its unique in pipe or not and using storage_ref) self.nlu_ref = nlu_ref if nlu_ref else str(jsl_anno_object) self.nlp_ref = nlp_ref if nlp_ref else str(jsl_anno_object) self.language = language if language else 'en' self.loaded_from_pretrained_pipe = loaded_from_pretrained_pipe self.in_types = self.node.ins.copy() self.out_types = self.node.outs.copy() self.in_types_default = self.node.ins.copy() self.out_types_default = self.node.outs.copy() self.spark_input_column_names = self.in_types.copy() self.spark_output_column_names = self.out_types.copy() if storage_ref: self.storage_ref = storage_ref if license_type: self.license = license_type if nlp_ref == 'glove_840B_300' or nlp_ref == 'glove_6B_300': self.lang = 'xx' if hasattr(self.model, 'setIncludeConfidence'): self.model.setIncludeConfidence(True) # if self.has_storage_ref and 'converter' in self.name: from nlu.universe.feature_node_ids import NLP_NODE_IDS if self.name in [NLP_NODE_IDS.SENTENCE_EMBEDDINGS_CONVERTER, NLP_NODE_IDS.CHUNK_EMBEDDINGS_CONVERTER]: # Embedding converters initially have self.storage_ref = '' if self.trainable: self.is_trained = False return self def set_input(self, input_cols: Union[str, List[str]]): if hasattr(self.model, 'setInputCol'): self.model.setInputCol(input_cols) elif hasattr(self.model, 'setInputCols'): self.model.setInputCols(input_cols) else: raise Exception(f'Cannot set input cols on {self.model}') if isinstance(input_cols, str): self.spark_input_column_names = [input_cols] elif isinstance(input_cols, list): self.spark_input_column_names = input_cols return self def set_output(self, output_cols: Union[str, List[str]]): if hasattr(self.model, 'setOutputCol'): self.model.setOutputCol(output_cols) elif hasattr(self.model, 'setOutputCols'): self.model.setOutputCols(output_cols) else: raise Exception(f'Cannot set output cols on {self.model}') if isinstance(output_cols, str): self.spark_output_column_names = [output_cols] elif isinstance(output_cols, list): self.spark_output_column_names = output_cols return self def __str__(self): return f'Component(ID={self.name}, NLU_REF={self.nlu_ref} NLP_REF={self.nlp_ref})' def __hash__(self): return hash((self.name, self.nlu_ref, self.nlp_ref, self.jsl_anno_class_id, self.language))
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/nlu_component.py
nlu_component.py
import logging from dataclasses import dataclass from nlu import Licenses from nlu.pipe.component_resolution import resolve_feature from nlu.pipe.nlu_component import NluComponent from nlu.pipe.pipeline import NLUPipeline from nlu.pipe.utils.component_utils import ComponentUtils from nlu.pipe.utils.pipe_utils import PipeUtils from nlu.pipe.utils.resolution.storage_ref_utils import StorageRefUtils from nlu.universe.component_universes import jsl_id_to_empty_component from nlu.universe.feature_node_ids import NLP_NODE_IDS, NLP_HC_NODE_IDS from nlu.universe.feature_universes import NLP_FEATURES from nlu.universe.logic_universes import AnnoTypes logger = logging.getLogger('nlu') @dataclass class StorageRefConversionResolutionData: """Hold information that can be used to resolve to a NLU component_to_resolve, which satisfies the storage ref demands.""" storage_ref: str # storage ref a resolver component_to_resolve should have component_candidate: NluComponent # from which NLU component_to_resolve should the converter feed type: str # what kind of conversion, either word2chunk or word2sentence class PipelineCompleter: ''' Pass a list of NLU components to the pipeline (or a NLU pipeline) For every component_to_resolve, it checks if all requirements are met. It checks and fixes the following issues for a list of components: 1. Missing Features / component_to_resolve requirements 2. Bad order of components (which will cause missing features exception) 3. Check Feature names in the output 4. Check weather pipeline needs to be fitted ''' @staticmethod def check_if_storage_ref_is_satisfied_or_get_conversion_candidate(component_to_check: NluComponent, pipe: NLUPipeline, storage_ref_to_find: str): """Check if any other component_to_resolve in the pipeline has same storage ref as the input component_to_resolve. Returns 1. If there is a candidate, but it has different level, it will be returned as candidate If first condition is not satisfied, consults the namespace.storage_ref_2_nlp_ref """ # If there is just 1 component_to_resolve, there is nothing to check if len(pipe.components) == 1: return False, None conversion_candidate = None conversion_type = "no_conversion" logger.info(f'checking for storage={storage_ref_to_find} is available in component_list..') for c in pipe.components: if component_to_check.name != c.name: if StorageRefUtils.has_storage_ref(c): if StorageRefUtils.extract_storage_ref(c) == storage_ref_to_find: # Both components have Different Names AND their Storage Ref Matches up AND they both take in tokens -> Match if NLP_FEATURES.TOKEN in component_to_check.in_types and c.type == AnnoTypes.TOKEN_EMBEDDING: logger.info(f'Word Embedding Match found = {c.name}') return False, None # Since document and be substituted for sentence # and vice versa if either of them matches up we have a match if NLP_FEATURES.SENTENCE_EMBEDDINGS in component_to_check.in_types and \ c.type == AnnoTypes.DOCUMENT_EMBEDDING: logger.info(f'Sentence Embedding Match found = {c.name}') return False, None # component_to_check requires Sentence_embedding # but the Matching Storage_ref component_to_resolve takes in Token # -> Convert the Output of the Match to SentenceLevel # and feed the component_to_check to the new component_to_resolve if NLP_FEATURES.SENTENCE_EMBEDDINGS in component_to_check.in_types \ and c.type == AnnoTypes.TOKEN_EMBEDDING: logger.info(f'Sentence Embedding Conversion Candidate found={c.name}') conversion_type = 'word2sentence' conversion_candidate = c # analogous case as above for chunk if NLP_FEATURES.CHUNK_EMBEDDINGS in component_to_check.in_types and c.type == AnnoTypes.TOKEN_EMBEDDING: logger.info(f'Sentence Embedding Conversion Candidate found={c.name}') conversion_type = 'word2chunk' conversion_candidate = c logger.info(f'No matching storage ref found') return True, StorageRefConversionResolutionData(storage_ref_to_find, conversion_candidate, conversion_type) @staticmethod def extract_required_features_refless_from_pipe(pipe: NLUPipeline): """Extract provided features from component_list, which have no storage ref""" provided_features_no_ref = [] for c in pipe.components: if c.loaded_from_pretrained_pipe: continue for feat in c.in_types: if 'embed' not in feat: provided_features_no_ref.append(feat) return ComponentUtils.clean_irrelevant_features(provided_features_no_ref) @staticmethod def extract_provided_features_refless_from_pipe(pipe: NLUPipeline): """Extract provided features from component_list, which have no storage ref""" provided_features_no_ref = [] for c in pipe.components: for feat in c.out_types: if 'embed' not in feat: provided_features_no_ref.append(feat) return ComponentUtils.clean_irrelevant_features(provided_features_no_ref) @staticmethod def extract_provided_features_ref_from_pipe(pipe: NLUPipeline): """Extract provided features from component_list, which have storage ref. """ provided_features_ref = [] for c in pipe.components: for feat in c.out_types: if 'embed' in feat: if '@' not in feat: provided_features_ref.append(feat + "@" + StorageRefUtils.extract_storage_ref(c)) else: provided_features_ref.append(feat) return ComponentUtils.clean_irrelevant_features(provided_features_ref) @staticmethod def extract_required_features_ref_from_pipe(pipe: NLUPipeline): """Extract provided features from component_list, which have storage ref""" provided_features_ref = [] for c in pipe.components: if c.loaded_from_pretrained_pipe: continue for feat in c.in_types: if 'embed' in feat: # if StorageRefUtils.extract_storage_ref(os_components) !='': # special edge case, some components might not have a storage ref set if '@' not in feat: provided_features_ref.append(feat + "@" + StorageRefUtils.extract_storage_ref(c)) else: provided_features_ref.append(feat) return ComponentUtils.clean_irrelevant_features(provided_features_ref) @staticmethod def extract_sentence_embedding_conversion_candidates(pipe: NLUPipeline): """Extract information about embedding conversion candidates""" conversion_candidates_data = [] for c in pipe.components: if ComponentUtils.component_has_embeddings_requirement(c) and not PipeUtils.is_trainable_pipe(pipe): storage_ref = StorageRefUtils.extract_storage_ref(c) conversion_applicable, conversion_data = PipelineCompleter.check_if_storage_ref_is_satisfied_or_get_conversion_candidate( c, pipe, storage_ref) if conversion_applicable: conversion_candidates_data.append(conversion_data) return conversion_candidates_data @staticmethod def get_missing_required_features(pipe: NLUPipeline): """For every component_to_resolve in the pipeline""" provided_features_no_ref = ComponentUtils.clean_irrelevant_features( PipelineCompleter.extract_provided_features_refless_from_pipe(pipe)) required_features_no_ref = ComponentUtils.clean_irrelevant_features( PipelineCompleter.extract_required_features_refless_from_pipe(pipe)) provided_features_ref = ComponentUtils.clean_irrelevant_features( PipelineCompleter.extract_provided_features_ref_from_pipe(pipe)) required_features_ref = ComponentUtils.clean_irrelevant_features( PipelineCompleter.extract_required_features_ref_from_pipe(pipe)) is_trainable = PipeUtils.is_trainable_pipe(pipe) conversion_candidates = PipelineCompleter.extract_sentence_embedding_conversion_candidates( pipe) pipe.has_trainable_components = is_trainable required_features_ref, conversion_candidates = PipeUtils.remove_convertable_storage_refs(required_features_ref, conversion_candidates, provided_features_ref) provided_features_ref, required_features_ref = PipeUtils.update_converter_storage_refs_and_cols(pipe, provided_features_ref, required_features_ref) if is_trainable: trainable_index, embed_type = PipeUtils.find_trainable_embed_consumer(pipe) required_features_ref = [] if embed_type is not None: # After resolve for a word embedding ,we must fix all NONES and set their storage refs ! # embed consuming trainable annotators get their storage ref set here if len(provided_features_ref) == 0: required_features_no_ref.append(embed_type) if embed_type == NLP_FEATURES.CHUNK_EMBEDDINGS: required_features_no_ref.append(NLP_FEATURES.WORD_EMBEDDINGS) if len(provided_features_ref) >= 1 and embed_type == NLP_FEATURES.CHUNK_EMBEDDINGS: # This case is for when 1 Embed is preloaded and we still need to load the converter if any(NLP_FEATURES.WORD_EMBEDDINGS in c for c in provided_features_ref): required_features_no_ref.append(embed_type) if len(provided_features_ref) >= 1: # TODO Appraoches / Trainable models have no setStorageRef, we must set it after fitting pipe.components[trainable_index].storage_ref = provided_features_ref[0].split('@')[-1] missing_features_no_ref = set(required_features_no_ref) - set( provided_features_no_ref) # - set(['text','label']) missing_features_ref = set(required_features_ref) - set(provided_features_ref) PipelineCompleter.log_resolution_status(provided_features_no_ref, required_features_no_ref, provided_features_ref, required_features_ref, is_trainable, conversion_candidates, missing_features_no_ref, missing_features_ref, ) return missing_features_no_ref, missing_features_ref, conversion_candidates @staticmethod def add_sentence_embedding_converter(resolution_data: StorageRefConversionResolutionData) -> NluComponent: """ Return a Word to Sentence Embedding converter for a given Component. The input cols with match the Sentence Embedder ones The converter is a NLU Component Embelishement of the Spark NLP Sentence Embeddings Annotator """ logger.info(f'Adding Sentence embedding conversion for Embedding Provider={resolution_data}') word_embedding_provider = resolution_data.component_candidate c = jsl_id_to_empty_component(NLP_NODE_IDS.SENTENCE_EMBEDDINGS_CONVERTER) storage_ref = StorageRefUtils.extract_storage_ref(word_embedding_provider) c.set_metadata(c.get_default_model(), 'sentence_embedding_converter', NLP_NODE_IDS.SENTENCE_EMBEDDINGS_CONVERTER, 'xx', False, Licenses.open_source, storage_ref) c.model.setStorageRef(storage_ref) # set output cols embed_AT_out = NLP_FEATURES.SENTENCE_EMBEDDINGS + '@' + storage_ref c.model.setOutputCol(embed_AT_out) c.spark_output_column_names = [embed_AT_out] c.spark_input_column_names = [NLP_FEATURES.DOCUMENT, NLP_FEATURES.WORD_EMBEDDINGS + '@' + storage_ref] c.model.setInputCols(c.spark_input_column_names) return c @staticmethod def add_chunk_embedding_converter( resolution_data: StorageRefConversionResolutionData) -> NluComponent: """ Return a Word to CHUNK Embedding converter for a given Component. The input cols with match the Sentence Embedder ones The converter is a NLU Component Embelishement of the Spark NLP Sentence Embeddings Annotator The CHUNK embedder requires entities and also embeddings to generate data from. Since there could be multiple entities generators, we neeed to pass the correct one """ # TODO REFACTOR logger.info(f'Adding Chunk embedding conversion Provider={resolution_data} and NER Converter provider = ') word_embedding_provider = resolution_data.component_candidate entities_col = 'entities' embed_provider_col = word_embedding_provider.info.spark_output_column_names[0] c = jsl_id_to_empty_component(NLP_NODE_IDS.CHUNK_EMBEDDINGS_CONVERTER) c.set_metadata(c.get_default_model(), NLP_NODE_IDS.CHUNK_EMBEDDINGS_CONVERTER, NLP_NODE_IDS.CHUNK_EMBEDDINGS_CONVERTER, 'xx', False, Licenses.open_source) # c = nlu.embeddings_chunker.EmbeddingsChunker(annotator_class='chunk_embedder') storage_ref = StorageRefUtils.extract_storage_ref(word_embedding_provider) c.model.setStorageRef(storage_ref) c.info.storage_ref = storage_ref c.model.setInputCols(entities_col, embed_provider_col) c.model.setOutputCol('chunk_embeddings@' + storage_ref) c.info.spark_input_column_names = [entities_col, embed_provider_col] c.info.input_column_names = [entities_col, embed_provider_col] c.info.spark_output_column_names = ['chunk_embeddings@' + storage_ref] c.info.output_column_names = ['chunk_embeddings@' + storage_ref] return c @staticmethod def check_if_all_conversions_satisfied(components_for_embedding_conversion): """Check if all dependencies are satisfied.""" for conversion in components_for_embedding_conversion: if conversion.component_candidate is not None: return False return True @staticmethod def check_dependencies_satisfied(missing_components, missing_storage_refs, components_for_embedding_conversion): """Check if all dependencies are satisfied.""" return len(missing_components) == 0 and len( missing_storage_refs) == 0 and PipelineCompleter.check_if_all_conversions_satisfied( components_for_embedding_conversion) @staticmethod def has_licensed_components(pipe: NLUPipeline) -> bool: """Check if any licensed components in component_list""" for c in pipe.components: if c.license == Licenses.hc or c.license == Licenses.ocr: return True return False @staticmethod def check_same_as_last_iteration(last_missing_components, last_missing_storage_refs, last_components_for_embedding_conversion, missing_components, missing_storage_refs, components_for_embedding_conversion): return last_missing_components == missing_components and last_missing_storage_refs == missing_storage_refs and last_components_for_embedding_conversion == components_for_embedding_conversion @staticmethod def except_infinity_loop(reason): raise Exception(f"Sorry, nlu has problems building this spell, please report this issue. Problem={reason}") @staticmethod def satisfy_dependencies(pipe: NLUPipeline): """Feature Dependency Resolution Algorithm. For a given pipeline with N components, builds a DAG in reverse and satisfy each of their dependencies and child dependencies with a BFS approach and returns the resulting pipeline :param pipe: Nlu Pipe containing components for which dependencies should be satisfied :return: Nlu pipe with dependencies satisfied """ all_features_provided = False is_licensed = PipelineCompleter.has_licensed_components(pipe) pipe.has_licensed_components = is_licensed is_trainable = PipeUtils.is_trainable_pipe(pipe) loop_count = 0 max_loop_count = 5 while all_features_provided == False: # After new components have been added, check again for the new components if requirements are met components_to_add = [] missing_components, missing_storage_refs, components_for_embedding_conversion = \ PipelineCompleter.get_missing_required_features(pipe) if PipelineCompleter.check_dependencies_satisfied(missing_components, missing_storage_refs, components_for_embedding_conversion): # Now all features are provided break # Update last iteration variables last_missing_components, last_missing_storage_refs, last_components_for_embedding_conversion = missing_components, missing_storage_refs, components_for_embedding_conversion # Create missing base storage ref producers, i.e. embeddings for missing_component in missing_storage_refs: component = resolve_feature(missing_component, language=pipe.lang, is_licensed=is_licensed, is_trainable_pipe=is_trainable) if component is None: continue if 'chunk_emb' in missing_component: components_to_add.append(ComponentUtils.config_chunk_embed_converter(component)) else: components_to_add.append(component) # Create missing base components, storage refs are fetched in previous loop for missing_component in missing_components: components_to_add.append( resolve_feature(missing_component, language=pipe.lang, is_licensed=is_licensed, is_trainable_pipe=is_trainable)) # Create embedding converters for resolution_info in components_for_embedding_conversion: converter = None if 'word2chunk' == resolution_info.type: converter = PipelineCompleter.add_chunk_embedding_converter(resolution_info) elif 'word2sentence' == resolution_info.type: converter = PipelineCompleter.add_sentence_embedding_converter(resolution_info) if converter is not None: components_to_add.append(converter) logger.info(f'Resolved for missing components the following NLU components : {components_to_add}') # Add missing components for new_component in components_to_add: if new_component: logger.info(f'adding {new_component.name}') pipe.add(new_component) # For some models we update storage ref to the resovling models storageref. # We need to update them so dependencies can properly be deducted as satisfied pipe = PipeUtils.update_bad_storage_refs(pipe) # Check if we are in an infinity loop if PipelineCompleter.check_same_as_last_iteration(last_missing_components, last_missing_storage_refs, last_components_for_embedding_conversion, missing_components, missing_storage_refs, components_for_embedding_conversion): loop_count += 1 else: loop_count = 0 if loop_count > max_loop_count: PipelineCompleter.except_infinity_loop('Failure resolving feature dependencies') logger.info(f"{'!*'*20} ALL DEPENDENCIES SATISFIED {'!*'*20}") return pipe @staticmethod def check_and_fix_component_output_column_name_satisfaction(pipe: NLUPipeline): ''' This function verifies that every input and output column name of a component_to_resolve is satisfied. If some output names are missing, it will be added by this method. Usually classifiers need to change their input column name, so that it matches one of the previous embeddings because they have dynamic output names This function performs the following steps : 1. For each component_to_resolve we verify that all input column names are satisfied by checking all other components output names 2. When a input column is missing we do the following : 2.1 Figure out the type of the missing input column. The name of the missing column should be equal to the type 2.2 Check if there is already a component_to_resolve in the component_list, which provides this input (It should) 2.3. When A providing component_to_resolve is found, check if storage ref matches up. 2.4 If True for all, update provider component_to_resolve output name, or update the original component_to_resolve input name :return: NLU pipeline where the output and input column names of the models have been adjusted to each other ''' logger.info("Fixing input and output column names") for component_to_check in pipe.components: if component_to_check.loaded_from_pretrained_pipe: continue input_columns = set(component_to_check.spark_input_column_names) # a component_to_resolve either has '' storage ref or at most 1 logger.info( f'Checking for component_to_resolve {component_to_check.name} wether inputs {input_columns} is satisfied by another component_to_resolve in the component_list ', ) for other_component in pipe.components: if component_to_check.name == other_component.name: continue output_columns = set(other_component.spark_output_column_names) input_columns -= output_columns # remove provided columns input_columns = ComponentUtils.clean_irrelevant_features(input_columns) # Resolve basic mismatches, usually storage refs if len(input_columns) != 0 and not pipe.has_trainable_components or ComponentUtils.is_embedding_consumer( component_to_check): # fix missing column name # We must not only check if input satisfied, but if storage refs match! and Match Storage_refs accordingly logger.info(f"Fixing bad input col for C={component_to_check} untrainable component_list") resolved_storage_ref_cols = [] for missing_column in input_columns: for other_component in pipe.components: if component_to_check.name == other_component.name: continue if other_component.type == missing_column: # We update the output name for the component_to_resolve which consumes our feature if StorageRefUtils.has_storage_ref( other_component) and ComponentUtils.is_embedding_provider(component_to_check): if ComponentUtils.are_producer_consumer_matches(component_to_check, other_component): resolved_storage_ref_cols.append( (other_component.spark_output_column_names[0], missing_column)) component_to_check.spark_output_column_names = [missing_column] logger.info( f'Resolved requirement for missing_column={missing_column} with inputs from provider={other_component.name} by col={missing_column} ') other_component.model.setOutputCol(missing_column) for resolution, unsatisfied in resolved_storage_ref_cols: component_to_check.spark_input_column_names.remove(unsatisfied) component_to_check.spark_input_column_names.append(resolution) # Resolve training missmatches elif len(input_columns) != 0 and pipe.has_trainable_components: # fix missing column name logger.info(f"Fixing bad input col for C={component_to_check} trainable component_list") # for trainable components, we change their input columns and leave other components outputs unchanged for missing_column in input_columns: for other_component in pipe.components: if component_to_check.name == other_component.name: continue if other_component.type == missing_column: # We update the input col name for the componenet that has missing cols component_to_check.spark_input_column_names.remove(missing_column) component_to_check.spark_input_column_names.append( other_component.spark_output_column_names[0]) component_to_check.model.setInputCols( component_to_check.spark_input_column_names) logger.info( f'Setting input col columns for component_to_resolve {component_to_check.name} to {other_component.spark_output_column_names[0]} ') return pipe @staticmethod def check_and_fix_nlu_pipeline(pipe: NLUPipeline): """Check if the NLU pipeline is ready to transform data and return it. If all dependencies not satisfied, returns a new NLU pipeline where dependencies and sub-dependencies are satisfied. Checks and resolves in the following order : 1. Get a reference list of input features missing for the current component_list 2. Resolve the list of missing features by adding new Annotators to component_list 3. Add NER Converter if required (When there is a NER model_anno_obj) 4. Fix order and output column names 5. :param pipe: :return: """ # main entry point for Model stacking withouth pretrained pipelines # requirements and provided features will be lists of lists # 0. Clean old @AT storage ref from all columns # logger.info('Cleaning old AT refs') # pipe = PipeUtils.clean_AT_storage_refs(pipe) # 1. Resolve dependencies, builds a DAG in reverse and satisfies dependencies with a Breadth-First-Search approach # 0. Write additional metadata to the pipe pre pipe construction pipe = PipeUtils.add_metadata_to_pipe(pipe) logger.info('Satisfying dependencies') pipe = PipelineCompleter.satisfy_dependencies(pipe) # 2. Enforce naming schema <col_name>@<storage_ref> for storage_ref consumers and producers and <entity@nlu_ref> and <ner@nlu_ref> for NER and NER-Converters # and add NER-IOB to NER-Pretty converters for every NER model_anno_obj that is not already feeding a NER converter pipe = PipeUtils.enforce_AT_schema_on_pipeline_and_add_NER_converter(pipe) # 2.1 If Sentence Resolvers are in pipeline, all Sentence-Embeddings must feed from Chunk2Doc which stems from the entities column to resolve pipe = PipelineCompleter.enforce_chunk2doc_on_sentence_embeddings(pipe) # 3. Validate naming of output columns is correct and no error will be thrown in spark logger.info('Fixing column names') pipe = PipelineCompleter.check_and_fix_component_output_column_name_satisfaction(pipe) # 4. Set on every NLP Annotator the output columns pipe = PipeUtils.enforce_NLU_columns_to_NLP_columns(pipe) # 5. fix order logger.info('Optimizing component_list component_to_resolve order') pipe = PipelineCompleter.check_and_fix_component_order(pipe) # 6. Rename overlapping/duplicate leaf columns in the DAG logger.info('Renaming duplicates cols') pipe = PipeUtils.rename_duplicate_cols(pipe) # 7. enfore again because trainable pipes might mutate component_list cols pipe = PipeUtils.enforce_NLU_columns_to_NLP_columns(pipe) # 8. Write additional metadata to the pipe post pipe construction pipe = PipeUtils.add_metadata_to_pipe(pipe) # 9. For Table-QA based Pipes, we Inject a SetenceDetector for tokenizing the questions pipe = PipeUtils.add_sentence_detector_to_pipe_if_required(pipe) logger.info('Done with component_list optimizing') return pipe @staticmethod def check_and_fix_component_order(pipe: NLUPipeline): ''' This method takes care that the order of components is the correct in such a way,that the pipeline can be iteratively processed by spark NLP. Column Names will not be touched. DAG Task Sort basically. ''' logger.info("Starting to optimize component_to_resolve order ") correct_order_component_pipeline = [] provided_features = [] all_components_ordered = False unsorted_components = pipe.components update_last_type = False last_type_sorted = None trainable_updated = False pipe.components = sorted(pipe.components, key=lambda x: x.type) if not pipe.contains_ocr_components: # if OCR we must take text sorting into account. Non-OCR pipes get text provided externalyl provided_features.append('text') if pipe.contains_audio_components: provided_features.append(NLP_FEATURES.RAW_AUDIO) loop_count = 0 max_loop_count = 10 * len(pipe.components) last_correct_order_component_pipeline = [] last_provided_features = [] while not all_components_ordered: if update_last_type: last_type_sorted = None else: update_last_type = True for component in unsorted_components: logger.info(f"Optimizing order for component_to_resolve {component.name}") input_columns = ComponentUtils.remove_storage_ref_from_features( ComponentUtils.clean_irrelevant_features(component.spark_input_column_names.copy(), False, False)) if last_type_sorted is None or component.type == last_type_sorted: if set(input_columns).issubset(provided_features): correct_order_component_pipeline.append(component) # Leave pretrained component_list components untouched if component.loaded_from_pretrained_pipe: unsorted_components.remove(component) if component in unsorted_components: unsorted_components.remove(component) # TODO remove storage ref from provided features ? provided_features += ComponentUtils.remove_storage_ref_from_features( ComponentUtils.clean_irrelevant_features(component.spark_output_column_names.copy(), False, False)) last_type_sorted = component.type update_last_type = False break if len(unsorted_components) == 0: all_components_ordered = True if not all_components_ordered and len( unsorted_components) <= 2 and pipe.has_trainable_components and not trainable_updated and \ unsorted_components[0].trainable and 'sentence_embeddings@' in unsorted_components[ 0].spark_input_column_names: # special case, if trainable then we feed embed consumers on the first sentence embed provider # 1. Find first sent embed provider # 2. substitute any 'sent_embed@' consumer inputs for the provider col for f in provided_features: if 'sentence_embeddings' in f and not trainable_updated: unsorted_components[0].spark_input_column_names.remove('sentence_embeddings@') if 'sentence_embeddings@' in unsorted_components[0].spark_input_column_names: unsorted_components[0].spark_input_column_names.remove('sentence_embeddings@') unsorted_components[0].spark_input_column_names.append(f) if f not in unsorted_components[0].spark_input_column_names: unsorted_components[ 0].spark_input_column_names.append(f) trainable_updated = True if not all_components_ordered and len( unsorted_components) <= 2 and pipe.has_trainable_components and not trainable_updated and \ unsorted_components[0].trainable and 'word_embeddings@' in unsorted_components[ 0].spark_input_column_names: # special case, if trainable then we feed embed consumers on the first sentence embed provider # 1. Find first sent embed provider # 2. substitute any 'sent_embed@' consumer inputs for the provider col for f in provided_features: if 'word_embeddings' in f and not trainable_updated: unsorted_components[0].spark_input_column_names.remove('word_embeddings@') if 'word_embeddings@' in unsorted_components[0].spark_input_column_names: unsorted_components[ 0].spark_input_column_names.remove( 'word_embeddings@') unsorted_components[0].spark_input_column_names.append(f) if f not in unsorted_components[0].spark_input_column_names: unsorted_components[ 0].spark_input_column_names.append(f) trainable_updated = True # detect endless loop if last_correct_order_component_pipeline == correct_order_component_pipeline and last_provided_features == provided_features: loop_count += 1 else: loop_count = 0 if loop_count > max_loop_count: PipelineCompleter.except_infinity_loop('Failure sorting dependencies') last_provided_features = provided_features.copy() # correct_order_component_pipeline = last_correct_order_component_pipeline.copy() last_correct_order_component_pipeline = correct_order_component_pipeline.copy() pipe.components = correct_order_component_pipeline return pipe @staticmethod def is_storage_ref_match(embedding_consumer, embedding_provider, pipe: NLUPipeline): """Check for 2 components, if one provides the embeddings for the other. Makes sure that pipe_prediction_output_level matches up (chunk/sent/tok/embeds)""" consumer_AT_ref = ComponentUtils.extract_storage_ref_AT_notation_for_embeds(embedding_consumer, 'input') provider_AT_rev = ComponentUtils.extract_storage_ref_AT_notation_for_embeds(embedding_provider, 'output') consum_level = ComponentUtils.extract_embed_level_identity(embedding_consumer, 'input') provide_level = ComponentUtils.extract_embed_level_identity(embedding_provider, 'output') consumer_ref = StorageRefUtils.extract_storage_ref(embedding_consumer) provider_rev = StorageRefUtils.extract_storage_ref(embedding_provider) # input/output levels must match if consum_level != provide_level: return False # If storage ref dont match up, we must consult the storage_ref_2_embed mapping if it still maybe is a match, otherwise it is not. if consumer_ref == provider_rev: return True # Embed Components have have been resolved via@ have a nlu_resolution_ref_source will match up with the consumer ref if correct embedding. if hasattr(embedding_provider.info, 'nlu_ref'): if consumer_ref == StorageRefUtils.extract_storage_ref(embedding_provider.info.nlu_ref): return True # If it is either sentence_embedding_converter or chunk_embedding_converter then we gotta check what the storage ref of the inpot of those is. # If storage ref matches up, the providers output will match the consumer # if embedding_provider if embedding_provider.info.name in ["chunk_embedding_converter", 'sentence_embedding_converter']: # TODO FOR RESOLUTION nlu_ref, conv_prov_storage_ref = PipelineCompleter.get_converters_provider_info(embedding_provider, pipe) return False @staticmethod def is_matching_level(embedding_consumer, embedding_provider): """Check for embedding consumer if input level matches up outputlevel of consumer """ @staticmethod def get_converters_provider_info(embedding_provider, pipe: NLUPipeline): """For a component_to_resolve and a component_list, find storage_ref and """ @staticmethod def enforce_chunk2doc_on_sentence_embeddings(pipe: NLUPipeline): """ #If Sentence Resolvers are in pipeline, all Sentence-Embeddings must feed from Chunk2Doc which stems from the entities column to resolve We need to update input/output types of sentence Resolver, to the component_to_resolve so sorting does not get confused """ if not pipe.has_licensed_components: return pipe resolvers = [] ner_converters = [] sentence_embeddings = [] # Find Resolver for i, c in enumerate(pipe.components): if c.loaded_from_pretrained_pipe: continue # if isinstance(c.model_anno_obj, SentenceEntityResolverModel): resolvers.append(c) # if isinstance(c.model_anno_obj, (NerConverter, NerConverterInternal)): ner_converters.append(c) # if 'sentence_embeddings' == c.info.type: sentence_embeddings.append(c) if c.name == NLP_HC_NODE_IDS.SENTENCE_ENTITY_RESOLVER: resolvers.append(c) if c.name in [NLP_NODE_IDS.NER_CONVERTER, NLP_HC_NODE_IDS.NER_CONVERTER_INTERNAL]: ner_converters.append(c) if c.type == AnnoTypes.DOCUMENT_EMBEDDING or c.type == AnnoTypes.SENTENCE_EMBEDDING: sentence_embeddings.append(c) # No resolvers, nothing to update if len(resolvers) == 0: return pipe # Update Resolver # TODO this does not work in multi resolver scenarios reliably if NLP_FEATURES.DOCUMENT in sentence_embeddings[0].in_types: sentence_embeddings[0].in_types.remove(NLP_FEATURES.DOCUMENT) if NLP_FEATURES.SENTENCE in sentence_embeddings[0].in_types: sentence_embeddings[0].in_types.remove(NLP_FEATURES.SENTENCE) if NLP_FEATURES.DOCUMENT in sentence_embeddings[0].spark_input_column_names: sentence_embeddings[0].spark_input_column_names.remove(NLP_FEATURES.DOCUMENT) if NLP_FEATURES.SENTENCE in sentence_embeddings[0].spark_input_column_names: sentence_embeddings[0].spark_input_column_names.remove(NLP_FEATURES.SENTENCE) sentence_embeddings[0].in_types.append(NLP_FEATURES.DOCUMENT_FROM_CHUNK) sentence_embeddings[0].spark_input_column_names.append(NLP_FEATURES.DOCUMENT_FROM_CHUNK) # sentence_embeddings[0].info.inputs = ['chunk2doc'] # sentence_embeddings[0].info.spark_input_column_names = ['chunk2doc'] # sentence_embeddings[0].model_anno_obj.setInputCols('chunk2doc') # shouldb e handled by enforcing # chunk2doc.model_anno_obj.setOutputCol("chunk2doc") # chunk2doc.info.inputs = ner_converters[0].spark_output_column_names # TODO this will not be resolved by the resolution Algo!! chunk2doc = resolve_feature(NLP_FEATURES.DOCUMENT_FROM_CHUNK, 'xx') chunk2doc.model.setInputCols(ner_converters[0].spark_output_column_names) chunk2doc.spark_input_column_names = ner_converters[0].spark_output_column_names pipe.components.append(chunk2doc) # this will add a entity converter and a NER model_anno_obj if none provided pipe = PipelineCompleter.satisfy_dependencies(pipe) return pipe @staticmethod def log_resolution_status(provided_features_no_ref, required_features_no_ref, provided_features_ref, required_features_ref, is_trainable, conversion_candidates, missing_features_no_ref, missing_features_ref, ): logger.info(f"========================================================================") logger.info(f"Resolution Status provided_features_no_ref = {set(provided_features_no_ref)}") logger.info(f"Resolution Status required_features_no_ref = {set(required_features_no_ref)}") logger.info(f"Resolution Status provided_features_ref = {set(provided_features_ref)}") logger.info(f"Resolution Status required_features_ref = {set(required_features_ref)}") logger.info(f"Resolution Status is_trainable = {is_trainable}") logger.info(f"Resolution Status conversion_candidates = {conversion_candidates}") logger.info(f"Resolution Status missing_features_no_ref = {set(missing_features_no_ref)}") logger.info(f"Resolution Status conversion_candidates = {set(missing_features_ref)}") logger.info(f"========================================================================")
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/pipe_logic.py
pipe_logic.py
from nlu.pipe.extractors.extractor_methods.base_extractor_methods import * from nlu.pipe.extractors.extractor_methods.helper_extractor_methods import * """ This file contains methods to get pre-defined configurations for every annotator. Extractor_resolver.py should be used to resolve SparkNLP Annotator classes to methods in this file, which return the corrosponding configs that need to be passed to the master_extractor() call. This file is where all the in extractor_base_data_classes.py Dataclasses are combined with the extractors defined in extractor_methods.py. """ def default_get_nothing(output_col_prefix): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, name='nothing_extractor', description='Extracts nothing. Useful for annotators with irrelevant data' ) def default_only_result_config(output_col_prefix): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, name='Default result extractor', description='Just gets the result field' ) def default_full_config(output_col_prefix='DEFAULT'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_positions=True, get_begin=True, get_end=True, get_embeds=True, get_result=True, get_meta=True, get_full_meta=True, get_annotator_type=True, name='default_full', description='Default full configuration, keeps all data and gets all metadata fields', ) def default_NER_converter_licensed_config(output_col_prefix='entities'): """Extracts NER tokens withouth positions, just the converted IOB tags,confidences and classified tokens """ return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, get_meta=True, meta_white_list=['entity', 'confidence','sentence', 'chunk'], name='default_ner', description='Converts IOB-NER representation into entity representation and generates confidences for the entire entity chunk', ) def default_chunk_mapper_config(output_col_prefix='mapped_entity'): """Extracts NER tokens withouth positions, just the converted IOB tags,confidences and classified tokens """ return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, get_meta=True, meta_white_list=['relation', 'all_relations','chunk', 'entity', 'sentence' ], # MAYBE DROP 'chunk', 'entity'default_chunk_mapper_config, sentence name='default_ner', meta_data_extractor=SparkNLPExtractor(extract_chunk_mapper_relation_data, 'Get ChunkMapper Relation Metadata', 'Get ChunkMapper Relation Metadata'), description='Extract Chunk Mapper with relation Data', ) def default_chunk_resolution_config(output_col_prefix='resolved_entities'): """Extracts NER tokens withouth positions, just the IOB tags,confidences and classified tokens """ return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, get_meta=True, meta_white_list=['confidence', 'resolved_text'], # sentence, chunk name='default_ner', ) def full_resolver_config(output_col_prefix='DEFAULT'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_positions=True, get_begin=True, get_end=True, get_embeds=True, get_result=True, get_meta=True, get_full_meta=True, get_annotator_type=True, name='default_full', description='Full resolver outputs, with any _k_ field in the metadata dict splitted :::', meta_data_extractor=SparkNLPExtractor(extract_resolver_all_k_subfields_splitted, 'Splits all _k_ fields on :::d returns all other fields as corrosponding to pop config', 'split all _k_ fields') ) def resolver_conifg_with_metadata(output_col_prefix='DEFAULT'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_meta=True, get_result=True, get_full_meta=True, name='with metadata', description='Full resolver outputs, with any _k_ field in the metadata dict splitted :::', meta_data_extractor=SparkNLPExtractor(extract_resolver_all_k_subfields_splitted, 'Splits all _k_ fields on :::d returns all other fields as corrosponding to pop config', 'split all _k_ fields') ) def default_relation_extraction_positional_config(output_col_prefix='extracted_relations'): """Extracts NER tokens withouth positions, just the IOB tags,confidences and classified tokens """ return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, get_meta=True, get_full_meta=True, name='full_relation_extraction', description='Get relation extraction result and all metadata, with positions of entities', ) def default_relation_extraction_config(output_col_prefix='extracted_relations'): """Extracts NER tokens withouth positions, just the IOB tags,confidences and classified tokens """ return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, meta_white_list=[], get_meta=True, meta_black_list=['entity1_begin', 'entity2_begin', 'entity1_end', 'entity2_end', ], name='default_relation_extraction', description='Get relation extraction result and all metadata, positions of entities excluded', ) def default_de_identification_config(output_col_prefix='de_identified'): """Extracts NER tokens withouth positions, just the IOB tags,confidences and classified tokens """ return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, name='positional_relation_extraction', description='Get relation extraction result and all metadata, which will include positions of entities chunks', ) def default_assertion_config(output_col_prefix='assertion'): """Extracts NER tokens withouth positions, just the IOB tags,confidences and classified tokens """ return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, name='default_assertion_extraction', get_meta=True, meta_white_list=['confidence'], description='Gets the assertion result and confidence', ) def default_ner_config(output_col_prefix='med_ner'): return default_only_result_config(output_col_prefix) def default_ner_config(output_col_prefix='med_ner'): return default_get_nothing(output_col_prefix) def default_feature_assembler_config(output_col_prefix='feature_assembler'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=False, name='features_assembled', get_meta=False, description='Gets nothing', ) def default_generic_classifier_config(output_col_prefix='generic_classifier'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, name='generic_classifier', get_meta=True, meta_white_list=['confidence'], description='Gets the result and confidence', )
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/extractors/extractor_configs_HC.py
extractor_configs_HC.py
from sparknlp.annotator import * from sparknlp.base import * from nlu.pipe.extractors.extractor_configs_OS import * OS_anno2config = { NerConverter: { 'default': default_ner_converter_config, 'default_full': default_full_config, }, MultiClassifierDLModel: { 'default': default_multi_classifier_dl_config, 'default_full': default_full_config, }, PerceptronModel: { 'default': default_POS_config, 'default_full': default_full_config, }, ClassifierDLModel: { 'default': default_classifier_dl_config, 'default_full': default_full_config, }, BertEmbeddings: { 'default': default_word_embedding_config, 'default_full': default_full_config, }, BertForSequenceClassification : { 'default': default_classifier_dl_config, 'default_full': default_full_config, }, DistilBertForSequenceClassification : { 'default': default_classifier_dl_config, 'default_full': default_full_config, }, BertForTokenClassification: { 'default': default_token_classifier_config, 'default_full': default_full_config, }, DistilBertForTokenClassification: { 'default': default_token_classifier_config, 'default_full': default_full_config, }, RoBertaForTokenClassification: { 'default': default_token_classifier_config, 'default_full': default_full_config, }, XlmRoBertaForTokenClassification: { 'default': default_token_classifier_config, 'default_full': default_full_config, }, XlnetForTokenClassification: { 'default': default_token_classifier_config, 'default_full': default_full_config, }, AlbertForTokenClassification: { 'default': default_token_classifier_config, 'default_full': default_full_config, }, LongformerForTokenClassification: { 'default': default_token_classifier_config, 'default_full': default_full_config, }, LongformerEmbeddings: { 'default': default_word_embedding_config, 'default_full': default_full_config, }, AlbertEmbeddings: { 'default': default_word_embedding_config, 'default_full': default_full_config, }, XlnetEmbeddings: { 'default': default_word_embedding_config, 'default_full': default_full_config, }, RoBertaEmbeddings: { 'default': default_word_embedding_config, 'default_full': default_full_config, }, XlmRoBertaEmbeddings: { 'default': default_word_embedding_config, 'default_full': default_full_config, }, DistilBertEmbeddings: { 'default': default_word_embedding_config, 'default_full': default_full_config, }, WordEmbeddingsModel: { 'default': default_word_embedding_config, 'default_full': default_full_config, }, ElmoEmbeddings: { 'default': default_word_embedding_config, 'default_full': default_full_config, }, BertSentenceEmbeddings: { 'default': default_sentence_embedding_config, 'default_full': default_full_config, }, XlmRoBertaSentenceEmbeddings: { 'default': default_sentence_embedding_config, 'default_full': default_full_config, }, Doc2VecModel: { 'default': default_sentence_embedding_config, 'default_full': default_full_config, }, Doc2VecApproach: { 'default': default_sentence_embedding_config, 'default_full': default_full_config, }, UniversalSentenceEncoder: { 'default': default_sentence_embedding_config, 'default_full': default_full_config, }, SentenceEmbeddings: { 'default': default_sentence_embedding_config, 'default_full': default_full_config, }, Tokenizer: { 'default': default_tokenizer_config, 'default_full': default_full_config, }, TokenizerModel: { 'default': default_tokenizer_config, 'default_full': default_full_config, }, RegexTokenizer: { 'default': default_tokenizer_config, 'default_full': default_full_config, }, DocumentAssembler: { 'default': default_document_config, 'default_full': default_full_config, }, SentenceDetectorDLModel: { 'default': default_sentence_detector_DL_config, 'default_full': default_full_config, }, SentenceDetector: { 'default': default_sentence_detector_config, 'default_full': default_full_config, }, ContextSpellCheckerModel: { 'default': default_spell_context_config, 'default_full': default_full_config, }, SymmetricDeleteModel: { 'default': default_spell_symmetric_config, 'default_full': default_full_config, }, NorvigSweetingModel: { 'default': default_spell_norvig_config, 'default_full': default_full_config, }, LemmatizerModel: { 'default': default_lemma_config, 'default_full': default_full_config, }, Normalizer: { 'default': default_norm_config, 'default_full': default_full_config, }, NormalizerModel: { 'default': default_norm_config, 'default_full': default_full_config, }, DocumentNormalizer: { 'default': default_norm_document_config, 'default_full': default_full_config, }, Stemmer: { 'default': default_stemm_config, 'default_full': default_full_config, }, NerDLModel: { 'default': default_NER_config, 'meta': meta_NER_config, 'default_full': default_full_config, }, NerCrfModel: { 'default': '', # TODO 'default_full': default_full_config, }, LanguageDetectorDL: { 'default': default_lang_classifier_config, 'default_full': default_full_config, }, DependencyParserModel: { 'default': default_dep_untyped_config, 'default_full': default_full_config, }, TypedDependencyParserModel: { 'default': default_dep_typed_config, 'default_full': default_full_config, }, SentimentDLModel: { 'default': default_sentiment_dl_config, 'default_full': default_full_config, }, SentimentDetectorModel: { 'default': default_sentiment_config, 'default_full': default_full_config, }, ViveknSentimentModel: { 'default': default_sentiment_vivk_config, 'default_full': default_full_config, }, Chunker: { 'default': default_chunk_config, 'default_full': default_full_config, }, NGramGenerator: { 'default': default_ngram_config, 'default_full': default_full_config, }, ChunkEmbeddings: { 'default': default_chunk_embedding_config, 'default_full': default_full_config, }, StopWordsCleaner: { 'default': default_stopwords_config, 'default_full': default_full_config, }, TextMatcherModel: { 'default': '', # TODO 'default_full': default_full_config, }, TextMatcher: { 'default': '', # TODO 'default_full': default_full_config, }, RegexMatcherModel: { 'default': '', # TODO 'default_full': default_full_config, }, RegexMatcher: { 'default': '', # TODO 'default_full': default_full_config, }, DateMatcher: { 'default': '', # TODO 'default_full': default_full_config, }, MultiDateMatcher: { 'default': '', # TODO 'default_full': default_full_config, }, Doc2Chunk: { 'default': default_doc2chunk_config, 'default_full': default_full_config, }, Chunk2Doc: { 'default': '', # TODO 'default_full': default_full_config, }, T5Transformer: { 'default': default_T5_config, 'default_full': default_full_config, }, MarianTransformer: { 'default': default_marian_config, 'default_full': default_full_config, }, YakeKeywordExtraction: { 'default': default_yake_config, 'default_full': default_full_config, }, WordSegmenterModel: { 'default': default_word_segmenter_config, 'default_full': default_full_config, }, # approaches ViveknSentimentApproach: {'default': '', 'default_full': default_full_config, }, SentimentDLApproach: {'default': default_sentiment_dl_config, 'default_full': default_full_config, }, ClassifierDLApproach: {'default': default_classifier_dl_config, 'default_full': default_full_config, }, MultiClassifierDLApproach: {'default': default_multi_classifier_dl_config, 'default_full': default_full_config, }, NerDLApproach: {'default': default_NER_config, 'default_full': default_full_config, }, PerceptronApproach: {'default': default_POS_config, 'default_full': default_full_config, }, # PretrainedPipeline : { # 'default' : '', # } }
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/extractors/extraction_resolver_OS.py
extraction_resolver_OS.py
from nlu.pipe.extractors.extractor_base_data_classes import SparkNLPExtractor, SparkNLPExtractorConfig from nlu.pipe.extractors.extractor_methods.helper_extractor_methods import * """ This file contains methods to get pre-defined configurations for every annotator. Extractor_resolver.py should be used to resolve SparkNLP Annotator classes to methods in this file, which return the corrosponding configs that need to be passed to the master_extractor() call. This file is where all the in extractor_base_data_classes.py Dataclasses are combined with the extractors defined in extractor_methods.py. """ def default_full_config(output_col_prefix='DEFAULT'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_positions=True, get_begin=True, get_end=True, get_embeds=True, get_result=True, get_meta=True, get_full_meta=True, get_annotator_type=False, name='default_full', description='Default full configuration, keeps all data and gets all metadata fields', ) def default_document_config(output_col_prefix='document'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, pop_result_list=True, get_result=True, ) def default_image_assembler_config(output_col_prefix='image_assembler'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, pop_result_list=True, get_result=False, get_origin=True ) def default_tapas_config(output_col_prefix='tapas_qa'): """Extracts TAPAS QA, excluding potions """ return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, get_meta=True, pop_result_list=False, pop_never=True, meta_white_list=['question', 'aggregation', 'cell_positions', 'cell_scores'], name='tapas_qa', description='NER with IOB tags and confidences for them', ) def default_NER_config(output_col_prefix='NER'): """Extracts NER tokens without positions, just the IOB tags,confidences and classified tokens """ return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, # get_result = True, # get_meta = True, meta_white_list=['confidence'], name='default_ner', description='NER with IOB tags and confidences for them', ) def meta_NER_config(output_col_prefix='NER'): """Extracts NER tokens withouth positions, just the IOB tags,confidences and classified tokens """ return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, get_meta=True, meta_white_list=['confidence'], name='default_ner', description='NER with IOB tags and confidences for them', ) def default_language_classifier_config(output_col_prefix='language'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, get_meta=True, get_full_meta=True, pop_result_list=True, name='Only keep maximum language confidence', description='Instead of returning the confidence for every language the Classifier was traiend on, only the maximum confidence will be returned', meta_data_extractor=SparkNLPExtractor(meta_extract_language_classifier_max_confidence, 'Extract the maximum confidence from all classified languages and drop the others. TODO top k results', 'Keep only top language confidence') ) def default_partial_implement_config(output_col_prefix): return default_only_result_config(output_col_prefix) def default_only_result_config(output_col_prefix): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, name='Default result extractor', description='Just gets the result field' ) def default_only_result_popped_config(output_col_prefix): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, name='Default result extractor', description='Just gets the result field', pop_result_list=True, ) def default_only_embedding_config(output_col_prefix): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_embeds=True, name='Default Embed extractor', description='Just get the Embed field' ) def default_only_result_and_positions_config(output_col_prefix): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, get_positions=True, name='Positional result only default', description='Get the result field and the positions' ) def default_sentiment_dl_config(output_col_prefix='sentiment_dl'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, get_full_meta=True, name='Only keep maximum sentiment confidence ', description='Instead of r eturning the confidence for Postive and Negative, only the confidence of the more likely class will be returned in the confidence column', meta_data_extractor=SparkNLPExtractor(extract_maximum_confidence, 'Instead of returining positive/negative confidence, only the maximum confidence will be returned withouth sentence number reference.', 'Maximum binary confidence') ) def default_lang_classifier_config(output_col_prefix='sentiment_dl'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, get_full_meta=True, pop_meta_list=True, pop_result_list=True, name='default_lang_classifier_config', description='Instead of returning the confidence for every language, just returns the confidence of the most likely language', meta_data_extractor=SparkNLPExtractor(extract_maximum_confidence, 'Instead of returining positive/negative confidence, only the maximum confidence will be returned withouth sentence number reference.', 'Maximum binary confidence') ) def default_sentiment_config(output_col_prefix='sentiment'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, get_full_meta=True, # pop_result_list = True, name='Only keep maximum sentiment confidence ', description='Instead of returning the confidence for Postive and Negative, only the confidence of the more likely class will be returned in the confidence column', meta_data_extractor=SparkNLPExtractor(extract_maximum_confidence, 'Instead of returining positive/negative confidence, only the maximum confidence will be returned withouth sentence number reference.', 'Maximum binary confidence') ) def default_sentiment_vivk_config(output_col_prefix='vivk_sentiment'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, get_full_meta=True, # pop_result_list = True, name='Default sentiment vivk', description='Get prediction confidence and the resulting label' ) def default_multi_classifier_dl_config(output_col_prefix='classifier_dl'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, get_full_meta=True, name='default_classifier_dl', description='Get all predicted confidences and labels', pop_never=True, meta_data_extractor=SparkNLPExtractor(extract_maximum_confidence, 'Instead returning confidence for each class, only return max confidence', 'Max confidence') ) def default_classifier_dl_config(output_col_prefix='classifier_dl'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, get_full_meta=True, name='default_classifier_dl', description='Get all predicted confidences and labels', meta_data_extractor=SparkNLPExtractor(extract_maximum_confidence, 'Instead returning confidence for each class, only return max confidence unless get_full_meta=True', 'Max confidence') ) def default_seq_classifier_config(output_col_prefix='classified_sequence '): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, get_full_meta=True, name='default_seq_classifier', description='Get max predicted confidence and label', meta_data_extractor=SparkNLPExtractor(extract_maximum_confidence, 'Instead returning confidence for each class, only return max confidence', 'Max confidence') ) def default_span_classifier_config(output_col_prefix='answer'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, get_meta=True, name='default_span_classifier_config', description='Get predicted span and score for it ', meta_white_list=['score'], pop_meta_list=True, pop_result_list=True, ) def default_full_span_classifier_config(output_col_prefix='answer'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, get_meta=True, name='default_full_span_classifier_config', description='Get predicted span and score for it, including beging and end of span and positions for them ', pop_meta_list=True, pop_result_list=True, meta_white_list=['score', 'start_score', 'end_score', 'start', 'end'] ) def default_tokenizer_config(output_col_prefix='token'): return default_only_result_config(output_col_prefix) def default_POS_config(output_col_prefix='POS_tag'): return default_only_result_config(output_col_prefix) def default_sentence_detector_DL_config(output_col_prefix='sentence'): return default_only_result_config(output_col_prefix) def default_chunker_config(output_col_prefix='matched_chunk'): return default_only_result_config(output_col_prefix) def default_T5_config(output_col_prefix='T5'): return default_only_result_config(output_col_prefix) def default_marian_config(output_col_prefix='translated'): return default_only_result_config(output_col_prefix) def default_gpt2_config(output_col_prefix='generated'): return default_only_result_config(output_col_prefix) # EMBEDS def default_sentence_embedding_config(output_col_prefix='sentence_embedding'): return default_only_embedding_config(output_col_prefix) def default_chunk_embedding_config(output_col_prefix='chunk_embedding'): return default_only_embedding_config(output_col_prefix) def default_word_embedding_config(output_col_prefix='word_embedding'): return default_only_embedding_config(output_col_prefix) def default_token_classifier_config(output_col_prefix='token_classifier'): return default_only_result_config(output_col_prefix) # TOKEN CLEANERS def default_stopwords_config(output_col_prefix='stopwords_removed'): return default_only_result_config(output_col_prefix) def default_lemma_config(output_col_prefix='lemma'): return default_only_result_config(output_col_prefix) def default_stemm_config(output_col_prefix='stemm'): return default_only_result_config(output_col_prefix) def default_norm_config(output_col_prefix='norm'): return default_only_result_config(output_col_prefix) def default_norm_document_config(output_col_prefix='norm_document'): return default_only_result_config(output_col_prefix) def default_sentence_detector_config(output_col_prefix='sentence'): return default_only_result_config(output_col_prefix) def default_dep_untyped_config(output_col_prefix='dependency_untyped'): return default_only_result_config(output_col_prefix) def default_dep_typed_config(output_col_prefix='dependency_typed'): return default_only_result_config(output_col_prefix) def default_spell_norvig_config(output_col_prefix='spell_checked'): return default_only_result_config(output_col_prefix) def default_spell_context_config(output_col_prefix='spell_checked'): return default_only_result_config(output_col_prefix) def default_spell_symmetric_config(output_col_prefix='spell_checked'): return default_only_result_config(output_col_prefix) def default_ngram_config(output_col_prefix='ngram'): return default_only_result_config(output_col_prefix) def default_word_segmenter_config(output_col_prefix='words_segmented'): return default_only_result_config(output_col_prefix) def default_chunk_config(output_col_prefix='matched_chunk'): return default_only_result_config(output_col_prefix) def default_yake_config(output_col_prefix='keywords'): """Extracts YAKE keywords with confidences """ return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, name='default_yake', get_meta=True, meta_white_list=['score'], description='Get all keywords and their confidences', pop_never=True ) def default_ner_converter_config(output_col_prefix='ner_chunk'): """Extracts the Entity Labels, which are derived from the IOB Tags """ return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, name='default_ner', get_meta=True, meta_white_list=['entity', 'confidence', 'sentence', 'chunk', ], description='Converts IOB-NER representation into entity representation and generates confidences for the entire entity chunk', ) def default_doc2chunk_config(output_col_prefix='doc2chunk'): return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=False, name='doc2chunk', get_meta=False, description='Converts Doc type col to chunk aka entity type', ) def default_coref_spanbert_config(output_col_prefix='coreferences'): """Extracts YAKE keywords with confidences """ return SparkNLPExtractorConfig( output_col_prefix=output_col_prefix, get_result=True, name='default_coref_bert', get_meta=True, get_full_meta=True, meta_white_list=['score'], description='Gets all Coreferences', # # meta_data_extractor = SparkNLPExtractor(None,extract_coreference_data, # 'Extract coreference data', # 'Extract coreference data') )
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/extractors/extractor_configs_OS.py
extractor_configs_OS.py
from nlu.pipe.extractors.extractor_configs_OS import * from nlu.pipe.extractors.extractor_configs_HC import * from sparknlp_jsl.annotator import * from sparknlp_jsl.base import * HC_anno2config = { MedicalNerModel : { 'default': default_ner_config, # 'meta': meta_NER_config, 'default_full' : default_full_config, }, NerConverterInternal : { 'default': default_NER_converter_licensed_config, 'default_full' : default_full_config, }, AssertionDLModel : { 'default': default_assertion_config, 'default_full' : default_full_config, }, AssertionLogRegModel : { 'default': default_assertion_config, 'default_full' : default_full_config, }, SentenceEntityResolverModel : { 'default': default_chunk_resolution_config, 'default_full' : default_full_config, }, SentenceEntityResolverApproach : { 'default': default_chunk_resolution_config, 'default_full' : default_full_config, }, # ChunkEntityResolverModel : { # 'default': default_chunk_resolution_config, # 'default_full' : default_full_config, # }, # ChunkEntityResolverApproach : { # 'default': default_chunk_resolution_config, # 'default_full' : default_full_config, # }, DeIdentificationModel : { 'default': default_de_identification_config, 'default_full' : default_full_config, }, RelationExtractionModel : { 'default': default_relation_extraction_config, 'positional': default_relation_extraction_positional_config, 'default_full' : default_full_config, }, RelationExtractionDLModel : { 'default': default_relation_extraction_config, 'positional': default_relation_extraction_positional_config, 'default_full' : default_full_config, }, Chunk2Token : { 'default': '',# TODO 'default_full' : default_full_config, }, ContextualParserModel : { 'default': default_full_config,# TODO 'default_full' : default_full_config, }, ContextualParserApproach : { 'default': default_full_config,# TODO 'default_full' : default_full_config, }, DrugNormalizer : { 'default': default_only_result_config, 'default_full' : default_full_config, }, GenericClassifierModel : { 'default': default_generic_classifier_config, 'default_full' : default_full_config, }, GenericClassifierApproach : { 'default': default_generic_classifier_config, 'default_full' : default_full_config, }, FeaturesAssembler : { 'default': default_feature_assembler_config, 'default_full' : default_full_config, }, ChunkMergeModel : { 'default': '',# TODO 'default_full' : default_full_config, }, NerDisambiguatorModel : { 'default': '',# TODO 'default_full' : default_full_config, }, RENerChunksFilter : { 'default': '',# TODO 'default_full' : default_full_config, }, NerOverwriter : { 'default': '',# TODO 'default_full' : default_full_config, }, PosologyREModel : { # 'default': '',# TODO 'default': default_relation_extraction_positional_config, 'default_full' : default_full_config, } }
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/extractors/extraction_resolver_HC.py
extraction_resolver_HC.py
from typing import List, Dict, Set, Dict, Tuple, Optional, Any, Callable, Dict from dataclasses import dataclass, field """ Contains dataclases which represent Extractors and their configuration """ from typing import List, Dict, Set, Dict, Tuple, Optional, Any from typing import Callable from typing import List, Dict, Set, Dict, Tuple, Optional from dataclasses import dataclass, field @dataclass class SparkNLPExtractor: """ Define and describe a **extractor_method** that will be called on every annotator's output row. The extractor method will receive a list of dictionaries of type Dict[Str,Any] and must return a List[Any] It will receive a list of annotation results of a particular field for a given row of spark nlp pipeline transformation results. Wrap a extractor method, description and name in a dataclass that is used as part of a SparkNLPExtractorConfig Parameters ---------- extractor_method : Callable[[List[Dict[str,Any]]], List[Any]] : An extractor is a method that takes in [Dict[str,Any]] and returns [Any]. Will be applied to every output row's metadata for that particular description : str Description name :str Description """ extractor_method: Callable[[List[Dict[str, Any]]], List[Any]] = field(repr=False, default=lambda x: x) description: str = field(default='') name: str = field(default='') extractor_with_result_method: \ Optional[Callable[[List[Dict[str, Any]], List[Dict[str, Any]]], List[Any]]] = field(repr=False, default=None) ## TODO extra config for getting "to which sentence did chunk/x/y/z belong to?" @dataclass class SparkNLPExtractorConfig: """ Universal Configuration class for defining what data to extract from a Spark NLP annotator. These extractor configs can be passed to any extractor NLU defined for Spark-NLP. Setting a boolean config to false, results in the extractor NOT returning that field from the Annotator outputs Metadata Extractor methods will be applied to metadata after white/black list filtering has been applied to the fields Extractor methods for the fields output_col_prefix : str Prefix used for naming output columns from this annotator this config is applied to/ description : str Describes how this configs affects te outputs name : str A name for this config get_positions : bool Get Annotation ending and beginning. If this is set to true, get_begin and get_end will be ignore and both positons will be outputted get_begin : bool Get Annotation beginnings get_end : bool Get Annotation ends get_embeds : bool Get Annotation Embeds get_result : bool Get Annotation results get_meta : bool Should get meta any meta at all? IF THIS FALSE, get_full_meta, whitelist and extractors will be ignored. get_sentence_origin : bool. Should extract from which sentence a prediction was generated from. If output level is Document, this field is irrelevant and should be set to False. get_full_meta : bool Get all keys and vals from base meta map. If this is true, white/blacklist will be ignored get_annotator_type : bool Get Annotator Type. pop_result_list : bool Should unpack the result field. Only set true for annotators that return exactly one element in their result, like Document classifier! This will convert list with just 1 element into just their element in the final pandas representation pop_result_list : bool Should unpack the result field. Only set true for annotators that return exactly one element in their result, like Document classifier! This will convert list with just 1 element into just their element in the final pandas representation pop_begin_list : bool Should unpack the begin field. Only set true for annotators that return exactly one element in their result, like Document classifier! This will convert list with just 1 element into just their element in the final pandas representation pop_end_list : bool Should unpack the end field. Only set true for annotators that return exactly one element in their result, like Document classifier! This will convert list with just 1 element into just their element in the final pandas representation pop_embed_list : bool Should unpack the embed field. Only set true for annotators that return exactly one element in their result, like Document classifier! This will convert list with just 1 element into just their element in the final pandas representation pop_meta_list : bool Should unpack the meta field. Only set true for annotators that return exactly one element in their result, like Document classifier! This will convert list with just 1 element into just their element in the final pandas representation meta_white_list : List[str] Whitelist some keys which should be fetched from meta map. If this is not [], meta_black_list will be ignored meta_black_list : List[str] black_list some keys which should not be fetched from meta map meta_data_extractor : List[str] An extractor is a method that takes in [Dict[str,Any]] and returs [Any] begin_extractor : SparkNLPExtractor DOCS end_extractor : SparkNLPExtractor DOCS result_extractor : SparkNLPExtractor DOCS embedding_extractor : SparkNLPExtractor DOCS """ ## TODO pretty __repr__ or __to__string() method! Leverage SparkNLPExtractor fields output_col_prefix: str get_positions: bool = field(default=False) get_begin: bool = field(default=False) get_end: bool = field(default=False) get_embeds: bool = field(default=False) get_origin: bool = field(default=False) get_result: bool = field(default=False) get_meta: bool = field(default=False) get_sentence_origin: bool = field( default=False) # Should extract from which sentence a prediction was generated from. If output level is Document, this field is irrelevant and should be set to false get_full_meta: bool = field(default=False) get_annotator_type: bool = field(default=False) pop_result_list: bool = field(default=False) # TODO implement in ex pop_begin_list: bool = field(default=False) # TODO implement in ex pop_end_list: bool = field(default=False) # TODO implement in ex pop_embeds_list: bool = field(default=False) # TODO implement in ex pop_meta_list: bool = field(default=False) # TODO implement in ex pop_never: bool = field(default=False) # never ever pop meta_black_list: List[str] = field(default=list) meta_white_list: List[str] = field(default=list) meta_data_extractor: SparkNLPExtractor = field(default_factory=SparkNLPExtractor) begin_extractor: SparkNLPExtractor = field(default_factory=SparkNLPExtractor) end_extractor: SparkNLPExtractor = field(default_factory=SparkNLPExtractor) result_extractor: SparkNLPExtractor = field(default_factory=SparkNLPExtractor) embedding_extractor: SparkNLPExtractor = field(default_factory=SparkNLPExtractor) description: str = field(default='') name: str = field(default='') @dataclass class SparkOCRExtractorConfig(SparkNLPExtractorConfig): get_text: bool = field(default=False) # Image struct fields get_image: bool = field(default=False) get_image_origin: bool = field(default=False) get_image_height: bool = field(default=False) get_image_width: bool = field(default=False) get_image_n_channels: bool = field(default=False) get_image_mode: bool = field(default=False) get_image_resolution: bool = field(default=False) get_image_data: bool = field(default=False) # General OCR fields # get_path :bool = field(default = False)# origin is path get_modification_time: bool = field(default=False) get_length: bool = field(default=False) get_page_num: bool = field(default=False) get_confidence: bool = field(default=False) get_exception: bool = field(default=False) # Position struct fields get_img_positions: bool = field(default=False)
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/extractors/extractor_base_data_classes.py
extractor_base_data_classes.py
from functools import reduce import numpy as np import pandas as pd import pyspark from pyspark.sql import Row as PysparkRow from sparknlp.annotation import Annotation from nlu.pipe.extractors.extractor_base_data_classes import * def extract_light_pipe_rows(df): """Extract Annotations from Light Pipeline into same represenation as other extractors in thos module""" ff = lambda row: list(map(f, row)) if isinstance(row, List) else row f = lambda anno: dict(annotatorType=anno.annotator_type, begin=anno.begin, end=anno.end, result=anno.result, metadata=anno.metadata, embeddings=anno.embeddings if isinstance(anno, List) else []) \ if isinstance(anno, Annotation) else anno return df.applymap(ff) def extract_pyspark_rows(r: pd.Series, ) -> pd.Series: """ Convert pyspark.sql.Row[Annotation] to List(Dict[str,str]) objects. Except for key=metadata in dict, this element in the Dict which is [str,Dict[str,str]] Checks if elements are of type list and whether they contain Pyspark Rows. If PysparkRow, call .asDict() on every row element to generate the dicts First method that runs after toPandas() call """ if isinstance(r, str): return r elif isinstance(r, list): if len(r) == 0: return r elif isinstance(r[0], PysparkRow): pyspark_row_to_list = lambda l: l.asDict() return list(map(pyspark_row_to_list, r)) return r def extract_pyarrow_rows(r: pd.Series, ) -> pd.Series: """ Convert pyspark.sql.Row[Annotation] to List(Dict[str,str]) objects. Except for key=metadata in dict, this element in the Dict which is [str,Dict[str,str]] Checks if elements are of type list and whether they contain Pyspark Rows. If PysparkRow, call .asDict() on every row element to generate the dicts First method that runs after toPandas() call """ if isinstance(r, str): return r elif isinstance(r, np.ndarray): if len(r) == 0: return r elif isinstance(r[0], dict) and 'annotatorType' in r[0].keys(): r[0]['metadata'] = dict(r[0]['metadata']) return r return r def extract_base_sparkocr_features(row: pd.Series, configs: SparkOCRExtractorConfig) -> dict: ###### OCR EXTRACTOR # for now only text recognizer outputs fetched if configs.name == 'default text recognizer config': if configs.get_text: return {'text': row} # Check for primitive type here and return if 'visual_classifier' in configs.name: # Either Label or Confidence if isinstance(row, str): return {'visual_classifier_label': row} else: return {'visual_classifier_confidence': row} else: # # OCR unpackers (TODO WIP) # unpack_text = lambda x: unpack_dict_list(x, 'text') # # unpack_image = lambda x : unpack_dict_list(x, 'TODO') # is data? # unpack_image_origin = lambda x: unpack_dict_list(x, 'origin') # unpack_image_height = lambda x: unpack_dict_list(x, 'height') # unpack_image_width = lambda x: unpack_dict_list(x, 'width') # unpack_image_n_channels = lambda x: unpack_dict_list(x, 'nChannels') # unpack_image_mode = lambda x: unpack_dict_list(x, 'mode') # unpack_image_resolution = lambda x: unpack_dict_list(x, 'resolution') # unpack_image_data = lambda x: unpack_dict_list(x, 'data') # # unpack_path = lambda x : unpack_dict_list(x, 'TODO') # unpack_modification_time = lambda x: unpack_dict_list(x, 'TODO') # unpack_length = lambda x: unpack_dict_list(x, 'TODO') # unpack_page_num = lambda x: unpack_dict_list(x, 'TODO') # unpack_confidence = lambda x: unpack_dict_list(x, 'TODO') # unpack_exception = lambda x: unpack_dict_list(x, 'TODO') # unpack_img_positions = lambda x: unpack_dict_list(x, 'TODO') # if configs.get_image: # pass # if configs.get_image_origin: # pass # if configs.get_image_height: # pass # if configs.get_image_width: # pass # if configs.get_image_n_channels: # pass # if configs.get_image_mode: # pass # if configs.get_image_resolution: # pass # if configs.get_image_data: # pass # if configs.get_path: # pass # if configs.get_modification_time: # pass # if configs.get_length: # pass # if configs.get_page_num: # pass # if configs.get_confidence: # pass # if configs.get_exception: # pass # if configs.get_img_positions: # pass # return {**beginnings, **endings, **results, **annotator_types, **embeddings} # Merge dicts OCR output return {} def extract_base_sparknlp_features(row: pd.Series, configs: SparkNLPExtractorConfig) -> dict: """ Extract base features common in all saprk NLP annotators Begin/End/Embedding/Metadata/Result, except for the blacklisted features Expects a list with Token Annotator Outputs from extract_pyspark_rows() , i.e Setting pop to true for a certain field will return only the first element of that fields list of elements. Useful if that field always has exactly 1 result, like many classifirs [{'annotatorType': 'token', 'begin': 0, 'embeddings': [], 'end': 4, 'metadata': {'sentence': '0'}, 'result': 'Hello' }] row = pyspark.row or [ {'annotatorType': 'language', 'begin': 0, 'embeddings': [], 'end': 57, 'metadata': {'bg': '0.0', 'sentence': '0', 'sl': '5.2462015E-24', 'sv': '2.5977007E-25'}, 'result': 'en'} ] returns a DICT """ unpack_dict_list = lambda d, k: d[k] unpack_begin = lambda x: unpack_dict_list(x, 'begin') unpack_end = lambda x: unpack_dict_list(x, 'end') unpack_annotator_type = lambda x: unpack_dict_list(x, 'annotatorType') unpack_result = lambda x: unpack_dict_list(x, 'result') unpack_embeddings = lambda x: unpack_dict_list(x, 'embeddings') unpack_origin = lambda x: unpack_dict_list(x, 'origin') # Either extract list of anno results and put them in a dict with corrosponding key name or return empty dict {} for easy merge in return annotator_types = {configs.output_col_prefix + '_types': list( map(unpack_annotator_type, row))} if configs.get_annotator_type else {} # Same logic as above, but we check wether to pop or not and either evaluate the map result with list() or just next() origins = {configs.output_col_prefix + '_origin': next(map(unpack_origin, row))} if configs.get_origin else {} if configs.pop_result_list: results = {configs.output_col_prefix + '_results': next(map(unpack_result, row))} if configs.get_result else {} else: results = {configs.output_col_prefix + '_results': list(map(unpack_result, row))} if configs.get_result else {} if configs.pop_begin_list: beginnings = {configs.output_col_prefix + '_beginnings': next( map(unpack_begin, row))} if configs.get_begin or configs.get_positions else {} else: beginnings = {configs.output_col_prefix + '_beginnings': list( map(unpack_begin, row))} if configs.get_begin or configs.get_positions else {} if configs.pop_end_list: endings = {configs.output_col_prefix + '_endings': next( map(unpack_end, row))} if configs.get_end or configs.get_positions else {} else: endings = {configs.output_col_prefix + '_endings': list( map(unpack_end, row))} if configs.get_end or configs.get_positions else {} if configs.pop_embeds_list: embeddings = { configs.output_col_prefix + '_embeddings': next(map(unpack_embeddings, row))} if configs.get_embeds else {} else: embeddings = { configs.output_col_prefix + '_embeddings': list(map(unpack_embeddings, row))} if configs.get_embeds else {} return {**beginnings, **endings, **results, **annotator_types, **embeddings, **origins} # Merge dicts NLP output def extract_sparknlp_metadata(row: pd.Series, configs: SparkNLPExtractorConfig) -> dict: """ Extract base features common in all saprk NLP annotators Begin/End/Embedding/Metadata/Result, except for the blacklisted features Expects a list with Token Annotator Outputs, i.e. Can either use a WHITE_LISTE or BLACK_LIST or get ALL metadata For WHITE_LIST != [], only metadata keys/values will be kepts, for which the keys are contained in the white list For WHITE_LIST == [] AND BLACK_LIST !=, all metadata key/values will be returned, which are not on the black list. If WHITE_LIST is not [] the BLACK_LIST will be ignored. returns one DICT which will be merged into pd.Serise by the extractor calling this exctractor for .apply() in pythonify """ if len(row) == 0: return {} unpack_dict_list = lambda d, k: d[k] # extract list of metadata dictionaries (all dict should have same keys) unpack_metadata_to_dict_list = lambda x: unpack_dict_list(x, 'metadata') metadatas_dict_list = list(map(unpack_metadata_to_dict_list, row)) # extract keys, which should all be equal in all rows if configs.get_full_meta: keys_in_metadata = list(metadatas_dict_list[0].keys()) if len(metadatas_dict_list) > 0 else [] elif len(configs.meta_white_list) != 0: keys_in_metadata = [k for k in metadatas_dict_list[0].keys() if k in configs.meta_white_list] elif len(configs.meta_black_list) != 0: keys_in_metadata = [k for k in metadatas_dict_list[0].keys() if k not in configs.meta_black_list] else: keys_in_metadata = [] # dectorate lambda with key to extract, equalt to def decorate_f(key): return lambda x,y : x+ [y[key]] # For a list of dicts which all have the same keys, will return a list of all the values for one key in all the dicts if configs.pop_meta_list: f = lambda key: metadatas_dict_list[0][key] metadata_scalars = list(map(f, keys_in_metadata)) result = dict( zip(map(lambda x: 'meta_' + configs.output_col_prefix + '_' + x, keys_in_metadata), metadata_scalars)) return result extract_val_from_dic_list_to_list = lambda key: lambda x, y: x + [y[key]] # List of lambda expression, on for each Key to be extracted. (TODO balcklisting?) dict_value_extractors = list(map(extract_val_from_dic_list_to_list, keys_in_metadata)) # reduce list of dicts with same struct and a common key to a list of values for thay key. Leveraging closuer for meta_dict_list reduce_dict_list_to_values = lambda t: reduce(t, metadatas_dict_list, []) # list of lists, where each list is corrosponding to all values in the previous dict list meta_values_list = list(map(reduce_dict_list_to_values, dict_value_extractors)) # add prefix to key and zip with values for final dict result result = dict( zip(list(map(lambda x: 'meta_' + configs.output_col_prefix + '_' + x, keys_in_metadata)), meta_values_list)) return result def extract_master(row: pd.Series, configs: SparkNLPExtractorConfig) -> pd.Series: """ Re-Usable base extractor for simple Annotators like Document/Token/etc..? extract_universal/?/Better name? row = a list or Spark-NLP annotations as dictionary """ if isinstance(row, pyspark.sql.Row) and len(row) == 0: return pd.Series({}) if isinstance(configs, SparkOCRExtractorConfig): base_annos = extract_base_sparkocr_features(row, configs) else: base_annos = extract_base_sparknlp_features(row, configs) # Get Metadata all_metas = extract_sparknlp_metadata(row, configs) if configs.get_meta or configs.get_full_meta else {} # Apply custom extractor methods if configs.meta_data_extractor.name != '': if configs.meta_data_extractor.extractor_with_result_method: all_metas = configs.meta_data_extractor.extractor_with_result_method(all_metas, base_annos, configs) else: all_metas = configs.meta_data_extractor.extractor_method(all_metas, configs) # Apply Finishers on metadata/additional fields return pd.Series( { **base_annos, **all_metas }) def apply_extractors_and_merge(df, anno_2_ex_config, keep_stranger_features, stranger_features): """ apply extract_master on all fields with corrosponding configs after converting Pyspark Rows to List[Dict] and merge them to a final DF (1 to 1 mapping still) df The Df we want to apply the extractors on columns_to_extractor_map Map column names to extractor configs. Columns which are not in these keys will be ignored These configs will be passed to master_extractor for every column """ # keep df and ex_resolver in closure and apply base extractor with configs for each col extractor = lambda c: df[c].apply(extract_master, configs=anno_2_ex_config[c]) keep_strangers = lambda c: df[c] # merged_extraction_df # apply the extract_master together with it's configs to every column and geenrate a list of output DF's, one per Spark NLP COL # TODO handle MULTI-COL-OUTPUT. If Anno has multi cols, then we either needs multiple keys in anno_2_ex or use something besides # anno_2_ex_config.keys() here because it will only apply to one of the extracted rows..(?) # Apply each Anno Extractor to the corrosponding generated col. # If no Extractor defined for a col and it is not a stranger feature, it will be dropped here return pd.concat( list(map(extractor, anno_2_ex_config.keys())) + list(map(keep_strangers, stranger_features)) if keep_stranger_features else [], axis=1) def pad_same_level_cols(row): """We must ensure that the cols which are going to be exploded have all the same amount of elements. To ensure this, we apply this methods on the cols we wish to explode. It ensures, they have all the same length and can be exploded eronous free """ max_len = 0 lens = {} for c in row.index: if isinstance(row[c], list): lens[c] = len(row[c]) if lens[c] > max_len: max_len = lens[c] else: lens[c] = 1 row[c] = [row[c]] for c, length in lens.items(): if length < max_len: row[c] += [np.nan] * (max_len - length) return row def zip_and_explode(df: pd.DataFrame, cols_to_explode: List[str]) -> pd.DataFrame: """ Returns a new dataframe, where columns in cols_to_explode should have all array elements. :param df: Dataframe to explode columns on. Each column in cols_to_explode should be of type array :param cols_to_explode: list of columns to explode :return: new dataframe, where each array element of a row in cols_to_explode is in a new row. For exploding Rows where the lists are same length, lists will be padded to length of the longest list in that row (sub-levels) Elements of columns which are not in cols_to_explode, will be in lists """ # Check cols we want to explode actually exist, if no data extracted cols can be missing missing = [] for col in cols_to_explode: if col not in df.columns: missing.append(col) for miss in missing: cols_to_explode.remove(miss) # Drop duplicate cols df = df.loc[:, ~df.columns.duplicated()] if len(cols_to_explode) > 0: # We must pad all cols we want to explode to the same length because pandas limitation. # Spark API does not require this since it handles cols with not same length by creating nan. We do it ourselves here manually df[cols_to_explode] = df[cols_to_explode].apply(pad_same_level_cols, axis=1) return pd.concat([df.explode(c)[c] for c in cols_to_explode] + [df.drop(cols_to_explode, axis=1)], axis=1) else: # No padding return pd.concat([df.drop(cols_to_explode, axis=1)], axis=1)
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/extractors/extractor_methods/base_extractor_methods.py
base_extractor_methods.py
import numpy as np def meta_extract_language_classifier_max_confidence(row, configs): ''' Extract the language classificationw ith highest confidence and drop the others ''' # # todo Get the best, but what about TOP K! conditional sentence extraction and mroe docs # unpack all confidences to float and set 'sentence' key value to -1 so it does not affect finding the highest cnfidence unpack_dict_values = lambda x: -1 if 'sentence' in x[0] else float(x[1][0]) l = list(map(unpack_dict_values, row.items())) m = np.argmax(l) k = list(row.keys())[m] return {k + '_confidence': row[k][0]} # remoe [0] for list return def zipp(l): return zip(*l) # unpack during list comprehension not supported in Python, need this workaround for now def extract_maximum_confidence(row, configs): ''' Extract the maximum confidence from any classifier with N classes. A classifier with N classes, has N confidences in it's metadata by default, which is too much data usually. This extractor gets the highest confidence from the array of confidences. This method assumes all keys in metadata corrospond to confidences, except the `sentence` key, which maps to a sentence ID key schema is 'meta_' + configs.output_col_prefix + '_confidence' Parameters ------------- configs : SparkNLPExtractorConfig if configs.get_sentence_origin is True, the sentence origin column will be kept, otherwise dropped. row : dict i.e. looks like{'meta_category_sentence': ['0'],'meta_category_surprise': ['0.0050183665'],'meta_category_sadness': ['8.706827E-5'],'meta_category_joy': ['0.9947379'],'meta_category_fear': ['1.5667251E-4']} Returns ------------ dict if configs.get_sentence_origin True {'meta_sentiment_dl_sentence': ['0', '1'], 'meta_sentiment_dl_confidence': [0.9366506, 0.9366506]} else {'meta_sentiment_dl_confidence': [0.9366506, 0.9366506]} ''' meta_sent_key = 'meta_' + configs.output_col_prefix + '_sentence' fl = lambda \ k: False if 'sentence' in k else True # every key that has not the sub string sentence in it is considerd a confidence key confidences_keys = list(filter(fl, row.keys())) if configs.pop_meta_list: return { **{'meta_' + configs.output_col_prefix + '_confidence': max([float(row[k][0]) for k in confidences_keys])}, **({'meta_' + configs.output_col_prefix + '_sentence': row[ meta_sent_key]} if configs.get_sentence_origin else {}) } else: if len(confidences_keys) == 1: return { **{'meta_' + configs.output_col_prefix + '_confidence': max([float(row[k]) for k in confidences_keys])}, **({'meta_' + configs.output_col_prefix + '_sentence': row[ meta_sent_key]} if configs.get_sentence_origin else {}) } else: return { **{'meta_' + configs.output_col_prefix + '_confidence': [max(z) for z in zipp( list(map(float, row[k])) for k in confidences_keys)]}, **({'meta_' + configs.output_col_prefix + '_sentence': row[ meta_sent_key]} if configs.get_sentence_origin else {}) } def unpack_HPO_codes(row, k): # Case: HPO UMLS_codes = [] ORPHA_CODES = [] MSH_CODES = [] SNOMED_CODES = [] OMIM_CODES = [] for resolution in row[k]: k_candidates = resolution.split(':::') for candidate in k_candidates: # There is 0 to 5 alt terminologies, 1 per HPO_codes. If one is missing, we have to append None alt_terminologies = candidate.split('||') UMLS_ok, ORPHA_ok, MSH_ok, SNOMED_ok, OMIM_ok = False, False, False, False, False for alt in alt_terminologies: if 'UMLS' in alt: UMLS_codes.append(alt) UMLS_ok = True elif 'ORPHA' in alt: ORPHA_CODES.append(alt) ORPHA_ok = True elif 'MSH' in alt: MSH_CODES.append(alt) MSH_ok = True elif 'SNOMED' in alt: SNOMED_CODES.append(alt) SNOMED_ok = True elif 'OMIM' in alt: OMIM_CODES.append(alt) OMIM_ok = True # Detect which of the 0 to 5 alt terminologies are missing and add None for them if not UMLS_ok: UMLS_codes.append(None) if not ORPHA_ok: ORPHA_CODES.append(None) if not MSH_ok: MSH_CODES.append(None) if not SNOMED_ok: SNOMED_CODES.append(None) if not OMIM_ok: OMIM_CODES.append(None) return UMLS_codes, ORPHA_CODES, MSH_CODES, SNOMED_CODES, OMIM_CODES, # Write into dict def extract_resolver_all_k_subfields_splitted(row, configs): ''' Extract all metadata fields for sentence resolver annotators and splits all _k_ fields on ::: , relevant for all_k_result, all_k_resolutions, al_k_distances, all_k_cosine_distances pop_meta_list should be true, if outputlevel of pipe is the same as the resolver component we are extracting here for ::: for icd || in HCC splits billable/status/code, always aligns. in HPO splits resolutions in alterantive terminologies, not always aligns ::: splits Mappings of resolutions, i.e. len(split(:::)) == number_of_entities == number of resolutions Special case HPO :MeSH/SNOMED/UMLS/ORPHA/OMIM unpack_resolutions = lambda x : x.split(':::') unpack_k_aux_label = lambda x : x.split('||') # each of the k resolutions as K aux label. # for HPO, each aux label, can have 0 to 5 extra terminolgies unpack_terms = lambda x: x.split('') unpacked = list(map(lambda x: list(map(unpack_k_aux_label, unpack_resolutions(x))), row[k] )) for key, g in itertools.groupby(unpacked, lambda x : x.split(':')[0]): print(key,list(g)) unpacked[2] :UMLS ''' # todo del AUX label col for CODES HPO_CODES = ['UMLS', 'ORPHA', 'MSH', 'SNOMED', 'OMIM'] prefix = 'meta_' + configs.output_col_prefix + '_' res = {} for k in row.keys(): if '_k_' in k: if '||' in row[k][0]: if any(x in row[k][0] for x in HPO_CODES): # Case : HPO res[prefix + 'k_UMLS_codes'], \ res[prefix + 'k_ORPHA_codes'], \ res[prefix + 'k_MESH_codes'], \ res[prefix + 'k_SNOMED_codes'], \ res[prefix + 'k_OMIM_codes'] = unpack_HPO_codes(row, k) else: # CASE : billable code handling f = lambda s: list(map(lambda x: x.split("||"), s.split(':::'))) # Triple assignment so we unpack properly res[prefix + 'billable'], \ res[prefix + 'hcc_status'], \ res[prefix + 'hcc_code'] = zip(*map(lambda x: zip(*x), map(f, row[k]))) # Casting from tuple to list or we get problems during pd explode h = lambda z: list(map(lambda r: list(r), z))# [0] res[prefix + 'billable'] = h(res[prefix + 'billable']) res[prefix + 'hcc_status'] = h(res[prefix + 'hcc_status']) res[prefix + 'hcc_code'] = h(res[prefix + 'hcc_code']) elif ':::' in row[k][0]: # CASE : General code handling res[prefix+k.replace('results', 'codes')] = list(map(lambda x: x.split(':::'), row[k]))# [0] else: # Any other metadata field hadling res[k] = row[k] return res def extract_chunk_mapper_relation_data(row, configs): ''' Splits all_relations field on ::: to create an array , uses row.relation as prefix ''' prefix = 'meta_' + configs.output_col_prefix + '_' for k in row.keys(): if 'chunk_all_relations' in k: row[k] = [s.split(':::') for s in row[k]] return row def extract_coreference_data(row_metadata,row_results, configs): ''' Splits all_relations field on ::: to create an array , | Text |heads | Co-References | Heads_sentence| Coref_sentence| coref_head_begin | coref_head_end| |John told Mary he would like to borrow a book from her, after his lunch |[John, Marry] | [he,his], [her] | [0,0] | [0,0],[0] | [0,0], [10] | [3,3], [13] |Text | heads | Co-Refernces | John | ROOT | [he,his] | told | / | / | Marry | ROOT | [her] | he | JOHN | / | likes | / | / | her | MARRY |/ |ORIGIN_REFERENCE | CO_REFERENCES| | Peter | he , him, that dude | | Maria | her, she, the lady | ''' head_to_coref = {} prefix = 'meta_' + configs.output_col_prefix + '_' # for (k_meta,v_meta), (k_result,v_result) in zip(row_metadata.items(), row_results.items()): # if raise NotImplemented('Not implemented')
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/extractors/extractor_methods/helper_extractor_methods.py
helper_extractor_methods.py
from sparknlp.annotator import * from sparknlp.base import * from nlu.pipe.extractors.extractor_configs_OS import * from nlu.pipe.col_substitution.col_substitution_OS import * OS_anno2substitution_fn = { NerConverter: { 'default': substitute_ner_converter_cols, }, PerceptronModel: { 'default': substitute_pos_cols, }, BertEmbeddings: { 'default': substitute_word_embed_cols, }, AlbertEmbeddings: { 'default': substitute_word_embed_cols, }, XlnetEmbeddings: { 'default': substitute_word_embed_cols, }, DistilBertEmbeddings: { 'default': substitute_word_embed_cols, }, RoBertaEmbeddings: { 'default': substitute_word_embed_cols, }, XlmRoBertaEmbeddings: { 'default': substitute_word_embed_cols, }, WordEmbeddingsModel: { 'default': substitute_word_embed_cols, }, ElmoEmbeddings: { 'default': substitute_word_embed_cols, }, BertSentenceEmbeddings: { 'default': substitute_sent_embed_cols, }, Doc2VecModel: { 'default': substitute_sent_embed_cols, }, XlmRoBertaSentenceEmbeddings: { 'default': substitute_sent_embed_cols, }, UniversalSentenceEncoder: { 'default': substitute_sent_embed_cols, }, SentenceEmbeddings: { 'default': substitute_sent_embed_cols, }, Tokenizer: { 'default': substitute_tokenizer_cols, }, TokenizerModel: { 'default': substitute_tokenizer_cols, }, RegexTokenizer: { 'default': substitute_tokenizer_cols, }, DocumentAssembler: { 'default': substitute_doc_assembler_cols, }, SentenceDetectorDLModel: { 'default': substitute_sentence_detector_dl_cols, }, SentenceDetector: { 'default': substitute_sentence_detector_pragmatic_cols, }, ContextSpellCheckerModel: { 'default': substitute_spell_context_cols, }, SymmetricDeleteModel: { 'default': substitute_spell_symm_cols, }, NorvigSweetingModel: { 'default': substitute_spell_norvig_cols, }, LemmatizerModel: { 'default': substitute_lem_cols, }, Normalizer: { 'default': substitute_norm_cols, }, NormalizerModel: { 'default': substitute_norm_cols, }, DocumentNormalizer: { 'default': substitute_doc_norm_cols, }, Stemmer: { 'default': substitute_stem_cols, }, NerDLModel: { 'default': substitute_ner_dl_cols, }, NerCrfModel: { 'default': 'TODO', }, LanguageDetectorDL: { 'default': 'TODO', }, DependencyParserModel: { 'default': substitute_un_labled_dependency_cols, }, TypedDependencyParserModel: { 'default': substitute_labled_dependency_cols, }, SentimentDLModel: { 'default': substitute_sentiment_dl_cols, }, SentimentDetectorModel: { 'default': substitute_sentiment_dl_cols, }, ViveknSentimentModel: { 'default': substitute_sentiment_vivk_cols, }, MultiClassifierDLModel: { 'default': substitute_multi_classifier_dl_cols, }, ClassifierDLModel: { 'default': substitute_classifier_dl_cols, }, Chunker: { 'default': substitute_chunk_cols, }, NGramGenerator: { 'default': substitute_ngram_cols, }, ChunkEmbeddings: { 'default': substitute_chunk_embed_cols, }, StopWordsCleaner: { 'default': substitute_stopwords_cols, }, BertForTokenClassification: { 'default': substitute_transformer_token_classifier_cols, }, DistilBertForTokenClassification: { 'default': substitute_transformer_token_classifier_cols, }, XlnetForTokenClassification: { 'default': substitute_transformer_token_classifier_cols, }, XlmRoBertaForTokenClassification: { 'default': substitute_transformer_token_classifier_cols, }, RoBertaForTokenClassification: { 'default': substitute_transformer_token_classifier_cols, }, LongformerForTokenClassification: { 'default': substitute_transformer_token_classifier_cols, }, AlbertForTokenClassification: { 'default': substitute_transformer_token_classifier_cols, }, BertForSequenceClassification: { 'default': substitute_seq_bert_classifier_cols }, DistilBertForSequenceClassification: { 'default': substitute_seq_bert_classifier_cols }, TextMatcherModel: { 'default': substitute_text_match_cols, }, RegexMatcherModel: { 'default': substitute_regex_match_cols, }, DateMatcher: { 'default': substitute_date_match_cols, }, MultiDateMatcher: { 'default': '', # TODO }, Doc2Chunk: { 'default': substitute_doc2chunk_cols, }, # Chunk2Doc: { 'default': substitute_doc2chunk_cols, # TODO better? }, T5Transformer: { 'default': substitute_T5_cols, }, MarianTransformer: { 'default': substitute_marian_cols, }, YakeKeywordExtraction: { 'default': substitute_YAKE_cols, }, WordSegmenterModel: { 'default': substitute_word_seg_cols, }, # approaches ViveknSentimentApproach: {'default': substitute_sentiment_vivk_approach_cols, 'default_full': default_full_config, }, SentimentDLApproach: {'default': substitute_sentiment_dl_approach_cols, 'default_full': default_full_config, }, ClassifierDLApproach: {'default': substitute_classifier_dl_approach_cols, 'default_full': default_full_config, }, MultiClassifierDLApproach: {'default': substitute_multi_classifier_dl_approach_cols, 'default_full': default_full_config, }, NerDLApproach: {'default': substitute_ner_dl_approach_cols, 'default_full': default_full_config, }, PerceptronApproach: {'default': substitute_pos_approach_cols, 'default_full': default_full_config, }, Doc2VecApproach: {'default': substitute_sent_embed_cols}, }
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/col_substitution/substitution_map_OS.py
substitution_map_OS.py
from typing import List from sparknlp.annotator import * import nlu from nlu.universe.feature_universes import NLP_FEATURES from nlu.pipe.col_substitution import substitution_map_OS from nlu.pipe.col_substitution import col_substitution_OS import logging from nlu.pipe.extractors.extractor_base_data_classes import SparkOCRExtractorConfig from nlu.universe.logic_universes import AnnoTypes from nlu.universe.universes import Licenses logger = logging.getLogger('nlu') """ NAMING SCHEMAS after pythonify procedure : ### NAMING RESULT SCHEMA: results = { configs.output_col_prefix+'_results' : list(map(unpack_result,row))} if configs.get_result else {} beginnings = { configs.output_col_prefix+'_beginnings' : list(map(unpack_begin,row))} if configs.get_begin or configs.get_positions else {} endings = { configs.output_col_prefix+'_endings' : next(map(unpack_end,row))} if configs.get_end or configs.get_positions else {} embeddings = { configs.output_col_prefix+'_embeddings' : next(map(unpack_embeddings,row))} if configs.get_embeds else {} ### METADATA NAMING SCHEMA result = dict(zip(list(map(lambda x : 'meta_'+ configs.output_col_prefix + '_' + x, keys_in_metadata)),meta_values_list)) """ from sparknlp.base import MultiDocumentAssembler class ColSubstitutionUtils: """Utils for substituting col names in Pythonify to short and meaningful names. Uses custom rename methods for either PySpark or Pandas """ from sparknlp.annotator import MarianTransformer cleanable_splits = ['ner_converter', 'spell', 'ner_to_chunk_converter', 'train', 'classify', 'ner', 'med_ner', 'dl', 'match', 'clean', 'sentiment', 'embed', 'embed_sentence', 'embed_chunk', 'explain', 'pos', 'resolve_chunk', 'resolve', ] @staticmethod def substitute_col_names(df, anno_2_ex, pipe, stranger_cols=[], get_embeddings=False, drop_debug_cols=True): """ Some truly irrelevant cols might be dropped, regardless of anno Extractor config Some truly irrelevant cols might be dropped, regardless of anno Extractor config 0. Get list of annotator classes that are duplicates. Check inside the NLU Component Embelishment 1. Get list of cols derived by component_to_resolve 2. Substitute list of cols in DF with custom logic """ substitution_fn = 'TODO' anno2final_cols = {} # mapping of final col names to annotator class Key=AnnoModel, Value=List of Result cols new_cols = {} if pipe.has_licensed_components: from nlu.pipe.col_substitution import col_substitution_HC from nlu.pipe.col_substitution import substitution_map_HC deducted_component_names = ColSubstitutionUtils.deduct_component_names(pipe) for c in pipe.components: if c.license == Licenses.ocr: # TODO better substitution old2new_anno_cols = {k: k for k in c.spark_output_column_names} anno2final_cols[c.model] = list(old2new_anno_cols.values()) new_cols.update(old2new_anno_cols) new_cols = {**new_cols, **(old2new_anno_cols)} continue if 'embedding' in c.type and get_embeddings == False: continue cols_to_substitute = ColSubstitutionUtils.get_final_output_cols_of_component(c, df, anno_2_ex) if type(c.model) in substitution_map_OS.OS_anno2substitution_fn.keys(): substitution_fn = substitution_map_OS.OS_anno2substitution_fn[type(c.model)]['default'] else: substitution_fn = c.pdf_col_name_substitutor if pipe.has_licensed_components and substitution_fn != 'TODO': if type(c.model) in substitution_map_HC.HC_anno2substitution_fn.keys(): substitution_fn = substitution_map_HC.HC_anno2substitution_fn[type(c.model)]['default'] if substitution_fn == 'TODO': logger.info(f"Could not find substitution function for os_components={c}, leaving col names untouched") old2new_anno_cols = dict(zip(cols_to_substitute, cols_to_substitute)) anno2final_cols[c.model] = list(old2new_anno_cols.values()) new_cols.update(old2new_anno_cols) continue # dic, key=old_col, value=new_col. Some cols may be omitted and missing from the dic which are deemed irrelevant. Behaivour can be disabled by setting drop_debug_cols=False old2new_anno_cols = substitution_fn(c, cols_to_substitute, deducted_component_names[c]) anno2final_cols[c.model] = list(old2new_anno_cols.values()) new_cols = {**new_cols, **(old2new_anno_cols)} pipe.anno2final_cols = anno2final_cols cols_to_rename = list(new_cols.keys()) for k in cols_to_rename: # some cols might not exist because no annotations generated, so we need to double check it really exists if k not in df.columns: del new_cols[k] return df.rename(columns=new_cols)[set(new_cols.values()).union(set(stranger_cols))] if drop_debug_cols else \ df.rename(columns=new_cols) @staticmethod def get_final_output_cols_of_component(c, df, anno_2_ex) -> List[str]: # get_final_output_cols_of_component(self.components[1], pretty_df, anno_2_ex_config) """Get's a list of all columns that have been derived in the pythonify procedure from the component_to_resolve os_components in dataframe df for anno_2_ex configs """ og_output_col = c.spark_output_column_names[0] configs = anno_2_ex[og_output_col] result_cols = [] if isinstance(configs, SparkOCRExtractorConfig): # TODO better OCR-EX handling --> Col Name generator function which we use everywhere for unified col naming !!!!! return ['text'] if isinstance(c.model, MultiDocumentAssembler): return [f'{NLP_FEATURES.DOCUMENT_QUESTION}_results', f'{NLP_FEATURES.DOCUMENT_QUESTION_CONTEXT}_results'] if configs.get_annotator_type: result_cols.append(configs.output_col_prefix + '_types') if configs.get_result: result_cols.append(configs.output_col_prefix + '_results') if configs.get_begin or configs.get_positions: result_cols.append(configs.output_col_prefix + '_beginnings') if configs.get_end or configs.get_positions: result_cols.append(configs.output_col_prefix + '_endings') if configs.get_embeds: result_cols.append(configs.output_col_prefix + '_embeddings') if configs.get_origin: result_cols.append(configs.output_col_prefix + '_origin') # find all metadata fields generated by component_to_resolve for col in df.columns: if 'meta_' + configs.output_col_prefix in col: base_meta_prefix = 'meta_' + configs.output_col_prefix meta_col_name = base_meta_prefix + col.split(base_meta_prefix)[-1] if meta_col_name in df.columns: # special case for overlapping names with _ if col.split(base_meta_prefix)[-1].split('_')[1].isnumeric() and not \ c.spark_output_column_names[0].split('_')[-1].isnumeric(): continue if col.split(base_meta_prefix)[-1].split('_')[1].isnumeric() and \ c.spark_output_column_names[0].split('_')[-1].isnumeric(): id1 = int(col.split(base_meta_prefix)[-1].split('_')[1]) id2 = int(c.spark_output_column_names.split('_')[-1]) if id1 != id2: continue result_cols.append(meta_col_name) elif c.type == AnnoTypes.CHUNK_CLASSIFIER: result_cols.append(col) else: logger.info(f"Could not find meta col for os_components={c}, col={col}. Ommiting col..") return result_cols @staticmethod def deduct_component_names(pipe): """Deduct a meaningful name for Embeddings, classifiers, resolvesr, relation extractors, etc.. Will return a dict that maps every Annotator Class to a String Name. If String_Name =='' that means, it can be omtited for naming and the unique_default name schema should be used, since that annotator is unique in the component_list """ # Todo extract name deductable as NLU component attribute import nlu.pipe.col_substitution.name_deduction.name_deductable_annotators_OS as deductable_OS max_depth = 10 result_names = {} for c in pipe.components: is_partially_ready = c.type == AnnoTypes.PARTIALLY_READY if is_partially_ready or c.loaded_from_pretrained_pipe: if hasattr(c.model, 'getOutputCol'): result_names[c] = c.model.getOutputCol() elif hasattr(c.model, 'getOutputCols'): result_names[c] = c.model.getOutputCols()[0] else: result_names[c] = str(c.model) continue result_names[c] = 'UNIQUE' # assuemd uniqe, if not updated in followign steps is_always_name_deductable_component = False hc_deducted = False if pipe.has_licensed_components: import nlu.pipe.col_substitution.name_deduction.name_deductable_annotators_HC as deductable_HC if type(c.model) not in deductable_HC.name_deductable_HC and type( c.model) not in deductable_OS.name_deductable_OS: continue else: hc_deducted = True if type(c.model) in deductable_HC.always_name_deductable_HC: is_always_name_deductable_component = True if type(c.model) not in deductable_OS.name_deductable_OS and not hc_deducted and not is_partially_ready: continue if type(c.model) in deductable_OS.always_name_deductable_OS: is_always_name_deductable_component = True same_components = [] for other_c in pipe.components: if c is other_c: continue if c.type == other_c.type: same_components.append(other_c) if len(same_components) or is_always_name_deductable_component: # make sure each name is unique among the components of same type # if is_partially_ready and c.loaded_from_pretrained_pipe: cur_depth = 1 other_names = [ColSubstitutionUtils.deduct_name_from_nlu_ref_at_depth(other_c) for other_c in same_components] c_name = ColSubstitutionUtils.deduct_name_from_nlu_ref_at_depth(c) while c_name in other_names and cur_depth < max_depth: cur_depth += 1 other_names = [ColSubstitutionUtils.deduct_name_from_nlu_ref_at_depth(other_c) for other_c in same_components] c_name = ColSubstitutionUtils.deduct_name_from_nlu_ref_at_depth(c, cur_depth) result_names[c] = c_name else: result_names[c] = 'UNIQUE' # no name insertion required return result_names @staticmethod def deduct_name_from_nlu_ref_at_depth(c, depth=1): if isinstance(c.model, MarianTransformer): return c.nlu_ref.split('xx.')[-1].replace('marian.', '') splits = c.nlu_ref.split('.') # remove all name irrelevant splits while len(splits) > 1 and (splits[0] in nlu.Spellbook.pretrained_models_references.keys() or splits[ 0] in ColSubstitutionUtils.cleanable_splits): splits.pop(0) if len(splits) == 0: if isinstance(c.model, (NerDLModel, NerConverter)): return 'ner' return c.nlu_ref.replace("@", "_") elif splits[0] == 'sentiment' and len(splits) == 1: return 'UNIQUE' else: return '_'.join(splits[:depth])
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/col_substitution/col_name_substitution_utils.py
col_name_substitution_utils.py
import logging logger = logging.getLogger('nlu') def substitute_ner_internal_converter_cols(c, cols, nlu_identifier): """ Fetched fields are: - entities@<storage_ref>_results - entities@<storage_ref>_<metadata> - entities@<storage_ref>_entity - entities@<storage_ref>_confidence """ new_cols = {} new_base_name = 'entities' if nlu_identifier == 'UNIQUE' else f'entities_{nlu_identifier}' for col in cols: if 'results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: new_cols[col] = f'{new_base_name}_embedding' elif 'meta' in col: if 'confidence' in col: new_cols[col] = f"{new_base_name}_confidence" elif 'entity' in col: new_cols[col] = f"{new_base_name}_class" elif 'sentence' in col: new_cols[col] = f"{new_base_name}_origin_sentence" elif 'chunk' in col: new_cols[col] = f"{new_base_name}_origin_chunk" else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_sentence_resolution_cols(c, cols, nlu_identifier=True): """ Substitute col name for Resolution. For Resolution, some name will be infered, and sentence_resolution_<name> will become the base name schema all_k_results -> Sorted ResolverLabels in the top `alternatives` that match the distance `threshold` all_k_resolutions -> Respective ResolverNormalized strings all_k_distances -> Respective distance values after aggregation all_k_wmd_distances -> Respective WMD distance values all_k_tfidf_distances -> Respective TFIDF Cosinge distance values all_k_jaccard_distances -> Respective Jaccard distance values all_k_sorensen_distances -> Respective SorensenDice distance values all_k_jaro_distances -> Respective JaroWinkler distance values all_k_levenshtein_distances -> Respective Levenshtein distance values all_k_confidences -> Respective normalized probabilities based in inverse distance values target_text -> The actual searched string resolved_text -> The top ResolverNormalized string confidence -> Top probability distance -> Top distance value sentence -> Sentence index chunk -> Chunk Index token -> Token index """ new_cols = {} new_base_name = f'resolution' if nlu_identifier == 'UNIQUE' else f'resolution_{nlu_identifier}' for col in cols: if '_results' in col and 'all_k' not in col: new_cols[col] = f'{new_base_name}_code' if 'code' not in new_base_name else new_base_name # resolved code elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif '_embeddings' in col: continue # omit , no data elif 'meta' in col: if 'all_k_aux_labels' in col: new_cols[col] = f'{new_base_name}_k_aux_labels' # maps to which sentence token comes from elif 'resolved_text' in col: new_cols[col] = f'{new_base_name}_resolved_text' # The most likely resolution elif 'target_text' in col: new_cols[ col] = f'{new_base_name}_target_text' # Can be omitted, origin chunk basically, which will be included in the nerConverterInternal result elif 'token' in col: new_cols[ col] = f'{new_base_name}_token' # Can be omitted, origin chunk basically, which will be included in the nerConverterInternal result elif 'all_k_confidences' in col: new_cols[col] = f'{new_base_name}_k_confidences' # confidences of the k resolutions elif 'confidence' in col: new_cols[col] = f'{new_base_name}_confidence' elif 'all_k_results' in col: new_cols[col] = f'{new_base_name}_k_results' elif 'all_k_distances' in col: new_cols[col] = f'{new_base_name}_k_distances' elif 'all_k_resolutions' in col: new_cols[col] = f'{new_base_name}_k_resolution' elif 'all_k_cosine_distances' in col: new_cols[col] = f'{new_base_name}_k_cos_distances' elif 'all_k_wmd_distances' in col: new_cols[col] = f'{new_base_name}_k_wmd_distances' elif 'all_k_tfidf_distances' in col: new_cols[col] = f'{new_base_name}_k_tfidf_distances' elif 'all_k_jaccard_distances' in col: new_cols[col] = f'{new_base_name}_k_jaccard_distances' elif 'all_k_sorensen_distances' in col: new_cols[col] = f'{new_base_name}_k_sorensen_distances' elif 'all_k_jaro_distances' in col: new_cols[col] = f'{new_base_name}_k_jaro_distances' elif 'all_k_levenshtein_distances' in col: new_cols[col] = f'{new_base_name}_k_levenshtein_distances' elif 'all_k_codes' in col: new_cols[col] = f'{new_base_name}_k_codes' elif '_k_' in col: new_cols[col] = f'{new_base_name}_{col}' elif 'billable' in col: new_cols[col] = f'{new_base_name}_billable' elif 'hcc_status' in col: new_cols[col] = f'{new_base_name}_hcc_status' elif 'hcc_code' in col: new_cols[col] = f'{new_base_name}_hcc_code' elif 'distance' in col: new_cols[col] = f'{new_base_name}_distance' elif 'chunk' in col: continue # Omit, irreleant new_cols[col] = f'{new_base_name}_confidence' elif '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' # maps to which sentence token comes from else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_assertion_cols(c, cols, nlu_identifier=True): """ Substitute col name for Assertion. For Assertion, some name will be infered, and assertion_<sub_field> defines the base name schema Assert should always be unique """ new_cols = {} # c_name = extract_nlu_identifier(os_components) new_base_name = f'assertion' # if is_unique else f'sentence_resolution_{c_name}' for col in cols: if '_results' in col: new_cols[col] = f'{new_base_name}' # resolved code elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif '_embeddings' in col: continue # omit , no data elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' # maps to which sentence token comes from elif 'chunk' in col: new_cols[col] = f'{new_base_name}_origin_chunk' # maps to which sentence token comes from elif 'confidence' in col: new_cols[col] = f'{new_base_name}_confidence' # The most likely resolution else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_de_identification_cols(c, cols, is_unique=True): """ Substitute col name for de-identification. For de-identification, some name will be infered, and de_identified_<sub_field> defines the base name schema de_identify should always be unique """ new_cols = {} new_base_name = f'de_identified' # for col in cols: if '_results' in col: new_cols[col] = f'{new_base_name}' # resolved code elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif '_embeddings' in col: continue # omit , no data elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' # maps to which sentence token comes from else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_relation_cols(c, cols, nlu_identifier=True): """ Substitute col name for de-identification. For de-identification, some name will be infered, and de_identified_<sub_field> defines the base name schema de_identify should always be unique metadata = Map( "entity1" -> relation.entity1, "entity2" -> relation.entity2, "entity1_begin" -> relation.entity1_begin.toString, "entity1_end" -> relation.entity1_end.toString, "entity2_begin" -> relation.entity2_begin.toString, "entity2_end" -> relation.entity2_end.toString, "chunk1" -> relation.chunk1, "chunk2" -> relation.chunk2, "confidence" -> result._2.toString ), """ new_cols = {} new_base_name = f'relation' if nlu_identifier == 'UNIQUE' else f'relation_{nlu_identifier}' if 'relation' not in nlu_identifier else nlu_identifier for col in cols: if '_results' in col: new_cols[col] = f'{new_base_name}' # resolved code elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif '_embeddings' in col: continue # omit , no data elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' # maps to which sentence token comes from elif 'entity1_begin' in col: new_cols[col] = f'{new_base_name}_entity1_begin' # maps to which sentence token comes from elif 'entity2_begin' in col: new_cols[col] = f'{new_base_name}_entity2_begin' # maps to which sentence token comes from elif 'entity1_end' in col: new_cols[col] = f'{new_base_name}_entity1_end' # maps to which sentence token comes from elif 'entity2_end' in col: new_cols[col] = f'{new_base_name}_entity2_end' # maps to which sentence token comes from elif 'confidence' in col: new_cols[col] = f'{new_base_name}_confidence' # maps to which sentence token comes from elif 'entity1' in col: new_cols[col] = f'{new_base_name}_entity1_class' # maps to which sentence token comes from elif 'entity2' in col: new_cols[col] = f'{new_base_name}_entity2_class' # maps to which sentence token comes from elif 'chunk1' in col: new_cols[col] = f'{new_base_name}_entity1' # maps to which sentence token comes from elif 'chunk2' in col: new_cols[col] = f'{new_base_name}_entity2' # maps to which sentence token comes from else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_chunk_mapper_cols(c, cols, nlu_identifier=True): """ Substitute col name for de-identification. For de-identification, some name will be infered, and de_identified_<sub_field> defines the base name schema de_identify should always be unique metadata = Map( "entity1" -> relation.entity1, "entity2" -> relation.entity2, "entity1_begin" -> relation.entity1_begin.toString, "entity1_end" -> relation.entity1_end.toString, "entity2_begin" -> relation.entity2_begin.toString, "entity2_end" -> relation.entity2_end.toString, "chunk1" -> relation.chunk1, "chunk2" -> relation.chunk2, "confidence" -> result._2.toString ), """ new_cols = {} new_base_name = f'mapped_entity' if nlu_identifier == 'UNIQUE' else f'mapped_entity_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = f'{new_base_name}' # resolved code elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif '_embeddings' in col: continue # omit , no data elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence_id' # maps to which sentence token comes from elif 'chunk_relation' in col: new_cols[col] = f'{new_base_name}_relation_type' # maps to which sentence token comes from elif 'chunk_chunk' in col: new_cols[col] = f'{new_base_name}_origin_entity_id' # maps to which sentence token comes from elif 'all_relations' in col: new_cols[col] = f'{new_base_name}_all_relations' # maps to which sentence token comes from elif 'entity' in col: new_cols[col] = f'{new_base_name}_origin_entity' # maps to which sentence token comes from else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_drug_normalizer_cols(c, cols, is_unique=True): """ Drug Norm is always unique Fetched fields are: - entities@<storage_ref>_results - entities@<storage_ref>_<metadata> - entities@<storage_ref>_entity - entities@<storage_ref>_confidence """ new_cols = {} new_base_name = 'drug_norm' # if is_unique else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # irrelevant new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' # maps to which sentence token comes from else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_context_parser_cols(c, cols, is_unique=True): """ Drug Norm is always unique Fetched fields are: - entities@<storage_ref>_results - entities@<storage_ref>_<metadata> - entities@<storage_ref>_entity - entities@<storage_ref>_confidence """ new_cols = {} new_base_name = 'context_match' # if is_unique else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # irrelevant new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' # maps to which sentence token comes from elif 'field' in col: new_cols[col] = f'{new_base_name}_field' # maps to which sentence token comes from elif 'normalized' in col: new_cols[col] = f'{new_base_name}_normalized' # maps to which sentence token comes from elif 'confidenceValue' in col: new_cols[col] = f'{new_base_name}_confidence' # maps to which sentence token comes from elif 'hits' in col: new_cols[col] = f'{new_base_name}_hits' # maps to which sentence token comes from else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_generic_classifier_parser_cols(c, cols, is_unique=True, nlu_identifier=''): """ Drug Norm is always unique Fetched fields are: - entities@<storage_ref>_results - entities@<storage_ref>_<metadata> - entities@<storage_ref>_entity - entities@<storage_ref>_confidence """ new_cols = {} new_base_name = 'generic_classifier' if is_unique else f'generic_classification_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # irrelevant new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' # maps to which sentence token comes from elif 'confidence' in col: new_cols[col] = f'{new_base_name}_confidence' # maps to which sentence token comes from else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/col_substitution/col_substitution_HC.py
col_substitution_HC.py
import logging logger = logging.getLogger('nlu') def partially_implemented_substitutor(c, cols, nlu_identifier): """ Fetched fields are: - entities@<storage_ref>_results - entities@<storage_ref>_<metadata> - entities@<storage_ref>_entity - entities@<storage_ref>_confidence """ new_cols = {} new_base_name = nlu_identifier for col in cols: if 'results' in col: new_cols[col] = f'{new_base_name}_result' elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif 'meta' in col: if 'confidence' in col: new_cols[col] = f"{new_base_name}_confidence" elif 'entity' in col: new_cols[col] = f"{new_base_name}_class" elif 'chunk' in col: new_cols[col] = f"{new_base_name}_origin_chunk" elif 'sentence' in col: new_cols[col] = f"{new_base_name}_origin_sentence" else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') continue if '_embeddings' in col and f'{new_base_name}_embedding' not in new_cols.values(): new_cols[col] = f'{new_base_name}_embedding' return new_cols def substitute_ner_converter_cols(c, cols, nlu_identifier): """ Fetched fields are: - entities@<storage_ref>_results - entities@<storage_ref>_<metadata> - entities@<storage_ref>_entity - entities@<storage_ref>_confidence """ new_cols = {} new_base_name = 'entities' if nlu_identifier == 'UNIQUE' else f'entities_{nlu_identifier}' if 'entities' not in nlu_identifier else nlu_identifier for col in cols: if 'results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: new_cols[col] = f'{new_base_name}_embedding' elif 'meta' in col: if 'confidence' in col: new_cols[col] = f"{new_base_name}_confidence" elif 'entity' in col: new_cols[col] = f"{new_base_name}_class" elif 'sentence' in col: new_cols[col] = f"{new_base_name}_origin_sentence" elif 'chunk' in col: new_cols[col] = f"{new_base_name}_origin_chunk" else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_ner_dl_cols(c, cols, nlu_identifier): """ Fetched fields are: - entities@<storage_ref>_results - entities@<storage_ref>_<metadata> - entities@<storage_ref>_entity - entities@<storage_ref>_confidence """ new_cols = {} new_base_name = 'ner_iob' if nlu_identifier == 'UNIQUE' else f'ner_iob_{nlu_identifier}' for col in cols: if 'results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # always empty and irrelevant new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if 'confidence' in col: new_cols[col] = f"{new_base_name}_confidence" elif 'word' in col: continue # is the same as token col, can be omitted else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_doc_assembler_cols(c, cols, nlu_identifier=True): """ Doc assember is always unique Fetched fields are: - entities@<storage_ref>_results - entities@<storage_ref>_<metadata> - entities@<storage_ref>_entity - entities@<storage_ref>_confidence """ new_cols = {} new_base_name = 'document' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # irrelevant new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: continue # This field is irrelevant, since meta_sentence in document assembler is always 0 else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_img_assembler_cols(c, cols, nlu_identifier=True): """ Doc assember is always unique Fetched fields are: - entities@<storage_ref>_results - entities@<storage_ref>_<metadata> - entities@<storage_ref>_entity - entities@<storage_ref>_confidence """ new_cols = {} new_base_name = 'image_origin' if nlu_identifier == 'UNIQUE' else f'image_origin{nlu_identifier}' for col in cols: if 'origin' in col: new_cols[col] = new_base_name return new_cols def audio_assembler_cols(c, cols, nlu_identifier=True): """ Sent detector is always unique """ new_cols = {} new_base_name = 'audio_series' if nlu_identifier == 'UNIQUE' else f'audio_series_' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Sentence never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_wav2vec_cols(c, cols, nlu_identifier=True): """ Sent detector is always unique """ new_cols = {} new_base_name = 'text' if nlu_identifier == 'UNIQUE' else f'asr_text' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Sentence never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_sentence_detector_dl_cols(c, cols, nlu_identifier=True): """ Sent detector is always unique """ new_cols = {} new_base_name = 'sentence' if nlu_identifier == 'UNIQUE' else f'sentence_dl' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Sentence never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if 'sentence_sentence' in col: continue # Seems like an irrelevant field, so drop else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_sentence_detector_pragmatic_cols(c, cols, nlu_identifier=True): """ Sent detector is always unique """ new_cols = {} new_base_name = 'sentence' if nlu_identifier == 'UNIQUE' else f'sentence_pragmatic' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Sentence never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if 'sentence_sentence' in col: continue # Seems like an irrelevant field, so drop else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_tokenizer_cols(c, cols, nlu_identifier=True): """ Tokenizer is always unique """ new_cols = {} new_base_name = 'token' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_transformer_token_classifier_cols(c, cols, nlu_identifier=True): """ Token classifier """ new_cols = {} new_base_name = 'classified_token' if nlu_identifier == 'UNIQUE' else f'classified_token_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col and 'Some' not in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[ col] = f'{new_base_name}_origin_sentence' if 'Some' in col: new_cols[ col] = f"'{new_base_name}_{col.split('Some(')[-1].split(')')[0]}_confidence" else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_coref_cols(c, cols, nlu_identifier=True): """ |ORIGIN_REFERENCE | CO_REFERENCES| | Peter | he , him, that dude | | Maria | her, she, the lady | # {'head': 'ROOT', 'head.end': '-1', 'sentence': '0', 'head.sentence': '-1', 'head.begin': '-1'} """ new_cols = {} new_base_name = 'coref' if nlu_identifier == 'UNIQUE' else f'coref_{nlu_identifier}' for col in cols: if '_result' in col: new_cols[col] = new_base_name elif '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col and 'Some' not in col: continue elif '_types' in col: continue elif 'meta' in col: if 'head.sentence' in col: new_cols[col] = f'{new_base_name}_head_origin_sentence' elif 'head.end' in col: new_cols[col] = f'{new_base_name}_head_end' elif 'head.begin' in col: new_cols[col] = f'{new_base_name}_head_begin' elif '_head' in col: new_cols[col] = f'{new_base_name}_head' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_seq_bert_classifier_cols(c, cols, nlu_identifier=True): """ Seq classifier """ new_cols = {} # Remove sequence from nlu_identifier because we use it in the base name anyways if '_sequence' in nlu_identifier: nlu_identifier = nlu_identifier.replace('_sequence', '') new_base_name = 'classified_sequence' if nlu_identifier == 'UNIQUE' else f'classified_sequence_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col and 'Some' not in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' if '_confidence' in col: new_cols[col] = f'{new_base_name}_confidence' if 'Some' in col: new_cols[col] = f"'{new_base_name}_{col.split('Some(')[-1].split(')')[0]}_confidence" else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_span_classifier_cols(c, cols, nlu_identifier=True): """ Seq classifier """ new_cols = {} # Remove sequence from nlu_identifier because we use it in the base name anyways # if '_sequence' in nlu_identifier: # nlu_identifier = nlu_identifier.replace('_sequence', '') new_base_name = 'answer' if nlu_identifier == 'UNIQUE' else f'{nlu_identifier}_answer' for col in cols: if 'span_result' in col: new_cols[col] = f'{new_base_name}' if 'span_score' in col: new_cols[col] = f'{new_base_name}_confidence' elif 'span_start_score' in col: new_cols[col] = f'{new_base_name}_start_confidence' elif 'span_end_score' in col: new_cols[col] = f'{new_base_name}_end_confidence' elif 'start' in col and not 'score' in col: new_cols[col] = f'{new_base_name}_start' elif 'end' in col and not 'score' in col: new_cols[col] = f'{new_base_name}_end' elif 'sentence' in col: new_cols[col] = f'{new_base_name}_sentence' return new_cols def substitute_multi_doc_span_assembler_cols(c, cols, nlu_identifier=True): """ Seq classifier """ new_cols = {} # Remove sequence from nlu_identifier because we use it in the base name anyways for col in cols: if 'question' in col and not 'context' in col: new_cols[col] = f'question' elif 'context' in col: new_cols[col] = f'context' return new_cols def substitute_tapas_qa_cols(c, cols, nlu_identifier=True): new_cols = {} new_base_name = f'tapas_qa_{nlu_identifier}' if 'tapas_qa_' not in nlu_identifier else nlu_identifier for col in cols: if '_results' in col: new_cols[col] = f'{new_base_name}_answer' elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if 'question' in col: new_cols[col] = f'{new_base_name}_origin_question' elif 'aggregation' in col: new_cols[col] = f'{new_base_name}_aggregation' elif 'cell_positions' in col: new_cols[col] = f'{new_base_name}_cell_positions' elif 'cell_scores' in col: new_cols[col] = f'{new_base_name}_cell_scores' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_word_embed_cols(c, cols, nlu_identifier=True): """ Substitute col name for Word Embeddings. For Word_Embeddings, some name will be infered, and word_embedding_<name> will become the base name schema """ new_cols = {} new_base_name = f'word_embedding_{nlu_identifier}' if 'word_embedding_' not in nlu_identifier else nlu_identifier # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: continue # new_cols[col] = new_base_name can be omitted for word_embeddings, maps to the origin token, which will be in the tokenizer col anyways elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' elif 'OOV' in col: new_cols[col] = f'{new_base_name}_is_OOV' elif 'isWordStart' in col: new_cols[col] = f'{new_base_name}_is_word_start' elif 'pieceId' in col: new_cols[col] = f'{new_base_name}_piece_id' elif '_token' in col: continue # Can be omitted, is the same as _result, just maps to origin_token else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') elif '_embeddings' in col: new_cols[col] = new_base_name # stores the embeds and represents basically the main result return new_cols def substitute_sent_embed_cols(c, cols, nlu_identifier=True): """ Substitute col name for Word Embeddings. For Word_Embeddings, some name will be infered, and word_embedding_<name> will become the base name schema """ new_cols = {} # new_base_name = f'sentence_embedding_{nlu_identifier}' # if nlu_identifier else f'document_{nlu_identifier}' new_base_name = f'sentence_embedding_{nlu_identifier}' if 'sentence_embedding' not in nlu_identifier else nlu_identifier # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: continue # new_cols[col] = new_base_name can be omitted for word_embeddings, maps to the origin token, which will be in the tokenizer col anyways elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if 'OOV' in col: new_cols[col] = f'{new_base_name}_is_OOV' elif 'isWordStart' in col: new_cols[col] = f'{new_base_name}_is_word_start' elif 'pieceId' in col: new_cols[col] = f'{new_base_name}_piece_id' elif '_token' in col: continue # Can be omited, is the same as _result, just maps to origin_token else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') elif '_embeddings' in col: new_cols[col] = new_base_name # stores the embeds and represents basically the main result elif '_sentence' in col and 'meta' in col: new_cols[col] = f'{new_base_name}_origin_sentence' return new_cols def substitute_chunk_embed_cols(c, cols, nlu_identifier=True): """ Substitute col name for chunk Embeddings. For Word_Embeddings, some name will be infered, and chunk_embedding_<name> will become the base name schema """ new_cols = {} new_base_name = f'chunk_embedding_{nlu_identifier}' if 'chunk_embedding_' not in nlu_identifier else nlu_identifier # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: continue # new_cols[col] = new_base_name can be omitted for chunk_embeddings, maps to the origin chunk, which will be in the tokenizer col anyways elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' elif 'isWordStart' in col: new_cols[col] = f'{new_base_name}_is_word_start' elif 'pieceId' in col: new_cols[col] = f'{new_base_name}_piece_id' elif '_token' in col: continue # Can be omited, is the same as _result, just maps to origin_token else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') elif '_embeddings' in col: new_cols[col] = new_base_name # stores the embeds and represents basically the main result return new_cols def substitute_classifier_dl_cols(c, cols, nlu_identifier=True): """ Substitute col name for Word Embeddings. For Word_Embeddings, some name will be infered, and word_embedding_<name> will become the base name schema """ new_cols = {} new_base_name = f'{nlu_identifier}' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[ col] = new_base_name # can be omitted for chunk_embeddings, maps to the origin chunk, which will be in the tokenizer col anyways elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # elif '_embeddings' in col: continue # elif 'meta' in col: old_base_name = f'meta_{c.out_types[0]}' metadata = col.split(old_base_name)[-1] if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' elif metadata in ['confidence', '_confidence']: new_cols[col] = f'{new_base_name}_confidence' # max confidence over all classes # else: # new_cols[col] = f'{new_base_name}{metadata}_confidence' # confidence field # else : logger.info(f'Dropping unmatched metadata_col={col} for os_components={os_components}') return new_cols def substitute_ngram_cols(c, cols, nlu_identifier=True): """ Substitute col name for Word Embeddings. ngram will be the new base name """ new_cols = {} new_base_name = f'ngram' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = f'{new_base_name}' elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # elif '_embeddings' in col: continue # elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_labled_dependency_cols(c, cols, nlu_identifier=True): """ Substitute col name for Labled dependenecy labeled_dependency will become the base name schema """ new_cols = {} new_base_name = f'labeled_dependency' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = f'{new_base_name}' elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # elif '_embeddings' in col: continue # elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_un_labled_dependency_cols(c, cols, nlu_identifier=True): """ Substitute col name for Labled dependenecy unlabeled_dependency will become the base name schema """ new_cols = {} new_base_name = f'unlabeled_dependency' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = f'{new_base_name}' elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # elif '_embeddings' in col: continue # elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' elif '_head.begin' in col: new_cols[col] = f'{new_base_name}_head_begin' elif 'head.end' in col: new_cols[col] = f'{new_base_name}_head_end' elif 'head' in col: new_cols[col] = f'{new_base_name}_head' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_pos_cols(c, cols, nlu_identifier=True): """ Substitute col name for Labled dependenecy unlabeled_dependency will become the base name schema """ new_cols = {} new_base_name = f'pos' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = f'{new_base_name}' elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # elif '_embeddings' in col: continue # elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' elif '_word' in col: continue # can be omitted, is jsut the token elif 'confidence' in col: new_cols[col] = f'{new_base_name}_confidence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_norm_cols(c, cols, nlu_identifier=True): """ Substitute col name for normalized, <norm> will be new base col name """ new_cols = {} new_base_name = f'norm' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = f'{new_base_name}' elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # elif '_embeddings' in col: continue # elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_doc_norm_cols(c, cols, nlu_identifier=True): """ Substitute col name for normalized, <norm> will be new base col name """ new_cols = {} new_base_name = f'doc_norm' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = f'{new_base_name}' elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # elif '_embeddings' in col: continue # elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_spell_context_cols(c, cols, nlu_identifier=True): """ Substitute col name for normalized, <spell> will be new base col namem 1 spell checker is assumed per component_list for now """ new_cols = {} new_base_name = f'spell' if nlu_identifier == 'UNIQUE' else f'spell_dl' for col in cols: if '_results' in col: new_cols[col] = f'{new_base_name}' elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # elif '_embeddings' in col: continue # elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' if '_cost' in col: new_cols[col] = f'{new_base_name}_cost' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_spell_symm_cols(c, cols, nlu_identifier=True): """ Substitute col name for sym, <spell> will be new base col name 1 spell checker is assumed per component_list for now """ new_cols = {} new_base_name = f'spell' if nlu_identifier == 'UNIQUE' else f'spell_sym' for col in cols: if '_results' in col: new_cols[col] = f'{new_base_name}' elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # elif '_embeddings' in col: continue # elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' if '_confidence' in col: new_cols[col] = f'{new_base_name}_confidence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_spell_norvig_cols(c, cols, nlu_identifier=True): """ Substitute col name for spell, <spell> will be new base col name 1 spell checker is assumed per component_list for now """ new_cols = {} new_base_name = f'spell' if nlu_identifier == 'UNIQUE' else f'spell_norvig' for col in cols: if '_results' in col: new_cols[col] = f'{new_base_name}' elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # elif '_embeddings' in col: continue # elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' if '_confidence' in col: new_cols[col] = f'{new_base_name}_confidence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_word_seg_cols(c, cols, nlu_identifier=True): """ Word_seg is always unique """ new_cols = {} new_base_name = 'words_seg' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_stem_cols(c, cols, nlu_identifier=True): """ stem is always unique """ new_cols = {} new_base_name = 'stem' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_lem_cols(c, cols, nlu_identifier=True): """ stem is always unique """ new_cols = {} new_base_name = 'lem' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_stopwords_cols(c, cols, nlu_identifier=True): """ stem is always unique """ new_cols = {} new_base_name = 'stopword_less' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_chunk_cols(c, cols, nlu_identifier=True): """ stem is always unique """ new_cols = {} new_base_name = 'matched_pos' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_YAKE_cols(c, cols, nlu_identifier=True): """ stem is always unique """ new_cols = {} new_base_name = 'keywords' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[ col] = f'{new_base_name}_origin_sentence' if '_score' in col: new_cols[col] = f'{new_base_name}_confidence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_marian_cols(c, cols, nlu_identifier=True): """ rename cols with base name either <translated> or if not unique <translated_<lang>> """ new_cols = {} new_base_name = 'translated' if nlu_identifier == 'UNIQUE' else f'translated_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_gpt2_cols(c, cols, nlu_identifier=True): """ rename cols with base name either <gpt> or if not unique <generated_identifier> """ new_cols = {} new_base_name = 'generated' if nlu_identifier == 'UNIQUE' else f'generated_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_T5_cols(c, cols, nlu_identifier=True): """ rename cols with base name either <t5> or if not unique <t5_<task>> """ new_cols = {} new_base_name = 't5' if nlu_identifier == 'UNIQUE' else f't5_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_sentiment_vivk_cols(c, cols, nlu_identifier=True): new_cols = {} new_base_name = 'sentiment' if nlu_identifier == 'UNIQUE' else f'sentiment_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' elif '_confidence' in col: new_cols[col] = f'{new_base_name}_confidence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_sentiment_dl_cols(c, cols, nlu_identifier=True): new_cols = {} new_base_name = 'sentiment' if nlu_identifier == 'UNIQUE' else f'sentiment_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' elif '_confidence' in col: new_cols[col] = f'{new_base_name}_confidence' elif '_negative' in col: new_cols[col] = f'{new_base_name}_negative' elif '_positive' in col: new_cols[col] = f'{new_base_name}_positive' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_multi_classifier_dl_cols(c, cols, nlu_identifier=True): """ Substitute col name for Word Embeddings. For Word_Embeddings, some name will be infered, and word_embedding_<name> will become the base name schema """ new_cols = {} new_base_name = f'{nlu_identifier}' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[ col] = new_base_name # can be omitted for chunk_embeddings, maps to the origin chunk, which will be in the tokenizer col anyways elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # elif '_embeddings' in col: continue # elif 'meta' in col: old_base_name = f'meta_{c.out_types[0]}' metadata = col.split(old_base_name)[-1] if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' elif metadata == 'confidence': new_cols[col] = f'{new_base_name}_confidence' # max confidence over all classes else: new_cols[col] = f'{new_base_name}{metadata}_confidence' # confidence field # else : logger.info(f'Dropping unmatched metadata_col={col} for os_components={os_components}') return new_cols def substitute_date_match_cols(c, cols, nlu_identifier=True): """ stem is always unique """ new_cols = {} new_base_name = 'matched_date' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_regex_match_cols(c, cols, nlu_identifier=True): """ stem is always unique """ new_cols = {} new_base_name = 'matched_regex' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_text_match_cols(c, cols, nlu_identifier=True): """ stem is always unique """ new_cols = {} new_base_name = 'matched_text' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols ## Trainable def substitute_classifier_dl_approach_cols(c, cols, nlu_identifier=True): """ Substitute col name for Word Embeddings. For Word_Embeddings, some name will be infered, and word_embedding_<name> will become the base name schema """ new_cols = {} new_base_name = f'trained_classifier' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[ col] = new_base_name # can be omitted for chunk_embeddings, maps to the origin chunk, which will be in the tokenizer col anyways elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # elif '_embeddings' in col: continue # elif 'meta' in col: old_base_name = f'meta_{c.out_types[0]}' metadata = col.split(old_base_name)[-1] if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' elif metadata == 'confidence': new_cols[col] = f'{new_base_name}_confidence' # max confidence over all classes else: new_cols[col] = f'{new_base_name}{metadata}_confidence' # confidence field # else : logger.info(f'Dropping unmatched metadata_col={col} for os_components={os_components}') return new_cols def substitute_sentiment_vivk_approach_cols(c, cols, nlu_identifier=True): new_cols = {} new_base_name = 'trained_sentiment' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' elif '_confidence' in col: new_cols[col] = f'{new_base_name}_confidence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') # new_cols[col]= f"{new_base_name}_confidence" return new_cols def substitute_sentiment_dl_approach_cols(c, cols, nlu_identifier=True): new_cols = {} new_base_name = 'trained_sentiment' for col in cols: if '_results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # Token never stores Embeddings new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' elif '_confidence' in col: new_cols[col] = f'{new_base_name}_confidence' elif '_negative' in col: new_cols[col] = f'{new_base_name}_negative' elif '_positive' in col: new_cols[col] = f'{new_base_name}_positive' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_multi_classifier_dl_approach_cols(c, cols, nlu_identifier=True): """ Substitute col name for Word Embeddings. For Word_Embeddings, some name will be infered, and word_embedding_<name> will become the base name schema """ new_cols = {} new_base_name = f'trained_multi_classifier' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[ col] = new_base_name # can be omitted for chunk_embeddings, maps to the origin chunk, which will be in the tokenizer col anyways elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # elif '_embeddings' in col: continue # elif 'meta' in col: old_base_name = f'meta_{c.out_types[0]}' metadata = col.split(old_base_name)[-1] if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' elif metadata == 'confidence': new_cols[col] = f'{new_base_name}_confidence' # max confidence over all classes else: new_cols[col] = f'{new_base_name}{metadata}_confidence' # confidence field # else : logger.info(f'Dropping unmatched metadata_col={col} for os_components={os_components}') return new_cols def substitute_ner_dl_approach_cols(c, cols, nlu_identifier): """ Fetched fields are: - entities@<storage_ref>_results - entities@<storage_ref>_<metadata> - entities@<storage_ref>_entity - entities@<storage_ref>_confidence """ new_cols = {} new_base_name = 'trained_ner_iob' for col in cols: if 'results' in col: new_cols[col] = new_base_name elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_embeddings' in col: continue # always empty and irrelevant new_cols[col] = f'{new_base_name}_embedding' elif '_types' in col: continue # new_cols[col] = f'{new_base_name}_type' elif 'meta' in col: if 'confidence' in col: new_cols[col] = f"{new_base_name}_confidence" elif 'word' in col: continue # is the same as token col, can be omitted else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_pos_approach_cols(c, cols, nlu_identifier=True): """ Substitute col name for Labled dependenecy unlabeled_dependency will become the base name schema """ new_cols = {} new_base_name = f'trained_pos' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = f'{new_base_name}' elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # elif '_embeddings' in col: continue # elif 'meta' in col: if '_sentence' in col: new_cols[col] = f'{new_base_name}_origin_sentence' elif '_word' in col: continue # can be omitted, is jsut the token elif 'confidence' in col: new_cols[col] = f'{new_base_name}_confidence' else: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols def substitute_doc2chunk_cols(c, cols, nlu_identifier=True): """ Substitute col name for Doc2chunk """ new_cols = {} new_base_name = f'doc2chunk' # if nlu_identifier else f'document_{nlu_identifier}' for col in cols: if '_results' in col: new_cols[col] = f'{new_base_name}' elif '_beginnings' in col: new_cols[col] = f'{new_base_name}_begin' elif '_endings' in col: new_cols[col] = f'{new_base_name}_end' elif '_types' in col: continue # elif '_embeddings' in col: continue # elif 'meta' in col: logger.info(f'Dropping unmatched metadata_col={col} for c={c}') return new_cols
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/col_substitution/col_substitution_OS.py
col_substitution_OS.py
Resolve Annotator Classes in the Pipeline to Extractor Configs and Methods. Each Spark NLP Annotator Class is mapped to at least one Every Annotator should have 2 configs. Some might offor multuple configs/method pairs, based on model_anno_obj/NLP reference. - default/minimalistic -> Just the results of the annotations, no confidences or extra metadata - with meta -> A config that leverages white/black list and gets the most relevant metadata - with positions -> With Begins/Ends """ from sparknlp.annotator import * from sparknlp.base import * name_deductable_OS = [ NerConverter, BertEmbeddings, AlbertEmbeddings, XlnetEmbeddings , WordEmbeddingsModel , ElmoEmbeddings , BertSentenceEmbeddings, UniversalSentenceEncoder, SentenceEmbeddings, ContextSpellCheckerModel , SymmetricDeleteModel , NorvigSweetingModel , NerDLModel , NerCrfModel, LanguageDetectorDL , SentimentDLModel , SentimentDetectorModel , ViveknSentimentModel , MultiClassifierDLModel, ClassifierDLModel , ChunkEmbeddings , TextMatcherModel, RegexMatcherModel, DateMatcher, MultiDateMatcher, T5Transformer, MarianTransformer, WordSegmenterModel, DistilBertEmbeddings, RoBertaEmbeddings, XlmRoBertaEmbeddings, DistilBertForTokenClassification, BertForTokenClassification, LongformerEmbeddings, DistilBertForSequenceClassification, BertForSequenceClassification, # approaches ViveknSentimentApproach , SentimentDLApproach , ClassifierDLApproach , MultiClassifierDLApproach , NerDLApproach , PerceptronApproach , Doc2Chunk, Chunk2Doc, DeBertaEmbeddings, # MultiDocumentAssembler, AlbertForQuestionAnswering, BertForQuestionAnswering, DeBertaForQuestionAnswering, DistilBertForQuestionAnswering, LongformerForQuestionAnswering, RoBertaForQuestionAnswering, XlmRoBertaForQuestionAnswering, # SpanBertCorefModel, # ] always_name_deductable_OS = [ BertEmbeddings, AlbertEmbeddings, XlnetEmbeddings , WordEmbeddingsModel , ElmoEmbeddings , BertSentenceEmbeddings, UniversalSentenceEncoder, SentenceEmbeddings, MultiClassifierDLModel, ClassifierDLModel , ChunkEmbeddings , TextMatcherModel, RegexMatcherModel, DateMatcher, MultiDateMatcher, # T5Transformer, # MarianTransformer, # WordSegmenterModel, DistilBertEmbeddings, RoBertaEmbeddings, XlmRoBertaEmbeddings, Chunk2Doc, DeBertaEmbeddings, # MultiDocumentAssembler, # AlbertForQuestionAnswering, # BertForQuestionAnswering, # DeBertaForQuestionAnswering, # DistilBertForQuestionAnswering, # LongformerForQuestionAnswering, # RoBertaForQuestionAnswering, # XlmRoBertaForQuestionAnswering, # # SpanBertCorefModel, ]
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/col_substitution/name_deduction/name_deductable_annotators_OS.py
name_deductable_annotators_OS.py
from sparknlp_jsl.annotator import * from sparknlp.base import * from sparknlp_display import * from nlu import NLP_FEATURES from nlu.universe.feature_node_ids import NLP_HC_NODE_IDS, NLP_NODE_IDS class VizUtilsHC(): """Utils for interfacing with the Spark-NLP-Display lib - licensed Viz""" HTML_WRAPPER = """<div class="scroll entities" style="overflow-x: auto; border: 1px solid #e6e9ef; border-radius: 0.25rem; padding: 1rem; margin-bottom: 2.5rem; white-space:pre-wrap">{}</div>""" @staticmethod def infer_viz_licensed(pipe) -> str: """For a given NLUPipeline with licensed components, infers which visualizations are applicable. """ # we go in reverse, which makes NER always take lowest priority and NER feeder annotators have higher priority for c in pipe.components[::-1]: if isinstance(c.model, TypedDependencyParserModel): return 'dep' if isinstance(c.model, (SentenceEntityResolverModel)): return 'resolution' if isinstance(c.model, (RelationExtractionDLModel, RelationExtractionDLModel)): return 'relation' if isinstance(c.model, (AssertionDLModel, AssertionLogRegModel)): return 'assert' if isinstance(c.model, (NerConverter, NerConverterInternal)): return 'ner' @staticmethod def viz_ner(anno_res, pipe, labels=[], viz_colors={}, is_databricks_env=False, write_to_streamlit=False, ner_col=None): """Infer columns required for ner viz and then viz it. viz_colors : set label colors by specifying hex codes , i.e. viz_colors = {'LOC':'#800080', 'PER':'#77b5fe'} labels : only allow these labels to be displayed. (default: [] - all labels will be displayed) """ document_col, entities_col = VizUtilsHC.infer_ner_dependencies(pipe) if ner_col: entities_col = ner_col ner_vis = NerVisualizer() if len(viz_colors) > 0: ner_vis.set_label_colors(viz_colors) if write_to_streamlit: import streamlit as st HTML = ner_vis.display(anno_res, label_col=entities_col, document_col=document_col, labels=labels, return_html=True) CSS, HTML = HTML.split('</style>') CSS = CSS + '</style>' HTML = f'<div> {HTML} ' st.markdown(CSS, unsafe_allow_html=True) # st.markdown(HTML, unsafe_allow_html=True) st.markdown(VizUtilsHC.HTML_WRAPPER.format(HTML), unsafe_allow_html=True) elif not is_databricks_env: ner_vis.display(anno_res, label_col=entities_col, document_col=document_col, labels=labels) else: return ner_vis.display(anno_res, label_col=entities_col, document_col=document_col, labels=labels, return_html=True) @staticmethod def infer_ner_dependencies(pipe): """Finds entities and doc cols for ner viz""" doc_component = None entities_component = None for c in pipe.components: if NLP_FEATURES.NAMED_ENTITY_CONVERTED in c.out_types: entities_component = c if isinstance(c.model, DocumentAssembler): doc_component = c document_col = doc_component.spark_output_column_names[0] entities_col = entities_component.spark_output_column_names[0] return document_col, entities_col @staticmethod def viz_dep(anno_res, pipe, is_databricks_env, write_to_streamlit=False, user_dep_untyped_col=None, user_dep_typed_col=None, user_pos_col=None): """Viz dep result""" pos_col, dep_typ_col, dep_untyp_col = VizUtilsHC.infer_dep_dependencies(pipe) if user_dep_untyped_col: dep_untyp_col = user_dep_untyped_col if user_dep_typed_col: dep_typ_col = user_dep_typed_col if user_pos_col: pos_col = user_pos_col dependency_vis = DependencyParserVisualizer() if write_to_streamlit: import streamlit as st SVG = dependency_vis.display(anno_res, pos_col=pos_col, dependency_col=dep_untyp_col, dependency_type_col=dep_typ_col, return_html=True) # st.markdown(SVG, unsafe_allow_html=True) st.markdown(VizUtilsHC.HTML_WRAPPER.format(SVG), unsafe_allow_html=True) elif not is_databricks_env: dependency_vis.display(anno_res, pos_col=pos_col, dependency_col=dep_untyp_col, dependency_type_col=dep_typ_col) else: return dependency_vis.display(anno_res, pos_col=pos_col, dependency_col=dep_untyp_col, dependency_type_col=dep_typ_col, return_html=True) @staticmethod def infer_dep_dependencies(pipe): """Finds entities,pos,dep_typed,dep_untyped and doc cols for dep viz viz""" # doc_component = None pos_component = None dep_untyped_component = None dep_typed_component = None for c in pipe.components: if isinstance(c.model, PerceptronModel): pos_component = c if isinstance(c.model, TypedDependencyParserModel): dep_typed_component = c if isinstance(c.model, DependencyParserModel): dep_untyped_component = c pos_col = pos_component.spark_output_column_names[0] dep_typ_col = dep_typed_component.spark_output_column_names[0] dep_untyp_col = dep_untyped_component.spark_output_column_names[0] return pos_col, dep_typ_col, dep_untyp_col @staticmethod def viz_resolution(anno_res, pipe, viz_colors={}, is_databricks_env=False, write_to_streamlit=False, user_ner_col=None, user_resolution_col=None, ): """Viz dep result. Set label colors by specifying hex codes, i.e. viz_colors={'TREATMENT':'#800080', 'PROBLEM':'#77b5fe'} """ entities_col, resolution_col, doc_col = VizUtilsHC.infer_resolution_dependencies(pipe) er_vis = EntityResolverVisualizer() if len(viz_colors) > 0: er_vis.set_label_colors(viz_colors) if user_ner_col: entities_col = user_ner_col if user_resolution_col: resolution_col = user_resolution_col if write_to_streamlit: import streamlit as st HTML = er_vis.display(anno_res, label_col=entities_col, resolution_col=resolution_col, document_col=doc_col, return_html=True) CSS, HTML = HTML.split('</style>') CSS = CSS + '</style>' HTML = f'<div> {HTML} ' st.markdown(CSS, unsafe_allow_html=True) # st.markdown(HTML, unsafe_allow_html=True) st.markdown(VizUtilsHC.HTML_WRAPPER.format(HTML), unsafe_allow_html=True) elif not is_databricks_env: er_vis.display(anno_res, label_col=entities_col, resolution_col=resolution_col, document_col=doc_col) else: return er_vis.display(anno_res, label_col=entities_col, resolution_col=resolution_col, document_col=doc_col, return_html=True) @staticmethod def infer_resolution_dependencies(pipe): """Finds entities_col,resolution_col,doc_col cols for resolution viz viz""" entities_component, resolution_component, doc_component = None, None, None for c in pipe.components: if c.name == NLP_NODE_IDS.DOCUMENT_ASSEMBLER: doc_component = c if NLP_FEATURES.NAMED_ENTITY_CONVERTED in c.out_types: entities_component = c if c.name == NLP_HC_NODE_IDS.SENTENCE_ENTITY_RESOLVER: resolution_component = c entities_col = entities_component.spark_output_column_names[0] resolution_col = resolution_component.spark_output_column_names[0] doc_col = doc_component.spark_output_column_names[0] return entities_col, resolution_col, doc_col @staticmethod def viz_relation(anno_res, pipe, is_databricks_env, write_to_streamlit=False, user_relation_col=None): """Viz relation result. Set label colors by specifying hex codes, i.e. viz_colors={'TREATMENT':'#800080', 'PROBLEM':'#77b5fe'} """ relation_col, document_col = VizUtilsHC.infer_relation_dependencies(pipe) if user_relation_col: relation_col = user_relation_col re_vis = RelationExtractionVisualizer() if write_to_streamlit: import streamlit as st HTML = re_vis.display(anno_res, relation_col=relation_col, document_col=document_col, show_relations=True, return_html=True) # st.markdown(HTML, unsafe_allow_html=True) st.markdown(VizUtilsHC.HTML_WRAPPER.format(HTML), unsafe_allow_html=True) if not is_databricks_env: re_vis.display(anno_res, relation_col=relation_col, document_col=document_col, show_relations=True) else: return re_vis.display(anno_res, relation_col=relation_col, document_col=document_col, show_relations=True, return_html=True) @staticmethod def infer_relation_dependencies(pipe): """Finds relation_col,document_col cols for relation viz viz""" relation_component, doc_component = None, None for c in pipe.components: if isinstance(c.model, DocumentAssembler): doc_component = c if isinstance(c.model, (RelationExtractionDLModel, RelationExtractionModel)): relation_component = c relation_col = relation_component.spark_output_column_names[0] document_col = doc_component.spark_output_column_names[0] return relation_col, document_col @staticmethod def viz_assertion(anno_res, pipe, viz_colors={}, is_databricks_env=False, write_to_streamlit=False, user_ner_col=None, user_assertion_col=None ): """Viz relation result. Set label colors by specifying hex codes, i.e. viz_colors={'TREATMENT':'#008080', 'problem':'#800080'} """ entities_col, assertion_col, doc_col = VizUtilsHC.infer_assertion_dependencies(pipe) assertion_vis = AssertionVisualizer() if user_ner_col: entities_col = user_ner_col if user_assertion_col: assertion_col = assertion_col if len(viz_colors) > 0: assertion_vis.set_label_colors(viz_colors) if write_to_streamlit: import streamlit as st HTML = assertion_vis.display(anno_res, label_col=entities_col, assertion_col=assertion_col, document_col=doc_col, return_html=True) # st.markdown(HTML, unsafe_allow_html=True) CSS, HTML = HTML.split('</style>') CSS = CSS + '</style>' HTML = f'<div> {HTML} ' st.markdown(CSS, unsafe_allow_html=True) # st.markdown(HTML, unsafe_allow_html=True) st.markdown(VizUtilsHC.HTML_WRAPPER.format(HTML), unsafe_allow_html=True) elif not is_databricks_env: assertion_vis.display(anno_res, label_col=entities_col, assertion_col=assertion_col, document_col=doc_col) else: return assertion_vis.display(anno_res, label_col=entities_col, assertion_col=assertion_col, document_col=doc_col, return_html=True) @staticmethod def infer_assertion_dependencies(pipe): """Finds relation_col,document_col cols for relation viz viz""" entities_component, assert_component, doc_component = None, None, None for c in pipe.components: if c.name == NLP_NODE_IDS.DOCUMENT_ASSEMBLER: doc_component = c if c.name in [NLP_HC_NODE_IDS.NER_CONVERTER_INTERNAL, NLP_NODE_IDS.NER_CONVERTER, NLP_NODE_IDS.PARTIAL_NerConverterInternalModel]: entities_component = c if c.name in [NLP_HC_NODE_IDS.ASSERTION_DL, NLP_HC_NODE_IDS.ASSERTION_LOG_REG]: assert_component = c entities_col = entities_component.spark_output_column_names[0] assertion_col = assert_component.spark_output_column_names[0] doc_col = doc_component.spark_output_column_names[0] return entities_col, assertion_col, doc_col
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/viz/vis_utils_HC.py
vis_utils_HC.py
from sparknlp.annotator import * from nlu.pipe.viz.vis_utils_OS import VizUtilsOS import random class VizUtils(): """Utils for interfacing with the Spark-NLP-Display lib""" @staticmethod def get_random(): return random.randint(0, 1333333333337) @staticmethod def infer_viz_type(pipe) -> str: """For a given NLUPipeline, infers which visualizations are applicable. """ if pipe.has_licensed_components: from nlu.pipe.viz.vis_utils_HC import VizUtilsHC return VizUtilsHC.infer_viz_licensed(pipe) else: return VizUtilsOS.infer_viz_open_source(pipe) @staticmethod def viz_OS(anno_res, pipe, viz_type, viz_colors, labels_to_viz, is_databricks_env, write_to_streamlit, streamlit_key, ner_col, pos_col, dep_untyped_col, dep_typed_col): """Vizualize open source component_to_resolve""" streamlit_key = VizUtils.get_random() if streamlit_key == "RANDOM" else streamlit_key if viz_type == 'ner': return VizUtilsOS.viz_ner(anno_res, pipe, labels_to_viz, viz_colors, is_databricks_env, write_to_streamlit, streamlit_key, ner_col) elif viz_type == 'dep': return VizUtilsOS.viz_dep(anno_res, pipe, is_databricks_env, write_to_streamlit, streamlit_key, pos_col, dep_untyped_col, dep_typed_col) else: raise ValueError( "Could not find applicable viz_type. Please make sure you specify either ner, dep, resolution, relation, assert or dep and have loaded corrosponding components") @staticmethod def viz_HC(anno_res, pipe, viz_type, viz_colors, labels_to_viz, is_databricks_env, write_to_streamlit, ner_col, pos_col, dep_untyped_col, dep_typed_col, resolution_col, relation_col, assertion_col): """Vizualize licensed component_to_resolve""" from nlu.pipe.viz.vis_utils_HC import VizUtilsHC if viz_type == 'ner': return VizUtilsHC.viz_ner(anno_res, pipe, labels_to_viz, viz_colors, is_databricks_env, write_to_streamlit, ner_col) elif viz_type == 'dep': return VizUtilsHC.viz_dep(anno_res, pipe, is_databricks_env, write_to_streamlit, dep_untyped_col, dep_typed_col, pos_col) elif viz_type == 'resolution': return VizUtilsHC.viz_resolution(anno_res, pipe, viz_colors, is_databricks_env, write_to_streamlit, ner_col, resolution_col) elif viz_type == 'relation': return VizUtilsHC.viz_relation(anno_res, pipe, is_databricks_env, write_to_streamlit, relation_col, ) elif viz_type == 'assert': return VizUtilsHC.viz_assertion(anno_res, pipe, viz_colors, is_databricks_env, write_to_streamlit, ner_col, assertion_col) else: raise ValueError( "Could not find applicable viz_type. Please make sure you specify either ner, dep, resolution, relation, assert or dep and have loaded corrosponding components") """Define whiche annotators model_anno_obj are definable by which vizualizer. There are 5 in total, 2 open source and 5 HC""" # vizalbe_components_OC = { # 'ner' : [NerConverter], # 'dep' : [DependencyParserModel], # } # vizalbe_components_HC = { # 'ner':[NerConverter,NerConverterInternal], # 'resolution' : [SentenceEntityResolverModel, ChunkEntityResolverModel] , # 'relation' : [RelationExtractionModel,RelationExtractionDLModel], # 'assert' : [AssertionDLModel,AssertionLogRegApproach], # 'dep' : [DependencyParserModel], # }
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/viz/vis_utils.py
vis_utils.py
from sparknlp_display import NerVisualizer, DependencyParserVisualizer from sparknlp.annotator import NerConverter, DependencyParserModel, TypedDependencyParserModel, PerceptronModel from sparknlp.base import DocumentAssembler from nlu.universe.feature_node_ids import NLP_NODE_IDS class VizUtilsOS(): """Utils for interfacing with the Spark-NLP-Display lib and vizzing Open Source Components - Open source""" HTML_WRAPPER = """<div class="scroll entities" style="overflow-x: auto; border: 1px solid #e6e9ef; border-radius: 0.25rem; padding: 1rem; margin-bottom: 2.5rem; white-space:pre-wrap">{}</div>""" @staticmethod def infer_viz_open_source(pipe) -> str: """For a given NLUPipeline with only open source components, infers which visualizations are applicable. """ for c in pipe.components: if isinstance(c.model, NerConverter): return 'ner' if isinstance(c.model, DependencyParserModel): return 'dep' @staticmethod def viz_ner(anno_res, pipe, labels=None, viz_colors={}, is_databricks_env=False, write_to_streamlit=False, streamlit_key='RANDOM', user_ner_col=None): """Infer columns required for ner viz and then viz it. viz_colors : set label colors by specifying hex codes , i.e. viz_colors = {'LOC':'#800080', 'PER':'#77b5fe'} labels : only allow these labels to be displayed. (default: [] - all labels will be displayed) """ document_col, entities_col = VizUtilsOS.infer_ner_dependencies(pipe) ner_vis = NerVisualizer() ner_vis.set_label_colors(viz_colors) if user_ner_col : entities_col = user_ner_col if write_to_streamlit: import streamlit as st HTML = ner_vis.display(anno_res, label_col=entities_col, document_col=document_col, labels=labels, return_html=True) CSS, HTML = HTML.split('</style>') CSS = CSS + '</style>' HTML = f'<div> {HTML} ' st.markdown(CSS, unsafe_allow_html=True) st.markdown(VizUtilsOS.HTML_WRAPPER.format(HTML), unsafe_allow_html=True) elif not is_databricks_env: ner_vis.display(anno_res, label_col=entities_col, document_col=document_col, labels=labels) else: return ner_vis.display(anno_res, label_col=entities_col, document_col=document_col, labels=labels, return_html=True) @staticmethod def infer_ner_dependencies(pipe): """Finds entities and doc cols for ner viz""" # TODO FIX doc_component = None entities_component = None for c in pipe.components: if isinstance(c.model, NerConverter): entities_component = c if isinstance(c.model, DocumentAssembler): doc_component = c document_col = doc_component.spark_output_column_names[0] entities_col = entities_component.spark_output_column_names[0] return document_col, entities_col @staticmethod def viz_dep(anno_res, pipe, is_databricks_env, write_to_streamlit, streamlit_key='RANDOM', user_pos_col=None, user_dep_untyped_col=None, user_dep_typed_col=None ): """Viz dep result""" pos_col, dep_typ_col, dep_untyp_col = VizUtilsOS.infer_dep_dependencies(pipe) dependency_vis = DependencyParserVisualizer() if user_pos_col : pos_col = user_pos_col if user_dep_typed_col: dep_typ_col = user_dep_typed_col if user_dep_untyped_col: dep_untyp_col = dep_untyp_col if write_to_streamlit: import streamlit as st SVG = dependency_vis.display(anno_res, pos_col=pos_col, dependency_col=dep_untyp_col, dependency_type_col=dep_typ_col, return_html=True) # st.markdown(SVG, unsafe_allow_html=True) st.markdown(VizUtilsOS.HTML_WRAPPER.format(SVG), unsafe_allow_html=True) elif not is_databricks_env: dependency_vis.display(anno_res, pos_col=pos_col, dependency_col=dep_untyp_col, dependency_type_col=dep_typ_col) else: return dependency_vis.display(anno_res, pos_col=pos_col, dependency_col=dep_untyp_col, dependency_type_col=dep_typ_col, return_html=True) @staticmethod def infer_dep_dependencies(pipe): """Finds entities,pos,dep_typed,dep_untyped and doc cols for dep viz viz""" # doc_component = None pos_component = None dep_untyped_component = None dep_typed_component = None for c in pipe.components: if c.name == NLP_NODE_IDS.POS : pos_component = c if c.name == NLP_NODE_IDS.TYPED_DEPENDENCY_PARSER : dep_typed_component = c if c.name == NLP_NODE_IDS.UNTYPED_DEPENDENCY_PARSER : dep_untyped_component = c pos_col = pos_component.spark_output_column_names[0] dep_typ_col = dep_typed_component.spark_output_column_names[0] dep_untyp_col = dep_untyped_component.spark_output_column_names[0] return pos_col, dep_typ_col, dep_untyp_col
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/viz/vis_utils_OS.py
vis_utils_OS.py
from nlu.discovery import Discoverer from typing import List, Tuple, Optional, Dict, Union import streamlit as st import pandas as pd from nlu.pipe.viz.streamlit_viz.streamlit_utils_OS import StreamlitUtilsOS from nlu.pipe.viz.streamlit_viz.styles import _set_block_container_style from nlu.pipe.viz.streamlit_viz.streamlit_viz_tracker import StreamlitVizTracker from nlu.pipe.viz.streamlit_viz.viz_building_blocks.dep_tree import DepTreeStreamlitBlock from nlu.pipe.viz.streamlit_viz.viz_building_blocks.classifier import ClassifierStreamlitBlock from nlu.pipe.viz.streamlit_viz.viz_building_blocks.token_features import TokenFeaturesStreamlitBlock from nlu.pipe.viz.streamlit_viz.viz_building_blocks.ner import NERStreamlitBlock from nlu.pipe.viz.streamlit_viz.viz_building_blocks.word_similarity import WordSimilarityStreamlitBlock from nlu.pipe.viz.streamlit_viz.viz_building_blocks.word_embedding_manifold import WordEmbeddingManifoldStreamlitBlock from nlu.pipe.viz.streamlit_viz.viz_building_blocks.sentence_embedding_manifold import \ SentenceEmbeddingManifoldStreamlitBlock from nlu.pipe.viz.streamlit_viz.viz_building_blocks.entity_embedding_manifold import \ EntityEmbeddingManifoldStreamlitBlock class StreamlitVizBlockHandler(): """Internal API to access any on the Streamlit building blocks. This is part of the Controller of the MVC pattern""" @staticmethod def viz_streamlit_dashboard( pipe, # Base Params text: Union[str, List[str], pd.DataFrame, pd.Series], model_selection: List[str] = [], # NER PARAMS # default_ner_model2viz:Union[str, List[str]] = 'en.ner.onto.electra.base', # SIMILARITY PARAMS similarity_texts: Tuple[str, str] = ('I love NLU <3', 'I love Streamlit <3'), title: str = 'NLU ❤️ Streamlit - Prototype your NLP startup in 0 lines of code', sub_title: str = 'Play with over 1000+ scalable enterprise NLP models', side_info: str = None, # UI PARAMS visualizers: List[str] = ( "dependency_tree", "ner", "similarity", "token_features", 'classification', 'manifold'), show_models_info: bool = True, show_model_select: bool = False, show_viz_selection: bool = False, show_logo: bool = True, set_wide_layout_CSS: bool = True, show_code_snippets: bool = False, model_select_position: str = 'side', # main or side display_infos: bool = True, key: str = "NLU_streamlit", display_footer: bool = True, num_similarity_cols: int = 2, # NEW PARAMS # MANIfold num_manifold_cols: int = 3, manifold_algos: List[str] = ('TSNE'), # SIMY similarity_algos: List[str] = ('COSINE'), ) -> None: """Visualize either individual building blocks for streamlit or a full UI to experiment and explore models with""" StreamlitVizTracker.footer_displayed = not display_footer if set_wide_layout_CSS: _set_block_container_style() if title: st.title(title) if sub_title: st.subheader(sub_title) if show_logo: StreamlitVizTracker.show_logo() if side_info: st.sidebar.markdown(side_info) if isinstance(text, list): text = '\n'.join(text) text = st.text_area("Enter text you want to visualize below", text, key=key) ner_model_2_viz = pipe.nlu_ref if show_model_select: show_code_snippets = st.sidebar.checkbox('Generate code snippets', value=show_code_snippets) if model_selection == []: model_selection = Discoverer.get_components('ner', include_pipes=True) model_selection.sort() if model_select_position == 'side': if pipe.nlu_ref.split(' ')[0] in model_selection: ner_model_2_viz = st.sidebar.selectbox("Select a NER model_anno_obj.", model_selection, index=model_selection.index(pipe.nlu_ref.split(' ')[0])) else: ner_model_2_viz = st.sidebar.selectbox("Select a NER model_anno_obj.", model_selection, index=model_selection.index('en.ner')) else: if pipe.nlu_ref.split(' ')[0] in model_selection: ner_model_2_viz = st.selectbox("Select a NER model_anno_obj", model_selection, index=model_selection.index(pipe.nlu_ref.split(' ')[0])) else: ner_model_2_viz = st.selectbox("Select a NER model_anno_obj.", index=model_selection.index('en.ner')) active_visualizers = visualizers if show_viz_selection: active_visualizers = st.sidebar.multiselect("Visualizers", options=visualizers, default=visualizers, key=key) all_models = ner_model_2_viz + ' en.dep.typed ' if 'dependency_tree' in active_visualizers else ner_model_2_viz ner_pipe, tree_pipe = None, None for viz in active_visualizers: if 'ner' == viz: ner_pipe = pipe if pipe.nlu_ref == ner_model_2_viz else StreamlitUtilsOS.get_pipe(ner_model_2_viz) NERStreamlitBlock.visualize_ner(ner_pipe, text, generate_code_sample=show_code_snippets, key=key + '_ner', show_model_select=False, show_text_input=True, show_logo=False, show_infos=False) if 'dependency_tree' == viz: tree_pipe = StreamlitUtilsOS.get_pipe( 'en.dep.typed') # if not ValidateVizPipe.viz_tree_satisfied(component_list) else component_list DepTreeStreamlitBlock.visualize_dep_tree(tree_pipe, text, generate_code_sample=show_code_snippets, key=key + '_dep', show_infos=False, show_logo=False) if 'token_features' == viz: ner_pipe = pipe if pipe.nlu_ref == ner_model_2_viz else StreamlitUtilsOS.get_pipe(ner_model_2_viz) TokenFeaturesStreamlitBlock.visualize_tokens_information(ner_pipe, text, generate_code_sample=show_code_snippets, key=key + '_tok', model_select_position=model_select_position, show_infos=False, show_logo=False, ) if 'classification' == viz: ner_pipe = pipe if pipe.nlu_ref == ner_model_2_viz else StreamlitUtilsOS.get_pipe(ner_model_2_viz) ClassifierStreamlitBlock.visualize_classes(ner_pipe, text, generate_code_sample=show_code_snippets, key=key + '_class', model_select_position=model_select_position, show_infos=False, show_logo=False) if 'similarity' == viz: ner_pipe = pipe if pipe.nlu_ref == ner_model_2_viz else StreamlitUtilsOS.get_pipe(ner_model_2_viz) WordSimilarityStreamlitBlock.display_word_similarity(ner_pipe, similarity_texts, generate_code_sample=show_code_snippets, model_select_position=model_select_position, show_infos=False, show_logo=False, num_cols=num_similarity_cols, key=key + '_sim') if 'manifold' == viz: ner_pipe = pipe if ner_model_2_viz in pipe.nlu_ref.split(' ') else StreamlitUtilsOS.get_pipe( ner_model_2_viz) WordEmbeddingManifoldStreamlitBlock.viz_streamlit_word_embed_manifold(ner_pipe, similarity_texts, generate_code_sample=show_code_snippets, model_select_position=model_select_position, show_infos=False, show_logo=False, num_cols=num_manifold_cols, key=key + '_mani') models_to_display_info_for = [] if ner_pipe is not None: models_to_display_info_for.append(ner_pipe) if tree_pipe is not None: models_to_display_info_for.append(tree_pipe) if show_models_info: StreamlitVizTracker.display_model_info(all_models, models_to_display_info_for) if display_infos: StreamlitVizTracker.display_footer() @staticmethod def viz_streamlit_entity_embed_manifold( pipe, # nlu component_list default_texts: List[str] = ("Donald Trump likes to visit New York", "Angela Merkel likes to visit Berlin!", 'Peter hates visiting Paris'), title: Optional[str] = "Lower dimensional Manifold visualization for Entity embeddings", sub_title: Optional[ str] = "Apply any of the 11 `Manifold` or `Matrix Decomposition` algorithms to reduce the dimensionality of `Entity Embeddings` to `1-D`, `2-D` and `3-D` ", default_algos_to_apply: List[str] = ("TSNE", "PCA"), target_dimensions: List[int] = (1, 2, 3), show_algo_select: bool = True, set_wide_layout_CSS: bool = True, num_cols: int = 3, model_select_position: str = 'side', # side or main key: str = "NLU_streamlit", show_infos: bool = True, show_logo: bool = True, n_jobs: Optional[int] = 3, # False ): EntityEmbeddingManifoldStreamlitBlock.viz_streamlit_entity_embed_manifold( pipe, default_texts, title, sub_title, default_algos_to_apply, target_dimensions, show_algo_select, set_wide_layout_CSS, num_cols, model_select_position, key, show_infos, show_logo, n_jobs, ) @staticmethod def viz_streamlit_sentence_embed_manifold( pipe, # nlu component_list default_texts: List[str] = ( "Donald Trump likes to party!", "Angela Merkel likes to party!", 'Peter HATES TO PARTTY!!!! :('), title: Optional[str] = "Lower dimensional Manifold visualization for word embeddings", sub_title: Optional[ str] = "Apply any of the 11 `Manifold` or `Matrix Decomposition` algorithms to reduce the dimensionality of `Word Embeddings` to `1-D`, `2-D` and `3-D` ", write_raw_pandas: bool = False, default_algos_to_apply: List[str] = ("TSNE", "PCA"), # ,'LLE','Spectral Embedding','MDS','ISOMAP','SVD aka LSA','DictionaryLearning','FactorAnalysis','FastICA','KernelPCA',), # LatentDirichletAllocation 'NMF', target_dimensions: List[int] = (1, 2, 3), show_algo_select: bool = True, show_embed_select: bool = True, show_color_select: bool = True, MAX_DISPLAY_NUM: int = 100, display_embed_information: bool = True, set_wide_layout_CSS: bool = True, num_cols: int = 3, model_select_position: str = 'side', # side or main key: str = "NLU_streamlit", additional_classifiers_for_coloring: List[str] = ['pos', 'sentiment'], generate_code_sample: bool = False, show_infos: bool = True, show_logo: bool = True, n_jobs: Optional[int] = 3, # False ): SentenceEmbeddingManifoldStreamlitBlock.viz_streamlit_sentence_embed_manifold( pipe, default_texts, title, sub_title, write_raw_pandas, default_algos_to_apply, target_dimensions, show_algo_select, show_embed_select, show_color_select, MAX_DISPLAY_NUM, display_embed_information, set_wide_layout_CSS, num_cols, model_select_position, key, additional_classifiers_for_coloring, generate_code_sample, show_infos, show_logo, n_jobs, ) @staticmethod def viz_streamlit_word_embed_manifold( pipe, # nlu component_list default_texts: List[str] = ( "Donald Trump likes to party!", "Angela Merkel likes to party!", 'Peter HATES TO PARTTY!!!! :('), title: Optional[str] = "Lower dimensional Manifold visualization for word embeddings", sub_title: Optional[ str] = "Apply any of the 11 `Manifold` or `Matrix Decomposition` algorithms to reduce the dimensionality of `Word Embeddings` to `1-D`, `2-D` and `3-D` ", write_raw_pandas: bool = False, default_algos_to_apply: List[str] = ("TSNE", "PCA"), # ,'LLE','Spectral Embedding','MDS','ISOMAP','SVD aka LSA','DictionaryLearning','FactorAnalysis','FastICA','KernelPCA',), # LatentDirichletAllocation 'NMF', target_dimensions: List[int] = (1, 2, 3), show_algo_select: bool = True, show_embed_select: bool = True, show_color_select: bool = True, MAX_DISPLAY_NUM: int = 100, display_embed_information: bool = True, set_wide_layout_CSS: bool = True, num_cols: int = 3, model_select_position: str = 'side', # side or main key: str = "NLU_streamlit", additional_classifiers_for_coloring: List[str] = ['pos', 'sentiment'], generate_code_sample: bool = False, show_infos: bool = True, show_logo: bool = True, n_jobs: Optional[int] = 3, # False ): WordEmbeddingManifoldStreamlitBlock.viz_streamlit_word_embed_manifold( pipe, default_texts, title, sub_title, write_raw_pandas, default_algos_to_apply, target_dimensions, show_algo_select, show_embed_select, show_color_select, MAX_DISPLAY_NUM, display_embed_information, set_wide_layout_CSS, num_cols, model_select_position, key, additional_classifiers_for_coloring, generate_code_sample, show_infos, show_logo, n_jobs, ) @staticmethod def visualize_dep_tree( pipe, # nlu component_list text: str = 'Billy likes to swim', title: Optional[str] = "Dependency Parse & Part-of-speech tags", sub_title: Optional[ str] = 'POS tags define a `grammatical label` for `each token` and the `Dependency Tree` classifies `Relations between the tokens` ', set_wide_layout_CSS: bool = True, generate_code_sample: bool = False, key="NLU_streamlit", show_infos: bool = True, show_logo: bool = True, show_text_input: bool = True, ): DepTreeStreamlitBlock.visualize_dep_tree(pipe, text, title, sub_title, set_wide_layout_CSS, generate_code_sample, key, show_infos, show_logo, show_text_input, ) @staticmethod def display_word_similarity( pipe, # nlu component_list default_texts: Tuple[str, str] = ("Donald Trump likes to party!", "Angela Merkel likes to party!"), threshold: float = 0.5, title: Optional[str] = "Embeddings Similarity Matrix & Visualizations ", sub_tile: Optional[ str] = "Visualize `word-wise similarity matrix` and calculate `similarity scores` for `2 texts` and every `word embedding` loaded", write_raw_pandas: bool = False, display_embed_information: bool = True, similarity_matrix=True, show_algo_select: bool = True, dist_metrics: List[str] = ('cosine'), set_wide_layout_CSS: bool = True, generate_code_sample: bool = False, key: str = "NLU_streamlit", num_cols: int = 2, display_scalar_similarities: bool = False, display_similarity_summary: bool = False, model_select_position: str = 'side', # main or side show_infos: bool = True, show_logo: bool = True, ): WordSimilarityStreamlitBlock.display_word_similarity(pipe, default_texts, threshold, title, sub_tile, write_raw_pandas, display_embed_information, similarity_matrix, show_algo_select, dist_metrics, set_wide_layout_CSS, generate_code_sample, key, num_cols, display_scalar_similarities, display_similarity_summary, model_select_position, show_infos, show_logo, ) @staticmethod def visualize_tokens_information( pipe, # nlu component_list text: str, title: Optional[str] = "Token Features", sub_title: Optional[str] = 'Pick from `over 1000+ models` on the left and `view the generated features`', show_feature_select: bool = True, features: Optional[List[str]] = None, full_metadata: bool = True, output_level: str = 'token', positions: bool = False, set_wide_layout_CSS: bool = True, generate_code_sample: bool = False, key="NLU_streamlit", show_model_select=True, model_select_position: str = 'side', # main or side show_infos: bool = True, show_logo: bool = True, show_text_input: bool = True, ) -> None: TokenFeaturesStreamlitBlock.visualize_tokens_information( pipe, text, title, sub_title, show_feature_select, features, full_metadata, output_level, positions, set_wide_layout_CSS, generate_code_sample, key, show_model_select, model_select_position, show_infos, show_logo, show_text_input, ) @staticmethod def visualize_ner( pipe, # Nlu component_list text: str, ner_tags: Optional[List[str]] = None, show_label_select: bool = True, show_table: bool = False, title: Optional[str] = "Named Entities", sub_title: Optional[ str] = "Recognize various `Named Entities (NER)` in text entered and filter them. You can select from over `100 languages` in the dropdown.", colors: Dict[str, str] = {}, show_color_selector: bool = False, set_wide_layout_CSS: bool = True, generate_code_sample: bool = False, key="NLU_streamlit", model_select_position: str = 'side', show_model_select: bool = True, show_text_input: bool = True, show_infos: bool = True, show_logo: bool = True, ): NERStreamlitBlock.visualize_ner( pipe, text, ner_tags, show_label_select, show_table, title, sub_title, colors, show_color_selector, set_wide_layout_CSS, generate_code_sample, key, model_select_position, show_model_select, show_text_input, show_infos, show_logo, ) @staticmethod def visualize_classes( pipe, # nlu component_list text: Union[str, list, pd.DataFrame, pd.Series, List[str]] = ( 'I love NLU and Streamlit and sunny days!', 'I hate rainy daiys', 'CALL NOW AND WIN 1000$M'), output_level: Optional[str] = 'document', title: Optional[str] = "Text Classification", sub_title: Optional[ str] = 'View predicted `classes` and `confidences` for `hundreds of text classifiers` in `over 200 languages`', metadata: bool = False, positions: bool = False, set_wide_layout_CSS: bool = True, generate_code_sample: bool = False, key: str = "NLU_streamlit", show_model_selector: bool = True, model_select_position: str = 'side', show_infos: bool = True, show_logo: bool = True, ) -> None: ClassifierStreamlitBlock.visualize_classes( pipe, text, output_level, title, sub_title, metadata, positions, set_wide_layout_CSS, generate_code_sample, key, show_model_selector, model_select_position, show_infos, show_logo, )
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/viz/streamlit_viz/streamlit_dashboard_OS.py
streamlit_dashboard_OS.py
import base64 SVG = """ <svg id="svg" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="400" height="177.77777777777777" viewBox="0, 0, 400,177.77777777777777"><g id="svgg"><path id="path0" d="M70.819 61.991 C 70.379 62.076,70.139 62.863,70.510 63.006 C 70.847 63.135,71.003 62.787,70.746 62.477 C 70.566 62.260,70.579 62.234,70.819 62.326 C 70.982 62.388,71.111 62.351,71.111 62.241 C 71.111 62.133,71.201 62.044,71.311 62.044 C 71.456 62.044,71.457 62.108,71.315 62.280 C 71.172 62.452,71.178 62.574,71.338 62.737 C 71.457 62.859,71.476 62.944,71.378 62.925 C 70.987 62.851,70.859 62.949,71.111 63.131 C 71.336 63.293,71.329 63.307,71.067 63.222 C 70.891 63.166,70.756 63.213,70.756 63.330 C 70.756 63.485,70.702 63.484,70.542 63.324 C 70.285 63.067,69.800 63.038,69.956 63.289 C 70.082 63.494,69.755 63.502,69.256 63.307 C 68.997 63.206,68.969 63.137,69.143 63.030 C 69.280 62.945,69.436 62.987,69.528 63.133 C 69.651 63.327,69.684 63.313,69.686 63.067 C 69.688 62.896,69.784 62.776,69.901 62.800 C 70.017 62.824,70.144 62.697,70.184 62.516 C 70.246 62.228,70.156 62.246,69.436 62.668 C 68.645 63.132,68.089 63.162,68.089 62.740 C 68.089 62.618,68.269 62.571,68.538 62.622 C 68.784 62.669,69.037 62.626,69.098 62.526 C 69.160 62.427,69.125 62.398,69.020 62.463 C 68.915 62.528,68.708 62.460,68.560 62.313 C 68.263 62.016,68.009 61.950,68.171 62.211 C 68.228 62.303,68.113 62.366,67.915 62.351 C 67.717 62.337,67.556 62.275,67.556 62.214 C 67.556 62.153,67.425 62.104,67.265 62.104 C 67.010 62.104,67.014 62.132,67.292 62.335 C 67.466 62.462,67.676 62.524,67.760 62.473 C 67.843 62.421,67.911 62.472,67.911 62.586 C 67.911 62.700,67.791 62.745,67.644 62.686 C 67.498 62.626,67.247 62.525,67.087 62.461 C 66.884 62.379,66.823 62.419,66.887 62.594 C 66.937 62.732,67.088 62.823,67.222 62.797 C 67.360 62.771,67.315 62.868,67.122 63.020 C 66.756 63.305,66.399 63.060,66.618 62.675 C 66.676 62.573,66.656 62.551,66.573 62.626 C 66.346 62.832,66.338 63.470,66.564 63.335 C 66.669 63.272,66.648 63.353,66.517 63.515 C 66.386 63.676,66.125 63.857,65.937 63.917 C 65.749 63.977,65.632 64.087,65.678 64.161 C 65.724 64.235,65.645 64.296,65.503 64.296 C 65.278 64.296,65.277 64.258,65.495 64.015 C 65.647 63.846,65.710 63.538,65.653 63.244 C 65.553 62.729,65.351 62.618,65.139 62.961 C 65.053 63.099,65.093 63.118,65.261 63.019 C 65.399 62.938,65.359 63.046,65.172 63.258 C 64.829 63.650,64.606 63.780,64.791 63.481 C 64.912 63.286,64.419 62.754,64.121 62.758 C 63.988 62.760,63.977 62.809,64.089 62.892 C 64.327 63.066,64.414 63.524,64.182 63.381 C 64.009 63.274,63.929 63.494,63.982 63.934 C 63.995 64.045,63.807 64.173,63.564 64.220 C 63.321 64.266,63.164 64.236,63.216 64.152 C 63.268 64.068,63.230 64.000,63.132 64.000 C 63.034 64.000,62.916 64.062,62.869 64.137 C 62.744 64.340,61.478 64.281,61.067 64.054 C 60.712 63.858,60.711 63.904,60.757 86.788 L 60.803 109.719 60.401 110.053 L 60.000 110.388 60.422 110.394 C 60.734 110.398,60.798 110.458,60.668 110.622 C 60.456 110.891,60.445 111.467,60.652 111.467 C 60.733 111.467,60.800 111.592,60.800 111.746 C 60.800 112.000,60.775 112.000,60.521 111.746 C 60.130 111.356,60.034 111.396,60.132 111.911 C 60.179 112.156,60.148 112.356,60.064 112.356 C 59.904 112.356,59.850 112.114,59.899 111.616 C 59.915 111.453,59.831 111.260,59.712 111.187 C 59.560 111.093,59.531 111.146,59.615 111.365 C 59.682 111.541,59.604 111.897,59.436 112.182 C 59.175 112.623,59.170 112.728,59.391 113.000 C 59.613 113.273,59.611 113.293,59.378 113.156 C 59.156 113.025,59.149 113.045,59.332 113.271 C 59.453 113.421,59.493 113.609,59.421 113.689 C 59.348 113.769,59.388 113.778,59.510 113.708 C 59.800 113.541,60.391 113.858,60.112 114.030 C 60.001 114.099,59.911 114.072,59.911 113.972 C 59.911 113.871,59.789 113.836,59.639 113.893 C 59.404 113.983,59.410 114.030,59.684 114.232 C 59.984 114.455,59.985 114.483,59.691 114.781 C 59.395 115.081,59.369 115.079,59.110 114.747 C 58.961 114.556,58.850 114.480,58.863 114.578 C 58.916 114.977,58.829 115.095,58.667 114.844 C 58.546 114.658,58.494 114.646,58.492 114.806 C 58.490 114.931,58.608 115.079,58.755 115.135 C 58.901 115.191,58.968 115.322,58.903 115.427 C 58.830 115.545,58.896 115.574,59.082 115.502 C 59.275 115.428,59.378 115.480,59.378 115.650 C 59.378 115.794,59.318 115.902,59.244 115.890 C 58.712 115.806,58.409 115.927,58.711 116.103 C 58.882 116.203,59.022 116.370,59.022 116.475 C 59.022 116.603,58.941 116.598,58.774 116.459 C 58.414 116.161,57.777 116.069,57.826 116.322 C 57.848 116.438,57.774 116.516,57.661 116.494 C 57.529 116.469,57.484 116.639,57.535 116.963 C 57.587 117.291,57.508 117.578,57.312 117.774 C 57.077 118.009,57.059 118.108,57.235 118.216 C 57.394 118.315,57.403 118.391,57.263 118.477 C 57.155 118.544,56.986 118.549,56.888 118.488 C 56.784 118.424,56.796 118.570,56.916 118.833 C 57.030 119.084,57.196 119.289,57.283 119.289 C 57.371 119.289,57.401 119.357,57.349 119.441 C 57.298 119.525,57.453 119.555,57.694 119.509 C 57.960 119.458,58.133 119.508,58.133 119.634 C 58.133 119.749,58.062 119.799,57.976 119.746 C 57.780 119.625,57.382 120.078,57.542 120.239 C 57.607 120.303,57.466 120.349,57.230 120.342 C 56.993 120.334,56.900 120.287,57.022 120.238 C 57.453 120.064,57.237 119.905,56.571 119.905 C 56.037 119.905,55.898 119.972,55.898 120.231 C 55.898 120.533,55.915 120.535,56.117 120.258 C 56.306 120.000,56.327 120.029,56.269 120.469 C 56.232 120.749,56.197 121.042,56.190 121.122 C 56.183 121.201,56.089 121.211,55.981 121.144 C 55.844 121.059,55.857 120.976,56.025 120.869 C 56.180 120.771,56.192 120.716,56.059 120.714 C 55.945 120.712,55.742 120.669,55.608 120.618 C 55.439 120.553,55.352 120.695,55.322 121.088 C 55.287 121.547,55.324 121.614,55.523 121.450 C 55.762 121.251,56.000 121.340,56.000 121.629 C 56.000 121.828,55.382 121.721,55.078 121.468 C 54.883 121.307,54.832 121.334,54.832 121.597 C 54.832 121.780,54.915 121.886,55.016 121.833 C 55.117 121.780,54.967 121.946,54.682 122.203 C 54.212 122.626,54.128 122.646,53.778 122.417 C 53.456 122.206,53.393 122.204,53.393 122.407 C 53.393 122.541,53.459 122.610,53.541 122.559 C 53.745 122.433,53.726 122.798,53.507 123.208 C 53.375 123.455,53.177 123.530,52.782 123.484 C 52.437 123.444,52.205 123.511,52.145 123.666 C 51.992 124.064,51.760 123.945,51.843 123.511 C 51.913 123.138,51.907 123.135,51.749 123.467 C 51.508 123.974,51.521 124.089,51.817 124.089 C 51.990 124.089,52.039 124.193,51.963 124.400 C 51.801 124.841,50.994 125.569,50.864 125.392 C 50.634 125.079,49.963 125.145,50.080 125.470 C 50.140 125.639,50.132 125.726,50.062 125.663 C 49.991 125.600,49.896 125.244,49.849 124.872 C 49.742 124.006,49.775 124.018,49.084 124.607 C 48.714 124.924,48.568 125.156,48.692 125.232 C 48.800 125.299,48.889 125.256,48.889 125.136 C 48.889 125.017,48.932 124.962,48.984 125.014 C 49.171 125.201,48.649 126.044,48.347 126.044 C 48.016 126.044,47.963 125.528,48.267 125.273 C 48.368 125.189,48.272 125.163,48.044 125.212 C 47.824 125.261,47.644 125.241,47.644 125.169 C 47.644 125.096,47.521 125.037,47.370 125.037 C 47.215 125.037,47.135 125.140,47.187 125.274 C 47.237 125.404,47.200 125.511,47.106 125.511 C 47.011 125.511,46.933 125.596,46.933 125.699 C 46.933 125.803,47.008 125.842,47.099 125.785 C 47.190 125.729,47.311 125.759,47.368 125.851 C 47.426 125.944,47.397 126.067,47.304 126.124 C 47.211 126.181,47.085 126.145,47.022 126.044 C 46.960 125.943,46.833 125.908,46.740 125.965 C 46.648 126.022,46.617 126.143,46.673 126.232 C 46.729 126.323,46.638 126.344,46.469 126.279 C 46.262 126.199,46.205 126.229,46.292 126.370 C 46.370 126.495,46.310 126.578,46.144 126.578 C 45.991 126.578,45.887 126.488,45.911 126.378 C 45.936 126.268,45.844 126.137,45.709 126.088 C 45.537 126.026,45.507 126.073,45.609 126.244 C 45.734 126.451,45.711 126.452,45.458 126.247 C 45.205 126.041,45.145 126.044,45.058 126.269 C 45.002 126.414,44.758 126.533,44.515 126.533 C 44.095 126.533,44.088 126.517,44.376 126.200 C 44.553 126.004,44.593 125.867,44.472 125.867 C 44.359 125.867,44.267 125.947,44.267 126.044 C 44.267 126.142,44.153 126.222,44.015 126.222 C 43.876 126.222,43.736 126.303,43.703 126.402 C 43.670 126.500,43.542 126.418,43.417 126.220 C 43.207 125.883,43.226 125.864,43.685 125.952 C 43.956 126.004,44.077 125.999,43.954 125.941 C 43.832 125.884,43.771 125.772,43.820 125.692 C 43.869 125.613,43.809 125.510,43.687 125.463 C 43.376 125.344,42.776 126.159,43.049 126.328 C 43.186 126.412,43.206 126.376,43.111 126.222 C 43.013 126.064,43.037 126.032,43.184 126.124 C 43.435 126.279,43.313 126.451,42.912 126.510 C 42.759 126.533,42.682 126.504,42.740 126.446 C 42.798 126.387,42.713 126.193,42.551 126.014 C 42.389 125.835,42.340 125.689,42.442 125.689 C 42.545 125.689,42.673 125.573,42.727 125.431 C 42.793 125.261,42.741 125.206,42.575 125.270 C 42.438 125.323,42.276 125.519,42.217 125.705 C 42.158 125.892,41.975 126.042,41.810 126.039 C 41.540 126.034,41.541 126.016,41.826 125.850 C 41.999 125.750,42.089 125.583,42.025 125.480 C 41.962 125.378,42.020 125.224,42.155 125.138 C 42.351 125.014,42.341 124.983,42.103 124.981 C 41.604 124.976,41.240 125.182,41.368 125.396 C 41.436 125.508,41.340 125.480,41.156 125.333 C 40.960 125.178,40.876 125.159,40.954 125.289 C 41.028 125.411,41.010 125.511,40.914 125.511 C 40.819 125.511,40.708 125.413,40.668 125.293 C 40.628 125.174,40.549 125.254,40.492 125.471 C 40.435 125.689,40.461 125.823,40.549 125.768 C 40.636 125.714,40.849 125.760,41.021 125.870 C 41.310 126.056,41.308 126.063,40.997 125.965 C 40.724 125.878,40.700 125.905,40.868 126.109 C 41.030 126.303,41.016 126.381,40.805 126.463 C 40.637 126.527,40.533 126.467,40.533 126.306 C 40.533 125.957,40.356 125.976,40.076 126.356 C 39.886 126.612,39.805 126.626,39.612 126.434 C 39.418 126.242,39.429 126.221,39.673 126.313 C 40.037 126.452,40.300 125.784,40.016 125.442 C 39.911 125.315,39.885 125.147,39.957 125.069 C 40.030 124.990,39.906 124.970,39.682 125.026 C 39.342 125.109,39.292 125.069,39.382 124.786 C 39.464 124.529,39.422 124.473,39.212 124.553 C 39.044 124.618,38.931 124.564,38.928 124.419 C 38.924 124.250,38.850 124.272,38.684 124.492 C 38.552 124.665,38.328 124.761,38.186 124.707 C 37.982 124.628,37.943 124.733,38.004 125.192 C 38.077 125.738,38.065 125.755,37.821 125.436 C 37.677 125.248,37.579 124.866,37.603 124.588 C 37.638 124.173,37.601 124.118,37.399 124.286 C 37.211 124.442,37.200 124.551,37.353 124.735 C 37.464 124.868,37.495 124.978,37.422 124.978 C 37.349 124.978,37.382 125.088,37.494 125.224 C 37.651 125.413,37.637 125.493,37.432 125.571 C 37.286 125.628,37.119 125.596,37.060 125.500 C 37.001 125.405,36.877 125.375,36.783 125.433 C 36.690 125.490,36.635 125.432,36.662 125.302 C 36.689 125.173,36.608 125.088,36.481 125.114 C 36.309 125.149,36.297 125.076,36.431 124.826 C 36.529 124.642,36.553 124.428,36.483 124.350 C 36.413 124.273,36.461 124.271,36.590 124.345 C 36.719 124.419,36.781 124.592,36.729 124.729 C 36.676 124.866,36.711 124.978,36.806 124.978 C 36.900 124.978,36.979 124.878,36.980 124.756 C 36.982 124.633,37.035 124.333,37.100 124.089 C 37.209 123.675,37.195 123.663,36.891 123.911 C 36.582 124.163,36.577 124.153,36.793 123.733 C 36.982 123.365,36.981 123.327,36.786 123.511 C 36.499 123.782,36.089 123.802,36.089 123.545 C 36.089 123.442,36.189 123.414,36.311 123.485 C 36.440 123.559,36.471 123.545,36.385 123.451 C 36.304 123.362,36.164 123.279,36.074 123.267 C 35.984 123.254,35.826 123.234,35.723 123.222 C 35.619 123.210,35.583 123.121,35.643 123.024 C 35.706 122.923,35.634 122.894,35.474 122.955 C 35.262 123.036,35.225 122.983,35.315 122.731 C 35.381 122.549,35.371 122.457,35.294 122.527 C 35.217 122.597,35.004 122.574,34.821 122.476 C 34.556 122.335,34.484 122.363,34.465 122.616 C 34.453 122.791,34.393 122.661,34.332 122.327 C 34.198 121.586,33.694 121.449,33.113 121.995 C 32.893 122.202,32.659 122.318,32.593 122.253 C 32.528 122.187,32.572 122.133,32.692 122.133 C 32.812 122.133,32.863 122.057,32.805 121.963 C 32.747 121.870,32.582 121.838,32.439 121.893 C 32.295 121.949,32.178 121.896,32.178 121.776 C 32.178 121.648,32.320 121.596,32.522 121.649 C 32.802 121.722,33.067 121.490,33.067 121.171 C 33.067 121.016,32.532 121.163,32.528 121.319 C 32.525 121.424,32.441 121.370,32.341 121.199 C 32.240 121.027,32.202 120.816,32.255 120.730 C 32.308 120.644,32.235 120.477,32.092 120.358 C 31.864 120.169,31.864 120.110,32.094 119.881 C 32.428 119.546,32.427 119.214,32.089 118.933 C 31.905 118.780,31.822 118.774,31.822 118.912 C 31.822 119.021,31.902 119.111,32.000 119.111 C 32.098 119.111,32.177 119.251,32.175 119.422 C 32.173 119.688,32.147 119.695,32.000 119.467 C 31.875 119.273,31.827 119.264,31.825 119.433 C 31.823 119.561,31.741 119.615,31.642 119.554 C 31.542 119.492,31.507 119.368,31.563 119.277 C 31.619 119.186,31.561 119.110,31.433 119.108 C 31.264 119.106,31.273 119.059,31.467 118.933 C 31.695 118.786,31.688 118.761,31.422 118.758 C 31.221 118.757,31.107 118.897,31.099 119.156 C 31.087 119.537,31.079 119.539,30.933 119.200 C 30.849 119.004,30.775 118.936,30.768 119.048 C 30.761 119.159,30.624 119.301,30.464 119.363 C 30.218 119.457,30.208 119.431,30.400 119.200 C 30.526 119.049,30.577 118.873,30.514 118.811 C 30.451 118.748,30.400 118.790,30.400 118.904 C 30.400 119.018,30.289 119.111,30.153 119.111 C 30.017 119.111,29.910 119.011,29.914 118.889 C 29.940 118.190,29.848 118.044,29.378 118.039 C 28.929 118.034,28.915 118.013,29.205 117.793 C 29.508 117.564,29.508 117.548,29.206 117.432 C 28.921 117.323,28.931 117.281,29.314 116.992 C 29.598 116.778,29.670 116.632,29.532 116.546 C 29.420 116.477,29.387 116.325,29.459 116.210 C 29.669 115.874,29.672 115.566,29.468 115.440 C 29.356 115.371,29.330 115.407,29.405 115.528 C 29.626 115.886,29.225 115.750,28.872 115.347 C 28.686 115.135,28.620 115.011,28.726 115.073 C 28.832 115.134,28.919 115.074,28.919 114.939 C 28.919 114.804,28.780 114.704,28.610 114.716 C 28.426 114.730,28.324 114.857,28.357 115.029 C 28.388 115.189,28.337 115.432,28.243 115.571 C 28.100 115.784,28.078 115.781,28.103 115.556 C 28.163 114.986,28.081 114.651,27.907 114.758 C 27.807 114.820,27.747 114.765,27.773 114.635 C 27.800 114.506,27.716 114.422,27.587 114.449 C 27.457 114.476,27.397 114.423,27.454 114.332 C 27.510 114.241,27.710 114.207,27.898 114.256 C 28.268 114.353,28.900 113.413,28.736 113.010 C 28.689 112.895,28.644 112.940,28.636 113.111 C 28.629 113.282,28.538 113.422,28.434 113.422 C 28.330 113.422,28.295 113.502,28.356 113.600 C 28.416 113.698,28.381 113.778,28.277 113.778 C 28.039 113.778,28.032 113.479,28.267 113.333 C 28.364 113.273,28.444 113.143,28.444 113.046 C 28.444 112.948,28.352 112.925,28.239 112.995 C 28.112 113.073,28.082 113.044,28.161 112.916 C 28.231 112.803,28.403 112.711,28.544 112.711 C 28.685 112.711,28.800 112.634,28.800 112.539 C 28.800 112.444,28.667 112.418,28.504 112.480 C 28.341 112.543,28.251 112.520,28.304 112.430 C 28.357 112.340,28.336 112.107,28.257 111.911 C 28.121 111.574,28.113 111.576,28.101 111.956 C 28.088 112.370,27.821 112.462,27.403 112.197 C 27.251 112.101,27.207 111.904,27.283 111.664 C 27.349 111.455,27.483 111.333,27.580 111.393 C 27.678 111.453,27.707 111.635,27.646 111.796 C 27.559 112.025,27.581 112.042,27.751 111.874 C 27.917 111.709,27.919 111.601,27.758 111.408 C 27.643 111.269,27.599 111.027,27.660 110.868 C 27.728 110.692,27.660 110.538,27.485 110.471 C 27.126 110.333,27.115 109.937,27.467 109.802 C 27.613 109.746,27.740 109.518,27.748 109.294 C 27.761 108.928,27.786 108.940,28.006 109.422 C 28.215 109.878,28.252 106.684,28.259 87.476 C 28.264 72.547,28.326 65.055,28.444 65.173 C 28.559 65.288,28.637 73.171,28.667 87.742 L 28.714 110.133 28.754 87.511 L 28.795 64.889 28.367 64.889 C 28.132 64.889,27.800 64.835,27.628 64.769 C 27.397 64.681,27.343 64.719,27.419 64.917 C 27.479 65.075,27.422 65.185,27.280 65.185 C 27.147 65.185,27.079 65.119,27.129 65.037 C 27.180 64.956,27.092 64.889,26.933 64.889 C 26.775 64.889,26.687 64.956,26.737 65.037 C 26.788 65.119,26.723 65.185,26.595 65.185 C 26.466 65.185,26.362 65.099,26.364 64.993 C 26.373 64.458,26.294 64.178,26.133 64.178 C 26.036 64.178,25.976 64.278,26.000 64.400 C 26.024 64.522,25.905 64.611,25.735 64.598 C 25.565 64.584,25.472 64.648,25.528 64.738 C 25.584 64.829,25.803 64.851,26.015 64.787 C 26.325 64.694,26.357 64.716,26.183 64.900 C 26.010 65.081,25.868 65.084,25.495 64.914 C 25.091 64.730,25.039 64.738,25.129 64.972 C 25.186 65.122,25.162 65.244,25.076 65.244 C 24.989 65.244,24.878 65.144,24.829 65.022 C 24.757 64.844,24.736 64.844,24.725 65.022 C 24.717 65.144,24.630 65.244,24.530 65.244 C 24.303 65.244,24.512 64.322,24.783 64.126 C 24.898 64.043,24.833 64.006,24.622 64.037 C 24.403 64.069,24.285 63.999,24.316 63.853 C 24.342 63.724,24.278 63.671,24.174 63.736 C 24.054 63.810,24.044 63.931,24.147 64.060 C 24.238 64.174,24.247 64.427,24.168 64.622 C 24.043 64.934,24.023 64.938,24.012 64.660 C 24.002 64.398,23.957 64.382,23.756 64.570 C 23.621 64.695,23.556 64.919,23.611 65.066 C 23.697 65.295,23.669 65.299,23.412 65.090 C 23.248 64.956,22.955 64.888,22.763 64.939 C 22.555 64.993,22.428 64.947,22.451 64.826 C 22.472 64.714,22.392 64.641,22.273 64.665 C 22.148 64.689,22.100 64.595,22.159 64.442 C 22.215 64.297,22.167 64.178,22.053 64.178 C 21.939 64.178,21.893 64.102,21.950 64.009 C 22.007 63.917,21.912 63.863,21.738 63.889 C 21.564 63.916,21.230 63.752,20.994 63.524 C 20.759 63.297,20.653 63.111,20.759 63.111 C 20.865 63.111,21.073 63.232,21.220 63.379 C 21.387 63.545,21.531 63.579,21.599 63.468 C 21.660 63.370,21.675 63.289,21.632 63.289 C 21.589 63.289,21.260 63.166,20.901 63.016 L 20.247 62.743 20.460 63.141 C 20.626 63.450,20.626 63.554,20.459 63.610 C 20.310 63.660,20.342 63.796,20.567 64.060 C 20.790 64.323,20.802 64.372,20.606 64.221 C 20.450 64.101,20.264 64.062,20.191 64.135 C 20.018 64.308,19.032 64.306,19.139 64.133 C 19.184 64.060,19.116 63.999,18.988 63.997 C 18.817 63.995,18.825 63.951,19.018 63.828 C 19.199 63.714,19.389 63.757,19.626 63.965 C 19.887 64.192,19.936 64.201,19.825 64.000 C 19.468 63.354,19.344 63.274,19.054 63.509 C 18.819 63.700,18.796 63.684,18.900 63.408 C 18.967 63.229,19.022 63.044,19.022 62.997 C 19.022 62.951,19.097 62.958,19.188 63.015 C 19.279 63.071,19.393 63.053,19.442 62.974 C 19.490 62.895,19.344 62.816,19.116 62.797 C 18.888 62.778,18.659 62.876,18.606 63.014 C 18.437 63.453,17.785 63.712,17.863 63.310 C 17.911 63.059,17.846 62.995,17.604 63.059 C 17.426 63.105,17.117 63.056,16.918 62.949 C 16.507 62.729,16.184 63.011,16.493 63.320 C 16.611 63.437,16.708 63.438,16.780 63.321 C 16.909 63.112,17.600 63.257,17.600 63.494 C 17.600 63.586,17.452 63.578,17.259 63.474 C 17.016 63.344,16.898 63.351,16.849 63.496 C 16.800 63.644,16.688 63.649,16.445 63.513 C 16.169 63.359,16.020 63.420,15.613 63.856 L 15.117 64.387 15.108 87.601 C 15.101 103.897,15.155 110.851,15.289 110.933 C 15.393 110.998,15.449 111.164,15.413 111.303 C 15.377 111.442,15.382 111.491,15.425 111.413 C 15.468 111.334,15.620 111.414,15.764 111.591 C 15.909 111.767,15.965 111.791,15.890 111.644 C 15.773 111.416,15.797 111.412,16.051 111.619 C 16.329 111.844,16.427 112.776,16.155 112.608 C 16.087 112.566,15.994 112.632,15.951 112.754 C 15.907 112.877,15.806 112.917,15.726 112.843 C 15.647 112.768,15.736 112.595,15.924 112.457 C 16.112 112.319,16.208 112.149,16.137 112.078 C 15.938 111.879,15.516 112.315,15.419 112.819 C 15.365 113.105,15.423 113.307,15.577 113.366 C 15.711 113.417,15.771 113.540,15.710 113.638 C 15.642 113.747,15.543 113.723,15.451 113.575 C 15.333 113.384,15.250 113.408,15.043 113.690 C 14.811 114.006,14.820 114.105,15.122 114.595 C 15.309 114.898,15.410 115.198,15.346 115.262 C 15.282 115.326,15.383 115.384,15.570 115.392 C 15.758 115.400,15.816 115.445,15.699 115.492 C 15.582 115.540,15.531 115.693,15.584 115.833 C 15.638 115.972,15.753 116.043,15.841 115.988 C 15.992 115.895,16.076 116.271,16.013 116.756 C 15.997 116.878,16.059 116.978,16.149 116.978 C 16.240 116.978,16.356 117.138,16.407 117.333 C 16.458 117.529,16.632 117.689,16.794 117.689 C 17.233 117.689,16.930 118.042,16.478 118.057 C 16.104 118.069,16.103 118.075,16.444 118.212 C 16.640 118.291,16.860 118.321,16.933 118.278 C 17.007 118.236,17.067 118.295,17.067 118.410 C 17.067 118.534,16.917 118.580,16.701 118.524 C 16.419 118.450,16.379 118.480,16.523 118.655 C 16.627 118.779,16.985 118.868,17.320 118.851 C 17.800 118.827,17.924 118.886,17.905 119.131 C 17.892 119.302,17.818 119.403,17.741 119.355 C 17.575 119.252,17.562 119.575,17.712 120.131 C 17.770 120.350,17.901 120.478,18.003 120.415 C 18.111 120.349,18.135 120.387,18.061 120.506 C 17.867 120.820,17.776 120.758,17.573 120.178 C 17.471 119.884,17.307 119.644,17.208 119.644 C 17.109 119.644,17.076 119.769,17.134 119.920 C 17.193 120.072,17.181 120.160,17.109 120.117 C 17.037 120.073,16.872 120.152,16.742 120.292 C 16.541 120.510,16.580 120.533,17.009 120.450 C 17.285 120.397,17.412 120.400,17.290 120.458 C 17.122 120.537,17.133 120.625,17.337 120.829 C 17.483 120.975,17.550 121.181,17.485 121.286 C 17.415 121.400,17.451 121.425,17.575 121.349 C 17.690 121.278,17.746 121.159,17.700 121.084 C 17.653 121.009,17.715 120.948,17.836 120.948 C 17.958 120.948,18.057 121.089,18.057 121.261 C 18.057 121.433,17.987 121.530,17.900 121.477 C 17.814 121.424,17.771 121.523,17.805 121.698 C 17.903 122.202,18.293 122.205,18.483 121.703 C 18.579 121.451,18.760 121.251,18.884 121.258 C 19.050 121.269,19.054 121.296,18.899 121.359 C 18.782 121.406,18.731 121.560,18.785 121.700 C 18.838 121.841,18.789 121.956,18.675 121.956 C 18.561 121.956,18.518 122.037,18.580 122.136 C 18.641 122.236,18.786 122.260,18.901 122.191 C 19.021 122.119,19.006 122.195,18.867 122.366 C 18.674 122.604,18.671 122.667,18.852 122.667 C 18.978 122.667,19.040 122.709,18.988 122.760 C 18.937 122.811,18.757 122.800,18.588 122.735 C 18.345 122.642,18.330 122.586,18.518 122.467 C 18.700 122.352,18.683 122.316,18.444 122.314 C 18.273 122.312,18.127 122.371,18.120 122.444 C 18.113 122.518,18.072 122.767,18.029 122.998 C 17.970 123.316,18.017 123.392,18.220 123.314 C 18.388 123.250,18.489 123.314,18.489 123.483 C 18.489 123.632,18.416 123.709,18.328 123.655 C 18.130 123.533,18.296 124.271,18.535 124.576 C 18.629 124.697,18.772 124.756,18.852 124.706 C 18.932 124.657,19.049 124.699,19.111 124.800 C 19.174 124.901,19.319 124.929,19.435 124.863 C 19.550 124.797,19.585 124.808,19.513 124.888 C 19.440 124.968,19.469 125.140,19.577 125.270 C 19.685 125.400,19.730 125.618,19.678 125.755 C 19.625 125.892,19.813 126.225,20.096 126.496 C 20.378 126.766,20.573 127.046,20.529 127.118 C 20.485 127.190,20.612 127.291,20.812 127.344 C 21.084 127.415,21.162 127.363,21.119 127.142 C 21.088 126.978,20.970 126.844,20.857 126.844 C 20.744 126.844,20.592 126.784,20.519 126.711 C 20.445 126.638,20.523 126.578,20.692 126.578 C 20.861 126.578,20.952 126.503,20.896 126.411 C 20.839 126.319,20.624 126.276,20.419 126.316 C 20.057 126.386,20.055 126.376,20.361 126.039 C 20.713 125.649,20.822 125.602,20.756 125.867 C 20.709 126.054,21.264 126.429,22.089 126.767 C 22.358 126.878,22.578 127.050,22.578 127.150 C 22.578 127.265,22.487 127.257,22.334 127.130 C 22.199 127.018,21.946 126.982,21.771 127.049 C 21.562 127.130,21.494 127.105,21.574 126.976 C 21.640 126.868,21.630 126.740,21.552 126.692 C 21.473 126.643,21.394 126.786,21.375 127.010 C 21.347 127.351,21.411 127.403,21.780 127.332 C 22.074 127.276,22.254 127.342,22.328 127.534 C 22.388 127.692,22.514 127.773,22.608 127.715 C 22.840 127.571,23.469 128.166,23.455 128.516 C 23.448 128.701,23.388 128.660,23.283 128.400 C 23.195 128.180,23.093 128.000,23.058 128.000 C 22.946 128.000,23.038 128.697,23.167 128.826 C 23.349 129.008,23.314 129.459,23.127 129.343 C 23.038 129.288,22.984 129.372,23.007 129.530 C 23.055 129.861,23.644 130.262,23.644 129.964 C 23.644 129.862,23.724 129.778,23.822 129.778 C 24.118 129.778,24.022 130.281,23.689 130.475 C 23.518 130.574,23.458 130.664,23.556 130.674 C 23.653 130.684,23.573 130.760,23.378 130.844 C 23.083 130.971,23.125 130.999,23.622 131.010 C 24.118 131.020,24.186 130.979,24.013 130.771 C 23.871 130.600,23.864 130.484,23.991 130.405 C 24.094 130.342,24.179 130.395,24.181 130.523 C 24.182 130.692,24.230 130.683,24.356 130.489 C 24.596 130.116,24.575 129.911,24.311 130.062 C 24.189 130.133,24.148 130.125,24.221 130.045 C 24.293 129.965,24.253 129.772,24.132 129.616 C 23.945 129.376,23.952 129.365,24.178 129.540 C 24.324 129.654,24.524 129.774,24.622 129.807 C 24.811 129.870,24.809 129.971,24.606 130.650 C 24.514 130.956,24.537 131.041,24.686 130.949 C 24.801 130.878,24.852 130.751,24.800 130.667 C 24.748 130.582,24.778 130.469,24.866 130.414 C 24.954 130.360,25.068 130.474,25.119 130.669 C 25.170 130.863,25.139 131.022,25.050 131.022 C 24.961 131.022,24.889 131.107,24.889 131.210 C 24.889 131.314,24.981 131.342,25.094 131.272 C 25.224 131.191,25.251 131.223,25.167 131.359 C 25.066 131.523,24.976 131.524,24.784 131.365 C 24.646 131.250,24.527 131.186,24.520 131.223 C 24.405 131.782,24.419 131.876,24.607 131.804 C 24.725 131.758,24.970 131.884,25.150 132.083 C 25.401 132.360,25.419 132.447,25.228 132.457 C 25.087 132.464,25.133 132.532,25.333 132.612 C 25.817 132.807,26.023 132.652,25.742 132.306 C 25.594 132.123,25.587 132.066,25.721 132.147 C 25.837 132.216,25.979 132.195,26.038 132.100 C 26.097 132.004,26.273 131.976,26.430 132.036 C 26.625 132.111,26.688 132.066,26.631 131.895 C 26.534 131.608,25.762 131.556,25.542 131.822 C 25.452 131.932,25.419 131.910,25.458 131.764 C 25.492 131.635,25.558 131.548,25.604 131.572 C 25.651 131.596,25.862 131.523,26.075 131.409 C 26.373 131.250,26.480 131.253,26.549 131.424 C 26.620 131.599,26.642 131.598,26.653 131.422 C 26.660 131.300,26.756 131.200,26.866 131.200 C 27.004 131.200,26.997 131.282,26.844 131.467 C 26.668 131.679,26.668 131.733,26.844 131.733 C 27.013 131.733,27.017 131.792,26.862 131.979 C 26.705 132.169,26.719 132.249,26.923 132.327 C 27.069 132.383,27.236 132.352,27.294 132.259 C 27.502 131.922,27.664 132.099,27.628 132.623 C 27.584 133.250,27.787 133.431,28.221 133.152 C 28.453 133.003,28.461 132.973,28.253 133.036 C 27.960 133.124,27.890 132.584,28.159 132.315 C 28.235 132.239,28.270 132.298,28.237 132.444 C 28.204 132.591,28.264 132.696,28.370 132.677 C 28.476 132.658,28.567 132.738,28.571 132.854 C 28.576 132.971,28.643 133.220,28.720 133.407 C 28.813 133.633,28.796 133.708,28.670 133.630 C 28.381 133.451,28.507 134.540,28.810 134.838 C 29.031 135.055,29.114 135.054,29.412 134.834 C 29.602 134.693,29.666 134.578,29.556 134.578 C 29.445 134.578,29.300 134.666,29.233 134.775 C 29.082 135.018,28.807 134.636,28.803 134.178 C 28.799 133.717,29.693 133.755,29.815 134.220 C 29.865 134.415,29.983 134.527,30.077 134.469 C 30.170 134.411,30.202 134.248,30.147 134.106 C 30.089 133.953,30.125 133.895,30.236 133.964 C 30.340 134.028,30.381 134.192,30.329 134.329 C 30.276 134.466,30.299 134.578,30.378 134.578 C 30.458 134.578,30.508 134.798,30.489 135.067 C 30.471 135.336,30.363 135.536,30.250 135.511 C 30.137 135.487,30.044 135.552,30.044 135.656 C 30.044 135.769,30.198 135.797,30.422 135.726 C 30.700 135.638,30.763 135.666,30.661 135.831 C 30.575 135.971,30.595 136.010,30.715 135.936 C 30.820 135.871,30.936 135.959,30.972 136.131 C 31.024 136.382,31.059 136.361,31.151 136.027 C 31.213 135.797,31.218 135.656,31.161 135.713 C 30.957 135.917,30.607 135.211,30.765 134.915 C 30.851 134.755,30.887 134.591,30.847 134.550 C 30.806 134.510,30.923 134.476,31.107 134.476 C 31.290 134.476,31.402 134.539,31.354 134.616 C 31.307 134.693,31.401 134.756,31.564 134.756 C 31.751 134.756,31.823 134.851,31.760 135.017 C 31.587 135.469,31.995 135.512,32.363 135.080 C 32.557 134.853,32.698 134.747,32.676 134.844 C 32.606 135.154,33.120 135.791,33.365 135.697 C 33.504 135.643,33.600 135.732,33.600 135.914 C 33.600 136.167,33.647 136.182,33.867 136.000 C 34.079 135.823,34.133 135.823,34.133 136.000 C 34.133 136.175,34.190 136.174,34.404 135.996 C 34.553 135.873,34.720 135.816,34.775 135.871 C 34.878 135.975,34.113 136.446,33.956 136.376 C 33.907 136.355,33.838 136.475,33.803 136.645 C 33.744 136.930,33.761 136.931,34.031 136.661 C 34.405 136.287,35.225 136.587,35.080 137.044 C 35.001 137.295,35.015 137.298,35.183 137.067 C 35.356 136.830,35.397 136.830,35.548 137.065 C 35.675 137.262,35.629 137.364,35.371 137.460 C 35.179 137.531,35.022 137.629,35.022 137.678 C 35.022 137.727,35.156 137.715,35.320 137.652 C 35.540 137.568,35.589 137.611,35.510 137.818 C 35.436 138.012,35.495 138.085,35.701 138.054 C 36.059 138.002,36.089 137.668,35.789 137.075 C 35.668 136.835,35.617 136.595,35.675 136.541 C 35.734 136.488,35.818 136.544,35.861 136.667 C 35.905 136.789,36.063 136.889,36.212 136.889 C 36.416 136.889,36.466 137.039,36.412 137.499 C 36.373 137.834,36.404 138.069,36.482 138.021 C 36.559 137.974,36.622 137.779,36.622 137.590 C 36.622 137.395,36.744 137.244,36.901 137.244 C 37.098 137.244,37.147 137.141,37.067 136.890 C 36.973 136.595,37.029 136.542,37.402 136.572 C 37.648 136.593,37.815 136.644,37.772 136.687 C 37.729 136.730,37.833 136.879,38.003 137.018 C 38.172 137.157,38.220 137.226,38.109 137.171 C 37.998 137.116,37.805 137.193,37.681 137.343 C 37.556 137.493,37.350 137.576,37.222 137.527 C 37.094 137.478,36.934 137.527,36.867 137.635 C 36.796 137.750,36.825 137.783,36.936 137.715 C 37.041 137.650,37.253 137.723,37.407 137.876 C 37.560 138.030,37.780 138.098,37.896 138.027 C 38.036 137.940,38.068 137.993,37.994 138.187 C 37.932 138.347,37.960 138.526,38.055 138.584 C 38.150 138.643,38.174 138.778,38.109 138.884 C 38.043 138.991,38.071 139.027,38.172 138.965 C 38.283 138.895,38.303 138.576,38.223 138.137 C 38.151 137.744,38.161 137.422,38.246 137.422 C 38.331 137.422,38.400 137.500,38.400 137.596 C 38.400 137.708,38.560 137.708,38.856 137.596 C 39.562 137.327,39.975 137.379,39.924 137.729 C 39.888 137.968,40.490 138.489,40.801 138.489 C 41.015 138.489,40.996 137.911,40.776 137.690 C 40.526 137.441,40.552 137.418,41.041 137.455 C 41.493 137.490,41.547 137.539,41.369 137.757 C 41.191 137.973,41.218 138.001,41.526 137.922 C 41.895 137.827,41.922 137.870,41.998 138.667 C 42.017 138.862,42.140 139.022,42.272 139.022 C 42.432 139.022,42.475 138.903,42.400 138.667 C 42.327 138.436,42.370 138.306,42.521 138.298 C 42.650 138.291,42.949 138.249,43.186 138.205 C 43.616 138.126,43.616 138.127,43.280 138.530 C 43.095 138.752,43.050 138.840,43.182 138.725 C 43.313 138.610,43.538 138.562,43.681 138.616 C 43.894 138.698,43.898 138.744,43.704 138.866 C 43.542 138.969,43.625 139.018,43.966 139.019 C 44.241 139.021,44.419 138.948,44.363 138.857 C 44.307 138.766,44.343 138.640,44.444 138.578 C 44.545 138.515,44.590 138.403,44.544 138.328 C 44.497 138.253,44.693 138.205,44.978 138.222 C 45.263 138.239,45.461 138.197,45.419 138.128 C 45.376 138.059,45.492 137.867,45.675 137.701 C 45.917 137.482,46.045 137.456,46.138 137.607 C 46.236 137.766,46.298 137.764,46.399 137.602 C 46.484 137.464,46.572 137.551,46.647 137.850 C 46.757 138.290,47.111 138.506,47.111 138.133 C 47.111 138.036,47.044 137.956,46.963 137.956 C 46.881 137.956,46.815 137.836,46.815 137.689 C 46.815 137.536,46.973 137.423,47.185 137.425 C 47.491 137.427,47.509 137.458,47.289 137.603 C 47.065 137.750,47.062 137.803,47.267 137.936 C 47.410 138.028,47.454 138.188,47.373 138.319 C 47.282 138.465,47.306 138.500,47.439 138.417 C 47.559 138.343,47.644 138.400,47.644 138.555 C 47.644 138.938,48.372 139.128,48.609 138.806 C 48.714 138.663,48.920 138.580,49.067 138.620 C 49.304 138.686,49.304 138.673,49.067 138.505 C 48.714 138.256,48.230 138.261,48.072 138.516 C 47.996 138.639,48.023 138.674,48.138 138.602 C 48.244 138.537,48.377 138.557,48.433 138.648 C 48.489 138.739,48.395 138.793,48.223 138.767 C 48.051 138.742,47.910 138.649,47.908 138.561 C 47.907 138.472,47.840 138.296,47.760 138.170 C 47.676 138.038,47.705 137.830,47.829 137.681 C 48.108 137.345,49.067 137.336,49.067 137.670 C 49.067 138.028,49.539 138.155,49.782 137.862 C 49.896 137.724,50.077 137.629,50.184 137.650 C 50.291 137.671,50.402 137.529,50.431 137.333 C 50.459 137.138,50.437 137.078,50.382 137.200 C 50.273 137.441,49.778 137.510,49.778 137.284 C 49.778 137.062,50.491 136.514,50.651 136.613 C 50.731 136.662,50.760 136.897,50.714 137.135 C 50.663 137.400,50.724 137.598,50.871 137.647 C 51.003 137.691,50.976 137.707,50.811 137.683 C 50.647 137.659,50.471 137.745,50.422 137.874 C 50.333 138.105,50.814 138.105,51.188 137.874 C 51.279 137.818,51.400 137.848,51.458 137.941 C 51.515 138.034,51.715 138.099,51.902 138.085 C 52.089 138.071,52.205 138.000,52.160 137.926 C 52.115 137.853,51.946 137.844,51.786 137.905 C 51.567 137.989,51.522 137.943,51.607 137.722 C 51.672 137.554,51.639 137.445,51.532 137.469 C 51.257 137.531,50.991 136.751,51.235 136.600 C 51.350 136.529,51.383 136.559,51.313 136.671 C 51.248 136.777,51.276 136.915,51.375 136.976 C 51.474 137.037,51.556 136.976,51.556 136.840 C 51.556 136.583,52.156 136.541,52.396 136.782 C 52.472 136.857,52.453 136.902,52.356 136.883 C 51.781 136.767,51.721 136.931,52.206 137.289 C 52.505 137.509,52.767 137.769,52.789 137.867 C 52.811 137.964,52.870 138.084,52.919 138.133 C 52.967 138.182,52.981 138.102,52.948 137.956 C 52.915 137.808,52.994 137.711,53.124 137.738 C 53.254 137.765,53.308 137.703,53.245 137.602 C 53.182 137.500,53.044 137.470,52.938 137.536 C 52.829 137.603,52.796 137.573,52.862 137.466 C 52.927 137.361,53.119 137.299,53.290 137.326 C 53.460 137.354,53.696 137.300,53.814 137.206 C 53.932 137.113,54.107 137.084,54.202 137.143 C 54.297 137.202,54.427 137.167,54.490 137.065 C 54.553 136.963,54.538 136.892,54.458 136.906 C 53.887 137.005,53.742 136.712,54.133 136.247 C 54.255 136.102,54.380 136.092,54.532 136.216 C 54.736 136.383,55.164 135.726,55.020 135.467 C 54.917 135.282,55.448 134.937,55.558 135.116 C 55.619 135.214,55.745 135.248,55.837 135.191 C 55.930 135.133,55.964 135.019,55.913 134.937 C 55.862 134.854,55.961 134.810,56.132 134.839 C 56.304 134.868,56.695 134.654,57.002 134.364 C 57.512 133.881,57.993 133.908,57.688 134.402 C 57.626 134.502,57.824 134.631,58.136 134.694 C 58.531 134.773,58.626 134.850,58.459 134.956 C 58.279 135.070,58.298 135.106,58.539 135.108 C 58.713 135.110,58.902 134.991,58.958 134.844 C 59.014 134.698,59.252 134.578,59.486 134.578 C 59.720 134.578,59.911 134.509,59.911 134.426 C 59.911 134.342,60.111 134.058,60.356 133.795 C 60.865 133.245,60.914 132.866,60.533 132.444 C 60.295 132.181,60.295 132.148,60.533 132.148 C 60.680 132.148,60.800 132.220,60.800 132.307 C 60.800 132.394,60.881 132.415,60.981 132.354 C 61.080 132.292,61.122 132.179,61.075 132.102 C 61.027 132.025,61.186 132.000,61.428 132.046 C 61.914 132.139,62.037 132.419,61.575 132.380 C 61.414 132.367,61.260 132.458,61.232 132.582 C 61.205 132.707,61.223 132.727,61.273 132.627 C 61.322 132.526,61.441 132.444,61.536 132.444 C 61.632 132.444,61.665 132.517,61.610 132.607 C 61.555 132.696,61.650 132.742,61.821 132.709 C 61.993 132.676,62.190 132.643,62.260 132.636 C 62.329 132.628,62.369 132.389,62.348 132.104 C 62.318 131.684,62.265 131.625,62.063 131.792 C 61.859 131.961,61.839 131.924,61.949 131.576 C 62.041 131.287,62.021 131.193,61.886 131.277 C 61.581 131.465,61.653 131.272,61.990 131.000 C 62.170 130.853,62.239 130.841,62.162 130.969 C 61.980 131.273,62.639 131.601,62.903 131.337 C 63.015 131.225,63.272 131.122,63.475 131.107 C 63.811 131.083,63.801 131.115,63.371 131.457 C 62.942 131.799,62.932 131.835,63.258 131.835 C 63.456 131.835,63.576 131.767,63.525 131.683 C 63.473 131.600,63.579 131.557,63.760 131.588 C 63.996 131.629,64.073 131.552,64.033 131.315 C 63.999 131.118,64.052 131.033,64.166 131.104 C 64.276 131.172,64.356 131.072,64.356 130.865 C 64.356 130.670,64.448 130.453,64.561 130.383 C 64.697 130.299,64.717 130.335,64.622 130.489 C 64.527 130.643,64.548 130.679,64.684 130.594 C 64.821 130.510,64.835 130.364,64.724 130.157 C 64.553 129.837,64.723 129.370,64.993 129.420 C 65.078 129.436,65.175 129.363,65.210 129.258 C 65.245 129.153,65.127 129.064,64.948 129.061 C 64.639 129.056,64.639 129.046,64.937 128.873 C 65.110 128.772,65.209 128.620,65.156 128.535 C 65.104 128.450,65.142 128.330,65.242 128.268 C 65.341 128.207,65.421 128.062,65.419 127.945 C 65.418 127.818,65.348 127.840,65.244 128.000 C 65.098 128.226,65.072 128.217,65.069 127.944 C 65.067 127.706,65.156 127.649,65.407 127.729 C 65.859 127.872,66.049 127.539,65.648 127.305 C 65.456 127.193,65.424 127.120,65.566 127.117 C 65.694 127.114,65.755 127.040,65.701 126.954 C 65.648 126.867,65.800 126.759,66.040 126.713 C 66.280 126.667,66.513 126.718,66.558 126.826 C 66.602 126.934,66.645 126.882,66.653 126.711 C 66.660 126.540,66.592 126.400,66.500 126.400 C 66.408 126.400,66.345 126.240,66.360 126.044 C 66.375 125.849,66.408 125.689,66.433 125.689 C 66.558 125.689,67.026 126.784,66.952 126.903 C 66.722 127.275,67.157 127.336,67.505 126.980 C 67.805 126.674,67.834 126.578,67.628 126.578 C 67.278 126.578,67.465 126.277,67.965 126.036 C 68.342 125.854,68.344 125.857,68.010 126.124 C 67.821 126.276,67.755 126.400,67.864 126.400 C 67.974 126.400,68.189 126.274,68.343 126.121 C 68.657 125.807,68.715 125.611,68.444 125.778 C 68.347 125.838,68.267 125.809,68.267 125.714 C 68.267 125.618,68.225 125.431,68.174 125.298 C 68.123 125.165,68.195 124.900,68.335 124.709 C 68.576 124.380,68.584 124.386,68.500 124.827 C 68.451 125.083,68.502 125.348,68.613 125.416 C 68.753 125.503,68.779 125.401,68.699 125.082 C 68.625 124.787,68.657 124.622,68.789 124.622 C 68.901 124.622,68.970 124.482,68.941 124.311 C 68.905 124.098,68.988 124.017,69.203 124.053 C 69.428 124.091,69.547 123.954,69.625 123.564 C 69.685 123.266,69.803 123.012,69.889 123.000 C 69.974 122.988,70.124 122.968,70.222 122.956 C 70.320 122.943,70.400 122.853,70.400 122.756 C 70.400 122.658,70.320 122.578,70.222 122.578 C 70.124 122.578,70.064 122.498,70.089 122.400 C 70.113 122.302,70.213 122.249,70.311 122.281 C 70.409 122.314,70.295 122.134,70.058 121.881 C 69.650 121.446,69.646 121.422,69.984 121.422 C 70.357 121.422,70.455 120.805,70.121 120.564 C 69.892 120.398,69.815 119.947,70.040 120.086 C 70.142 120.149,70.222 120.037,70.222 119.833 C 70.222 119.631,70.300 119.467,70.394 119.467 C 70.489 119.467,70.522 119.607,70.467 119.778 C 70.382 120.040,70.396 120.047,70.558 119.822 C 70.689 119.641,70.751 119.627,70.753 119.778 C 70.754 119.900,70.848 120.000,70.961 120.000 C 71.099 120.000,71.085 119.898,70.916 119.689 C 70.638 119.342,70.222 118.629,70.222 118.498 C 70.222 118.451,70.394 118.320,70.605 118.208 C 70.926 118.036,70.970 118.047,70.882 118.277 C 70.824 118.428,70.876 118.634,70.998 118.735 C 71.252 118.946,71.257 118.070,71.005 117.661 C 70.741 117.234,70.767 116.688,71.034 117.052 C 71.314 117.436,71.520 117.409,71.683 116.967 C 71.753 116.777,71.933 116.622,72.083 116.622 C 72.233 116.622,72.356 116.542,72.356 116.444 C 72.356 116.277,72.097 116.192,71.765 116.250 C 71.684 116.264,71.669 116.192,71.732 116.090 C 71.795 115.989,71.921 115.951,72.012 116.008 C 72.103 116.064,72.178 116.025,72.178 115.922 C 72.178 115.818,72.038 115.728,71.867 115.721 C 71.598 115.710,71.604 115.685,71.911 115.540 C 72.201 115.402,72.167 115.386,71.727 115.451 C 71.258 115.521,71.182 115.475,71.151 115.099 C 71.127 114.805,71.207 114.667,71.402 114.667 C 71.638 114.667,71.649 114.714,71.463 114.937 C 71.273 115.167,71.289 115.189,71.575 115.084 C 71.760 115.016,71.975 114.854,72.053 114.725 C 72.131 114.595,72.276 114.489,72.375 114.489 C 72.474 114.489,72.505 114.568,72.446 114.665 C 72.383 114.765,72.455 114.795,72.613 114.734 C 72.765 114.676,72.889 114.557,72.889 114.470 C 72.889 114.383,72.889 114.251,72.889 114.178 C 72.889 114.104,72.809 114.034,72.711 114.022 C 72.613 114.010,72.513 113.984,72.489 113.964 C 72.252 113.773,71.822 113.856,71.862 114.086 C 71.889 114.238,71.791 114.398,71.644 114.440 C 71.486 114.485,71.450 114.456,71.556 114.369 C 71.653 114.288,71.739 114.142,71.747 114.044 C 71.791 113.451,72.243 112.657,72.355 112.975 C 72.425 113.176,72.519 113.234,72.589 113.120 C 72.654 113.016,72.542 112.722,72.342 112.467 C 72.141 112.212,72.034 111.910,72.105 111.796 C 72.182 111.672,72.155 111.637,72.040 111.709 C 71.934 111.774,71.797 111.747,71.735 111.647 C 71.674 111.548,71.714 111.467,71.824 111.467 C 71.947 111.467,71.978 111.321,71.904 111.089 C 71.822 110.832,71.848 110.750,71.984 110.834 C 72.358 111.066,73.103 111.135,72.974 110.927 C 72.907 110.819,72.737 110.776,72.595 110.830 C 72.453 110.885,72.382 110.857,72.437 110.768 C 72.492 110.679,72.416 110.485,72.268 110.337 C 72.030 110.100,72.004 107.367,72.044 86.723 L 72.090 63.378 72.134 86.722 C 72.163 102.228,72.237 110.029,72.356 109.956 C 72.473 109.883,72.533 101.894,72.533 86.567 C 72.533 71.167,72.473 63.289,72.356 63.289 C 72.258 63.289,72.178 63.209,72.178 63.111 C 72.178 63.013,72.278 62.953,72.400 62.978 C 72.522 63.002,72.598 62.909,72.568 62.770 C 72.521 62.552,72.555 62.552,72.819 62.770 C 73.101 63.002,73.107 62.989,72.904 62.608 C 72.759 62.335,72.583 62.230,72.386 62.298 C 72.160 62.376,72.171 62.337,72.434 62.134 C 72.624 61.987,72.704 61.869,72.612 61.872 C 72.325 61.882,71.821 62.340,71.953 62.471 C 72.022 62.540,71.943 62.545,71.778 62.481 C 71.613 62.418,71.495 62.284,71.517 62.183 C 71.564 61.960,71.325 61.894,70.819 61.991 M14.858 63.247 C 14.884 63.483,14.964 63.729,15.037 63.793 C 15.109 63.858,15.120 63.777,15.061 63.613 C 14.954 63.312,15.310 62.862,15.527 63.024 C 15.592 63.072,15.603 63.056,15.552 62.989 C 15.298 62.654,14.813 62.835,14.858 63.247 M65.847 63.145 C 65.858 63.355,65.960 63.557,66.075 63.595 C 66.208 63.640,66.248 63.531,66.187 63.296 C 66.053 62.786,65.822 62.683,65.847 63.145 M67.599 63.380 C 67.494 63.930,67.292 64.069,66.912 63.856 C 66.713 63.745,66.685 63.654,66.822 63.570 C 66.932 63.501,67.022 63.544,67.022 63.664 C 67.022 63.882,67.287 63.718,67.292 63.496 C 67.295 63.359,67.551 62.933,67.630 62.933 C 67.660 62.933,67.646 63.135,67.599 63.380 M62.803 63.660 C 62.755 63.845,62.650 63.956,62.570 63.906 C 62.490 63.857,62.379 63.891,62.323 63.981 C 62.266 64.072,62.355 64.127,62.520 64.103 C 62.684 64.078,62.871 63.911,62.934 63.731 C 63.038 63.435,63.060 63.432,63.163 63.702 C 63.226 63.866,63.380 63.999,63.506 63.997 C 63.665 63.995,63.653 63.943,63.467 63.822 C 63.239 63.675,63.245 63.649,63.511 63.647 C 64.007 63.643,63.870 63.381,63.356 63.351 C 63.035 63.332,62.864 63.428,62.803 63.660 M59.437 63.731 C 59.437 63.868,59.504 63.965,59.585 63.946 C 59.666 63.927,59.732 74.256,59.731 86.901 C 59.730 102.349,59.788 109.854,59.909 109.779 C 60.029 109.705,60.089 102.088,60.089 87.012 C 60.089 72.026,60.149 64.356,60.267 64.356 C 60.364 64.356,60.444 64.236,60.444 64.089 C 60.444 63.942,60.364 63.812,60.267 63.800 C 59.918 63.756,59.820 63.726,59.630 63.605 C 59.524 63.538,59.437 63.594,59.437 63.731 M18.565 63.842 C 18.498 63.951,18.554 64.110,18.688 64.195 C 18.882 64.318,18.868 64.351,18.622 64.353 C 18.316 64.356,18.198 63.995,18.430 63.763 C 18.616 63.576,18.700 63.625,18.565 63.842 M22.567 64.198 C 22.410 64.491,22.415 64.558,22.589 64.500 C 22.713 64.459,22.833 64.290,22.858 64.124 C 22.917 63.717,22.810 63.744,22.567 64.198 M16.180 87.437 C 16.185 100.174,16.141 110.671,16.084 110.764 C 16.026 110.857,16.028 111.013,16.089 111.111 C 16.149 111.209,16.071 111.289,15.915 111.289 C 15.687 111.289,15.669 111.220,15.822 110.933 C 15.927 110.738,15.960 110.578,15.896 110.578 C 15.831 110.578,15.888 110.445,16.023 110.282 C 16.244 110.014,16.229 110.006,15.867 110.199 C 15.647 110.317,15.465 110.350,15.464 110.273 C 15.462 110.196,15.384 110.253,15.289 110.400 C 15.194 110.547,15.115 110.580,15.114 110.474 C 15.112 110.368,15.152 110.240,15.203 110.190 C 15.253 110.140,15.312 99.786,15.334 87.183 L 15.374 64.267 15.465 87.111 C 15.525 102.162,15.616 109.956,15.733 109.956 C 15.851 109.956,15.926 102.174,15.956 87.056 C 15.990 69.738,15.951 64.187,15.795 64.283 C 15.662 64.365,15.638 64.332,15.726 64.190 C 16.125 63.545,16.173 66.033,16.180 87.437 M27.911 64.552 C 27.911 64.619,28.103 64.662,28.338 64.648 C 28.574 64.634,28.732 64.567,28.691 64.500 C 28.595 64.345,27.911 64.391,27.911 64.552 M59.464 87.111 C 59.464 99.676,59.485 104.816,59.511 98.533 C 59.537 92.251,59.537 81.971,59.511 75.689 C 59.485 69.407,59.464 74.547,59.464 87.111 M375.568 74.743 C 375.366 74.946,375.126 75.087,375.037 75.057 C 374.947 75.027,374.959 75.107,375.063 75.234 C 375.202 75.404,375.180 75.534,374.984 75.715 C 374.750 75.930,374.729 75.919,374.820 75.626 C 374.947 75.218,374.684 74.706,374.501 75.003 C 374.434 75.111,374.428 75.280,374.489 75.378 C 374.632 75.609,374.465 75.603,374.011 75.360 C 373.725 75.207,373.676 75.213,373.785 75.390 C 373.885 75.552,373.838 75.583,373.620 75.499 C 373.439 75.429,373.359 75.453,373.423 75.558 C 373.483 75.654,373.407 75.733,373.255 75.733 C 373.102 75.733,372.978 75.800,372.978 75.881 C 372.978 75.963,373.098 76.030,373.244 76.030 C 373.391 76.030,373.511 75.972,373.511 75.901 C 373.511 75.671,374.029 75.565,374.155 75.769 C 374.226 75.884,374.197 75.916,374.084 75.847 C 373.866 75.712,373.298 76.421,373.440 76.651 C 373.491 76.733,373.447 76.800,373.344 76.800 C 373.240 76.800,373.153 76.660,373.150 76.489 C 373.145 76.193,373.136 76.193,372.964 76.489 C 372.864 76.660,372.686 76.799,372.569 76.797 C 372.438 76.796,372.459 76.728,372.622 76.622 C 372.800 76.507,372.815 76.449,372.667 76.447 C 372.544 76.446,372.444 76.360,372.444 76.256 C 372.444 76.153,372.537 76.125,372.650 76.195 C 372.774 76.272,372.807 76.244,372.733 76.124 C 372.500 75.747,371.996 76.141,372.062 76.648 C 372.082 76.804,371.395 76.616,371.283 76.435 C 371.234 76.356,371.288 76.233,371.402 76.163 C 371.532 76.082,371.563 76.111,371.484 76.239 C 371.408 76.361,371.470 76.444,371.634 76.444 C 371.968 76.444,371.924 76.082,371.556 75.790 C 371.340 75.619,371.425 75.590,371.998 75.642 C 372.502 75.687,372.621 75.658,372.412 75.541 C 372.071 75.350,371.480 75.403,371.437 75.629 C 371.421 75.713,371.318 75.838,371.209 75.906 C 370.901 76.096,370.782 76.991,371.080 76.876 C 371.374 76.763,371.830 77.253,371.576 77.409 C 371.481 77.468,371.351 77.434,371.289 77.333 C 371.226 77.232,371.100 77.197,371.007 77.254 C 370.914 77.311,370.885 77.432,370.941 77.523 C 370.997 77.614,370.961 77.689,370.861 77.689 C 370.761 77.689,370.592 77.527,370.486 77.328 C 370.234 76.858,369.980 77.024,370.088 77.589 C 370.175 78.047,369.807 78.578,369.401 78.578 C 369.275 78.578,369.347 78.419,369.566 78.215 C 370.035 77.778,370.056 77.588,369.649 77.432 C 369.412 77.341,369.395 77.282,369.575 77.171 C 369.757 77.058,369.745 77.003,369.517 76.916 C 369.346 76.850,369.271 76.877,369.335 76.981 C 369.396 77.078,369.320 77.206,369.167 77.265 C 369.014 77.323,368.889 77.528,368.889 77.718 C 368.889 77.938,368.807 78.024,368.667 77.953 C 368.544 77.891,368.571 77.946,368.725 78.076 C 369.095 78.387,369.240 78.989,368.995 79.205 C 368.658 79.503,368.193 79.440,368.079 79.081 C 367.973 78.747,367.959 78.748,367.635 79.122 C 367.387 79.408,367.183 79.479,366.850 79.395 C 366.603 79.333,366.400 79.355,366.400 79.444 C 366.400 79.679,366.885 79.829,367.224 79.699 C 367.421 79.624,367.489 79.666,367.435 79.829 C 367.346 80.094,366.617 80.155,366.250 79.928 C 366.060 79.811,366.060 79.851,366.247 80.152 C 366.389 80.378,366.522 80.446,366.594 80.329 C 366.658 80.225,366.870 80.190,367.064 80.252 C 367.512 80.394,367.412 81.277,366.956 81.203 C 366.776 81.173,366.646 81.299,366.612 81.536 C 366.571 81.827,366.469 81.894,366.185 81.819 C 365.911 81.748,365.849 81.783,365.953 81.951 C 366.048 82.104,366.013 82.150,365.847 82.090 C 365.711 82.040,365.620 81.910,365.644 81.800 C 365.669 81.690,365.600 81.600,365.492 81.600 C 365.384 81.600,365.339 81.713,365.392 81.852 C 365.445 81.991,365.369 82.150,365.222 82.206 C 365.075 82.263,365.003 82.386,365.061 82.480 C 365.119 82.574,365.284 82.606,365.428 82.551 C 365.599 82.485,365.690 82.564,365.692 82.781 C 365.694 83.030,365.731 83.053,365.845 82.874 C 365.967 82.682,366.014 82.688,366.099 82.907 C 366.156 83.055,366.115 83.206,366.008 83.242 C 365.603 83.377,365.743 84.873,366.178 85.064 C 366.398 85.161,366.578 85.386,366.578 85.564 C 366.578 85.756,366.498 85.838,366.380 85.765 C 366.271 85.698,366.113 85.754,366.027 85.888 C 365.915 86.066,365.871 86.012,365.867 85.689 C 365.863 85.377,365.816 85.315,365.711 85.482 C 365.628 85.612,365.509 85.712,365.447 85.704 C 365.047 85.651,364.998 85.719,365.149 86.115 C 365.238 86.349,365.262 86.589,365.203 86.648 C 365.145 86.707,365.192 86.756,365.310 86.756 C 365.469 86.756,365.475 86.847,365.332 87.114 C 365.101 87.545,365.469 87.543,365.807 87.111 C 365.922 86.964,366.256 86.844,366.549 86.844 C 366.842 86.844,367.049 86.877,367.009 86.917 C 366.969 86.957,367.036 87.117,367.157 87.273 C 367.356 87.528,367.345 87.530,367.038 87.289 C 366.716 87.037,366.709 87.044,366.913 87.427 C 367.032 87.650,367.197 87.790,367.281 87.738 C 367.365 87.686,367.394 87.795,367.346 87.980 C 367.297 88.164,367.345 88.369,367.451 88.435 C 367.742 88.615,367.683 88.893,367.379 88.776 C 367.233 88.720,367.146 88.621,367.185 88.557 C 367.225 88.493,367.142 88.283,367.001 88.090 C 366.696 87.672,366.707 88.208,367.014 88.701 C 367.118 88.868,367.099 88.982,366.957 89.029 C 366.793 89.084,366.790 89.161,366.946 89.349 C 367.059 89.485,367.222 89.552,367.308 89.499 C 367.395 89.446,367.388 89.606,367.293 89.857 C 367.143 90.250,367.168 90.311,367.475 90.311 C 367.757 90.311,367.813 90.219,367.748 89.863 C 367.694 89.565,367.733 89.456,367.864 89.537 C 367.986 89.613,368.022 89.558,367.958 89.392 C 367.900 89.239,367.955 89.126,368.089 89.126 C 368.223 89.126,368.278 89.239,368.220 89.392 C 368.156 89.557,368.191 89.613,368.313 89.537 C 368.421 89.471,368.560 89.500,368.623 89.602 C 368.686 89.703,368.632 89.765,368.502 89.738 C 368.368 89.710,368.288 89.816,368.316 89.987 C 368.343 90.150,368.283 90.335,368.182 90.397 C 368.057 90.475,368.057 90.619,368.184 90.855 C 368.356 91.178,368.290 91.277,367.942 91.217 C 367.862 91.203,367.842 91.266,367.898 91.357 C 367.954 91.448,368.161 91.480,368.358 91.429 C 368.634 91.357,368.691 91.402,368.606 91.623 C 368.545 91.782,368.584 91.911,368.692 91.911 C 368.800 91.911,368.889 91.838,368.889 91.748 C 368.889 91.658,368.942 91.445,369.008 91.274 C 369.083 91.078,369.058 91.007,368.939 91.080 C 368.836 91.144,368.660 91.086,368.549 90.952 C 368.302 90.655,368.680 90.450,369.556 90.405 C 370.246 90.370,370.325 90.731,369.778 91.426 C 369.313 92.017,369.325 92.073,369.955 92.293 C 370.249 92.395,370.444 92.551,370.389 92.640 C 370.335 92.728,370.337 92.800,370.395 92.800 C 370.453 92.800,370.573 92.620,370.661 92.400 C 370.775 92.116,370.825 92.086,370.833 92.297 C 370.844 92.593,371.730 92.919,372.252 92.819 C 372.461 92.779,372.456 92.844,372.228 93.193 C 371.966 93.592,371.972 93.618,372.329 93.645 C 372.582 93.664,372.631 93.627,372.478 93.532 C 372.296 93.419,372.302 93.348,372.508 93.177 C 372.724 92.997,372.760 93.049,372.728 93.504 C 372.697 93.959,372.755 94.044,373.095 94.044 C 373.515 94.044,373.512 93.518,373.092 93.505 C 372.882 93.498,373.673 92.995,373.911 92.984 C 373.984 92.981,374.044 93.114,374.044 93.280 C 374.044 93.515,374.092 93.535,374.258 93.369 C 374.633 92.993,374.783 93.141,374.421 93.530 C 374.048 93.929,373.637 93.852,373.805 93.415 C 373.875 93.234,373.823 93.184,373.639 93.254 C 373.404 93.344,373.403 93.418,373.631 93.875 C 373.919 94.453,374.222 94.713,374.222 94.383 C 374.222 94.245,374.345 94.278,374.560 94.473 C 374.772 94.664,374.940 94.710,375.011 94.596 C 375.087 94.473,375.275 94.471,375.587 94.590 C 375.841 94.686,376.092 94.723,376.143 94.672 C 376.195 94.620,376.128 94.578,375.994 94.578 C 375.810 94.578,375.801 94.528,375.957 94.372 C 376.113 94.216,376.229 94.221,376.437 94.394 C 376.638 94.561,376.711 94.565,376.711 94.411 C 376.711 94.296,376.784 94.246,376.874 94.302 C 376.963 94.357,377.075 94.302,377.122 94.179 C 377.170 94.056,377.200 94.130,377.189 94.343 C 377.179 94.556,377.113 94.695,377.042 94.651 C 376.826 94.518,376.725 94.846,376.898 95.120 C 377.013 95.302,377.062 95.312,377.064 95.152 C 377.066 95.009,377.229 94.956,377.508 95.006 C 377.751 95.050,377.991 95.020,378.042 94.938 C 378.196 94.688,379.019 95.179,379.021 95.522 C 379.022 95.698,379.095 95.798,379.185 95.743 C 379.274 95.687,379.388 95.743,379.437 95.866 C 379.509 96.043,379.530 96.043,379.542 95.867 C 379.549 95.744,379.645 95.644,379.755 95.644 C 379.893 95.644,379.886 95.727,379.733 95.911 C 379.612 96.058,379.585 96.178,379.674 96.178 C 379.763 96.178,379.847 96.338,379.862 96.533 C 379.877 96.729,379.823 96.889,379.741 96.889 C 379.660 96.889,379.543 97.020,379.482 97.181 C 379.389 97.421,379.416 97.434,379.633 97.254 C 379.777 97.134,379.981 97.089,380.086 97.153 C 380.190 97.218,380.253 97.175,380.227 97.058 C 380.200 96.940,380.286 96.805,380.419 96.757 C 380.597 96.692,380.627 96.773,380.533 97.068 C 380.458 97.303,380.475 97.424,380.574 97.363 C 380.666 97.307,380.741 97.370,380.741 97.504 C 380.741 97.705,380.671 97.702,380.342 97.486 C 380.058 97.300,379.909 97.282,379.821 97.424 C 379.665 97.676,379.760 97.769,380.274 97.868 C 380.823 97.974,381.054 98.364,380.660 98.521 C 380.409 98.621,380.424 98.644,380.745 98.654 C 380.959 98.661,381.181 98.742,381.239 98.835 C 381.296 98.928,381.181 98.972,380.983 98.934 C 380.750 98.889,380.622 98.959,380.622 99.132 C 380.622 99.279,380.702 99.349,380.800 99.289 C 380.898 99.228,380.978 99.260,380.978 99.358 C 380.978 99.457,380.845 99.612,380.682 99.703 C 380.420 99.850,380.430 99.883,380.771 99.991 C 380.983 100.058,381.156 100.192,381.156 100.289 C 381.156 100.386,381.249 100.408,381.364 100.337 C 381.503 100.250,381.534 100.305,381.458 100.504 C 381.396 100.667,381.422 100.800,381.517 100.800 C 381.611 100.800,381.683 100.980,381.676 101.200 C 381.664 101.582,381.657 101.584,381.511 101.244 C 381.427 101.049,381.350 100.969,381.340 101.067 C 381.330 101.164,381.225 101.122,381.105 100.973 C 380.863 100.670,380.549 100.629,380.326 100.873 C 380.240 100.967,380.271 100.981,380.400 100.907 C 380.522 100.837,380.622 100.868,380.622 100.978 C 380.622 101.087,380.702 101.127,380.800 101.067 C 380.898 101.006,380.978 101.042,380.978 101.145 C 380.978 101.338,380.734 101.405,380.270 101.337 C 380.118 101.315,380.192 101.453,380.448 101.666 C 380.690 101.868,380.789 102.035,380.667 102.039 C 380.544 102.042,380.444 102.117,380.444 102.206 C 380.444 102.294,380.620 102.321,380.835 102.265 C 381.182 102.174,381.197 102.194,380.968 102.440 C 380.827 102.593,380.657 102.685,380.590 102.645 C 380.523 102.605,380.411 102.666,380.340 102.780 C 380.260 102.910,380.289 102.941,380.417 102.861 C 380.530 102.792,380.631 102.799,380.642 102.878 C 380.832 104.251,380.615 104.688,379.998 104.176 C 379.783 103.997,379.729 104.010,379.711 104.245 C 379.699 104.404,379.679 104.633,379.667 104.756 C 379.636 105.066,379.158 105.041,378.876 104.716 C 378.746 104.566,378.689 104.541,378.748 104.658 C 378.948 105.056,378.647 105.778,378.281 105.778 C 378.064 105.778,377.982 105.701,378.061 105.573 C 378.135 105.453,378.111 105.415,378.002 105.483 C 377.899 105.546,377.770 105.481,377.715 105.338 C 377.660 105.195,377.702 105.024,377.808 104.959 C 377.932 104.882,377.891 104.778,377.691 104.666 C 377.321 104.459,376.342 105.283,376.697 105.502 C 376.813 105.574,376.877 105.527,376.849 105.391 C 376.822 105.262,376.900 105.176,377.022 105.200 C 377.144 105.224,377.224 105.154,377.200 105.044 C 377.176 104.934,377.243 104.812,377.350 104.772 C 377.467 104.729,377.531 104.914,377.511 105.239 C 377.494 105.535,377.547 105.778,377.629 105.778 C 377.711 105.778,377.778 105.868,377.778 105.977 C 377.778 106.124,377.715 106.125,377.539 105.979 C 377.408 105.870,377.255 105.827,377.199 105.882 C 377.143 105.938,376.874 105.838,376.600 105.659 C 376.326 105.480,376.159 105.425,376.228 105.537 C 376.297 105.649,376.237 105.785,376.094 105.840 C 375.951 105.895,375.779 105.851,375.712 105.743 C 375.633 105.615,375.672 105.596,375.822 105.689 C 375.965 105.777,376.012 105.763,375.943 105.652 C 375.760 105.355,375.033 105.482,374.967 105.822 C 374.925 106.043,374.988 106.101,375.190 106.024 C 375.428 105.932,375.446 105.987,375.301 106.368 C 375.178 106.692,375.189 106.845,375.342 106.907 C 375.476 106.962,375.456 106.997,375.289 107.003 C 375.142 107.007,374.867 106.893,374.677 106.750 C 374.336 106.492,374.336 106.489,374.677 106.475 C 374.867 106.467,374.922 106.420,374.799 106.371 C 374.676 106.321,374.628 106.195,374.693 106.091 C 374.770 105.966,374.724 105.954,374.561 106.055 C 374.424 106.140,374.091 106.186,373.822 106.158 C 373.328 106.106,373.161 106.362,373.539 106.595 C 373.651 106.665,373.697 106.646,373.641 106.554 C 373.584 106.463,373.678 106.387,373.850 106.387 C 374.022 106.387,374.163 106.489,374.163 106.614 C 374.163 106.754,374.005 106.812,373.748 106.765 C 373.507 106.722,373.331 106.778,373.327 106.901 C 373.324 107.016,373.215 106.921,373.087 106.689 C 372.855 106.272,372.852 106.271,372.754 106.645 C 372.700 106.852,372.548 107.021,372.417 107.019 C 372.235 107.017,372.242 106.975,372.444 106.844 C 372.672 106.697,372.666 106.672,372.400 106.669 C 372.050 106.666,371.964 106.314,372.311 106.306 C 372.438 106.303,372.403 106.224,372.230 106.123 C 372.049 106.018,371.966 105.822,372.024 105.640 C 372.101 105.401,372.084 105.388,371.948 105.578 C 371.841 105.728,371.679 105.767,371.531 105.678 C 371.329 105.556,371.329 105.582,371.533 105.833 C 371.816 106.181,371.568 106.239,371.200 105.911 C 370.991 105.725,370.985 105.735,371.163 105.972 C 371.279 106.128,371.322 106.308,371.258 106.372 C 371.193 106.436,371.234 106.489,371.348 106.489 C 371.462 106.489,371.556 106.566,371.556 106.661 C 371.556 107.053,371.012 106.587,370.881 106.084 C 370.784 105.709,370.624 105.531,370.365 105.512 C 370.120 105.494,370.022 105.566,370.081 105.720 C 370.131 105.850,370.083 105.956,369.975 105.956 C 369.866 105.956,369.787 105.896,369.799 105.822 C 369.857 105.456,369.757 105.236,369.582 105.344 C 369.441 105.431,369.440 105.366,369.576 105.111 C 369.680 104.917,369.720 104.712,369.664 104.656 C 369.513 104.506,369.088 104.951,369.202 105.139 C 369.257 105.230,369.197 105.236,369.064 105.154 C 368.894 105.049,368.859 105.089,368.939 105.295 C 369.009 105.480,368.971 105.552,368.829 105.498 C 368.708 105.451,368.610 105.261,368.610 105.075 C 368.610 104.889,368.672 104.776,368.749 104.824 C 368.826 104.871,368.889 104.818,368.889 104.707 C 368.889 104.241,368.440 104.696,368.389 105.214 C 368.322 105.893,368.082 105.930,367.613 105.333 C 367.420 105.089,367.149 104.889,367.009 104.889 C 366.870 104.889,366.756 104.729,366.756 104.533 C 366.756 104.180,366.566 104.071,366.342 104.295 C 366.278 104.359,366.312 104.516,366.417 104.643 C 366.692 104.974,366.441 105.322,366.120 105.055 C 365.926 104.894,365.847 104.912,365.761 105.136 C 365.700 105.293,365.575 105.422,365.482 105.422 C 365.389 105.422,365.362 105.342,365.422 105.244 C 365.585 104.981,365.408 105.029,365.173 105.313 C 365.002 105.518,365.109 105.549,365.811 105.501 C 366.380 105.462,366.570 105.496,366.394 105.607 C 366.251 105.698,366.213 105.779,366.311 105.788 C 366.409 105.796,366.329 105.867,366.133 105.946 C 365.921 106.032,365.678 106.009,365.530 105.891 C 365.326 105.727,365.306 105.768,365.417 106.117 C 365.533 106.481,365.505 106.525,365.226 106.417 C 365.047 106.349,364.963 106.357,365.040 106.435 C 365.118 106.514,365.435 106.672,365.746 106.786 C 366.082 106.911,366.181 107.001,365.990 107.009 C 365.813 107.016,365.718 107.104,365.780 107.203 C 365.841 107.302,365.957 107.343,366.038 107.293 C 366.119 107.243,366.236 107.335,366.299 107.498 C 366.361 107.661,366.309 107.859,366.184 107.939 C 366.029 108.037,366.090 108.084,366.375 108.086 C 366.671 108.088,366.764 108.012,366.693 107.828 C 366.638 107.684,366.672 107.518,366.768 107.459 C 366.864 107.400,366.920 107.457,366.893 107.587 C 366.866 107.716,366.950 107.800,367.080 107.773 C 367.209 107.747,367.269 107.800,367.212 107.892 C 367.155 107.983,367.169 108.092,367.243 108.133 C 367.317 108.175,367.280 108.185,367.161 108.156 C 367.042 108.128,366.894 108.186,366.832 108.286 C 366.623 108.625,366.920 108.672,367.377 108.372 C 367.888 108.037,368.092 108.096,368.229 108.619 C 368.355 109.103,368.205 109.075,368.006 108.578 C 367.876 108.256,367.843 108.240,367.833 108.499 C 367.826 108.708,367.744 108.774,367.600 108.687 C 367.472 108.611,367.491 108.694,367.644 108.882 C 367.791 109.063,367.898 109.258,367.881 109.316 C 367.865 109.375,367.898 109.462,367.954 109.511 C 368.011 109.560,367.991 109.486,367.910 109.348 C 367.802 109.161,367.839 109.124,368.054 109.206 C 368.214 109.268,368.391 109.241,368.449 109.148 C 368.507 109.054,368.674 108.978,368.821 108.978 C 368.968 108.978,369.037 109.059,368.976 109.158 C 368.915 109.258,368.770 109.285,368.654 109.219 C 368.538 109.152,368.508 109.169,368.587 109.255 C 368.666 109.341,368.906 109.414,369.121 109.417 C 369.349 109.420,369.491 109.520,369.462 109.658 C 369.410 109.911,369.539 109.857,370.361 109.288 C 370.633 109.100,370.843 109.013,370.829 109.095 C 370.727 109.669,370.887 109.915,371.058 109.447 C 371.197 109.065,371.281 108.998,371.397 109.180 C 371.509 109.357,371.551 109.363,371.553 109.200 C 371.557 108.876,372.387 108.927,372.514 109.259 C 372.659 109.635,372.661 109.630,372.348 109.630 C 372.192 109.630,372.113 109.527,372.165 109.393 C 372.215 109.262,372.145 109.156,372.010 109.156 C 371.767 109.156,371.734 109.435,371.938 109.778 C 371.995 109.876,372.021 110.094,371.993 110.263 C 371.923 110.699,372.590 110.567,372.997 110.065 C 373.351 109.627,373.280 109.217,372.829 109.097 C 372.665 109.054,372.751 109.010,373.022 108.998 C 373.385 108.983,373.511 109.062,373.511 109.304 C 373.511 109.483,373.620 109.630,373.753 109.630 C 373.901 109.630,373.962 109.502,373.910 109.304 C 373.854 109.087,373.921 108.978,374.113 108.978 C 374.271 108.978,374.400 109.062,374.400 109.166 C 374.400 109.270,374.308 109.297,374.195 109.227 C 374.059 109.144,374.038 109.179,374.133 109.333 C 374.247 109.517,374.321 109.504,374.491 109.271 C 374.609 109.110,374.812 108.978,374.942 108.978 C 375.078 108.978,374.965 109.199,374.677 109.501 C 374.181 110.018,374.181 110.022,374.599 109.932 C 374.989 109.847,374.998 109.860,374.711 110.095 C 374.332 110.406,374.318 110.519,374.659 110.519 C 374.801 110.519,374.882 110.461,374.839 110.391 C 374.795 110.320,374.894 110.153,375.058 110.020 C 375.231 109.880,375.304 109.866,375.231 109.988 C 375.162 110.103,375.187 110.248,375.286 110.309 C 375.386 110.371,375.467 110.255,375.467 110.047 C 375.467 109.839,375.368 109.696,375.244 109.724 C 375.122 109.752,375.022 109.696,375.022 109.599 C 375.022 109.502,375.116 109.432,375.230 109.445 C 375.345 109.457,375.401 109.370,375.355 109.250 C 375.300 109.106,375.450 109.045,375.805 109.066 C 376.417 109.102,376.905 109.440,376.787 109.747 C 376.743 109.862,376.888 110.069,377.109 110.207 C 377.474 110.434,377.489 110.431,377.274 110.170 C 377.143 110.012,377.091 109.793,377.158 109.685 C 377.225 109.576,377.132 109.366,376.951 109.218 C 376.689 109.003,376.681 108.967,376.912 109.038 C 377.071 109.087,377.289 109.040,377.396 108.932 C 377.615 108.714,377.547 109.338,377.309 109.734 C 377.220 109.883,377.274 109.876,377.477 109.712 C 377.642 109.577,377.778 109.353,377.778 109.212 C 377.778 109.072,377.859 109.007,377.958 109.068 C 378.058 109.130,378.098 109.246,378.049 109.327 C 377.946 109.492,378.833 109.760,378.984 109.609 C 379.038 109.555,378.988 109.511,378.874 109.511 C 378.606 109.511,378.605 109.037,378.872 108.872 C 378.998 108.794,379.029 108.823,378.952 108.948 C 378.871 109.079,378.923 109.114,379.096 109.048 C 379.245 108.991,379.349 108.852,379.328 108.739 C 379.306 108.622,379.511 108.533,379.806 108.533 C 380.090 108.533,380.361 108.422,380.407 108.286 C 380.463 108.122,380.415 108.086,380.262 108.181 C 380.103 108.279,380.077 108.251,380.177 108.089 C 380.291 107.906,380.368 107.922,380.543 108.161 C 380.666 108.329,380.811 108.392,380.867 108.302 C 380.923 108.212,380.915 108.085,380.850 108.020 C 380.522 107.692,380.634 107.204,381.024 107.263 C 381.243 107.297,381.382 107.396,381.333 107.484 C 381.284 107.572,381.184 107.624,381.111 107.600 C 381.038 107.576,380.978 107.622,380.978 107.704 C 380.978 107.786,381.318 107.836,381.733 107.814 C 382.764 107.761,382.790 107.755,382.679 107.576 C 382.558 107.380,383.011 106.982,383.172 107.142 C 383.236 107.207,383.289 107.166,383.289 107.052 C 383.289 106.938,383.200 106.844,383.090 106.844 C 382.976 106.844,382.933 106.686,382.989 106.471 C 383.068 106.170,383.021 106.115,382.743 106.188 C 382.554 106.238,382.444 106.205,382.499 106.117 C 382.676 105.830,383.338 105.927,383.225 106.222 C 383.168 106.369,383.191 106.489,383.275 106.489 C 383.439 106.489,383.720 105.955,383.673 105.733 C 383.657 105.660,383.744 105.599,383.867 105.597 C 384.026 105.595,384.018 105.550,383.841 105.437 C 383.649 105.316,383.714 105.218,384.129 105.003 C 384.425 104.850,384.729 104.782,384.806 104.852 C 384.883 104.921,384.888 104.876,384.816 104.753 C 384.745 104.629,384.600 104.581,384.494 104.647 C 384.382 104.716,384.351 104.684,384.421 104.572 C 384.487 104.465,384.709 104.410,384.915 104.449 C 385.263 104.516,385.269 104.500,385.002 104.205 C 384.806 103.988,384.774 103.831,384.899 103.706 C 385.024 103.580,384.994 103.450,384.803 103.292 C 384.650 103.164,384.469 103.116,384.401 103.184 C 384.333 103.252,384.395 103.384,384.538 103.476 C 384.757 103.617,384.760 103.670,384.558 103.801 C 384.353 103.933,384.353 103.974,384.558 104.066 C 384.691 104.126,384.631 104.133,384.425 104.083 C 384.192 104.027,383.956 104.112,383.803 104.307 C 383.577 104.595,383.583 104.600,383.871 104.375 C 384.319 104.023,384.385 104.193,383.990 104.680 C 383.655 105.093,383.272 105.085,383.375 104.666 C 383.590 103.800,383.702 103.639,384.022 103.740 C 384.205 103.799,384.356 103.771,384.356 103.678 C 384.356 103.418,383.896 103.099,383.733 103.246 C 383.653 103.318,383.643 103.281,383.711 103.163 C 383.778 103.045,383.964 102.999,384.124 103.060 C 384.335 103.141,384.374 103.106,384.267 102.933 C 384.158 102.758,384.200 102.726,384.425 102.812 C 384.606 102.882,384.686 102.858,384.621 102.753 C 384.561 102.656,384.296 102.623,384.034 102.680 C 383.764 102.739,383.641 102.720,383.751 102.637 C 383.859 102.555,383.936 102.264,383.923 101.990 C 383.894 101.409,384.110 101.190,384.664 101.239 C 384.996 101.268,385.025 101.230,384.841 101.009 C 384.574 100.686,384.540 99.339,384.794 99.158 C 384.911 99.075,384.895 99.027,384.749 99.025 C 384.580 99.023,384.568 98.935,384.701 98.685 C 384.834 98.438,384.815 98.294,384.632 98.142 C 384.430 97.974,384.401 98.003,384.479 98.301 C 384.586 98.710,384.400 98.764,384.000 98.439 C 383.676 98.176,383.544 97.422,383.822 97.422 C 383.931 97.422,383.965 97.513,383.897 97.623 C 383.821 97.746,383.869 97.790,384.020 97.736 C 384.156 97.688,384.407 97.720,384.578 97.808 C 384.777 97.910,384.889 97.902,384.889 97.784 C 384.889 97.683,384.804 97.600,384.701 97.600 C 384.597 97.600,384.562 97.520,384.622 97.422 C 384.683 97.324,384.567 97.244,384.366 97.244 C 384.137 97.244,383.999 97.128,383.997 96.933 C 383.995 96.694,383.960 96.677,383.843 96.861 C 383.733 97.035,383.668 96.974,383.604 96.637 C 383.542 96.312,383.574 96.223,383.713 96.336 C 383.837 96.437,383.997 96.405,384.140 96.249 C 384.273 96.105,384.291 96.000,384.185 96.000 C 384.083 96.000,384.000 95.847,384.000 95.660 C 384.000 95.404,383.892 95.332,383.560 95.368 C 383.100 95.419,382.468 94.899,382.644 94.615 C 382.699 94.525,382.548 94.489,382.306 94.535 C 382.051 94.584,381.867 94.536,381.867 94.421 C 381.867 94.312,382.042 94.222,382.256 94.222 C 382.636 94.222,382.637 94.216,382.302 93.971 C 382.046 93.784,381.924 93.772,381.826 93.926 C 381.747 94.051,381.693 94.059,381.692 93.947 C 381.690 93.844,381.789 93.652,381.911 93.521 C 382.090 93.328,382.087 93.307,381.896 93.416 C 381.550 93.613,380.089 93.671,380.089 93.489 C 380.089 93.403,380.169 93.333,380.267 93.333 C 380.364 93.333,380.435 93.273,380.423 93.200 C 380.364 92.825,380.485 92.786,380.889 93.050 C 381.310 93.326,381.333 93.323,381.333 93.002 C 381.333 92.795,381.219 92.671,381.041 92.686 C 380.881 92.700,380.715 92.591,380.672 92.444 C 380.604 92.207,380.617 92.207,380.796 92.444 C 380.965 92.669,380.988 92.655,380.940 92.356 C 380.904 92.128,380.747 92.000,380.501 92.000 C 380.291 92.000,380.052 91.927,379.970 91.838 C 379.884 91.744,379.915 91.730,380.044 91.804 C 380.167 91.875,380.267 91.847,380.267 91.744 C 380.267 91.501,379.966 91.500,379.817 91.742 C 379.655 92.004,378.958 91.875,378.809 91.556 C 378.740 91.409,378.714 91.584,378.750 91.944 C 378.802 92.470,378.762 92.579,378.548 92.497 C 378.354 92.423,378.339 92.359,378.492 92.265 C 378.639 92.174,378.650 92.032,378.530 91.800 C 378.435 91.617,378.379 91.332,378.406 91.168 C 378.438 90.967,378.367 90.902,378.190 90.970 C 377.975 91.053,377.970 91.099,378.163 91.222 C 378.339 91.334,378.309 91.376,378.044 91.388 C 377.693 91.403,377.693 91.404,378.039 91.540 C 378.355 91.663,378.343 91.686,377.912 91.768 C 377.648 91.819,377.475 91.791,377.527 91.708 C 377.579 91.624,377.543 91.556,377.448 91.556 C 377.352 91.556,377.241 91.656,377.200 91.778 C 377.159 91.900,377.083 91.779,377.032 91.510 C 376.954 91.106,376.981 91.055,377.184 91.224 C 377.380 91.387,377.408 91.369,377.318 91.136 C 377.257 90.976,377.130 90.844,377.036 90.844 C 376.942 90.844,376.916 90.684,376.978 90.489 C 377.051 90.257,377.009 90.132,376.856 90.131 C 376.684 90.129,376.693 90.082,376.889 89.956 C 377.042 89.857,377.067 89.782,376.948 89.781 C 376.834 89.779,376.594 89.730,376.415 89.672 C 376.144 89.584,376.134 89.599,376.356 89.758 C 376.573 89.914,376.534 89.951,376.148 89.953 C 375.887 89.954,375.634 89.856,375.584 89.733 C 375.524 89.583,375.490 89.598,375.481 89.780 C 375.462 90.144,374.271 90.142,374.041 89.778 C 373.935 89.610,373.871 89.591,373.869 89.728 C 373.868 89.847,373.748 89.899,373.604 89.844 C 373.412 89.770,373.368 89.573,373.440 89.119 C 373.507 88.697,373.477 88.533,373.347 88.614 C 373.242 88.679,373.156 88.656,373.156 88.563 C 373.156 88.470,372.960 88.345,372.721 88.285 C 372.396 88.203,372.306 88.081,372.360 87.797 C 372.400 87.589,372.376 87.476,372.306 87.546 C 372.235 87.617,372.174 87.847,372.170 88.059 C 372.165 88.271,372.068 88.425,371.954 88.402 C 371.832 88.377,371.791 88.476,371.853 88.639 C 371.912 88.793,372.009 88.852,372.069 88.770 C 372.207 88.582,372.641 88.588,372.952 88.782 C 373.155 88.909,373.155 88.957,372.952 89.086 C 372.608 89.304,372.402 89.280,372.551 89.039 C 372.637 88.898,372.558 88.864,372.301 88.931 C 372.087 88.987,371.836 88.924,371.720 88.784 C 371.607 88.648,371.464 88.569,371.402 88.607 C 371.340 88.645,371.277 88.384,371.263 88.027 C 371.249 87.670,371.274 87.511,371.318 87.674 C 371.362 87.837,371.525 88.018,371.680 88.078 C 371.911 88.167,371.922 88.139,371.743 87.923 C 371.623 87.778,371.567 87.593,371.617 87.510 C 371.668 87.428,371.636 87.245,371.547 87.103 C 371.436 86.927,371.382 86.915,371.381 87.066 C 371.379 87.198,371.217 87.257,370.978 87.212 C 370.758 87.171,370.520 87.132,370.449 87.124 C 370.378 87.117,370.398 86.907,370.493 86.657 C 370.621 86.320,370.612 86.235,370.458 86.330 C 370.319 86.416,370.288 86.361,370.363 86.165 C 370.432 85.987,370.394 85.888,370.267 85.913 C 370.153 85.937,370.046 85.775,370.031 85.554 C 370.010 85.245,370.057 85.196,370.232 85.342 C 370.410 85.489,370.439 85.447,370.363 85.158 C 370.310 84.953,370.336 84.798,370.422 84.813 C 370.845 84.891,371.022 84.762,371.022 84.376 C 371.022 83.884,370.178 83.588,370.011 84.022 C 369.902 84.308,369.422 84.354,369.422 84.078 C 369.422 83.975,369.492 83.933,369.576 83.985 C 369.661 84.038,369.881 83.909,370.065 83.699 C 370.249 83.489,370.300 83.375,370.178 83.447 C 370.056 83.518,369.956 83.492,369.956 83.388 C 369.956 83.285,369.860 83.220,369.744 83.244 C 369.627 83.269,369.503 83.129,369.468 82.933 C 369.423 82.685,369.451 82.647,369.560 82.805 C 369.682 82.983,369.821 82.951,370.192 82.659 C 370.453 82.453,370.677 82.331,370.689 82.387 C 370.701 82.443,370.721 82.562,370.733 82.652 C 370.794 83.096,371.022 83.014,371.022 82.548 C 371.022 81.989,370.634 81.819,370.178 82.179 C 370.031 82.295,369.956 82.294,369.956 82.177 C 369.956 81.905,370.413 81.586,370.588 81.735 C 370.674 81.807,370.650 81.743,370.536 81.592 C 370.266 81.235,370.319 80.568,370.627 80.450 C 370.759 80.400,370.827 80.294,370.779 80.216 C 370.731 80.138,370.844 80.062,371.031 80.048 C 371.218 80.034,371.425 80.110,371.491 80.217 C 371.562 80.333,371.528 80.360,371.405 80.284 C 371.029 80.051,370.801 80.368,371.104 80.703 C 371.356 80.982,371.357 81.040,371.118 81.239 C 370.969 81.362,370.895 81.537,370.951 81.629 C 371.129 81.917,371.525 81.431,371.476 80.985 C 371.444 80.693,371.504 80.593,371.671 80.657 C 371.989 80.779,371.982 80.280,371.661 80.014 C 371.402 79.799,371.384 79.526,371.605 79.172 C 371.769 78.910,372.474 79.084,372.340 79.354 C 372.285 79.465,372.366 79.400,372.520 79.210 C 372.724 78.960,372.801 78.935,372.803 79.122 C 372.805 79.338,372.832 79.336,372.978 79.111 C 373.121 78.890,373.151 78.905,373.156 79.200 C 373.160 79.495,373.190 79.510,373.333 79.289 C 373.459 79.095,373.506 79.086,373.508 79.255 C 373.510 79.383,373.611 79.437,373.733 79.375 C 373.856 79.313,373.800 79.388,373.610 79.542 C 373.298 79.796,373.295 79.822,373.577 79.822 C 373.748 79.822,373.838 79.902,373.778 80.000 C 373.717 80.098,373.761 80.178,373.875 80.178 C 373.989 80.178,374.041 80.071,373.991 79.939 C 373.941 79.808,373.968 79.591,374.051 79.457 C 374.134 79.322,374.134 79.129,374.052 79.028 C 373.959 78.914,374.136 78.725,374.519 78.531 C 374.903 78.335,375.082 78.302,374.994 78.442 C 374.917 78.566,374.876 78.807,374.905 78.978 C 374.933 79.149,374.857 79.289,374.737 79.289 C 374.617 79.289,374.570 79.340,374.633 79.403 C 374.696 79.466,374.862 79.422,375.002 79.306 C 375.176 79.161,375.294 79.153,375.372 79.280 C 375.545 79.560,375.822 79.504,375.822 79.190 C 375.822 79.037,375.742 78.962,375.644 79.022 C 375.547 79.083,375.467 79.047,375.467 78.944 C 375.467 78.668,375.756 78.714,376.085 79.044 C 376.351 79.310,376.393 79.306,376.622 78.999 C 376.858 78.684,376.866 78.688,376.770 79.067 C 376.715 79.287,376.739 79.467,376.824 79.466 C 376.908 79.466,376.987 79.188,376.999 78.848 C 377.013 78.440,376.952 78.268,376.821 78.341 C 376.705 78.406,376.712 78.355,376.840 78.218 C 377.192 77.838,377.956 77.882,377.956 78.283 C 377.956 78.577,377.915 78.590,377.644 78.383 C 377.473 78.252,377.393 78.215,377.465 78.301 C 377.538 78.386,377.500 78.584,377.382 78.739 C 377.264 78.895,377.237 78.963,377.323 78.890 C 377.408 78.818,377.601 78.858,377.751 78.979 C 377.942 79.134,377.986 79.137,377.898 78.990 C 377.829 78.875,377.847 78.734,377.939 78.677 C 378.031 78.620,378.150 78.714,378.202 78.887 C 378.255 79.059,378.272 78.978,378.240 78.707 C 378.207 78.436,378.252 78.170,378.339 78.116 C 378.426 78.062,378.476 78.124,378.449 78.253 C 378.422 78.384,378.519 78.462,378.667 78.430 C 378.813 78.397,378.875 78.429,378.803 78.501 C 378.731 78.573,378.811 78.745,378.981 78.885 C 379.265 79.118,379.258 79.131,378.889 79.052 C 378.669 79.005,378.529 79.032,378.579 79.113 C 378.718 79.338,378.268 79.432,377.829 79.270 C 377.570 79.175,377.392 79.199,377.311 79.340 C 377.243 79.459,377.254 79.495,377.335 79.422 C 377.416 79.348,377.660 79.404,377.876 79.545 C 378.216 79.768,378.315 79.769,378.601 79.552 C 378.784 79.413,379.156 79.298,379.428 79.295 C 379.700 79.291,379.961 79.189,380.008 79.067 C 380.132 78.744,381.096 78.710,381.096 79.029 C 381.096 79.225,381.040 79.236,380.852 79.080 C 380.650 78.913,380.625 78.976,380.706 79.439 C 380.767 79.781,380.731 80.000,380.614 80.000 C 380.509 80.000,380.475 80.084,380.539 80.186 C 380.618 80.315,380.727 80.312,380.890 80.177 C 381.077 80.022,381.117 80.070,381.080 80.405 C 381.052 80.659,381.126 80.830,381.264 80.830 C 381.391 80.830,381.459 80.770,381.414 80.698 C 381.220 80.384,381.552 80.055,382.009 80.109 C 382.273 80.140,382.345 80.121,382.168 80.068 C 381.915 79.991,381.891 79.918,382.055 79.720 C 382.169 79.583,382.326 79.510,382.404 79.558 C 382.481 79.606,382.505 79.494,382.457 79.309 C 382.409 79.125,382.475 78.908,382.604 78.828 C 382.903 78.643,383.475 78.598,383.371 78.767 C 383.327 78.838,383.408 78.941,383.551 78.996 C 383.861 79.115,384.027 78.909,383.851 78.624 C 383.559 78.151,382.549 78.426,382.288 79.050 C 381.995 79.752,381.857 79.811,381.906 79.213 C 381.926 78.975,382.019 78.829,382.112 78.886 C 382.207 78.945,382.236 78.870,382.177 78.717 C 382.046 78.376,382.360 78.040,382.692 78.168 C 382.894 78.245,382.905 78.171,382.750 77.762 L 382.559 77.261 382.952 77.609 L 383.344 77.956 383.012 77.422 L 382.681 76.889 383.074 77.228 C 383.517 77.610,383.614 77.455,383.194 77.035 C 382.999 76.841,382.974 76.698,383.107 76.539 C 383.249 76.367,383.179 76.329,382.802 76.374 C 382.532 76.406,382.451 76.389,382.622 76.336 C 383.008 76.218,383.035 75.733,382.656 75.733 C 382.492 75.733,382.431 75.817,382.506 75.939 C 382.581 76.060,382.555 76.096,382.444 76.027 C 382.175 75.861,381.668 76.089,381.806 76.313 C 381.878 76.430,381.835 76.446,381.689 76.356 C 381.530 76.258,381.499 76.281,381.591 76.429 C 381.664 76.549,381.841 76.602,381.983 76.547 C 382.125 76.493,382.195 76.523,382.139 76.613 C 382.083 76.704,382.179 76.859,382.352 76.957 C 382.634 77.117,382.626 77.133,382.279 77.109 C 382.066 77.094,381.930 77.018,381.978 76.941 C 382.026 76.863,381.891 76.800,381.679 76.800 C 381.395 76.800,381.315 76.717,381.380 76.489 C 381.537 75.932,381.554 75.827,381.532 75.501 C 381.521 75.324,381.427 75.231,381.325 75.294 C 381.195 75.374,381.197 75.480,381.329 75.640 C 381.594 75.959,381.139 76.123,380.849 75.813 C 380.679 75.631,380.679 75.563,380.850 75.494 C 380.973 75.444,380.933 75.400,380.756 75.392 C 380.584 75.384,380.447 75.478,380.450 75.600 C 380.458 75.928,380.904 76.459,381.049 76.314 C 381.118 76.245,381.123 76.321,381.061 76.483 C 380.999 76.645,380.840 76.736,380.708 76.685 C 380.575 76.634,380.507 76.488,380.557 76.359 C 380.606 76.230,380.541 76.057,380.412 75.975 C 380.054 75.745,379.437 75.779,379.437 76.029 C 379.437 76.151,379.516 76.202,379.613 76.142 C 379.711 76.081,379.732 76.125,379.660 76.241 C 379.584 76.365,379.697 76.515,379.944 76.616 C 380.321 76.770,380.308 76.782,379.790 76.755 C 379.479 76.739,379.272 76.649,379.330 76.555 C 379.391 76.457,379.310 76.432,379.142 76.497 C 378.961 76.566,378.865 76.527,378.891 76.394 C 378.914 76.275,378.838 76.196,378.722 76.219 C 378.600 76.242,378.539 76.112,378.578 75.911 C 378.615 75.720,378.570 75.517,378.478 75.460 C 378.386 75.403,378.305 75.502,378.299 75.678 C 378.287 75.972,378.275 75.971,378.152 75.657 C 378.032 75.351,378.006 75.346,377.903 75.613 C 377.838 75.783,377.596 75.911,377.339 75.911 C 376.908 75.911,376.902 75.897,377.200 75.584 C 377.371 75.404,377.591 75.297,377.689 75.346 C 377.787 75.395,377.807 75.369,377.735 75.289 C 377.662 75.209,377.686 75.044,377.788 74.921 C 377.916 74.766,377.915 74.662,377.782 74.580 C 377.677 74.516,377.603 74.528,377.617 74.609 C 377.683 74.991,377.579 75.061,377.369 74.774 C 377.030 74.310,376.018 74.293,375.568 74.743 M376.353 74.778 C 376.354 74.693,376.461 74.711,376.591 74.818 C 376.763 74.961,376.883 74.957,377.035 74.805 C 377.406 74.434,377.261 74.938,376.878 75.348 C 376.534 75.717,376.050 75.729,376.276 75.363 C 376.333 75.270,376.455 75.240,376.546 75.296 C 376.637 75.353,376.711 75.314,376.711 75.210 C 376.711 75.029,376.345 74.945,375.848 75.013 C 375.646 75.041,375.646 75.005,375.848 74.787 C 376.021 74.601,376.126 74.585,376.219 74.731 C 376.291 74.842,376.351 74.864,376.353 74.778 M297.795 75.627 C 297.392 76.055,297.391 76.064,297.757 75.968 C 297.964 75.914,298.133 75.759,298.133 75.624 C 298.133 75.360,298.350 75.298,298.556 75.504 C 298.626 75.574,298.499 75.780,298.275 75.963 L 297.867 76.295 298.399 76.177 L 298.932 76.060 298.505 76.430 C 298.271 76.634,298.011 76.800,297.928 76.800 C 297.713 76.800,297.740 108.110,297.956 108.444 C 298.050 108.591,298.129 108.647,298.131 108.568 C 298.132 108.489,298.326 108.649,298.562 108.923 C 298.861 109.271,298.928 109.461,298.784 109.552 C 298.671 109.624,302.155 109.685,306.528 109.686 C 313.889 109.689,314.469 109.667,314.364 109.391 C 314.279 109.171,314.322 109.122,314.530 109.201 C 314.902 109.344,315.067 109.157,314.825 108.866 C 314.665 108.673,314.720 108.671,315.138 108.857 C 315.841 109.171,318.756 109.051,318.756 108.708 C 318.756 108.561,318.895 108.492,319.107 108.533 C 319.301 108.570,319.513 108.513,319.579 108.406 C 319.651 108.290,319.617 108.263,319.494 108.339 C 319.381 108.408,319.289 108.381,319.289 108.277 C 319.289 108.174,319.446 108.089,319.639 108.089 C 319.891 108.089,319.962 108.001,319.892 107.778 C 319.809 107.512,319.826 107.515,320.007 107.799 C 320.175 108.062,320.169 108.163,319.976 108.285 C 319.843 108.370,319.813 108.441,319.911 108.444 C 320.009 108.447,319.982 108.518,319.852 108.600 C 319.670 108.716,319.696 108.772,319.966 108.843 C 320.276 108.924,320.316 108.847,320.305 108.175 C 320.294 107.489,320.253 107.420,319.880 107.461 C 319.653 107.486,319.467 107.438,319.467 107.353 C 319.467 107.269,319.551 107.200,319.655 107.200 C 319.758 107.200,319.786 107.100,319.715 106.978 C 319.645 106.856,319.653 106.815,319.733 106.887 C 319.813 106.960,319.988 106.929,320.121 106.818 C 320.313 106.659,320.340 106.678,320.252 106.908 C 320.097 107.312,320.303 107.276,320.525 106.860 C 320.683 106.564,320.650 106.527,320.260 106.570 C 320.015 106.597,319.771 106.550,319.719 106.465 C 319.666 106.380,319.740 106.311,319.882 106.311 C 320.125 106.311,320.443 105.839,320.386 105.562 C 320.372 105.492,320.471 105.478,320.606 105.530 C 320.742 105.582,320.913 105.526,320.987 105.407 C 321.068 105.276,321.048 105.235,320.937 105.304 C 320.835 105.367,320.710 105.259,320.659 105.064 C 320.608 104.869,320.459 104.737,320.328 104.770 C 320.196 104.803,320.149 104.797,320.223 104.756 C 320.297 104.714,320.319 104.618,320.272 104.542 C 320.225 104.466,320.344 104.260,320.538 104.085 C 320.878 103.777,320.889 103.780,320.889 104.180 C 320.889 104.557,321.126 104.543,321.178 104.163 C 321.190 104.073,321.210 103.927,321.222 103.839 C 321.234 103.750,321.395 103.717,321.580 103.765 C 322.041 103.886,322.239 103.494,321.858 103.215 C 321.613 103.036,321.596 102.957,321.778 102.844 C 321.942 102.743,321.968 102.770,321.867 102.933 C 321.771 103.088,321.792 103.123,321.931 103.037 C 322.302 102.808,321.952 102.578,321.234 102.578 C 320.849 102.578,320.533 102.515,320.533 102.438 C 320.533 102.361,320.815 102.308,321.159 102.319 C 321.503 102.330,321.864 102.244,321.962 102.126 C 322.091 101.970,322.031 101.918,321.736 101.934 C 321.488 101.947,321.352 101.865,321.382 101.720 C 321.409 101.591,321.353 101.533,321.257 101.592 C 321.161 101.652,321.127 101.818,321.182 101.961 C 321.237 102.105,321.199 102.222,321.096 102.222 C 320.732 102.222,320.725 101.909,321.082 101.586 C 321.392 101.306,321.467 101.296,321.608 101.518 C 321.737 101.722,321.773 101.692,321.775 101.378 C 321.777 101.158,321.711 100.978,321.630 100.978 C 321.418 100.978,321.436 100.399,321.657 100.120 C 321.792 99.950,321.773 99.839,321.589 99.723 C 321.379 99.590,321.389 99.561,321.644 99.558 C 321.816 99.557,321.956 99.636,321.956 99.733 C 321.956 99.831,322.054 99.891,322.174 99.867 C 322.293 99.842,322.418 99.996,322.451 100.209 C 322.555 100.888,323.040 101.134,322.990 100.482 C 322.967 100.182,322.865 99.950,322.763 99.968 C 322.661 99.986,322.601 99.892,322.628 99.759 C 322.716 99.343,322.303 98.985,322.013 99.226 C 321.821 99.386,321.713 99.380,321.548 99.202 C 321.419 99.062,321.412 99.014,321.532 99.081 C 321.777 99.218,322.363 98.604,322.178 98.405 C 322.105 98.326,322.364 98.311,322.756 98.373 C 323.147 98.435,325.855 98.486,328.775 98.487 C 333.756 98.489,334.075 98.508,333.964 98.800 C 333.869 99.049,333.890 99.067,334.069 98.889 C 334.383 98.578,334.668 98.610,334.405 98.927 C 334.286 99.071,334.230 99.291,334.280 99.416 C 334.336 99.556,334.377 99.519,334.386 99.323 C 334.394 99.146,334.472 99.046,334.561 99.101 C 334.650 99.156,334.683 99.351,334.634 99.536 C 334.586 99.720,334.639 99.928,334.751 99.997 C 334.892 100.084,334.856 100.198,334.633 100.367 C 334.376 100.562,334.365 100.612,334.579 100.617 C 334.727 100.620,334.953 100.454,335.081 100.248 C 335.210 100.042,335.235 99.924,335.137 99.984 C 335.038 100.045,334.913 100.022,334.858 99.933 C 334.803 99.844,334.871 99.727,335.009 99.674 C 335.261 99.577,335.467 99.894,335.467 100.381 C 335.467 100.534,335.381 100.590,335.261 100.516 C 335.134 100.438,335.104 100.468,335.183 100.595 C 335.294 100.774,334.710 100.905,334.178 100.821 C 334.104 100.809,334.044 100.885,334.044 100.990 C 334.044 101.102,334.198 101.131,334.422 101.060 C 334.677 100.979,334.760 101.004,334.678 101.137 C 334.612 101.245,334.662 101.335,334.790 101.336 C 334.975 101.338,334.973 101.370,334.781 101.492 C 334.613 101.598,334.599 101.681,334.737 101.766 C 334.845 101.833,334.933 101.805,334.933 101.705 C 334.933 101.604,335.073 101.567,335.244 101.622 C 335.488 101.701,335.502 101.684,335.307 101.546 C 335.164 101.444,335.109 101.241,335.176 101.065 C 335.268 100.825,335.347 100.804,335.544 100.968 C 335.747 101.137,335.777 101.111,335.704 100.830 C 335.600 100.435,335.910 100.155,336.104 100.469 C 336.189 100.607,336.149 100.634,335.983 100.548 C 335.846 100.476,335.889 100.544,336.078 100.698 C 336.268 100.852,336.328 100.978,336.212 100.978 C 336.095 100.978,336.000 101.102,336.000 101.255 C 336.000 101.407,335.924 101.485,335.832 101.428 C 335.739 101.371,335.695 101.490,335.734 101.693 C 335.775 101.905,335.695 102.104,335.547 102.161 C 335.405 102.216,335.289 102.341,335.289 102.441 C 335.289 102.545,335.402 102.528,335.556 102.400 C 335.765 102.226,335.822 102.226,335.822 102.397 C 335.822 102.528,335.947 102.572,336.133 102.507 C 336.401 102.414,336.397 102.436,336.107 102.661 C 335.922 102.804,335.707 102.883,335.631 102.836 C 335.554 102.788,335.433 102.843,335.363 102.958 C 335.287 103.080,335.311 103.119,335.419 103.052 C 335.521 102.989,335.703 103.056,335.824 103.202 C 336.031 103.451,335.678 103.584,335.067 103.488 C 334.993 103.476,334.933 103.552,334.933 103.656 C 334.933 103.764,335.084 103.798,335.280 103.736 C 335.559 103.648,335.614 103.698,335.558 103.987 C 335.520 104.186,335.544 104.340,335.611 104.331 C 336.150 104.254,336.599 104.376,336.356 104.533 C 336.081 104.711,336.382 104.793,337.067 104.728 C 337.267 104.709,337.256 104.750,337.021 104.894 C 336.849 105.000,336.634 105.040,336.544 104.984 C 336.454 104.929,336.329 104.966,336.267 105.067 C 336.204 105.168,336.084 105.208,336.001 105.156 C 335.917 105.104,335.794 105.233,335.727 105.442 C 335.601 105.840,335.928 106.199,336.241 106.005 C 336.334 105.948,336.353 105.994,336.283 106.107 C 336.210 106.225,336.287 106.364,336.465 106.432 C 336.752 106.542,336.750 106.560,336.441 106.668 C 336.134 106.776,336.139 106.793,336.499 106.887 C 336.714 106.943,336.855 106.933,336.812 106.864 C 336.769 106.795,336.808 106.622,336.898 106.480 C 337.023 106.283,337.063 106.326,337.070 106.667 C 337.081 107.281,337.514 107.859,337.666 107.463 C 337.727 107.302,337.699 107.228,337.600 107.289 C 337.501 107.350,337.472 107.276,337.533 107.118 C 337.592 106.965,337.555 106.738,337.451 106.613 C 337.310 106.442,337.347 106.379,337.599 106.360 C 337.839 106.342,337.904 106.416,337.826 106.619 C 337.746 106.829,337.783 106.863,337.970 106.751 C 338.177 106.626,338.183 106.647,338.002 106.871 C 337.880 107.021,337.836 107.199,337.903 107.266 C 338.109 107.472,338.117 108.434,337.914 108.559 C 337.792 108.635,337.775 108.592,337.868 108.442 C 337.975 108.269,337.932 108.238,337.704 108.325 C 337.533 108.391,337.280 108.444,337.141 108.444 C 337.002 108.444,336.889 108.529,336.889 108.633 C 336.889 108.736,336.971 108.770,337.071 108.708 C 337.171 108.646,337.231 108.697,337.204 108.821 C 337.178 108.944,337.316 109.075,337.511 109.110 C 337.811 109.165,337.825 109.143,337.600 108.972 C 337.380 108.805,337.404 108.785,337.733 108.857 C 337.953 108.905,338.133 108.863,338.133 108.764 C 338.133 108.665,338.265 108.635,338.425 108.696 C 338.671 108.790,338.681 108.765,338.489 108.533 C 338.363 108.382,338.312 108.207,338.375 108.144 C 338.437 108.081,338.489 108.128,338.489 108.248 C 338.489 108.367,338.558 108.423,338.642 108.371 C 338.726 108.319,338.783 108.454,338.768 108.671 C 338.753 108.889,338.644 109.047,338.526 109.022 C 338.408 108.998,338.311 109.048,338.311 109.133 C 338.311 109.333,339.203 109.511,340.200 109.511 C 341.295 109.511,341.171 109.187,340.044 109.105 C 339.364 109.056,339.459 109.039,340.392 109.045 C 341.792 109.053,341.929 109.008,341.682 108.617 C 341.558 108.421,341.516 108.414,341.514 108.590 C 341.512 108.776,341.457 108.772,341.256 108.570 C 341.115 108.429,341.007 108.064,341.016 107.757 C 341.030 107.249,341.162 107.113,341.569 107.183 C 341.649 107.197,341.669 107.134,341.612 107.042 C 341.555 106.950,341.602 106.844,341.716 106.806 C 341.830 106.768,341.951 106.550,341.986 106.322 C 342.043 105.953,342.017 105.935,341.751 106.154 C 341.480 106.378,341.472 106.364,341.664 106.004 C 341.842 105.670,341.833 105.624,341.604 105.712 C 341.455 105.769,341.333 105.923,341.333 106.053 C 341.333 106.183,341.254 106.339,341.156 106.400 C 340.951 106.527,341.253 105.644,341.529 105.312 C 341.647 105.170,341.629 105.067,341.474 104.995 C 341.287 104.909,341.287 104.869,341.476 104.782 C 341.653 104.701,341.631 104.613,341.387 104.413 C 341.211 104.269,341.179 104.210,341.316 104.281 C 341.477 104.364,341.522 104.340,341.444 104.213 C 341.377 104.104,341.191 104.066,341.032 104.127 C 340.822 104.207,340.782 104.173,340.887 104.003 C 340.985 103.845,340.964 103.810,340.825 103.896 C 340.710 103.967,340.657 104.091,340.707 104.171 C 340.757 104.252,340.673 104.366,340.521 104.425 C 340.366 104.484,340.300 104.622,340.372 104.737 C 340.453 104.869,340.422 104.896,340.286 104.812 C 340.154 104.730,340.132 104.600,340.230 104.473 C 340.439 104.201,340.375 103.367,340.121 103.061 C 339.976 102.886,339.974 102.764,340.113 102.625 C 340.349 102.389,340.142 101.991,339.871 102.158 C 339.764 102.224,339.729 102.195,339.792 102.093 C 340.024 101.717,340.646 101.689,340.529 102.059 C 340.469 102.247,340.493 102.400,340.581 102.400 C 340.669 102.400,340.741 102.121,340.741 101.781 C 340.741 101.326,340.685 101.207,340.532 101.335 C 340.417 101.430,340.257 101.449,340.177 101.376 C 340.097 101.304,340.084 101.335,340.147 101.446 C 340.212 101.559,340.113 101.688,339.922 101.737 C 339.664 101.805,339.608 101.756,339.691 101.535 C 339.776 101.308,339.744 101.285,339.544 101.430 C 339.351 101.570,339.175 101.497,338.820 101.127 C 338.522 100.817,338.425 100.603,338.553 100.542 C 338.699 100.472,338.693 100.415,338.533 100.341 C 338.178 100.175,338.271 99.963,338.889 99.536 L 339.467 99.137 339.022 99.628 C 338.522 100.180,338.864 100.193,339.449 99.644 C 339.995 99.132,339.979 98.750,339.408 98.703 C 339.147 98.681,338.907 98.584,338.874 98.487 C 338.841 98.390,338.737 98.311,338.641 98.311 C 338.546 98.311,338.530 98.411,338.607 98.533 C 338.761 98.780,338.849 99.200,338.747 99.200 C 338.610 99.200,337.778 98.218,337.778 98.057 C 337.778 97.964,337.882 98.030,338.009 98.204 C 338.224 98.497,338.259 98.499,338.484 98.228 C 338.618 98.067,338.653 97.954,338.563 97.977 C 338.325 98.036,337.422 97.570,337.422 97.387 C 337.422 97.302,337.568 97.310,337.746 97.405 C 338.291 97.697,338.758 97.529,338.732 97.049 C 338.696 96.381,338.683 96.356,338.390 96.356 C 338.237 96.356,338.149 96.416,338.193 96.489 C 338.237 96.562,338.147 96.740,337.992 96.883 C 337.742 97.114,337.723 97.109,337.827 96.839 C 337.892 96.671,337.874 96.533,337.787 96.533 C 337.700 96.533,337.589 96.633,337.540 96.756 C 337.486 96.888,337.445 96.853,337.436 96.667 C 337.427 96.469,337.552 96.356,337.778 96.356 C 337.973 96.356,338.133 96.271,338.133 96.167 C 338.133 96.064,338.209 96.026,338.302 96.083 C 338.394 96.140,338.439 96.025,338.401 95.826 C 338.305 95.327,338.137 95.088,337.959 95.198 C 337.876 95.249,337.836 95.150,337.869 94.979 C 337.901 94.807,337.940 94.667,337.955 94.667 C 337.970 94.667,338.016 94.467,338.058 94.222 C 338.114 93.898,338.089 93.847,337.968 94.036 C 337.876 94.178,337.835 94.349,337.876 94.416 C 337.918 94.483,337.845 94.626,337.714 94.735 C 337.408 94.989,336.800 94.470,336.800 93.956 C 336.801 93.481,337.109 93.618,337.346 94.198 C 337.534 94.658,337.537 94.659,337.640 94.265 C 337.698 94.045,337.672 93.911,337.583 93.966 C 337.495 94.021,337.422 93.981,337.422 93.877 C 337.422 93.774,337.542 93.689,337.689 93.689 C 337.836 93.689,337.956 93.600,337.956 93.492 C 337.956 93.384,337.856 93.334,337.733 93.381 C 337.597 93.433,337.501 93.299,337.485 93.032 C 337.450 92.460,337.311 92.345,336.982 92.619 C 336.762 92.802,336.711 92.798,336.708 92.600 C 336.706 92.400,336.676 92.402,336.545 92.610 C 336.456 92.749,336.257 92.882,336.102 92.905 C 335.947 92.928,335.866 92.872,335.923 92.781 C 335.979 92.691,336.119 92.673,336.235 92.742 C 336.356 92.815,336.342 92.742,336.202 92.569 C 336.069 92.405,335.909 92.302,335.847 92.341 C 335.784 92.379,335.718 92.145,335.700 91.820 C 335.679 91.446,335.761 91.193,335.922 91.131 C 336.359 90.964,336.212 90.619,335.742 90.709 C 335.483 90.758,335.321 90.713,335.342 90.596 C 335.362 90.488,335.258 90.427,335.111 90.459 C 334.964 90.492,334.904 90.459,334.976 90.387 C 335.049 90.314,335.019 90.148,334.910 90.016 C 334.773 89.851,334.771 89.778,334.903 89.778 C 335.008 89.778,335.167 89.909,335.257 90.069 C 335.438 90.394,335.822 90.252,335.822 89.862 C 335.822 89.711,335.715 89.652,335.554 89.714 C 335.361 89.788,335.316 89.736,335.395 89.531 C 335.455 89.373,335.589 89.244,335.693 89.244 C 335.797 89.244,335.830 89.193,335.767 89.130 C 335.704 89.067,335.529 89.119,335.378 89.244 C 335.146 89.437,335.121 89.426,335.218 89.173 C 335.281 89.009,335.263 88.898,335.177 88.926 C 335.092 88.955,335.022 88.898,335.022 88.800 C 335.022 88.702,335.082 88.642,335.156 88.667 C 335.229 88.691,335.289 88.637,335.289 88.546 C 335.289 88.456,335.149 88.345,334.978 88.300 C 334.729 88.235,334.723 88.215,334.948 88.198 C 335.106 88.187,335.313 87.904,335.420 87.555 C 335.545 87.149,335.549 86.971,335.431 87.044 C 335.317 87.115,335.297 87.009,335.378 86.756 C 335.476 86.446,335.450 86.388,335.263 86.501 C 335.064 86.621,335.064 86.594,335.266 86.345 C 335.593 85.943,335.268 85.960,334.830 86.369 C 334.638 86.548,334.379 86.660,334.253 86.618 C 334.089 86.563,334.115 86.474,334.346 86.299 C 334.537 86.154,334.577 86.053,334.444 86.050 C 334.322 86.047,334.222 85.929,334.222 85.788 C 334.222 85.647,334.143 85.483,334.046 85.423 C 333.943 85.360,333.914 85.447,333.974 85.635 C 334.048 85.864,333.967 85.829,333.690 85.511 C 333.477 85.267,333.362 85.179,333.434 85.316 C 333.521 85.481,333.496 85.523,333.361 85.439 C 333.045 85.244,333.109 85.153,333.706 84.945 C 334.110 84.804,334.193 84.713,334.017 84.602 C 333.885 84.518,333.864 84.449,333.970 84.447 C 334.076 84.446,334.163 84.324,334.163 84.178 C 334.163 83.909,333.994 83.838,333.521 83.906 C 333.296 83.938,333.296 83.903,333.521 83.631 C 333.758 83.343,333.747 83.336,333.378 83.533 C 333.155 83.651,332.978 83.671,332.978 83.577 C 332.978 83.484,333.078 83.366,333.200 83.315 C 333.357 83.249,333.318 83.134,333.067 82.921 C 332.744 82.646,332.741 82.629,333.042 82.734 C 333.323 82.831,333.356 82.781,333.261 82.402 C 333.199 82.156,333.214 81.956,333.295 81.956 C 333.547 81.956,333.493 80.742,333.236 80.643 C 333.095 80.590,333.031 80.642,333.080 80.771 C 333.130 80.901,333.017 80.990,332.804 80.990 C 332.606 80.990,332.443 80.908,332.442 80.806 C 332.440 80.705,332.361 80.742,332.267 80.889 C 332.119 81.117,332.094 81.110,332.092 80.844 C 332.090 80.669,332.224 80.530,332.400 80.525 C 332.593 80.520,332.508 80.428,332.178 80.283 C 331.884 80.155,331.572 80.091,331.484 80.141 C 331.395 80.191,331.380 80.140,331.450 80.027 C 331.519 79.915,331.478 79.822,331.359 79.822 C 331.239 79.822,331.192 79.771,331.255 79.708 C 331.318 79.645,331.506 79.705,331.674 79.841 C 331.923 80.043,331.953 80.044,331.834 79.847 C 331.744 79.699,331.783 79.537,331.933 79.430 C 332.122 79.295,332.108 79.278,331.867 79.355 C 331.662 79.421,331.556 79.362,331.556 79.184 C 331.556 79.035,331.476 78.962,331.378 79.022 C 331.187 79.140,331.129 78.885,331.306 78.709 C 331.467 78.548,331.924 78.769,331.811 78.952 C 331.757 79.039,331.846 79.111,332.010 79.111 C 332.230 79.111,332.286 79.225,332.225 79.547 C 332.131 80.037,332.328 80.158,332.978 80.011 C 333.239 79.952,333.446 79.764,333.480 79.556 L 333.539 79.200 333.245 79.556 C 333.084 79.751,332.998 79.818,333.055 79.703 C 333.112 79.589,332.974 79.349,332.748 79.170 C 332.522 78.991,332.277 78.687,332.203 78.494 C 332.118 78.270,332.025 78.215,331.946 78.341 C 331.866 78.469,331.700 78.377,331.481 78.085 C 331.293 77.835,331.217 77.678,331.311 77.737 C 331.406 77.795,331.600 77.770,331.742 77.680 C 331.944 77.552,331.914 77.516,331.604 77.514 C 331.153 77.511,331.206 76.490,331.672 76.213 C 331.803 76.135,331.910 75.975,331.908 75.858 C 331.907 75.713,331.857 75.722,331.753 75.886 C 331.642 76.061,331.565 76.071,331.474 75.922 C 331.404 75.810,331.440 75.606,331.553 75.469 C 331.727 75.260,331.689 75.254,331.303 75.433 C 331.052 75.549,330.847 75.702,330.846 75.772 C 330.845 75.842,330.711 75.849,330.549 75.786 C 330.386 75.724,330.298 75.747,330.354 75.836 C 330.415 75.934,329.658 75.965,328.472 75.912 L 326.489 75.824 328.383 75.773 C 331.278 75.695,331.454 75.236,328.583 75.251 L 326.489 75.261 328.444 75.373 L 330.400 75.485 328.222 75.520 C 326.751 75.544,326.044 75.620,326.044 75.755 C 326.044 75.902,325.981 75.902,325.806 75.757 C 325.510 75.511,325.321 75.673,325.465 76.048 C 325.540 76.242,325.494 76.298,325.317 76.230 C 325.132 76.159,325.055 76.280,325.039 76.667 C 325.026 76.993,325.072 77.113,325.157 76.975 C 325.234 76.850,325.431 76.799,325.593 76.862 C 325.756 76.924,325.834 77.064,325.767 77.173 C 325.681 77.312,325.599 77.299,325.492 77.130 C 325.389 76.969,325.338 76.959,325.336 77.101 C 325.335 77.217,325.413 77.362,325.511 77.422 C 325.609 77.483,325.689 77.612,325.689 77.710 C 325.689 77.808,325.614 77.841,325.522 77.785 C 325.187 77.577,324.993 78.281,325.305 78.571 C 325.679 78.917,325.723 79.416,325.391 79.543 C 325.250 79.597,325.185 79.562,325.246 79.465 C 325.305 79.368,325.232 79.289,325.082 79.289 C 324.866 79.289,324.843 79.380,324.973 79.721 C 325.073 79.985,325.074 80.176,324.974 80.210 C 324.884 80.241,324.782 80.451,324.748 80.675 C 324.667 81.217,324.132 81.825,323.876 81.667 C 323.745 81.586,323.710 81.639,323.778 81.815 C 323.836 81.966,323.770 82.307,323.631 82.574 C 323.493 82.841,323.424 83.131,323.478 83.219 C 323.532 83.306,323.432 83.384,323.255 83.392 C 323.056 83.400,323.021 83.441,323.163 83.498 C 323.332 83.566,323.341 83.652,323.197 83.825 C 323.052 84.000,323.062 84.051,323.235 84.025 C 323.662 83.960,323.897 83.724,323.795 83.461 C 323.740 83.317,323.775 83.200,323.873 83.200 C 324.166 83.200,324.281 83.739,324.010 83.843 C 323.872 83.896,323.789 84.095,323.826 84.285 C 323.863 84.476,323.817 84.585,323.724 84.528 C 323.576 84.436,323.487 84.811,323.505 85.453 C 323.509 85.567,323.399 85.618,323.262 85.565 C 323.077 85.494,323.063 85.409,323.209 85.234 C 323.361 85.050,323.323 85.004,323.036 85.026 C 322.833 85.041,322.661 85.176,322.654 85.327 C 322.645 85.518,322.596 85.493,322.489 85.244 C 322.354 84.929,322.335 84.945,322.324 85.378 C 322.317 85.647,322.211 85.868,322.089 85.869 C 321.935 85.871,321.945 85.921,322.122 86.034 C 322.342 86.173,322.343 86.239,322.125 86.520 C 321.899 86.811,321.914 86.809,322.271 86.505 C 322.490 86.318,322.722 86.218,322.786 86.283 C 322.851 86.347,322.797 86.400,322.667 86.400 C 322.536 86.400,322.480 86.450,322.541 86.511 C 322.602 86.573,322.548 86.873,322.421 87.178 C 322.293 87.483,322.156 87.813,322.117 87.911 C 322.077 88.009,321.987 88.070,321.916 88.046 C 321.846 88.023,321.866 88.209,321.962 88.461 C 322.157 88.974,322.059 89.244,321.678 89.244 C 321.538 89.244,321.422 89.329,321.422 89.433 C 321.422 89.536,321.342 89.572,321.244 89.511 C 321.147 89.451,321.067 89.491,321.067 89.600 C 321.067 89.709,320.992 89.753,320.901 89.696 C 320.685 89.563,320.479 89.901,320.629 90.144 C 320.703 90.264,320.825 90.223,320.966 90.030 C 321.174 89.747,321.194 89.746,321.303 90.029 C 321.380 90.231,321.350 90.288,321.212 90.202 C 321.072 90.116,321.045 90.176,321.128 90.392 C 321.225 90.646,321.178 90.691,320.892 90.616 C 320.605 90.541,320.533 90.609,320.533 90.956 C 320.533 91.335,320.578 91.366,320.889 91.200 C 321.352 90.952,321.398 91.565,320.944 91.934 C 320.778 92.068,320.696 92.088,320.762 91.979 C 320.827 91.870,320.730 91.645,320.547 91.479 C 320.324 91.277,320.174 91.241,320.094 91.369 C 320.029 91.475,319.918 91.526,319.848 91.483 C 319.778 91.439,319.721 91.552,319.721 91.732 C 319.721 91.997,319.784 92.026,320.051 91.884 C 320.317 91.741,320.401 91.778,320.478 92.074 C 320.531 92.275,320.657 92.389,320.759 92.326 C 320.875 92.254,320.889 92.300,320.798 92.447 C 320.693 92.618,320.734 92.652,320.949 92.569 C 321.268 92.447,321.361 92.707,321.064 92.891 C 320.964 92.952,320.919 93.060,320.963 93.131 C 321.006 93.202,320.894 93.271,320.713 93.284 C 320.459 93.303,320.401 93.213,320.460 92.888 C 320.502 92.657,320.456 92.417,320.357 92.356 C 320.258 92.296,320.178 92.328,320.178 92.428 C 320.178 92.529,320.038 92.567,319.867 92.513 C 319.593 92.427,319.588 92.445,319.822 92.656 C 320.486 93.254,320.625 94.403,320.032 94.397 C 319.797 94.395,319.784 94.362,319.975 94.242 C 320.143 94.135,320.156 94.052,320.019 93.967 C 319.911 93.900,319.822 93.930,319.822 94.034 C 319.822 94.138,319.738 94.222,319.634 94.222 C 319.530 94.222,319.491 94.149,319.546 94.060 C 319.601 93.971,319.524 93.916,319.374 93.938 C 319.074 93.982,319.006 94.298,319.289 94.333 C 319.387 94.346,319.551 94.366,319.655 94.378 C 319.758 94.390,319.786 94.492,319.716 94.605 C 319.633 94.740,319.675 94.766,319.839 94.681 C 319.977 94.610,319.934 94.677,319.744 94.831 C 319.554 94.985,319.289 95.111,319.156 95.111 C 319.022 95.111,318.964 95.195,319.027 95.297 C 319.090 95.399,319.051 95.592,318.940 95.725 C 318.800 95.894,318.796 96.004,318.926 96.084 C 319.030 96.148,319.188 96.015,319.279 95.789 L 319.444 95.378 319.461 95.822 C 319.478 96.282,319.839 95.919,319.930 95.350 C 320.009 94.864,320.338 94.462,320.308 94.889 C 320.293 95.109,320.224 95.289,320.156 95.289 C 320.087 95.289,320.053 95.429,320.079 95.600 C 320.106 95.771,320.039 95.968,319.930 96.038 C 319.822 96.108,319.733 96.259,319.733 96.373 C 319.733 96.647,318.977 96.926,318.840 96.703 C 318.746 96.552,319.128 96.448,319.524 96.517 C 319.605 96.531,319.623 96.465,319.565 96.371 C 319.413 96.124,318.781 96.211,318.673 96.493 C 318.622 96.625,318.487 96.676,318.373 96.606 C 318.234 96.520,318.196 96.591,318.256 96.822 C 318.306 97.011,318.234 97.271,318.097 97.399 C 317.885 97.597,317.901 97.677,318.207 97.942 C 318.457 98.159,318.509 98.309,318.378 98.440 C 318.249 98.569,318.248 98.662,318.377 98.741 C 318.698 98.940,318.382 99.378,317.918 99.378 C 317.582 99.378,317.500 99.448,317.583 99.664 C 317.660 99.865,317.616 99.921,317.436 99.852 C 317.131 99.735,317.183 100.201,317.541 100.797 C 317.751 101.146,317.734 101.219,317.386 101.455 L 316.994 101.721 317.337 102.360 C 317.683 103.007,317.539 103.615,317.103 103.345 C 316.988 103.274,317.048 103.174,317.256 103.093 C 317.572 102.969,317.568 102.957,317.200 102.946 C 316.980 102.939,316.800 103.029,316.800 103.145 C 316.800 103.262,316.680 103.202,316.533 103.012 C 316.310 102.724,316.284 102.718,316.374 102.978 C 316.435 103.152,316.396 103.289,316.286 103.289 C 316.177 103.289,316.089 103.374,316.089 103.477 C 316.089 103.581,316.169 103.616,316.267 103.556 C 316.364 103.495,316.450 103.590,316.457 103.767 C 316.468 104.054,316.486 104.051,316.622 103.733 C 316.753 103.429,316.777 103.423,316.788 103.689 C 316.794 103.860,316.877 104.000,316.972 104.000 C 317.067 104.000,317.099 104.120,317.042 104.266 C 316.986 104.412,316.869 104.487,316.781 104.433 C 316.694 104.379,316.614 104.479,316.605 104.656 C 316.592 104.890,316.555 104.864,316.470 104.561 L 316.352 104.144 316.008 104.524 C 315.668 104.900,315.668 104.903,316.010 104.774 C 316.315 104.660,316.327 104.679,316.113 104.943 C 315.903 105.201,315.918 105.259,316.227 105.376 C 316.529 105.490,316.546 105.546,316.336 105.740 C 316.133 105.929,316.089 105.919,316.089 105.685 C 316.089 105.529,316.009 105.451,315.911 105.511 C 315.813 105.572,315.733 105.502,315.733 105.356 C 315.733 105.127,314.861 105.089,309.203 105.073 C 305.611 105.063,302.609 104.992,302.532 104.914 C 302.372 104.754,302.349 77.168,302.508 76.676 C 302.594 76.408,302.580 76.399,302.419 76.622 C 302.286 76.807,302.227 81.106,302.225 90.701 C 302.223 99.714,302.160 104.551,302.044 104.622 C 301.927 104.695,301.867 100.038,301.867 90.785 C 301.867 79.791,301.820 76.820,301.644 76.753 C 301.467 76.685,301.440 79.516,301.511 90.689 C 301.595 103.959,301.583 104.711,301.289 104.711 C 300.769 104.711,300.811 76.551,301.332 76.117 C 301.598 75.896,301.641 75.756,301.503 75.556 C 301.351 75.335,301.328 75.350,301.370 75.643 C 301.420 75.981,299.345 76.109,299.022 75.788 C 298.973 75.739,299.435 75.727,300.049 75.761 C 300.967 75.811,301.156 75.775,301.114 75.556 C 301.014 75.033,298.298 75.091,297.795 75.627 M344.962 75.596 C 344.775 75.815,344.742 75.902,344.889 75.790 C 345.036 75.678,345.216 75.579,345.289 75.571 C 345.440 75.554,345.489 75.386,345.377 75.273 C 345.335 75.232,345.149 75.377,344.962 75.596 M346.578 75.378 C 346.517 75.476,346.513 75.629,346.568 75.718 C 346.684 75.905,346.326 75.877,346.000 75.673 C 345.849 75.579,345.778 75.652,345.778 75.901 C 345.778 76.102,345.873 76.267,345.990 76.267 C 346.106 76.267,346.050 76.377,345.865 76.512 C 345.680 76.648,345.405 76.734,345.253 76.704 C 345.102 76.674,344.858 76.637,344.711 76.622 C 344.526 76.603,344.512 76.567,344.667 76.504 C 344.980 76.378,344.944 76.112,344.620 76.160 C 344.212 76.220,343.927 76.556,344.162 76.702 C 344.572 76.955,344.419 108.042,344.007 108.263 C 343.816 108.365,343.566 108.428,343.452 108.402 C 343.338 108.377,343.208 108.476,343.164 108.622 C 343.093 108.859,343.106 108.859,343.283 108.622 C 343.452 108.397,343.477 108.410,343.447 108.711 C 343.355 109.642,344.253 110.060,344.768 109.326 C 345.018 108.968,345.022 108.969,345.022 109.400 C 345.022 109.699,344.944 109.809,344.778 109.745 C 344.632 109.689,344.533 109.768,344.533 109.943 C 344.533 110.176,344.602 110.198,344.877 110.051 C 345.334 109.806,345.472 109.818,345.310 110.089 C 345.209 110.257,345.245 110.255,345.460 110.081 C 345.683 109.901,345.759 109.899,345.817 110.072 C 345.865 110.216,345.932 110.184,346.010 109.982 C 346.081 109.796,346.059 109.714,345.954 109.779 C 345.857 109.839,345.778 109.754,345.778 109.591 C 345.778 109.401,345.682 109.332,345.511 109.398 C 345.359 109.456,345.244 109.402,345.244 109.272 C 345.244 109.123,345.477 109.051,345.916 109.065 C 347.057 109.100,347.292 109.419,346.649 110.062 L 346.276 110.435 346.671 110.310 C 346.971 110.214,347.037 110.242,346.942 110.425 C 346.873 110.558,346.994 110.467,347.210 110.222 C 347.426 109.978,347.678 109.800,347.770 109.827 C 347.862 109.853,347.887 109.793,347.825 109.693 C 347.763 109.593,347.802 109.511,347.911 109.511 C 348.020 109.511,348.053 109.419,347.983 109.306 C 347.907 109.183,347.934 109.149,348.049 109.220 C 348.155 109.285,348.292 109.258,348.354 109.158 C 348.501 108.921,351.822 108.918,351.822 109.156 C 351.822 109.253,351.745 109.333,351.650 109.333 C 351.555 109.333,351.523 109.451,351.578 109.594 C 351.633 109.738,351.767 109.838,351.875 109.817 C 351.984 109.795,352.076 109.898,352.081 110.044 C 352.087 110.239,352.326 110.319,352.978 110.343 C 353.603 110.366,353.735 110.337,353.422 110.245 C 352.979 110.114,352.979 110.113,353.378 110.007 C 353.598 109.948,353.820 109.969,353.872 110.053 C 353.924 110.138,354.098 110.157,354.257 110.095 C 354.429 110.029,354.502 110.057,354.437 110.162 C 354.376 110.260,354.436 110.341,354.569 110.341 C 354.759 110.341,354.767 110.271,354.607 110.015 C 354.369 109.634,354.194 109.600,354.086 109.911 C 354.043 110.033,354.016 109.887,354.026 109.586 C 354.044 109.059,354.489 108.769,354.489 109.285 C 354.489 109.421,354.568 109.483,354.665 109.423 C 354.764 109.362,354.796 109.430,354.739 109.578 C 354.629 109.865,355.296 109.787,355.479 109.492 C 355.531 109.407,355.430 109.383,355.254 109.439 C 354.956 109.533,354.954 109.523,355.234 109.292 C 355.531 109.045,355.568 108.711,355.336 108.341 C 355.270 108.235,355.332 108.148,355.474 108.148 C 355.617 108.148,355.735 108.235,355.736 108.341 C 355.738 108.447,355.814 108.414,355.906 108.268 C 356.032 108.069,356.125 108.054,356.277 108.206 C 356.429 108.358,356.384 108.440,356.097 108.531 C 355.781 108.631,355.753 108.698,355.935 108.917 C 356.112 109.130,356.105 109.214,355.900 109.344 C 355.681 109.483,355.682 109.508,355.911 109.522 C 356.058 109.531,356.312 109.564,356.477 109.596 C 356.672 109.634,356.742 109.568,356.680 109.405 C 356.627 109.268,356.673 109.156,356.781 109.156 C 356.889 109.156,356.978 109.262,356.978 109.393 C 356.978 109.675,357.609 109.662,357.897 109.374 C 358.111 109.160,357.777 108.547,357.530 108.699 C 357.465 108.739,357.226 108.632,356.999 108.460 C 356.690 108.227,356.657 108.148,356.870 108.148 C 357.027 108.148,357.156 108.220,357.156 108.307 C 357.156 108.394,357.226 108.422,357.313 108.368 C 357.400 108.314,357.570 108.390,357.691 108.535 C 357.941 108.837,358.503 108.887,358.667 108.622 C 358.727 108.525,358.632 108.465,358.455 108.489 C 358.278 108.513,358.119 108.453,358.101 108.356 C 358.083 108.258,358.203 108.201,358.367 108.230 C 358.605 108.271,358.616 108.247,358.422 108.116 C 358.287 108.025,358.232 107.862,358.299 107.753 C 358.510 107.411,358.714 107.536,358.922 108.133 C 359.070 108.556,359.164 108.646,359.274 108.469 C 359.400 108.264,359.439 108.264,359.521 108.469 C 359.600 108.668,359.651 108.663,359.806 108.444 C 359.962 108.224,359.995 108.221,359.997 108.430 C 359.999 108.568,360.053 108.629,360.117 108.564 C 360.182 108.499,360.090 108.286,359.912 108.090 C 359.735 107.894,359.671 107.733,359.772 107.733 C 359.966 107.733,359.988 106.993,359.797 106.844 C 359.735 106.796,359.795 106.652,359.931 106.526 C 360.083 106.385,360.178 106.370,360.178 106.488 C 360.178 106.618,360.306 106.611,360.578 106.466 C 360.920 106.283,360.947 106.290,360.766 106.510 C 360.629 106.676,360.612 106.860,360.717 107.028 C 360.807 107.172,360.837 107.449,360.782 107.644 C 360.722 107.864,360.816 107.796,361.029 107.467 C 361.219 107.173,361.350 106.793,361.320 106.622 C 361.243 106.189,361.537 106.231,361.652 106.670 C 361.704 106.867,361.653 107.129,361.540 107.252 C 361.426 107.375,361.308 107.593,361.278 107.738 C 361.230 107.967,361.442 107.755,362.539 106.478 C 362.670 106.325,362.746 106.105,362.707 105.989 C 362.669 105.873,362.744 105.772,362.874 105.765 C 363.008 105.758,362.974 105.698,362.794 105.628 C 362.494 105.509,362.496 105.481,362.828 105.126 C 363.021 104.918,363.130 104.700,363.071 104.641 C 363.011 104.582,363.048 104.533,363.151 104.533 C 363.255 104.533,363.385 104.414,363.441 104.269 C 363.497 104.123,363.456 104.023,363.349 104.047 C 363.243 104.070,363.115 103.978,363.066 103.842 C 363.016 103.706,363.049 103.640,363.138 103.695 C 363.230 103.752,363.290 103.392,363.276 102.876 C 363.253 102.012,363.279 101.954,363.698 101.928 C 364.020 101.908,364.075 101.944,363.894 102.059 C 363.692 102.187,363.702 102.217,363.947 102.219 C 364.113 102.221,364.328 102.362,364.424 102.533 C 364.588 102.826,364.606 102.824,364.727 102.510 C 364.798 102.326,364.763 102.027,364.650 101.846 C 364.398 101.442,364.381 101.095,364.620 101.243 C 364.721 101.305,364.751 101.234,364.690 101.076 C 364.595 100.827,364.877 100.703,365.371 100.777 C 365.441 100.787,365.448 100.664,365.386 100.502 C 365.310 100.305,365.341 100.251,365.481 100.337 C 365.595 100.408,365.689 100.381,365.689 100.277 C 365.689 100.037,365.389 100.032,365.243 100.270 C 365.181 100.369,365.044 100.397,364.938 100.331 C 364.823 100.260,364.796 100.294,364.872 100.417 C 364.942 100.530,364.914 100.622,364.810 100.622 C 364.552 100.622,364.574 100.350,364.851 100.121 C 365.087 99.924,365.112 99.320,364.904 98.830 C 364.786 98.552,364.267 98.427,364.267 98.677 C 364.267 98.748,364.164 98.846,364.038 98.894 C 363.904 98.946,363.810 98.849,363.810 98.660 C 363.810 98.482,363.872 98.376,363.949 98.424 C 364.026 98.471,364.089 98.434,364.089 98.341 C 364.089 98.095,363.627 97.956,363.438 98.144 C 363.342 98.240,363.266 98.109,363.250 97.819 C 363.233 97.503,363.266 97.433,363.345 97.617 C 363.412 97.774,363.646 97.886,363.867 97.869 C 364.087 97.851,364.227 97.876,364.179 97.924 C 364.035 98.069,364.472 98.493,364.651 98.382 C 364.748 98.322,364.726 98.170,364.598 98.015 C 364.438 97.822,364.432 97.717,364.578 97.627 C 364.803 97.488,364.774 96.441,364.540 96.227 C 364.456 96.151,364.440 96.181,364.504 96.292 C 364.568 96.404,364.505 96.540,364.364 96.595 C 364.191 96.661,364.140 96.607,364.207 96.431 C 364.288 96.221,364.177 96.186,363.648 96.251 C 363.018 96.329,363.001 96.318,363.282 96.008 C 363.459 95.812,363.730 95.715,363.966 95.765 C 364.323 95.840,364.328 95.829,364.031 95.640 C 363.853 95.527,363.661 95.480,363.604 95.536 C 363.548 95.593,363.406 95.508,363.289 95.348 C 363.168 95.182,362.984 95.114,362.861 95.190 C 362.702 95.288,362.725 95.381,362.950 95.545 C 363.203 95.731,363.212 95.794,363.005 95.923 C 362.699 96.114,362.233 96.197,362.399 96.031 C 362.465 95.965,362.592 95.901,362.681 95.889 C 363.076 95.835,363.097 95.808,362.858 95.656 C 362.718 95.568,362.578 95.309,362.546 95.081 C 362.500 94.756,362.526 94.724,362.663 94.933 C 362.809 95.154,362.839 95.157,362.842 94.947 C 362.843 94.808,362.704 94.535,362.533 94.339 C 362.362 94.143,362.317 94.038,362.432 94.106 C 362.548 94.173,362.692 94.147,362.754 94.047 C 362.818 93.944,362.678 93.867,362.425 93.867 C 361.852 93.867,361.422 94.036,361.422 94.263 C 361.422 94.367,361.535 94.350,361.689 94.222 C 361.873 94.069,361.956 94.063,361.956 94.200 C 361.956 94.397,361.682 94.484,361.333 94.398 C 361.236 94.374,361.059 94.352,360.941 94.348 C 360.823 94.344,360.768 94.274,360.818 94.193 C 360.869 94.111,360.812 94.044,360.692 94.044 C 360.572 94.044,360.525 93.993,360.588 93.930 C 360.651 93.867,360.826 93.919,360.978 94.044 C 361.209 94.237,361.235 94.227,361.141 93.981 C 361.076 93.813,361.117 93.689,361.236 93.689 C 361.350 93.689,361.394 93.609,361.333 93.511 C 361.273 93.413,361.143 93.333,361.046 93.333 C 360.948 93.333,360.917 93.412,360.977 93.509 C 361.039 93.610,360.967 93.640,360.809 93.579 C 360.657 93.521,360.533 93.397,360.533 93.304 C 360.533 93.211,360.613 93.184,360.711 93.244 C 360.814 93.308,360.888 93.143,360.886 92.855 C 360.885 92.580,360.824 92.436,360.751 92.533 C 360.410 92.992,359.703 93.500,359.695 93.292 C 359.691 93.168,359.650 92.947,359.606 92.800 C 359.561 92.653,359.558 92.593,359.600 92.667 C 359.789 93.005,360.216 92.785,360.118 92.400 C 360.026 92.032,360.037 92.026,360.254 92.316 C 360.467 92.600,360.502 92.604,360.601 92.346 C 360.785 91.867,360.726 91.690,360.419 91.807 C 360.171 91.902,360.162 91.877,360.361 91.638 C 360.555 91.405,360.551 91.367,360.341 91.412 C 360.202 91.442,360.038 91.537,359.975 91.623 C 359.913 91.709,359.764 91.719,359.645 91.645 C 359.479 91.542,359.484 91.476,359.670 91.358 C 359.862 91.237,359.864 91.205,359.678 91.203 C 359.546 91.201,359.501 91.110,359.574 90.992 C 359.664 90.847,359.601 90.823,359.369 90.912 C 359.110 91.012,359.133 90.946,359.473 90.619 C 359.931 90.179,360.348 90.158,359.987 90.594 C 359.872 90.732,359.808 90.850,359.845 90.858 C 360.551 90.998,360.767 91.002,360.563 90.870 C 360.372 90.746,360.380 90.695,360.607 90.608 C 360.762 90.549,360.888 90.398,360.886 90.272 C 360.884 90.100,360.846 90.104,360.728 90.289 C 360.638 90.432,360.479 90.476,360.349 90.396 C 360.176 90.289,360.195 90.205,360.428 90.034 C 360.633 89.885,360.668 89.772,360.535 89.690 C 360.402 89.608,360.442 89.472,360.658 89.273 C 360.998 88.959,361.215 88.243,360.913 88.430 C 360.816 88.490,360.678 88.445,360.607 88.331 C 360.527 88.201,360.555 88.170,360.684 88.250 C 360.797 88.319,360.889 88.298,360.889 88.203 C 360.889 88.108,360.793 87.998,360.675 87.958 C 360.509 87.903,360.509 87.858,360.675 87.755 C 360.793 87.683,360.889 87.708,360.889 87.812 C 360.889 87.915,360.974 88.000,361.077 88.000 C 361.181 88.000,361.208 87.908,361.139 87.795 C 361.059 87.667,361.090 87.638,361.220 87.718 C 361.334 87.789,361.382 87.921,361.326 88.012 C 361.269 88.103,361.328 88.179,361.456 88.181 C 361.614 88.182,361.626 88.225,361.494 88.314 C 361.386 88.386,361.299 88.743,361.301 89.108 C 361.303 89.653,361.348 89.735,361.549 89.568 C 361.748 89.403,361.751 89.337,361.564 89.218 C 361.386 89.106,361.384 89.071,361.556 89.069 C 361.678 89.068,361.778 88.982,361.778 88.878 C 361.778 88.775,361.698 88.740,361.600 88.800 C 361.330 88.967,361.388 88.720,361.689 88.419 C 361.926 88.182,361.956 88.203,361.956 88.610 C 361.956 88.861,362.040 89.067,362.144 89.067 C 362.247 89.067,362.288 88.996,362.235 88.909 C 362.181 88.822,362.276 88.642,362.446 88.509 C 362.726 88.290,362.732 88.296,362.512 88.578 C 362.377 88.749,362.349 88.889,362.448 88.889 C 362.698 88.889,362.867 88.390,362.661 88.263 C 362.556 88.198,362.557 88.079,362.666 87.946 C 363.837 86.507,364.074 86.053,363.325 86.680 C 363.034 86.924,363.047 86.893,363.369 86.571 C 363.977 85.966,363.988 85.253,363.388 85.216 C 363.020 85.193,362.997 85.216,363.252 85.359 C 363.599 85.553,363.671 86.117,363.333 85.993 C 363.211 85.948,363.130 85.824,363.153 85.718 C 363.204 85.485,362.637 85.666,362.447 85.943 C 362.358 86.073,362.316 86.066,362.314 85.920 C 362.312 85.803,362.447 85.630,362.614 85.537 C 362.861 85.399,362.876 85.319,362.694 85.100 C 362.559 84.937,362.539 84.790,362.643 84.726 C 362.737 84.668,362.846 84.715,362.884 84.831 C 363.002 85.184,363.555 84.840,363.568 84.406 C 363.580 84.020,363.588 84.018,363.734 84.356 C 363.868 84.665,363.884 84.649,363.861 84.232 C 363.834 83.732,363.378 83.859,363.378 84.367 C 363.378 84.723,363.074 84.835,362.954 84.523 C 362.885 84.344,362.920 84.292,363.055 84.376 C 363.168 84.446,363.107 84.298,362.919 84.049 C 362.692 83.745,362.534 83.662,362.447 83.797 C 362.365 83.924,362.316 83.877,362.314 83.670 C 362.312 83.462,362.402 83.375,362.556 83.434 C 362.691 83.485,362.907 83.434,363.036 83.319 C 363.227 83.151,363.226 83.171,363.034 83.425 C 362.821 83.706,362.841 83.730,363.221 83.660 C 363.454 83.617,363.704 83.576,363.778 83.569 C 363.851 83.562,363.905 83.416,363.897 83.244 C 363.889 83.059,363.847 83.023,363.793 83.156 C 363.744 83.278,363.626 83.378,363.530 83.378 C 363.435 83.378,363.408 83.294,363.471 83.192 C 363.535 83.090,363.479 82.870,363.349 82.703 C 363.137 82.434,363.144 82.426,363.406 82.631 C 363.578 82.765,363.745 82.792,363.804 82.696 C 363.988 82.398,363.222 82.337,362.984 82.630 C 362.794 82.865,362.798 82.887,363.005 82.762 C 363.175 82.660,363.214 82.678,363.128 82.817 C 363.058 82.930,362.840 83.022,362.643 83.022 C 362.327 83.022,362.316 82.992,362.549 82.759 C 362.816 82.492,362.757 81.778,362.468 81.778 C 362.382 81.778,362.311 81.688,362.311 81.578 C 362.311 81.440,362.394 81.447,362.578 81.600 C 362.757 81.749,362.844 81.757,362.844 81.625 C 362.844 81.512,363.070 81.455,363.378 81.491 C 363.671 81.525,363.911 81.479,363.911 81.388 C 363.911 81.297,363.771 81.236,363.600 81.254 C 362.937 81.321,362.717 81.229,362.522 80.802 C 362.411 80.559,362.372 80.225,362.436 80.060 C 362.525 79.829,362.480 79.788,362.239 79.880 C 362.067 79.946,361.888 80.000,361.842 80.000 C 361.795 80.000,361.816 80.096,361.888 80.213 C 361.961 80.330,361.765 80.244,361.454 80.023 C 361.143 79.801,360.889 79.530,360.889 79.421 C 360.889 79.311,361.006 79.376,361.150 79.566 C 361.293 79.756,361.415 79.831,361.419 79.733 C 361.424 79.636,361.489 79.652,361.564 79.771 C 361.662 79.925,361.766 79.882,361.930 79.619 C 362.056 79.417,362.083 79.299,361.989 79.357 C 361.895 79.415,361.716 79.340,361.592 79.190 C 361.467 79.040,361.262 78.957,361.136 79.005 C 360.984 79.063,360.944 78.996,361.018 78.805 C 361.095 78.603,361.052 78.545,360.875 78.613 C 360.693 78.683,360.619 78.571,360.614 78.220 C 360.610 77.862,360.678 77.755,360.866 77.828 C 361.033 77.892,361.079 77.853,360.996 77.719 C 360.926 77.605,360.748 77.511,360.601 77.511 C 360.455 77.511,360.384 77.431,360.444 77.333 C 360.505 77.236,360.410 77.143,360.233 77.128 C 360.056 77.114,359.831 77.083,359.733 77.060 C 359.636 77.037,359.456 77.010,359.333 76.998 C 359.211 76.987,359.111 76.893,359.111 76.790 C 359.111 76.686,359.211 76.658,359.333 76.727 C 359.499 76.820,359.498 76.790,359.326 76.605 C 359.132 76.397,359.073 76.396,358.946 76.602 C 358.863 76.736,358.715 76.796,358.618 76.736 C 358.520 76.676,358.399 76.785,358.348 76.980 C 358.297 77.174,358.158 77.333,358.039 77.333 C 357.876 77.333,357.873 77.273,358.025 77.090 C 358.163 76.924,358.173 76.710,358.058 76.423 C 357.893 76.011,357.889 76.010,357.878 76.390 C 357.872 76.604,357.785 76.829,357.686 76.891 C 357.587 76.952,357.551 77.077,357.608 77.168 C 357.664 77.259,357.634 77.333,357.542 77.333 C 357.450 77.333,357.333 77.172,357.281 76.974 C 357.229 76.776,356.940 76.529,356.638 76.423 C 356.336 76.318,356.140 76.149,356.203 76.049 C 356.266 75.946,356.208 75.901,356.069 75.947 C 355.933 75.992,355.796 76.163,355.765 76.325 C 355.723 76.541,355.795 76.593,356.032 76.516 C 356.298 76.429,356.309 76.444,356.090 76.598 C 355.921 76.717,355.763 76.723,355.655 76.615 C 355.561 76.521,355.320 76.450,355.120 76.457 C 354.759 76.469,354.759 76.471,355.111 76.622 C 355.401 76.747,355.411 76.777,355.166 76.788 C 355.001 76.794,354.815 76.719,354.754 76.619 C 354.692 76.520,354.584 76.475,354.513 76.519 C 354.441 76.563,354.267 76.521,354.125 76.425 C 353.917 76.285,353.932 76.262,354.204 76.304 C 354.389 76.332,354.575 76.259,354.616 76.141 C 354.664 76.004,354.553 75.953,354.307 76.001 C 353.982 76.063,353.958 76.034,354.148 75.805 C 354.330 75.585,354.325 75.546,354.119 75.590 C 353.978 75.621,353.891 75.534,353.921 75.392 C 353.966 75.184,353.928 75.180,353.699 75.369 C 353.547 75.496,353.422 75.529,353.421 75.444 C 353.421 75.359,353.232 75.524,353.002 75.812 C 352.573 76.348,350.735 76.665,350.478 76.248 C 350.423 76.161,350.628 76.089,350.933 76.089 C 351.238 76.089,351.445 76.020,351.393 75.935 C 351.340 75.850,351.077 75.805,350.808 75.835 C 350.539 75.865,350.254 75.794,350.176 75.678 C 350.098 75.562,350.077 75.707,350.129 76.000 C 350.181 76.293,350.177 76.433,350.120 76.311 C 350.062 76.189,349.881 76.090,349.719 76.092 C 349.462 76.094,349.458 76.118,349.689 76.267 C 349.926 76.420,349.926 76.441,349.689 76.454 C 349.509 76.464,349.538 76.519,349.778 76.622 C 350.095 76.759,350.082 76.776,349.655 76.788 C 349.392 76.794,349.119 76.708,349.050 76.595 C 348.966 76.459,349.002 76.438,349.156 76.533 C 349.319 76.634,349.346 76.608,349.245 76.445 C 349.133 76.264,349.043 76.287,348.838 76.551 C 348.619 76.831,348.592 76.837,348.679 76.583 C 348.737 76.415,348.708 76.231,348.614 76.173 C 348.521 76.115,348.438 76.213,348.430 76.390 C 348.423 76.566,348.379 76.620,348.333 76.508 C 348.287 76.396,348.113 76.324,347.947 76.349 C 347.781 76.373,347.750 76.358,347.880 76.314 C 348.009 76.271,348.071 76.122,348.018 75.982 C 347.945 75.791,348.024 75.755,348.341 75.834 C 348.811 75.952,349.029 75.748,348.810 75.395 C 348.709 75.231,348.630 75.221,348.545 75.359 C 348.478 75.467,348.343 75.556,348.246 75.556 C 348.148 75.556,348.107 75.492,348.155 75.415 C 348.203 75.337,348.089 75.262,347.902 75.248 C 347.715 75.234,347.515 75.299,347.458 75.392 C 347.400 75.485,347.279 75.515,347.188 75.459 C 347.097 75.403,347.022 75.486,347.022 75.644 C 347.022 75.803,347.085 75.893,347.162 75.846 C 347.239 75.798,347.302 75.900,347.302 76.072 C 347.302 76.488,346.930 76.476,346.887 76.059 C 346.788 75.112,346.769 75.069,346.578 75.378 M349.690 75.333 C 349.691 75.407,350.071 75.480,350.535 75.496 C 351.135 75.517,351.275 75.482,351.022 75.375 C 350.572 75.185,349.688 75.157,349.690 75.333 M351.911 75.390 C 351.644 75.514,351.710 75.531,352.178 75.458 C 352.768 75.366,352.782 75.376,352.450 75.640 C 352.257 75.793,351.997 75.880,351.872 75.832 C 351.747 75.784,351.644 75.825,351.644 75.923 C 351.644 76.021,351.864 76.065,352.133 76.021 C 352.402 75.977,352.665 75.934,352.718 75.926 C 352.770 75.918,352.825 75.751,352.840 75.556 C 352.869 75.169,352.521 75.107,351.911 75.390 M381.706 75.289 C 381.699 75.387,381.772 75.587,381.867 75.733 C 381.992 75.927,382.040 75.936,382.042 75.767 C 382.043 75.639,382.108 75.574,382.186 75.622 C 382.264 75.670,382.444 75.633,382.586 75.540 C 382.774 75.416,382.714 75.392,382.366 75.449 C 382.097 75.494,381.850 75.437,381.802 75.320 C 381.755 75.205,381.712 75.191,381.706 75.289 M300.581 75.505 C 300.314 75.545,299.834 75.546,299.514 75.507 C 299.195 75.467,299.413 75.434,300.000 75.433 C 300.587 75.432,300.848 75.464,300.581 75.505 M326.140 75.960 C 326.316 75.719,326.341 75.733,326.317 76.059 C 326.301 76.267,326.234 76.492,326.167 76.559 C 326.099 76.627,326.046 76.768,326.047 76.874 C 326.049 76.980,326.118 76.960,326.200 76.830 C 326.318 76.643,326.377 76.661,326.474 76.913 C 326.541 77.089,326.536 77.174,326.463 77.100 C 326.390 77.027,326.224 77.149,326.094 77.372 C 325.882 77.737,325.863 77.742,325.903 77.422 C 325.927 77.227,325.881 76.962,325.801 76.834 C 325.721 76.706,325.633 76.386,325.605 76.123 C 325.563 75.724,325.587 75.694,325.746 75.942 C 325.918 76.210,325.956 76.212,326.140 75.960 M357.378 75.999 C 357.086 76.331,357.097 76.444,357.422 76.444 C 357.570 76.444,357.689 76.286,357.689 76.089 C 357.689 75.672,357.671 75.667,357.378 75.999 M345.067 76.159 C 345.067 76.547,345.524 76.630,345.553 76.247 C 345.567 76.062,345.463 75.911,345.322 75.911 C 345.182 75.911,345.067 76.023,345.067 76.159 M358.222 76.326 C 358.222 76.456,358.339 76.563,358.481 76.563 C 358.623 76.563,358.703 76.503,358.658 76.430 C 358.613 76.357,358.676 76.257,358.799 76.207 C 358.922 76.157,358.842 76.111,358.622 76.103 C 358.400 76.095,358.222 76.194,358.222 76.326 M370.551 76.300 C 370.611 76.488,370.534 76.630,370.346 76.679 C 370.064 76.754,370.063 76.760,370.345 76.779 C 370.739 76.806,370.866 76.580,370.638 76.258 C 370.487 76.045,370.472 76.052,370.551 76.300 M298.709 92.490 L 298.756 108.179 298.311 108.200 C 298.067 108.212,297.967 108.185,298.089 108.140 C 298.264 108.076,298.311 104.772,298.311 92.430 C 298.311 82.241,298.372 76.800,298.487 76.800 C 298.601 76.800,298.679 82.293,298.709 92.490 M343.018 92.444 C 343.018 101.098,343.040 104.638,343.067 100.311 C 343.093 95.984,343.093 88.904,343.067 84.578 C 343.040 80.251,343.018 83.791,343.018 92.444 M343.730 92.444 C 343.730 101.098,343.751 104.638,343.778 100.311 C 343.804 95.984,343.804 88.904,343.778 84.578 C 343.751 80.251,343.730 83.791,343.730 92.444 M384.244 77.235 C 384.085 77.577,384.095 77.687,384.288 77.762 C 384.463 77.828,384.533 77.703,384.533 77.328 C 384.533 76.689,384.502 76.679,384.244 77.235 M360.144 77.500 C 360.189 77.633,360.122 77.700,359.989 77.656 C 359.861 77.613,359.721 77.473,359.678 77.344 C 359.633 77.211,359.700 77.144,359.833 77.189 C 359.962 77.232,360.102 77.372,360.144 77.500 M359.748 77.867 C 360.032 77.867,360.044 77.902,359.892 78.297 C 359.746 78.678,359.438 78.655,359.503 78.267 C 359.539 78.055,359.456 77.972,359.244 78.008 C 359.073 78.037,358.933 77.972,358.933 77.864 C 358.933 77.756,359.013 77.717,359.111 77.778 C 359.209 77.838,359.295 77.783,359.303 77.655 C 359.314 77.476,359.338 77.474,359.407 77.644 C 359.456 77.767,359.610 77.867,359.748 77.867 M369.600 77.678 C 369.600 77.888,369.253 78.113,369.126 77.985 C 368.999 77.858,369.223 77.511,369.433 77.511 C 369.525 77.511,369.600 77.586,369.600 77.678 M352.767 78.402 C 352.932 78.581,352.920 78.611,352.711 78.539 C 351.772 78.220,351.644 78.207,351.642 78.432 C 351.640 78.591,351.597 78.604,351.508 78.471 C 351.437 78.364,351.258 78.230,351.111 78.173 C 350.964 78.117,351.230 78.094,351.700 78.123 C 352.171 78.151,352.651 78.277,352.767 78.402 M350.756 78.410 C 350.756 78.514,350.839 78.547,350.941 78.484 C 351.043 78.421,351.236 78.460,351.370 78.571 C 351.529 78.703,351.651 78.711,351.723 78.594 C 351.848 78.393,352.356 78.533,352.356 78.769 C 352.356 78.853,352.222 78.871,352.060 78.809 C 351.872 78.736,351.807 78.765,351.883 78.888 C 351.949 78.994,351.902 79.124,351.779 79.177 C 351.656 79.229,351.789 79.239,352.074 79.198 C 352.360 79.157,352.651 79.030,352.722 78.916 C 352.806 78.780,352.892 78.840,352.971 79.090 C 353.038 79.300,353.168 79.425,353.260 79.368 C 353.352 79.311,353.387 79.198,353.337 79.117 C 353.287 79.037,353.370 78.923,353.521 78.865 C 353.679 78.805,353.750 78.835,353.688 78.935 C 353.628 79.032,353.659 79.111,353.757 79.111 C 353.855 79.111,353.977 79.042,354.030 78.957 C 354.082 78.873,354.343 78.845,354.609 78.896 C 355.084 78.987,355.086 78.991,354.691 79.137 C 354.239 79.304,354.336 79.644,354.836 79.644 C 355.015 79.644,355.205 79.758,355.258 79.896 C 355.312 80.034,355.580 80.258,355.855 80.392 C 356.156 80.539,356.344 80.766,356.328 80.960 C 356.308 81.191,356.354 81.228,356.492 81.090 C 356.631 80.952,356.593 80.796,356.359 80.538 C 356.063 80.210,356.061 80.178,356.334 80.178 C 356.498 80.178,356.679 80.058,356.736 79.911 C 356.792 79.764,356.923 79.644,357.026 79.644 C 357.130 79.644,357.163 79.697,357.099 79.760 C 357.035 79.824,357.122 79.981,357.292 80.110 C 357.530 80.291,357.542 80.346,357.344 80.350 C 357.154 80.354,357.173 80.418,357.415 80.595 C 357.596 80.727,357.697 80.909,357.641 81.000 C 357.585 81.091,357.882 81.162,358.309 81.160 C 358.912 81.156,359.038 81.106,358.892 80.929 C 358.789 80.804,358.756 80.651,358.819 80.588 C 358.882 80.525,358.933 80.572,358.933 80.692 C 358.933 80.816,359.029 80.855,359.156 80.782 C 359.278 80.712,359.323 80.715,359.256 80.790 C 359.189 80.864,359.203 81.107,359.287 81.329 C 359.386 81.588,359.369 81.804,359.240 81.933 C 359.092 82.082,359.098 82.133,359.264 82.133 C 359.387 82.133,359.437 82.215,359.376 82.314 C 359.315 82.413,359.182 82.444,359.081 82.381 C 358.980 82.319,358.997 82.454,359.119 82.681 C 359.287 82.996,359.290 83.111,359.129 83.164 C 358.970 83.218,358.968 83.295,359.123 83.481 C 359.442 83.865,359.454 85.317,359.138 85.196 C 358.974 85.133,358.917 85.187,358.976 85.349 C 359.026 85.487,359.156 85.580,359.265 85.556 C 359.546 85.494,359.404 86.218,359.098 86.412 C 358.921 86.524,358.911 86.573,359.067 86.575 C 359.189 86.577,359.289 86.644,359.289 86.726 C 359.289 86.807,359.115 86.861,358.903 86.845 C 358.690 86.830,358.367 86.966,358.185 87.148 C 358.003 87.331,357.778 87.433,357.684 87.375 C 357.587 87.315,357.565 87.401,357.632 87.575 C 357.712 87.782,357.682 87.839,357.541 87.752 C 357.408 87.670,357.333 87.755,357.333 87.990 C 357.333 88.191,357.255 88.356,357.159 88.356 C 356.977 88.356,356.739 89.080,356.721 89.689 C 356.708 90.147,355.235 90.250,354.917 89.815 C 354.770 89.614,354.688 89.600,354.588 89.761 C 354.488 89.923,354.349 89.905,354.020 89.689 C 353.604 89.417,352.855 89.419,352.136 89.695 C 351.983 89.754,351.805 89.717,351.741 89.612 C 351.674 89.503,351.022 89.422,350.215 89.422 L 348.806 89.422 348.754 84.933 L 348.702 80.444 348.707 84.978 L 348.711 89.511 350.222 89.611 L 351.733 89.712 350.236 89.745 C 349.205 89.767,348.649 89.859,348.450 90.040 C 348.190 90.275,348.160 90.258,348.160 89.870 C 348.160 89.510,348.204 89.473,348.424 89.652 C 348.643 89.829,348.669 89.813,348.574 89.561 C 348.490 89.336,348.375 81.015,348.436 79.541 C 348.441 79.435,348.544 79.340,348.667 79.329 C 349.139 79.288,349.506 79.175,349.639 79.028 C 349.715 78.945,349.698 78.919,349.600 78.971 C 349.502 79.024,349.262 79.002,349.067 78.924 C 348.794 78.814,348.779 78.778,349.002 78.768 C 349.162 78.761,349.522 78.636,349.802 78.490 C 350.401 78.177,350.756 78.148,350.756 78.410 M350.138 78.570 C 350.009 78.778,351.053 79.344,351.345 79.225 C 351.461 79.178,351.406 79.133,351.223 79.125 C 350.932 79.113,350.920 79.072,351.134 78.800 C 351.355 78.518,351.348 78.512,351.069 78.731 C 350.804 78.939,350.744 78.932,350.650 78.687 C 350.527 78.366,350.297 78.313,350.138 78.570 M368.444 78.578 C 368.505 78.676,368.456 78.756,368.336 78.756 C 368.068 78.756,368.281 78.990,368.591 79.036 C 368.715 79.054,368.828 78.918,368.842 78.735 C 368.856 78.545,368.752 78.400,368.601 78.400 C 368.454 78.400,368.384 78.480,368.444 78.578 M324.089 78.715 C 324.089 78.814,324.215 78.944,324.370 79.003 C 324.757 79.152,324.800 79.142,324.800 78.904 C 324.800 78.790,324.747 78.749,324.683 78.813 C 324.619 78.878,324.459 78.841,324.327 78.732 C 324.177 78.608,324.089 78.601,324.089 78.715 M348.800 79.168 C 348.800 79.193,348.640 79.225,348.444 79.240 C 348.238 79.256,348.089 79.157,348.089 79.005 C 348.089 78.798,348.164 78.783,348.444 78.933 C 348.640 79.038,348.800 79.144,348.800 79.168 M355.580 78.880 C 355.375 78.961,355.375 79.000,355.580 79.130 C 355.770 79.250,355.755 79.284,355.511 79.286 C 355.340 79.288,355.200 79.169,355.200 79.022 C 355.200 78.876,355.340 78.762,355.511 78.770 C 355.728 78.779,355.749 78.813,355.580 78.880 M356.441 78.939 C 356.538 78.782,356.600 78.796,356.674 78.990 C 356.741 79.164,356.677 79.239,356.486 79.211 C 356.327 79.188,356.179 79.074,356.156 78.958 C 356.128 78.809,356.144 78.807,356.213 78.949 C 356.284 79.098,356.345 79.095,356.441 78.939 M371.720 79.145 C 371.593 79.657,371.621 79.786,371.840 79.702 C 372.131 79.590,372.164 79.356,371.911 79.200 C 371.813 79.140,371.727 79.115,371.720 79.145 M379.978 79.372 C 379.923 79.515,379.919 79.735,379.969 79.861 C 380.020 79.986,380.067 79.904,380.075 79.678 C 380.083 79.453,380.169 79.317,380.267 79.378 C 380.364 79.438,380.444 79.403,380.444 79.299 C 380.444 79.018,380.093 79.072,379.978 79.372 M381.610 79.629 C 381.457 79.876,381.156 79.880,381.156 79.634 C 381.156 79.530,381.249 79.504,381.364 79.574 C 381.509 79.664,381.532 79.600,381.442 79.362 C 381.343 79.103,381.362 79.071,381.521 79.229 C 381.635 79.343,381.675 79.523,381.610 79.629 M328.474 79.548 C 328.474 79.690,328.416 79.771,328.345 79.727 C 328.273 79.683,328.169 79.767,328.112 79.915 C 328.006 80.190,328.629 80.182,329.081 79.902 C 329.221 79.816,329.218 79.896,329.072 80.168 C 328.912 80.467,328.809 80.511,328.626 80.358 C 328.474 80.233,328.348 80.224,328.280 80.335 C 328.139 80.562,328.348 80.671,328.844 80.628 C 329.304 80.588,329.313 80.607,329.049 81.100 C 328.892 81.394,328.900 81.437,329.093 81.321 C 329.290 81.202,329.289 81.231,329.089 81.477 C 328.955 81.642,328.905 81.778,328.978 81.778 C 329.051 81.778,329.006 81.903,328.879 82.056 C 328.560 82.441,328.527 83.249,328.841 82.988 C 329.022 82.838,329.025 82.742,328.856 82.538 C 328.673 82.317,328.728 82.292,329.226 82.371 C 329.819 82.464,330.204 82.667,329.788 82.667 C 329.646 82.667,329.606 82.782,329.680 82.978 C 329.774 83.222,329.752 83.243,329.581 83.073 C 329.411 82.904,329.296 82.935,329.049 83.221 C 328.747 83.568,328.767 84.185,329.073 83.996 C 329.154 83.946,329.271 83.988,329.333 84.089 C 329.396 84.190,329.541 84.216,329.657 84.147 C 329.776 84.075,329.762 84.150,329.623 84.321 C 329.356 84.650,328.889 84.728,328.889 84.444 C 328.889 84.347,328.799 84.267,328.689 84.267 C 328.551 84.267,328.558 84.349,328.711 84.533 C 329.022 84.908,329.526 84.865,329.932 84.429 C 330.247 84.090,331.022 84.173,331.022 84.546 C 331.022 84.595,330.867 84.552,330.677 84.450 C 330.448 84.328,330.296 84.325,330.223 84.444 C 330.162 84.542,330.237 84.622,330.390 84.622 C 330.542 84.622,330.673 84.682,330.681 84.756 C 330.690 84.829,330.716 85.065,330.740 85.281 C 330.764 85.496,330.717 85.631,330.636 85.581 C 330.555 85.531,330.489 85.608,330.489 85.752 C 330.489 85.897,330.389 86.051,330.267 86.096 C 330.095 86.159,330.101 86.233,330.293 86.422 C 330.429 86.557,330.488 86.572,330.423 86.457 C 330.279 86.200,330.593 85.978,330.863 86.145 C 331.005 86.233,331.002 86.303,330.851 86.396 C 330.718 86.478,330.874 86.636,331.276 86.827 C 331.987 87.164,332.022 87.251,331.650 87.744 C 331.308 88.197,331.314 88.313,331.689 88.411 C 331.945 88.478,331.935 88.496,331.636 88.513 C 331.435 88.524,331.175 88.437,331.058 88.320 C 330.892 88.154,330.844 88.176,330.844 88.419 C 330.844 88.613,330.924 88.683,331.052 88.604 C 331.192 88.517,331.223 88.572,331.147 88.771 C 331.017 89.110,331.301 89.182,331.438 88.844 C 331.507 88.676,331.572 88.684,331.708 88.880 C 331.807 89.022,331.848 89.202,331.800 89.280 C 331.752 89.358,331.822 89.422,331.956 89.422 C 332.089 89.422,332.354 89.547,332.544 89.699 C 332.883 89.971,332.883 89.972,332.533 89.809 C 331.977 89.548,331.104 89.943,331.583 90.239 C 331.710 90.318,331.740 90.288,331.661 90.161 C 331.578 90.025,331.696 89.956,332.009 89.956 C 332.424 89.956,332.469 90.010,332.373 90.395 C 332.281 90.759,332.317 90.817,332.576 90.734 C 332.839 90.649,332.847 90.663,332.622 90.825 C 332.299 91.058,331.852 91.084,331.988 90.863 C 332.043 90.776,331.970 90.659,331.828 90.605 C 331.611 90.521,331.616 90.604,331.858 91.112 C 332.017 91.445,332.214 91.677,332.296 91.626 C 332.378 91.576,332.444 91.623,332.444 91.731 C 332.444 91.855,332.200 91.894,331.778 91.837 C 331.411 91.788,331.251 91.790,331.422 91.842 C 331.593 91.894,331.733 92.017,331.733 92.114 C 331.733 92.212,331.898 92.238,332.104 92.173 C 332.442 92.066,332.456 92.091,332.261 92.455 C 332.075 92.803,332.086 92.839,332.349 92.738 C 332.515 92.674,332.785 92.617,332.948 92.610 C 333.190 92.600,333.181 92.572,332.902 92.463 C 332.595 92.343,332.591 92.317,332.857 92.214 C 333.021 92.152,333.156 92.182,333.156 92.283 C 333.156 92.383,333.256 92.407,333.378 92.335 C 333.500 92.264,333.460 92.366,333.289 92.561 C 332.863 93.048,332.895 93.285,333.333 92.889 C 333.719 92.540,333.830 92.755,333.467 93.146 C 333.288 93.339,333.291 93.359,333.482 93.251 C 333.658 93.150,333.704 93.232,333.661 93.564 C 333.622 93.861,333.768 94.199,334.091 94.562 C 334.360 94.864,334.509 95.111,334.422 95.111 C 334.335 95.111,334.129 94.906,333.964 94.655 C 333.723 94.287,333.616 94.239,333.410 94.410 C 333.213 94.573,333.156 94.573,333.156 94.409 C 333.156 94.082,332.707 94.590,332.586 95.053 C 332.512 95.335,332.552 95.414,332.731 95.345 C 332.878 95.289,332.978 95.366,332.978 95.537 C 332.978 95.791,332.518 95.822,328.790 95.822 C 326.043 95.822,324.639 95.761,324.711 95.644 C 324.779 95.535,326.222 95.467,328.449 95.467 C 331.944 95.467,332.084 95.453,332.275 95.096 C 332.384 94.892,332.427 94.679,332.370 94.622 C 332.313 94.565,332.267 94.614,332.267 94.732 C 332.267 94.891,331.285 94.946,328.400 94.948 C 326.273 94.949,324.413 94.951,324.267 94.952 C 324.120 94.953,323.840 94.916,323.644 94.868 C 323.449 94.821,323.249 94.775,323.200 94.766 C 323.151 94.757,323.229 94.676,323.373 94.584 C 323.546 94.475,323.681 94.491,323.770 94.632 C 323.850 94.758,323.907 94.636,323.908 94.334 C 323.911 93.927,323.960 93.863,324.150 94.021 C 324.389 94.220,324.622 94.124,324.622 93.827 C 324.622 93.739,324.530 93.725,324.417 93.795 C 324.289 93.874,324.260 93.843,324.340 93.713 C 324.411 93.599,324.527 93.541,324.599 93.585 C 324.670 93.629,324.844 93.592,324.986 93.502 C 325.195 93.370,325.197 93.338,325.000 93.336 C 324.800 93.334,324.798 93.284,324.985 93.058 C 325.167 92.838,325.243 92.829,325.363 93.014 C 325.461 93.165,325.491 93.097,325.452 92.816 C 325.388 92.361,324.684 91.674,324.438 91.826 C 324.357 91.876,324.234 91.824,324.164 91.710 C 324.072 91.563,324.175 91.539,324.522 91.626 C 324.892 91.719,324.975 91.693,324.870 91.518 C 324.781 91.372,324.852 91.385,325.067 91.556 C 325.262 91.711,325.346 91.730,325.268 91.600 C 325.195 91.478,325.219 91.378,325.323 91.378 C 325.426 91.378,325.511 91.258,325.511 91.111 C 325.511 90.964,325.651 90.843,325.822 90.842 C 326.065 90.840,326.080 90.805,325.892 90.686 C 325.721 90.578,325.709 90.497,325.851 90.410 C 325.960 90.342,326.003 90.192,325.945 90.077 C 325.888 89.961,325.962 90.016,326.111 90.199 C 326.372 90.520,326.380 90.518,326.353 90.154 C 326.337 89.947,326.281 89.772,326.229 89.764 C 326.176 89.757,325.996 89.724,325.828 89.692 C 325.660 89.660,325.529 89.686,325.538 89.750 C 325.584 90.095,325.472 90.550,325.391 90.348 C 325.316 90.160,325.248 90.160,325.018 90.348 C 324.817 90.513,324.834 90.430,325.080 90.053 C 325.391 89.575,325.393 89.539,325.111 89.647 C 324.940 89.713,324.800 89.689,324.800 89.594 C 324.800 89.500,324.980 89.420,325.200 89.417 C 325.590 89.411,325.591 89.404,325.255 89.150 C 324.840 88.836,325.194 88.799,325.683 89.104 C 325.873 89.223,326.077 89.271,326.137 89.211 C 326.318 89.030,326.376 87.822,326.205 87.822 C 326.116 87.822,326.036 88.002,326.027 88.222 C 326.010 88.603,326.003 88.601,325.882 88.178 C 325.812 87.933,325.820 87.707,325.901 87.674 C 325.981 87.641,325.997 87.533,325.935 87.434 C 325.863 87.316,325.945 87.318,326.171 87.439 C 326.441 87.584,326.519 87.568,326.519 87.371 C 326.519 87.231,326.361 87.066,326.169 87.005 C 325.890 86.917,325.869 86.862,326.065 86.736 C 326.272 86.603,326.269 86.547,326.044 86.383 C 325.819 86.218,325.825 86.204,326.089 86.289 C 326.260 86.344,326.400 86.311,326.400 86.217 C 326.400 86.122,326.485 86.044,326.588 86.044 C 326.692 86.044,326.727 86.124,326.667 86.222 C 326.541 86.426,326.615 86.443,326.996 86.296 C 327.145 86.239,327.223 86.080,327.170 85.942 C 327.117 85.804,326.990 85.743,326.887 85.806 C 326.781 85.872,326.753 85.837,326.823 85.724 C 326.890 85.616,327.049 85.567,327.176 85.615 C 327.303 85.664,327.407 85.588,327.407 85.447 C 327.407 85.301,327.301 85.230,327.161 85.284 C 326.976 85.355,326.943 85.268,327.028 84.931 C 327.091 84.676,327.056 84.431,326.945 84.363 C 326.838 84.296,326.797 84.166,326.854 84.074 C 326.911 83.981,327.032 83.951,327.123 84.008 C 327.214 84.064,327.289 84.025,327.289 83.922 C 327.289 83.818,327.186 83.733,327.060 83.733 C 326.913 83.733,326.829 83.479,326.823 83.022 C 326.819 82.631,326.887 82.311,326.974 82.311 C 327.061 82.311,327.086 82.237,327.030 82.146 C 326.974 82.055,327.024 81.921,327.141 81.848 C 327.290 81.756,327.252 81.672,327.010 81.566 C 326.775 81.463,326.890 81.440,327.378 81.492 C 327.769 81.533,327.989 81.528,327.867 81.480 C 327.532 81.348,327.600 80.890,327.956 80.883 C 328.246 80.878,328.247 80.863,327.966 80.649 C 327.526 80.315,327.807 79.544,328.459 79.295 C 328.467 79.291,328.474 79.405,328.474 79.548 M324.444 79.708 C 324.444 79.852,324.344 80.011,324.222 80.060 C 324.083 80.116,324.075 80.155,324.200 80.164 C 324.509 80.185,324.855 79.700,324.631 79.561 C 324.528 79.498,324.444 79.564,324.444 79.708 M348.190 79.814 C 348.190 80.005,348.155 80.125,348.112 80.082 C 347.666 79.636,347.556 80.592,347.556 84.900 C 347.556 87.709,347.489 89.443,347.378 89.511 C 347.261 89.583,347.200 88.069,347.200 85.100 L 347.200 80.578 347.689 80.029 C 347.958 79.727,348.181 79.477,348.184 79.474 C 348.188 79.470,348.190 79.623,348.190 79.814 M378.765 79.772 C 378.701 79.940,378.704 80.022,378.773 79.953 C 378.842 79.884,379.002 79.967,379.129 80.136 C 379.336 80.412,379.349 80.399,379.252 80.017 C 379.177 79.718,379.206 79.627,379.350 79.716 C 379.463 79.786,379.556 79.758,379.556 79.655 C 379.556 79.344,378.892 79.443,378.765 79.772 M368.133 80.104 C 367.964 80.273,367.624 79.999,367.750 79.794 C 367.826 79.673,367.925 79.676,368.053 79.804 C 368.157 79.908,368.194 80.043,368.133 80.104 M362.660 80.096 C 362.475 80.319,362.476 80.411,362.665 80.585 C 362.850 80.756,362.876 80.741,362.796 80.511 C 362.740 80.351,362.784 80.131,362.894 80.022 C 363.004 79.912,363.047 79.822,362.991 79.822 C 362.934 79.822,362.786 79.946,362.660 80.096 M324.704 81.849 C 324.678 82.084,324.604 82.444,324.539 82.649 C 324.464 82.885,324.498 83.022,324.632 83.022 C 324.791 83.022,324.789 83.088,324.622 83.289 C 324.457 83.488,324.454 83.556,324.611 83.556 C 324.729 83.556,324.763 83.649,324.689 83.769 C 324.558 83.981,324.376 83.739,324.446 83.447 C 324.462 83.382,324.380 83.270,324.263 83.198 C 324.100 83.097,324.101 82.977,324.265 82.670 C 324.414 82.391,324.422 82.217,324.289 82.085 C 324.139 81.934,324.420 81.516,324.731 81.428 C 324.742 81.425,324.730 81.614,324.704 81.849 M370.128 81.556 C 370.116 81.837,369.597 82.367,369.469 82.229 C 369.394 82.150,369.433 82.049,369.556 82.005 C 369.678 81.962,369.778 81.813,369.778 81.674 C 369.778 81.536,369.858 81.422,369.956 81.422 C 370.053 81.422,370.131 81.482,370.128 81.556 M362.844 81.945 C 362.844 82.036,362.999 82.100,363.188 82.085 C 363.376 82.071,363.492 81.996,363.444 81.919 C 363.322 81.722,362.844 81.742,362.844 81.945 M358.212 83.422 C 358.126 83.795,358.083 84.821,358.156 84.747 C 358.276 84.628,358.405 83.378,358.298 83.378 C 358.256 83.378,358.218 83.398,358.212 83.422 M370.844 84.345 C 370.844 84.486,370.765 84.650,370.669 84.710 C 370.568 84.772,370.538 84.701,370.599 84.543 C 370.657 84.391,370.616 84.267,370.508 84.267 C 370.400 84.267,370.311 84.344,370.311 84.438 C 370.311 84.533,370.131 84.703,369.911 84.818 C 369.519 85.022,369.520 85.017,369.942 84.557 C 370.414 84.044,370.844 83.943,370.844 84.345 M323.200 84.390 C 323.200 84.491,322.980 84.542,322.711 84.505 C 322.233 84.439,322.231 84.442,322.628 84.649 C 323.041 84.863,323.599 84.606,323.321 84.329 C 323.255 84.262,323.200 84.290,323.200 84.390 M326.610 85.010 C 326.436 85.321,326.178 85.606,326.036 85.643 C 325.878 85.684,325.899 85.633,326.089 85.513 C 326.260 85.404,326.400 85.216,326.400 85.093 C 326.400 84.900,326.735 84.444,326.876 84.444 C 326.904 84.444,326.784 84.699,326.610 85.010 M329.688 84.979 C 329.627 85.078,329.706 85.207,329.863 85.268 C 330.112 85.363,330.102 85.411,329.786 85.632 C 329.586 85.772,329.423 85.982,329.425 86.099 C 329.427 86.226,329.495 86.204,329.596 86.044 C 329.689 85.898,329.859 85.778,329.973 85.778 C 330.088 85.778,330.250 85.593,330.334 85.366 C 330.469 85.002,330.443 84.968,330.110 85.074 C 329.857 85.155,329.773 85.129,329.855 84.997 C 329.922 84.889,329.937 84.800,329.888 84.800 C 329.839 84.800,329.749 84.881,329.688 84.979 M369.689 85.867 C 369.628 85.964,369.708 86.044,369.867 86.044 C 370.025 86.044,370.107 86.121,370.049 86.214 C 369.903 86.451,369.422 86.238,369.422 85.937 C 369.422 85.800,369.507 85.689,369.610 85.689 C 369.714 85.689,369.749 85.769,369.689 85.867 M358.672 86.036 C 358.620 86.120,358.398 86.144,358.178 86.089 C 357.789 85.992,357.790 85.999,358.222 86.356 C 358.495 86.582,358.709 86.653,358.778 86.541 C 358.841 86.440,358.988 86.299,359.105 86.226 C 359.261 86.129,359.244 86.065,359.043 85.988 C 358.891 85.930,358.724 85.951,358.672 86.036 M329.590 86.507 C 329.432 86.703,329.418 86.773,329.556 86.690 C 329.697 86.605,329.778 86.662,329.778 86.844 C 329.778 87.003,329.853 87.086,329.945 87.029 C 330.036 86.972,330.194 87.068,330.295 87.241 C 330.396 87.414,330.481 87.460,330.483 87.344 C 330.486 87.227,330.403 87.079,330.299 87.015 C 330.194 86.950,330.050 86.738,329.978 86.544 C 329.853 86.208,329.835 86.206,329.590 86.507 M361.845 87.511 C 361.971 88.036,361.696 88.208,361.561 87.689 C 361.486 87.400,361.580 86.841,361.682 86.967 C 361.707 86.997,361.781 87.242,361.845 87.511 M365.965 87.156 C 365.752 87.846,366.309 88.362,366.535 87.683 C 366.577 87.558,366.544 87.468,366.461 87.482 C 366.100 87.546,366.017 87.448,366.222 87.200 C 366.382 87.008,366.386 86.933,366.238 86.933 C 366.126 86.933,366.003 87.033,365.965 87.156 M335.289 87.255 C 335.289 87.334,335.182 87.545,335.052 87.723 C 334.823 88.035,334.815 88.035,334.812 87.712 C 334.811 87.528,334.897 87.319,335.005 87.247 C 335.255 87.079,335.289 87.080,335.289 87.255 M370.963 87.548 C 370.963 87.690,370.852 87.849,370.717 87.901 C 370.581 87.953,370.519 87.917,370.579 87.820 C 370.639 87.724,370.583 87.639,370.455 87.632 C 370.327 87.625,370.382 87.551,370.578 87.467 C 370.773 87.383,370.940 87.308,370.948 87.301 C 370.956 87.295,370.963 87.405,370.963 87.548 M358.146 87.764 C 358.084 87.927,358.135 88.125,358.261 88.205 C 358.423 88.308,358.428 88.351,358.277 88.353 C 358.161 88.354,358.014 88.272,357.950 88.169 C 357.825 87.966,357.964 87.467,358.147 87.467 C 358.209 87.467,358.209 87.600,358.146 87.764 M321.468 88.235 C 321.484 88.608,321.554 88.950,321.625 88.993 C 321.881 89.152,321.904 88.786,321.675 88.178 L 321.440 87.556 321.468 88.235 M322.685 88.388 C 322.436 88.493,322.440 88.510,322.711 88.521 C 323.007 88.533,323.007 88.548,322.711 88.851 C 322.348 89.222,322.003 88.733,322.244 88.189 C 322.312 88.036,322.349 87.831,322.325 87.733 C 322.302 87.636,322.437 87.716,322.626 87.911 C 322.931 88.227,322.938 88.280,322.685 88.388 M325.407 88.063 C 325.277 88.403,324.957 88.462,325.022 88.133 C 325.068 87.905,325.370 87.622,325.466 87.718 C 325.495 87.747,325.469 87.903,325.407 88.063 M330.543 87.963 C 330.402 88.052,330.386 88.187,330.496 88.360 C 330.636 88.583,330.662 88.577,330.664 88.324 C 330.665 88.160,330.807 87.989,330.978 87.945 C 331.198 87.887,331.211 87.858,331.022 87.846 C 330.876 87.836,330.660 87.889,330.543 87.963 M372.978 88.063 C 373.222 88.192,373.497 88.257,373.588 88.207 C 373.770 88.108,373.227 87.853,372.800 87.837 C 372.653 87.832,372.733 87.933,372.978 88.063 M356.442 89.022 C 356.439 89.288,356.414 89.295,356.267 89.067 C 356.166 88.911,356.093 88.887,356.092 89.009 C 356.090 89.124,355.989 89.253,355.867 89.295 C 355.744 89.337,355.784 89.350,355.956 89.324 C 356.127 89.299,356.267 89.350,356.267 89.439 C 356.267 89.527,356.127 89.606,355.956 89.612 C 355.690 89.623,355.696 89.647,356.000 89.778 C 356.539 90.009,356.550 90.001,356.569 89.333 C 356.579 88.991,356.555 88.711,356.516 88.711 C 356.477 88.711,356.443 88.851,356.442 89.022 M357.437 88.989 C 357.121 89.305,357.037 89.310,357.037 89.013 C 357.037 88.886,357.185 88.771,357.365 88.758 C 357.650 88.736,357.659 88.766,357.437 88.989 M322.101 89.769 C 322.148 89.910,322.098 89.955,321.979 89.881 C 321.868 89.813,321.778 89.835,321.778 89.930 C 321.778 90.026,321.867 90.133,321.975 90.169 C 322.104 90.212,322.083 90.309,321.916 90.448 C 321.716 90.614,321.658 90.613,321.652 90.442 C 321.648 90.321,321.626 90.142,321.602 90.044 C 321.456 89.450,321.911 89.199,322.101 89.769 M360.136 89.662 C 360.260 89.739,360.315 89.877,360.258 89.969 C 360.200 90.063,360.041 90.022,359.893 89.874 C 359.663 89.644,359.590 89.654,359.283 89.962 C 358.892 90.353,358.809 91.200,359.162 91.200 C 359.288 91.200,359.402 91.351,359.416 91.536 C 359.434 91.768,359.350 91.847,359.143 91.793 C 358.724 91.684,358.602 89.956,359.013 89.956 C 359.165 89.956,359.289 89.836,359.289 89.690 C 359.289 89.418,359.723 89.404,360.136 89.662 M351.766 89.894 C 351.702 89.957,351.789 90.115,351.958 90.243 C 352.164 90.400,352.190 90.479,352.034 90.483 C 351.906 90.486,351.839 90.549,351.884 90.622 C 351.930 90.696,351.235 90.716,350.339 90.668 L 348.711 90.580 350.273 90.527 C 352.244 90.461,352.166 90.146,350.167 90.100 L 348.711 90.067 350.222 89.956 C 351.053 89.894,351.767 89.829,351.807 89.811 C 351.848 89.793,351.829 89.830,351.766 89.894 M376.439 90.302 C 376.489 90.383,376.410 90.547,376.265 90.668 C 376.052 90.845,375.999 90.840,375.997 90.644 C 375.995 90.452,375.958 90.457,375.822 90.667 C 375.680 90.887,375.649 90.870,375.647 90.571 C 375.644 90.206,376.252 90.000,376.439 90.302 M348.391 90.565 C 348.151 90.747,348.128 90.832,348.302 90.901 C 348.451 90.961,348.423 90.999,348.222 91.008 C 348.051 91.016,347.911 90.946,347.911 90.853 C 347.911 90.760,348.051 90.603,348.222 90.503 C 348.668 90.243,348.767 90.280,348.391 90.565 M358.578 90.646 C 358.578 91.058,357.953 91.251,357.680 90.923 C 357.407 90.593,357.621 90.311,358.144 90.311 C 358.450 90.311,358.578 90.409,358.578 90.646 M375.467 90.470 C 375.467 90.557,375.347 90.675,375.200 90.731 C 374.929 90.835,374.836 90.646,375.052 90.430 C 375.222 90.260,375.467 90.283,375.467 90.470 M377.524 90.735 C 377.560 90.920,377.644 91.120,377.712 91.180 C 377.779 91.240,377.781 91.140,377.715 90.958 C 377.625 90.706,377.662 90.653,377.874 90.734 C 378.027 90.792,378.107 90.768,378.053 90.680 C 377.998 90.592,378.054 90.483,378.177 90.438 C 378.300 90.393,378.188 90.366,377.930 90.378 C 377.570 90.395,377.475 90.479,377.524 90.735 M369.258 90.600 C 369.233 90.958,369.722 91.247,369.838 90.944 C 369.894 90.798,369.851 90.623,369.743 90.556 C 369.623 90.482,369.595 90.515,369.672 90.639 C 369.742 90.752,369.721 90.844,369.625 90.844 C 369.530 90.844,369.412 90.744,369.362 90.622 C 369.306 90.483,369.267 90.475,369.258 90.600 M357.585 90.998 C 357.638 91.083,357.533 91.286,357.353 91.449 C 357.044 91.729,357.028 91.723,357.068 91.339 C 357.125 90.789,357.360 90.634,357.585 90.998 M370.553 91.467 C 370.609 91.613,370.573 91.733,370.473 91.733 C 370.372 91.733,370.347 91.826,370.417 91.939 C 370.496 92.066,370.466 92.096,370.339 92.017 C 370.098 91.868,370.060 91.200,370.292 91.200 C 370.379 91.200,370.497 91.320,370.553 91.467 M336.089 91.514 C 335.856 91.685,335.779 92.089,335.979 92.089 C 336.066 92.089,336.178 91.929,336.229 91.733 C 336.334 91.334,336.334 91.334,336.089 91.514 M358.933 92.176 C 358.933 92.183,358.779 92.320,358.590 92.482 C 358.269 92.756,358.248 92.756,358.269 92.471 C 358.285 92.262,358.933 91.975,358.933 92.176 M372.800 92.221 C 372.653 92.271,372.398 92.431,372.232 92.575 C 371.961 92.811,371.933 92.802,371.957 92.489 C 371.976 92.227,372.116 92.139,372.525 92.135 C 372.823 92.131,372.947 92.170,372.800 92.221 M324.157 92.999 C 323.889 93.279,323.638 93.475,323.599 93.436 C 323.479 93.316,323.911 92.622,324.106 92.622 C 324.206 92.622,324.239 92.543,324.179 92.446 C 324.116 92.344,324.190 92.317,324.357 92.381 C 324.600 92.474,324.570 92.569,324.157 92.999 M351.778 92.489 C 351.753 92.611,351.816 92.697,351.917 92.679 C 352.019 92.662,352.102 92.804,352.102 92.995 C 352.102 93.205,352.011 93.307,351.873 93.254 C 351.747 93.206,351.644 93.253,351.644 93.358 C 351.644 93.475,351.823 93.505,352.104 93.434 C 352.399 93.360,352.523 93.385,352.450 93.502 C 352.383 93.611,352.449 93.642,352.613 93.579 C 352.765 93.521,352.889 93.397,352.889 93.304 C 352.889 93.211,352.814 93.181,352.723 93.237 C 352.632 93.293,352.512 93.265,352.456 93.174 C 352.345 92.994,352.808 93.016,352.992 93.199 C 353.054 93.261,353.247 93.223,353.422 93.115 C 353.681 92.952,353.822 93.007,354.202 93.418 C 354.458 93.693,354.670 93.847,354.673 93.759 C 354.676 93.672,354.794 93.800,354.933 94.044 C 355.101 94.337,355.190 94.393,355.194 94.207 C 355.197 94.053,355.316 93.926,355.459 93.926 C 355.601 93.926,355.676 93.993,355.626 94.074 C 355.576 94.156,355.719 94.228,355.945 94.235 C 356.320 94.246,356.326 94.259,356.010 94.383 C 355.677 94.513,355.679 94.522,356.046 94.618 C 356.256 94.673,356.572 94.606,356.747 94.470 C 357.007 94.268,357.012 94.240,356.776 94.322 C 356.582 94.389,356.348 94.230,356.074 93.846 C 355.848 93.529,355.754 93.325,355.864 93.393 C 355.974 93.461,356.115 93.436,356.176 93.336 C 356.378 93.010,356.649 93.147,356.532 93.516 C 356.469 93.714,356.484 93.864,356.564 93.850 C 357.058 93.764,357.314 93.888,357.209 94.162 C 357.123 94.386,357.157 94.420,357.348 94.301 C 357.966 93.915,358.390 94.165,358.042 94.710 C 357.864 94.989,357.851 94.981,357.923 94.636 C 357.981 94.361,357.933 94.276,357.761 94.342 C 357.582 94.411,357.565 94.523,357.695 94.767 C 357.794 94.953,357.985 95.055,358.128 95.000 C 358.339 94.919,358.355 95.005,358.217 95.483 C 358.126 95.803,358.089 96.104,358.136 96.151 C 358.184 96.198,358.222 96.146,358.222 96.035 C 358.222 95.924,358.345 95.880,358.494 95.938 C 358.730 96.028,358.724 96.076,358.450 96.297 C 358.161 96.530,358.169 96.536,358.533 96.359 C 358.828 96.216,358.933 96.219,358.933 96.368 C 358.933 96.480,359.047 96.528,359.185 96.475 C 359.324 96.421,359.488 96.511,359.551 96.675 C 359.614 96.838,359.580 96.999,359.477 97.034 C 359.373 97.068,359.289 97.174,359.289 97.270 C 359.289 97.365,359.369 97.394,359.467 97.333 C 359.564 97.273,359.646 97.328,359.647 97.456 C 359.649 97.598,359.701 97.620,359.781 97.511 C 359.946 97.286,360.356 97.198,360.356 97.387 C 360.356 97.465,360.266 97.619,360.156 97.728 C 359.922 97.962,359.113 97.669,359.108 97.349 C 359.107 97.243,359.031 97.273,358.941 97.416 C 358.836 97.582,358.845 97.760,358.965 97.905 C 359.069 98.031,359.091 98.133,359.013 98.133 C 358.935 98.133,359.065 98.346,359.302 98.606 C 359.675 99.015,359.687 99.062,359.388 98.954 C 359.057 98.834,359.055 98.844,359.353 99.173 C 359.540 99.380,359.803 99.481,360.011 99.426 C 360.202 99.377,360.464 99.425,360.595 99.533 C 360.726 99.642,360.868 99.695,360.911 99.652 C 360.972 99.591,360.966 100.364,360.903 100.584 C 360.895 100.612,360.720 100.544,360.513 100.433 C 360.149 100.239,360.140 100.259,360.241 101.094 C 360.300 101.581,360.265 102.072,360.161 102.222 C 360.014 102.434,359.998 102.413,360.080 102.119 C 360.152 101.861,360.101 101.748,359.914 101.748 C 359.766 101.748,359.644 101.845,359.644 101.963 C 359.644 102.082,359.504 102.288,359.333 102.423 C 359.051 102.644,359.045 102.638,359.266 102.356 C 359.643 101.875,359.403 101.988,358.920 102.518 C 358.683 102.778,358.601 102.925,358.739 102.846 C 358.907 102.748,358.947 102.768,358.861 102.906 C 358.792 103.019,358.824 103.111,358.933 103.111 C 359.043 103.111,359.077 103.200,359.010 103.308 C 358.926 103.445,358.844 103.432,358.741 103.265 C 358.620 103.070,358.550 103.068,358.364 103.253 C 358.179 103.439,358.186 103.513,358.401 103.649 C 358.636 103.797,358.636 103.817,358.400 103.822 C 358.186 103.827,358.180 103.857,358.370 103.978 C 358.563 104.100,358.556 104.148,358.337 104.232 C 358.144 104.306,358.045 104.198,357.993 103.857 C 357.925 103.411,357.918 103.417,357.893 103.941 C 357.879 104.250,357.967 104.544,358.089 104.593 C 358.211 104.643,358.151 104.691,357.956 104.700 C 357.669 104.713,357.650 104.748,357.858 104.880 C 358.000 104.970,358.193 104.996,358.286 104.939 C 358.379 104.881,358.402 104.919,358.338 105.023 C 358.274 105.126,358.062 105.169,357.866 105.118 C 357.581 105.044,357.511 105.113,357.511 105.470 C 357.511 105.960,356.773 106.242,356.267 105.945 C 356.081 105.836,356.070 105.787,356.228 105.783 C 356.378 105.780,356.429 105.609,356.376 105.289 C 356.332 105.020,356.289 104.722,356.281 104.627 C 356.273 104.532,356.137 104.639,355.979 104.865 C 355.790 105.135,355.574 105.244,355.347 105.185 C 354.918 105.073,354.505 105.924,354.877 106.153 C 355.000 106.229,355.141 106.252,355.190 106.203 C 355.239 106.154,355.177 106.051,355.053 105.975 C 354.877 105.866,354.889 105.784,355.107 105.603 C 355.261 105.476,355.428 105.414,355.480 105.465 C 355.654 105.639,355.341 106.489,355.102 106.489 C 354.972 106.489,354.808 106.581,354.739 106.694 C 354.654 106.832,354.694 106.852,354.861 106.757 C 355.060 106.644,355.067 106.662,354.896 106.846 C 354.733 107.023,354.623 107.029,354.436 106.874 C 354.300 106.762,354.124 106.729,354.044 106.802 C 353.964 106.874,353.949 106.793,354.011 106.622 C 354.086 106.413,354.033 106.311,353.851 106.311 C 353.701 106.311,353.625 106.386,353.681 106.477 C 353.738 106.568,353.690 106.700,353.575 106.771 C 353.437 106.857,353.415 106.822,353.511 106.666 C 353.623 106.486,353.531 106.458,353.102 106.544 C 352.662 106.632,352.569 106.602,352.649 106.394 C 352.708 106.239,352.645 106.133,352.493 106.133 C 352.352 106.133,352.237 106.015,352.237 105.869 C 352.237 105.689,352.302 105.660,352.444 105.778 C 352.584 105.894,352.652 105.875,352.652 105.722 C 352.652 105.596,352.505 105.482,352.326 105.469 C 352.022 105.446,351.858 106.083,351.994 106.756 C 352.013 106.853,351.965 106.869,351.887 106.790 C 351.808 106.712,351.766 106.392,351.793 106.079 C 351.841 105.526,351.823 105.511,351.121 105.511 C 350.553 105.511,350.400 105.577,350.400 105.822 C 350.400 105.993,350.327 106.133,350.239 106.133 C 350.150 106.133,350.118 105.978,350.168 105.787 C 350.237 105.523,350.173 105.448,349.894 105.469 C 349.638 105.488,349.580 105.558,349.701 105.704 C 349.884 105.924,349.745 106.462,349.428 106.756 C 349.171 106.994,348.746 107.075,348.879 106.861 C 348.936 106.768,348.902 106.641,348.803 106.580 C 348.704 106.518,348.622 106.562,348.622 106.678 C 348.622 106.837,348.560 106.836,348.363 106.673 C 348.217 106.551,348.005 106.521,347.874 106.602 C 347.584 106.783,347.127 106.858,347.287 106.698 C 347.353 106.632,347.481 106.568,347.570 106.556 C 347.964 106.502,347.987 106.475,347.763 106.333 C 347.573 106.213,347.584 106.161,347.817 106.071 C 347.987 106.006,348.062 106.033,347.999 106.135 C 347.939 106.232,348.058 106.311,348.262 106.311 C 348.581 106.311,348.607 106.259,348.442 105.951 C 348.336 105.752,348.153 105.638,348.036 105.697 C 347.918 105.756,347.964 105.688,348.138 105.546 C 348.518 105.235,348.606 93.654,348.232 93.203 C 348.083 93.023,348.105 92.995,348.328 93.081 C 348.588 93.180,348.616 93.834,348.610 99.505 C 348.607 102.978,348.652 105.867,348.711 105.926 C 348.770 105.985,348.761 105.883,348.690 105.699 C 348.617 105.508,348.632 105.407,348.725 105.465 C 348.824 105.526,348.869 103.176,348.840 99.539 L 348.791 93.511 349.818 93.477 L 350.844 93.444 349.838 93.380 C 348.323 93.284,348.363 92.889,349.888 92.889 C 350.559 92.889,351.103 92.812,351.097 92.718 C 351.087 92.582,351.388 92.389,351.778 92.279 C 351.802 92.272,351.802 92.367,351.778 92.489 M373.222 92.585 C 373.270 92.663,373.156 92.738,372.969 92.752 C 372.782 92.766,372.583 92.702,372.526 92.611 C 372.470 92.519,372.583 92.444,372.779 92.444 C 372.975 92.444,373.174 92.508,373.222 92.585 M319.811 92.978 C 319.802 93.027,319.768 93.207,319.735 93.378 C 319.701 93.555,319.784 93.689,319.927 93.689 C 320.065 93.689,320.177 93.629,320.175 93.556 C 320.172 93.391,319.834 92.855,319.811 92.978 M362.070 93.100 C 361.946 93.224,362.402 93.689,362.648 93.689 C 362.750 93.689,362.790 93.575,362.736 93.436 C 362.628 93.154,362.221 92.949,362.070 93.100 M347.556 99.556 C 347.556 103.348,347.493 105.422,347.378 105.422 C 347.263 105.422,347.200 103.348,347.200 99.556 C 347.200 95.763,347.263 93.689,347.378 93.689 C 347.493 93.689,347.556 95.763,347.556 99.556 M375.004 94.093 C 375.073 94.023,375.076 94.106,375.011 94.277 C 374.886 94.603,374.400 94.383,374.400 94.001 C 374.400 93.892,374.493 93.900,374.639 94.021 C 374.770 94.130,374.934 94.162,375.004 94.093 M375.875 94.163 C 375.772 94.432,375.533 94.474,375.393 94.247 C 375.272 94.051,375.399 93.926,375.716 93.926 C 375.854 93.926,375.925 94.033,375.875 94.163 M354.233 94.206 C 354.178 94.294,353.953 94.329,353.733 94.282 C 353.365 94.205,353.360 94.215,353.663 94.407 C 353.927 94.575,354.050 94.560,354.279 94.330 C 354.436 94.173,354.513 94.044,354.449 94.044 C 354.385 94.044,354.287 94.117,354.233 94.206 M337.333 95.087 C 337.650 95.291,337.652 95.304,337.355 95.210 C 337.172 95.152,336.928 95.182,336.815 95.277 C 336.671 95.396,336.600 95.349,336.583 95.120 C 336.555 94.754,336.799 94.743,337.333 95.087 M359.236 95.467 C 359.445 95.467,359.463 95.523,359.312 95.705 C 359.203 95.836,359.167 95.996,359.231 96.061 C 359.296 96.125,359.265 96.178,359.163 96.178 C 358.938 96.178,358.468 95.646,358.689 95.642 C 358.775 95.640,358.724 95.553,358.578 95.447 C 358.356 95.288,358.366 95.273,358.637 95.361 C 358.816 95.419,359.086 95.467,359.236 95.467 M378.880 96.213 C 378.763 96.331,378.667 96.587,378.667 96.782 C 378.667 97.059,378.714 97.090,378.880 96.924 C 378.997 96.807,379.240 96.711,379.420 96.711 C 379.691 96.711,379.714 96.651,379.556 96.356 C 379.337 95.946,379.180 95.913,378.880 96.213 M384.711 96.192 C 384.506 96.336,384.541 96.351,384.867 96.259 C 385.154 96.178,385.250 96.203,385.167 96.337 C 385.100 96.445,385.137 96.533,385.249 96.533 C 385.482 96.533,385.395 96.138,385.144 96.058 C 385.053 96.029,384.858 96.090,384.711 96.192 M330.978 96.311 C 329.829 96.342,327.949 96.342,326.800 96.311 C 325.651 96.280,326.591 96.254,328.889 96.254 C 331.187 96.254,332.127 96.280,330.978 96.311 M320.356 96.411 C 320.356 96.442,320.103 96.702,319.795 96.989 C 319.243 97.503,319.250 97.312,319.805 96.698 C 320.059 96.418,320.356 96.263,320.356 96.411 M359.843 98.178 C 359.855 98.300,359.855 98.460,359.843 98.533 C 359.812 98.734,360.135 98.694,360.239 98.484 C 360.288 98.384,360.308 98.404,360.283 98.529 C 360.213 98.875,359.552 98.809,359.578 98.459 C 359.593 98.260,359.509 98.196,359.327 98.266 C 359.147 98.335,359.097 98.300,359.181 98.164 C 359.352 97.887,359.817 97.897,359.843 98.178 M319.422 98.540 C 319.422 98.813,319.317 98.946,319.102 98.946 C 318.926 98.946,318.832 98.863,318.894 98.762 C 319.001 98.587,319.353 98.155,319.400 98.140 C 319.412 98.136,319.422 98.316,319.422 98.540 M322.605 98.928 C 322.436 99.368,322.858 99.443,323.257 99.044 L 323.635 98.667 323.170 98.667 C 322.914 98.667,322.660 98.784,322.605 98.928 M365.263 99.127 C 365.203 99.283,365.199 99.483,365.254 99.572 C 365.392 99.794,365.689 99.776,365.689 99.545 C 365.689 99.442,365.600 99.412,365.492 99.478 C 365.360 99.560,365.334 99.477,365.415 99.222 C 365.554 98.786,365.425 98.705,365.263 99.127 M323.822 99.378 C 323.891 99.489,325.780 99.556,328.899 99.556 C 332.092 99.556,333.867 99.492,333.867 99.378 C 333.867 99.263,332.056 99.200,328.790 99.200 C 325.450 99.200,323.750 99.261,323.822 99.378 M385.499 99.535 C 385.593 99.689,385.502 99.716,385.170 99.632 C 384.856 99.553,384.711 99.589,384.711 99.744 C 384.711 99.868,384.760 99.922,384.819 99.863 C 384.878 99.803,385.090 99.907,385.290 100.093 C 385.490 100.280,385.714 100.395,385.788 100.349 C 385.961 100.242,385.790 99.584,385.551 99.436 C 385.443 99.369,385.421 99.410,385.499 99.535 M318.222 100.010 C 318.222 100.327,318.290 100.425,318.444 100.332 C 318.583 100.249,318.570 100.318,318.411 100.514 C 318.207 100.765,318.198 100.898,318.368 101.170 C 318.553 101.466,318.531 101.511,318.205 101.511 C 317.947 101.511,317.860 101.429,317.929 101.250 C 317.984 101.107,317.951 100.941,317.857 100.883 C 317.749 100.816,317.746 100.666,317.849 100.482 C 317.951 100.299,317.950 100.066,317.845 99.871 C 317.718 99.633,317.743 99.556,317.949 99.556 C 318.124 99.556,318.222 99.720,318.222 100.010 M360.000 99.851 C 360.000 100.007,360.128 100.090,360.311 100.055 C 360.482 100.022,360.622 99.924,360.622 99.837 C 360.622 99.750,360.482 99.658,360.311 99.633 C 360.134 99.607,360.000 99.701,360.000 99.851 M338.899 100.573 C 338.929 100.791,339.009 101.024,339.077 101.092 C 339.266 101.280,339.218 100.746,339.019 100.444 C 338.880 100.233,338.855 100.259,338.899 100.573 M364.521 100.602 C 364.574 100.689,364.497 100.860,364.350 100.983 C 364.123 101.170,364.100 101.156,364.202 100.890 C 364.288 100.665,364.251 100.598,364.073 100.658 C 363.935 100.704,363.826 100.941,363.831 101.186 C 363.836 101.430,363.743 101.630,363.626 101.630 C 363.508 101.630,363.453 101.523,363.503 101.393 C 363.553 101.262,363.496 101.156,363.377 101.156 C 363.102 101.156,363.927 100.458,364.212 100.450 C 364.328 100.447,364.467 100.515,364.521 100.602 M321.778 100.829 C 321.778 100.932,321.900 100.969,322.049 100.912 C 322.272 100.826,322.285 100.872,322.123 101.175 C 322.015 101.377,321.973 101.588,322.030 101.645 C 322.087 101.702,322.135 101.635,322.136 101.496 C 322.138 101.284,322.163 101.283,322.294 101.490 C 322.416 101.682,322.477 101.650,322.575 101.343 C 322.643 101.128,322.663 100.915,322.618 100.870 C 322.436 100.688,321.778 100.656,321.778 100.829 M358.859 101.625 C 358.798 101.786,358.787 101.958,358.836 102.006 C 358.885 102.055,358.975 101.964,359.037 101.804 C 359.098 101.644,359.214 101.554,359.295 101.604 C 359.376 101.654,359.492 101.613,359.554 101.514 C 359.615 101.415,359.509 101.333,359.318 101.333 C 359.128 101.333,358.921 101.465,358.859 101.625 M384.118 101.464 C 384.055 101.566,384.125 101.749,384.273 101.872 C 384.638 102.175,384.763 102.005,384.470 101.605 C 384.324 101.405,384.188 101.351,384.118 101.464 M339.802 102.850 C 339.640 103.097,339.473 103.265,339.430 103.223 C 339.325 103.117,339.785 102.400,339.958 102.400 C 340.034 102.400,339.964 102.602,339.802 102.850 M363.454 102.635 C 363.312 103.006,363.490 103.248,363.729 103.009 C 363.830 102.908,363.832 102.738,363.735 102.581 C 363.599 102.361,363.556 102.369,363.454 102.635 M384.910 102.778 C 384.898 102.937,384.996 103.156,385.126 103.264 C 385.305 103.413,385.363 103.398,385.363 103.205 C 385.363 103.064,385.302 102.986,385.228 103.032 C 385.154 103.078,385.057 102.974,385.012 102.802 C 384.944 102.540,384.927 102.536,384.910 102.778 M321.242 103.104 C 321.232 103.452,320.533 103.809,320.533 103.467 C 320.533 103.369,320.633 103.288,320.756 103.286 C 320.904 103.284,320.890 103.228,320.713 103.116 C 320.506 102.985,320.534 102.955,320.847 102.979 C 321.065 102.996,321.243 103.052,321.242 103.104 M379.271 103.414 C 379.271 103.638,379.315 103.828,379.369 103.836 C 379.642 103.874,380.031 103.978,380.293 104.084 C 380.521 104.176,380.580 104.105,380.560 103.768 C 380.542 103.473,380.453 103.365,380.282 103.431 C 380.134 103.487,380.078 103.450,380.147 103.339 C 380.215 103.229,380.058 103.121,379.767 103.078 C 379.347 103.017,379.271 103.068,379.271 103.414 M359.188 103.840 C 358.985 103.920,358.947 103.980,359.096 103.988 C 359.235 103.994,359.300 104.048,359.241 104.107 C 359.011 104.337,358.733 103.975,358.945 103.719 C 359.058 103.583,359.237 103.524,359.342 103.588 C 359.457 103.659,359.396 103.759,359.188 103.840 M337.244 104.186 C 337.244 104.268,337.162 104.385,337.062 104.447 C 336.962 104.509,336.902 104.454,336.929 104.324 C 336.956 104.195,336.872 104.111,336.742 104.138 C 336.613 104.165,336.555 104.108,336.615 104.012 C 336.736 103.816,337.244 103.957,337.244 104.186 M317.465 104.966 C 317.361 105.077,317.131 105.168,316.954 105.168 C 316.661 105.168,316.666 105.131,317.015 104.760 C 317.279 104.479,317.439 104.416,317.526 104.558 C 317.596 104.671,317.569 104.855,317.465 104.966 M362.735 104.967 C 362.679 105.114,362.518 105.176,362.356 105.114 C 362.178 105.046,362.119 105.078,362.196 105.202 C 362.263 105.310,362.136 105.532,361.914 105.697 L 361.511 105.996 361.911 105.904 C 362.131 105.853,362.363 105.896,362.427 106.000 C 362.492 106.105,362.460 106.137,362.354 106.071 C 362.249 106.006,362.135 106.038,362.101 106.142 C 362.026 106.367,361.422 106.172,361.422 105.923 C 361.422 105.691,362.413 104.711,362.647 104.711 C 362.750 104.711,362.789 104.826,362.735 104.967 M336.889 105.266 C 336.889 105.156,336.966 105.067,337.061 105.067 C 337.156 105.067,337.187 105.187,337.131 105.333 C 337.067 105.499,336.817 105.592,336.470 105.579 C 336.163 105.568,336.052 105.522,336.224 105.477 C 336.407 105.429,336.486 105.308,336.412 105.187 C 336.343 105.070,336.420 105.085,336.588 105.222 C 336.784 105.381,336.889 105.396,336.889 105.266 M302.578 105.644 C 302.578 105.947,302.475 106.044,302.154 106.044 L 301.730 106.044 302.109 105.663 C 302.595 105.175,302.578 105.176,302.578 105.644 M383.370 105.612 C 383.314 105.703,383.366 105.778,383.486 105.778 C 383.606 105.778,383.667 105.815,383.622 105.860 C 383.404 106.077,382.781 105.735,382.988 105.512 C 383.219 105.261,383.536 105.344,383.370 105.612 M367.781 105.778 C 368.052 106.212,367.705 106.211,367.321 105.778 C 367.148 105.582,367.119 105.578,367.214 105.761 C 367.300 105.926,367.274 105.967,367.136 105.882 C 367.021 105.811,366.974 105.679,367.030 105.588 C 367.200 105.313,367.546 105.401,367.781 105.778 M320.144 105.791 C 320.109 105.896,320.004 105.982,319.911 105.982 C 319.818 105.982,319.714 105.896,319.679 105.791 C 319.643 105.686,319.748 105.600,319.911 105.600 C 320.074 105.600,320.179 105.686,320.144 105.791 M351.422 105.900 C 351.251 105.945,350.971 105.945,350.800 105.900 C 350.629 105.856,350.769 105.819,351.111 105.819 C 351.453 105.819,351.593 105.856,351.422 105.900 M366.539 106.339 C 366.501 106.453,366.276 106.575,366.038 106.611 C 365.619 106.675,365.617 106.669,365.959 106.411 C 366.384 106.090,366.632 106.059,366.539 106.339 M370.889 106.641 C 370.957 106.900,370.901 106.957,370.644 106.890 C 370.269 106.792,370.272 106.798,370.419 106.416 C 370.549 106.077,370.769 106.183,370.889 106.641 M350.486 106.671 C 350.432 106.758,350.264 106.781,350.113 106.723 C 349.897 106.640,349.887 106.586,350.065 106.476 C 350.335 106.309,350.626 106.444,350.486 106.671 M355.836 106.607 C 355.812 106.770,355.652 106.924,355.481 106.950 C 355.185 106.993,355.185 106.978,355.479 106.653 C 355.857 106.236,355.891 106.231,355.836 106.607 M341.195 107.835 C 341.168 108.159,341.247 108.267,341.513 108.267 C 341.721 108.267,341.867 108.148,341.867 107.978 C 341.867 107.740,341.819 107.729,341.596 107.914 C 341.371 108.101,341.343 108.092,341.430 107.865 C 341.488 107.715,341.467 107.549,341.383 107.497 C 341.299 107.446,341.215 107.598,341.195 107.835 M380.444 107.682 C 380.298 107.736,380.038 107.902,379.867 108.051 C 379.696 108.200,379.556 108.245,379.556 108.152 C 379.556 107.858,379.968 107.555,380.347 107.570 C 380.603 107.579,380.632 107.613,380.444 107.682 M381.333 108.167 C 381.333 108.308,381.413 108.473,381.511 108.533 C 381.609 108.594,381.689 108.478,381.689 108.277 C 381.689 108.076,381.609 107.911,381.511 107.911 C 381.413 107.911,381.333 108.026,381.333 108.167 M357.054 108.724 C 357.505 109.175,357.375 109.369,356.851 109.026 C 356.586 108.853,356.428 108.806,356.499 108.921 C 356.570 109.037,356.534 109.189,356.420 109.260 C 356.295 109.337,356.259 109.311,356.331 109.195 C 356.397 109.089,356.354 108.943,356.237 108.871 C 356.088 108.778,356.127 108.698,356.367 108.604 C 356.556 108.530,356.725 108.464,356.743 108.457 C 356.760 108.450,356.900 108.570,357.054 108.724 M366.334 108.885 C 366.391 109.034,366.549 109.156,366.686 109.156 C 367.033 109.156,366.994 109.023,366.582 108.802 C 366.295 108.649,366.249 108.664,366.334 108.885 M297.600 109.067 C 297.466 109.228,297.452 109.333,297.563 109.333 C 297.665 109.333,297.795 109.233,297.852 109.111 C 297.935 108.933,297.976 108.933,298.059 109.111 C 298.116 109.233,298.286 109.333,298.437 109.333 C 298.660 109.333,298.669 109.284,298.489 109.067 C 298.367 108.920,298.167 108.800,298.044 108.800 C 297.922 108.800,297.722 108.920,297.600 109.067 M347.501 109.465 C 347.218 109.747,347.200 109.746,347.200 109.450 C 347.200 109.276,347.280 109.184,347.378 109.244 C 347.476 109.305,347.556 109.256,347.556 109.136 C 347.556 109.017,347.611 108.974,347.678 109.041 C 347.746 109.109,347.666 109.299,347.501 109.465 M370.006 109.281 C 369.889 109.585,369.662 109.574,369.540 109.256 C 369.476 109.088,369.553 109.010,369.768 109.026 C 369.957 109.040,370.057 109.148,370.006 109.281 M315.911 109.342 C 315.911 109.461,316.491 109.517,317.511 109.497 C 318.391 109.479,319.111 109.395,319.111 109.310 C 319.111 109.225,318.391 109.156,317.511 109.156 C 316.512 109.156,315.911 109.226,315.911 109.342 M348.667 109.273 C 348.544 109.323,348.444 109.446,348.444 109.548 C 348.444 109.669,348.526 109.664,348.683 109.534 C 348.814 109.426,348.974 109.389,349.039 109.453 C 349.103 109.518,349.156 109.477,349.156 109.363 C 349.156 109.146,349.037 109.124,348.667 109.273 M349.559 109.867 L 349.518 110.578 350.226 110.578 C 350.615 110.578,350.933 110.493,350.933 110.390 C 350.933 110.286,351.006 110.246,351.094 110.301 C 351.183 110.356,351.170 110.218,351.064 109.995 C 350.921 109.691,350.736 109.596,350.325 109.615 C 349.916 109.634,349.890 109.621,350.222 109.562 C 351.002 109.425,351.054 109.156,350.302 109.156 C 349.608 109.156,349.600 109.164,349.559 109.867 M351.111 109.345 C 351.111 109.681,351.473 110.222,351.695 110.219 C 351.853 110.218,351.849 110.177,351.680 110.070 C 351.553 109.990,351.436 109.751,351.420 109.540 C 351.391 109.158,351.111 108.982,351.111 109.345 M353.563 109.590 C 352.394 110.201,352.242 110.261,352.612 109.965 C 352.802 109.813,353.026 109.689,353.111 109.689 C 353.196 109.689,353.208 109.597,353.139 109.484 C 353.060 109.356,353.090 109.327,353.217 109.405 C 353.330 109.475,353.422 109.450,353.422 109.349 C 353.422 109.249,353.545 109.214,353.696 109.272 C 353.916 109.356,353.890 109.419,353.563 109.590 M72.492 109.967 C 72.345 110.114,72.526 110.578,72.730 110.578 C 72.817 110.578,72.879 110.518,72.868 110.444 C 72.812 110.088,72.913 109.983,73.083 110.222 C 73.251 110.459,73.263 110.459,73.199 110.222 C 73.121 109.939,72.680 109.779,72.492 109.967 M350.756 110.133 C 350.756 110.311,350.578 110.400,350.222 110.400 C 349.867 110.400,349.689 110.311,349.689 110.133 C 349.689 109.956,349.867 109.867,350.222 109.867 C 350.578 109.867,350.756 109.956,350.756 110.133 M373.249 110.037 C 373.098 110.281,373.232 110.464,373.494 110.374 C 373.602 110.337,373.616 110.212,373.528 110.070 C 373.417 109.891,373.344 109.883,373.249 110.037 M15.113 111.981 C 14.787 112.072,14.733 112.143,14.908 112.251 C 15.055 112.342,15.255 112.288,15.433 112.110 C 15.591 111.952,15.683 111.830,15.638 111.840 C 15.593 111.850,15.356 111.913,15.113 111.981 M72.634 111.981 C 72.526 112.155,73.059 112.422,73.211 112.271 C 73.262 112.220,73.190 112.177,73.052 112.175 C 72.844 112.173,72.847 112.142,73.067 112.000 C 73.296 111.851,73.292 111.827,73.033 111.825 C 72.867 111.823,72.688 111.894,72.634 111.981 M72.054 112.470 C 71.938 112.517,71.883 112.663,71.933 112.794 C 71.983 112.924,71.956 113.073,71.872 113.125 C 71.788 113.176,71.704 113.025,71.684 112.787 C 71.659 112.478,71.736 112.360,71.958 112.370 C 72.128 112.377,72.171 112.422,72.054 112.470 M72.382 113.591 C 72.421 113.708,72.511 113.798,72.582 113.791 C 72.795 113.769,72.754 113.525,72.524 113.449 C 72.407 113.410,72.343 113.474,72.382 113.591 M28.853 116.149 C 28.904 116.281,28.865 116.501,28.767 116.638 C 28.626 116.837,28.610 116.824,28.689 116.578 C 28.763 116.347,28.687 116.263,28.394 116.254 C 28.016 116.242,28.014 116.235,28.356 116.089 C 28.551 116.005,28.723 115.930,28.737 115.924 C 28.751 115.917,28.803 116.018,28.853 116.149 M71.289 116.366 C 71.289 116.518,71.369 116.594,71.467 116.533 C 71.564 116.473,71.644 116.463,71.644 116.512 C 71.644 116.798,71.277 116.754,71.114 116.449 C 70.984 116.207,70.981 116.089,71.105 116.089 C 71.206 116.089,71.289 116.214,71.289 116.366 M17.036 117.207 C 17.180 117.380,17.144 117.443,16.890 117.462 C 16.666 117.479,16.586 117.403,16.651 117.233 C 16.771 116.921,16.797 116.919,17.036 117.207 M57.998 117.417 C 58.049 117.685,58.012 117.813,57.902 117.745 C 57.799 117.681,57.778 117.723,57.851 117.842 C 57.922 117.957,58.046 118.010,58.125 117.960 C 58.205 117.911,58.312 118.030,58.363 118.224 C 58.423 118.453,58.368 118.578,58.206 118.578 C 58.068 118.578,57.956 118.498,57.956 118.400 C 57.956 118.302,57.836 118.222,57.689 118.222 C 57.387 118.222,57.331 117.944,57.600 117.778 C 57.698 117.717,57.778 117.513,57.778 117.323 C 57.778 116.841,57.898 116.892,57.998 117.417 M59.022 117.602 C 59.022 117.919,58.945 117.991,58.679 117.921 C 58.307 117.824,58.006 117.310,58.362 117.380 C 58.481 117.403,58.559 117.325,58.535 117.206 C 58.511 117.082,58.605 117.033,58.758 117.092 C 58.903 117.148,59.022 117.378,59.022 117.602 M57.646 119.111 C 57.730 119.111,57.742 119.019,57.672 118.906 C 57.593 118.779,57.623 118.749,57.750 118.827 C 57.863 118.897,57.956 119.030,57.956 119.122 C 57.956 119.402,57.480 119.308,57.230 118.978 C 57.030 118.713,57.032 118.700,57.244 118.889 C 57.381 119.011,57.562 119.111,57.646 119.111 M31.444 119.809 C 31.480 119.816,31.400 119.957,31.266 120.122 C 31.054 120.383,31.061 120.408,31.316 120.312 C 31.686 120.174,32.326 120.599,32.026 120.784 C 31.912 120.854,31.776 120.798,31.723 120.660 C 31.663 120.502,31.464 120.439,31.191 120.492 C 30.822 120.562,30.756 120.506,30.756 120.125 C 30.756 119.697,30.800 119.676,31.444 119.809 M71.172 120.090 C 71.015 120.429,71.016 120.533,71.178 120.533 C 71.294 120.533,71.406 120.333,71.426 120.089 C 71.472 119.533,71.430 119.534,71.172 120.090 M57.022 120.993 C 56.853 121.162,56.513 120.888,56.639 120.683 C 56.714 120.562,56.814 120.565,56.942 120.693 C 57.046 120.797,57.083 120.932,57.022 120.993 M31.207 120.877 C 31.151 120.968,31.286 121.076,31.508 121.116 C 31.883 121.185,31.881 121.189,31.480 121.173 C 31.061 121.156,30.701 120.711,31.107 120.711 C 31.218 120.711,31.264 120.786,31.207 120.877 M69.995 120.972 C 70.071 121.055,70.039 121.070,69.923 121.003 C 69.687 120.868,69.451 121.167,69.415 121.649 C 69.401 121.838,69.483 121.936,69.612 121.887 C 69.744 121.836,69.808 121.922,69.774 122.101 C 69.742 122.266,69.710 122.443,69.702 122.495 C 69.673 122.703,69.037 122.576,69.037 122.362 C 69.037 122.236,69.090 122.133,69.154 122.133 C 69.218 122.133,69.256 121.873,69.238 121.556 C 69.192 120.748,69.544 120.476,69.995 120.972 M55.191 122.677 C 55.019 122.867,54.756 123.022,54.606 123.022 C 54.415 123.022,54.489 122.860,54.853 122.480 C 55.404 121.905,55.722 122.090,55.191 122.677 M68.458 122.538 C 68.436 122.750,68.192 122.709,68.116 122.480 C 68.076 122.363,68.140 122.299,68.258 122.338 C 68.375 122.377,68.465 122.467,68.458 122.538 M52.872 123.050 C 52.797 123.171,52.823 123.207,52.935 123.137 C 53.040 123.073,53.157 123.116,53.197 123.234 C 53.242 123.370,53.311 123.337,53.384 123.147 C 53.517 122.800,53.078 122.717,52.872 123.050 M35.133 123.348 C 35.180 123.780,35.229 123.812,35.354 123.494 C 35.478 123.179,35.494 123.179,35.742 123.484 C 35.899 123.678,35.914 123.750,35.778 123.668 C 35.656 123.595,35.556 123.619,35.556 123.723 C 35.556 123.826,35.481 123.911,35.390 123.911 C 35.169 123.911,34.667 123.398,34.667 123.172 C 34.667 122.925,35.106 123.090,35.133 123.348 M51.207 123.378 C 51.035 123.830,51.038 124.322,51.212 124.148 C 51.321 124.039,51.373 123.800,51.326 123.619 C 51.279 123.437,51.226 123.329,51.207 123.378 M18.958 124.142 C 18.925 124.357,18.828 124.493,18.742 124.444 C 18.556 124.339,18.474 123.902,18.601 123.696 C 18.766 123.429,19.018 123.745,18.958 124.142 M52.367 123.892 C 52.313 123.980,52.392 124.099,52.543 124.157 C 52.770 124.244,52.778 124.306,52.587 124.513 C 52.280 124.844,52.076 124.699,52.167 124.213 C 52.262 123.708,52.251 123.733,52.366 123.733 C 52.421 123.733,52.421 123.805,52.367 123.892 M51.127 124.711 C 50.967 124.956,50.908 125.156,50.996 125.156 C 51.204 125.156,51.649 124.623,51.444 124.619 C 51.359 124.618,51.409 124.539,51.556 124.444 C 51.702 124.350,51.731 124.271,51.620 124.269 C 51.509 124.268,51.288 124.467,51.127 124.711 M50.398 124.625 C 50.337 124.724,50.192 124.749,50.077 124.680 C 49.957 124.608,49.972 124.683,50.111 124.855 C 50.370 125.174,50.763 125.278,50.578 124.978 C 50.517 124.880,50.597 124.800,50.756 124.800 C 50.914 124.800,50.994 124.720,50.933 124.622 C 50.792 124.393,50.541 124.395,50.398 124.625 M20.844 124.924 C 21.045 125.036,21.005 125.333,20.790 125.333 C 20.686 125.333,20.651 125.253,20.711 125.156 C 20.772 125.058,20.688 124.978,20.525 124.978 C 20.330 124.978,20.265 125.072,20.334 125.254 C 20.395 125.412,20.365 125.483,20.265 125.421 C 20.168 125.361,20.089 125.192,20.089 125.046 C 20.089 124.899,20.009 124.828,19.911 124.889 C 19.813 124.949,19.733 124.913,19.733 124.807 C 19.733 124.634,20.465 124.711,20.844 124.924 M67.484 124.909 C 67.401 125.124,67.451 125.168,67.687 125.086 C 67.985 124.982,67.985 124.989,67.686 125.221 C 67.499 125.366,67.417 125.584,67.483 125.755 C 67.549 125.929,67.503 126.044,67.367 126.044 C 67.243 126.044,67.189 125.996,67.248 125.937 C 67.307 125.878,67.282 125.638,67.193 125.404 C 67.087 125.124,67.094 124.978,67.215 124.978 C 67.316 124.978,67.349 124.898,67.289 124.800 C 67.228 124.702,67.272 124.622,67.386 124.622 C 67.504 124.622,67.546 124.746,67.484 124.909 M38.835 125.283 C 38.899 125.452,38.895 125.532,38.826 125.463 C 38.756 125.393,38.579 125.436,38.432 125.557 C 38.198 125.749,38.191 125.726,38.377 125.378 C 38.633 124.897,38.683 124.887,38.835 125.283 M44.622 125.212 C 44.622 125.489,45.099 125.499,45.531 125.232 C 45.659 125.153,45.690 125.228,45.620 125.447 C 45.477 125.898,46.034 126.266,46.427 125.979 C 46.614 125.842,46.658 125.664,46.564 125.422 C 46.452 125.136,46.423 125.126,46.412 125.367 C 46.396 125.753,46.107 125.935,45.940 125.664 C 45.860 125.534,45.889 125.504,46.017 125.583 C 46.130 125.653,46.222 125.625,46.222 125.522 C 46.222 125.418,46.122 125.332,46.000 125.331 C 45.851 125.329,45.865 125.269,46.044 125.149 C 46.245 125.015,46.184 124.991,45.800 125.055 C 45.520 125.102,45.247 125.183,45.195 125.235 C 45.142 125.287,44.992 125.241,44.861 125.132 C 44.678 124.981,44.622 125.000,44.622 125.212 M49.169 126.163 C 49.146 126.326,48.986 126.480,48.814 126.505 C 48.518 126.549,48.518 126.534,48.812 126.209 C 49.190 125.791,49.225 125.787,49.169 126.163 M22.143 127.973 C 22.206 128.203,22.409 128.543,22.595 128.729 C 22.971 129.104,23.260 129.193,22.970 128.844 C 22.869 128.722,22.779 128.562,22.771 128.489 C 22.762 128.416,22.709 128.376,22.652 128.400 C 22.595 128.424,22.432 128.244,22.289 128.000 C 22.035 127.565,22.032 127.564,22.143 127.973 M21.882 128.400 C 22.003 128.669,22.224 128.889,22.374 128.889 C 22.609 128.889,22.612 128.859,22.390 128.667 C 22.249 128.544,22.027 128.324,21.898 128.178 C 21.701 127.955,21.698 127.992,21.882 128.400 M64.913 128.302 C 64.709 128.384,64.709 128.426,64.913 128.572 C 65.099 128.704,65.083 128.721,64.844 128.645 C 64.672 128.589,64.533 128.634,64.533 128.746 C 64.533 128.857,64.491 128.906,64.440 128.855 C 64.259 128.674,64.576 128.178,64.863 128.192 C 65.077 128.202,65.091 128.232,64.913 128.302 M63.918 129.411 C 63.861 129.503,63.956 129.657,64.128 129.754 C 64.300 129.850,64.381 129.989,64.308 130.063 C 64.234 130.136,64.048 130.042,63.895 129.854 C 63.657 129.562,63.630 129.557,63.712 129.820 C 63.771 130.006,63.724 130.112,63.594 130.087 C 63.475 130.063,63.398 130.144,63.422 130.267 C 63.447 130.389,63.407 130.479,63.333 130.468 C 63.260 130.456,63.160 130.462,63.111 130.481 C 62.646 130.663,62.397 130.617,62.590 130.386 C 62.705 130.247,62.911 130.133,63.048 130.133 C 63.186 130.133,63.374 129.933,63.467 129.689 C 63.560 129.444,63.722 129.244,63.828 129.244 C 63.934 129.244,63.975 129.320,63.918 129.411 M63.556 130.795 C 63.360 130.842,63.060 131.005,62.889 131.157 C 62.502 131.500,62.476 131.327,62.857 130.946 C 63.011 130.792,63.311 130.676,63.524 130.687 C 63.884 130.706,63.886 130.714,63.556 130.795 M26.224 132.441 C 26.165 132.537,26.198 132.766,26.297 132.951 C 26.395 133.135,26.439 133.337,26.394 133.399 C 26.348 133.461,26.364 133.472,26.428 133.424 C 26.492 133.376,26.672 133.436,26.828 133.557 C 27.081 133.754,27.084 133.743,26.856 133.453 C 26.653 133.194,26.647 133.100,26.827 132.989 C 27.118 132.809,27.114 132.612,26.815 132.423 C 26.503 132.225,26.355 132.229,26.224 132.441 M60.138 132.749 C 59.976 132.807,59.992 132.936,60.203 133.258 C 60.378 133.524,60.410 133.689,60.287 133.689 C 60.178 133.689,60.089 133.576,60.089 133.437 C 60.089 133.299,59.989 133.152,59.867 133.110 C 59.694 133.052,59.700 132.969,59.892 132.736 C 60.028 132.571,60.188 132.489,60.248 132.553 C 60.307 132.618,60.258 132.706,60.138 132.749 M29.398 133.422 C 29.289 133.704,28.978 133.792,28.978 133.541 C 28.978 133.353,29.172 133.156,29.357 133.156 C 29.436 133.156,29.454 133.276,29.398 133.422 M58.535 133.806 C 58.369 133.873,58.382 133.963,58.588 134.169 C 58.738 134.319,58.807 134.493,58.740 134.554 C 58.673 134.616,58.582 134.566,58.538 134.443 C 58.494 134.321,58.373 134.273,58.268 134.337 C 58.156 134.407,58.130 134.371,58.205 134.250 C 58.276 134.135,58.230 134.043,58.099 134.042 C 57.930 134.040,57.940 133.992,58.133 133.867 C 58.280 133.772,58.480 133.699,58.578 133.706 C 58.676 133.712,58.656 133.757,58.535 133.806 M59.836 133.989 C 59.818 134.111,59.734 134.274,59.651 134.350 C 59.567 134.427,59.556 134.389,59.626 134.267 C 59.706 134.129,59.649 134.044,59.477 134.044 C 59.325 134.044,59.200 133.982,59.200 133.905 C 59.200 133.828,59.350 133.765,59.534 133.765 C 59.718 133.765,59.854 133.866,59.836 133.989 M56.806 134.749 C 56.373 135.201,56.361 135.253,56.656 135.368 C 56.955 135.484,56.950 135.516,56.580 135.817 C 56.250 136.087,56.229 136.160,56.456 136.247 C 56.607 136.304,56.686 136.281,56.632 136.194 C 56.479 135.945,57.000 135.828,57.235 136.058 C 57.352 136.173,57.433 136.187,57.416 136.089 C 57.348 135.709,57.655 135.028,57.763 135.319 C 57.837 135.515,57.970 135.555,58.222 135.455 C 58.524 135.335,58.530 135.312,58.267 135.301 C 58.096 135.295,57.956 135.209,57.956 135.111 C 57.956 134.468,57.280 134.255,56.806 134.749 M55.630 135.503 C 55.394 135.696,55.295 135.804,55.411 135.745 C 55.528 135.686,55.705 135.779,55.806 135.952 C 55.971 136.236,55.990 136.237,55.995 135.969 C 55.998 135.805,56.112 135.645,56.249 135.613 C 56.445 135.568,56.452 135.513,56.279 135.354 C 56.114 135.203,55.954 135.239,55.630 135.503 M32.475 135.478 C 32.590 135.593,32.641 135.798,32.589 135.933 C 32.538 136.068,32.584 136.178,32.692 136.178 C 33.012 136.178,32.916 135.515,32.578 135.386 C 32.339 135.295,32.315 135.317,32.475 135.478 M31.856 135.933 C 31.728 136.072,31.536 136.131,31.429 136.065 C 31.313 135.994,31.285 136.027,31.359 136.148 C 31.489 136.358,32.291 136.324,32.236 136.111 C 32.139 135.738,32.066 135.704,31.856 135.933 M54.074 136.011 C 53.858 136.212,53.728 136.453,53.785 136.545 C 53.842 136.638,53.693 136.624,53.453 136.515 C 52.910 136.267,53.305 135.684,54.034 135.659 L 54.467 135.644 54.074 136.011 M32.267 136.533 C 32.206 136.631,32.277 136.711,32.423 136.711 C 32.570 136.711,32.740 136.631,32.800 136.533 C 32.860 136.436,32.790 136.356,32.643 136.356 C 32.497 136.356,32.327 136.436,32.267 136.533 M37.296 136.831 C 37.473 136.902,37.478 136.981,37.319 137.173 C 37.193 137.324,37.183 137.422,37.293 137.422 C 37.509 137.422,37.709 136.968,37.558 136.817 C 37.500 136.759,37.365 136.717,37.259 136.725 C 37.153 136.733,37.170 136.781,37.296 136.831 M43.381 137.687 C 43.477 137.627,43.556 137.669,43.556 137.778 C 43.556 137.888,43.416 137.955,43.244 137.927 C 43.073 137.899,42.826 137.943,42.696 138.025 C 42.555 138.112,42.373 138.088,42.251 137.965 C 42.094 137.806,42.119 137.785,42.358 137.876 C 42.602 137.969,42.645 137.928,42.553 137.688 C 42.450 137.421,42.485 137.408,42.821 137.588 C 43.034 137.701,43.286 137.746,43.381 137.687 M45.471 137.631 C 45.379 137.907,44.444 138.029,44.444 137.765 C 44.444 137.653,44.552 137.603,44.683 137.653 C 44.814 137.704,45.034 137.674,45.172 137.586 C 45.505 137.375,45.554 137.383,45.471 137.631 M47.941 137.719 C 47.720 137.940,47.821 138.056,48.267 138.093 C 48.760 138.133,48.869 137.876,48.430 137.708 C 48.082 137.574,48.085 137.574,47.941 137.719 M40.044 138.607 C 39.737 138.731,39.766 138.963,40.089 138.963 C 40.236 138.963,40.356 138.856,40.356 138.726 C 40.356 138.596,40.336 138.495,40.311 138.503 C 40.287 138.511,40.167 138.557,40.044 138.607 " stroke="none" fill="#049cdc" fill-rule="evenodd"></path><path id="path1" d="M357.689 75.822 C 357.689 75.969,357.819 76.089,357.977 76.089 C 358.214 76.089,358.226 76.041,358.044 75.822 C 357.923 75.676,357.793 75.556,357.756 75.556 C 357.719 75.556,357.689 75.676,357.689 75.822 M321.956 85.600 C 321.774 85.818,321.786 85.867,322.023 85.867 C 322.181 85.867,322.311 85.747,322.311 85.600 C 322.311 85.453,322.281 85.333,322.244 85.333 C 322.207 85.333,322.077 85.453,321.956 85.600 M365.333 87.534 C 365.333 87.571,365.453 87.700,365.600 87.822 C 365.818 88.003,365.867 87.991,365.867 87.755 C 365.867 87.596,365.747 87.467,365.600 87.467 C 365.453 87.467,365.333 87.497,365.333 87.534 M321.067 88.267 C 320.885 88.485,320.898 88.533,321.134 88.533 C 321.292 88.533,321.422 88.413,321.422 88.267 C 321.422 88.120,321.392 88.000,321.355 88.000 C 321.318 88.000,321.188 88.120,321.067 88.267 M320.178 90.933 C 319.997 91.152,320.009 91.200,320.245 91.200 C 320.404 91.200,320.533 91.080,320.533 90.933 C 320.533 90.787,320.503 90.667,320.466 90.667 C 320.429 90.667,320.300 90.787,320.178 90.933 M31.647 102.236 L 31.655 113.271 32.080 113.697 C 32.364 113.980,32.527 114.376,32.568 114.883 C 32.615 115.465,32.723 115.686,33.026 115.820 C 33.346 115.962,33.422 116.141,33.422 116.750 C 33.422 117.399,33.482 117.519,33.851 117.612 C 34.107 117.676,34.324 117.893,34.388 118.149 C 34.456 118.422,34.624 118.578,34.848 118.578 C 35.105 118.578,35.200 118.698,35.200 119.022 C 35.200 119.378,35.289 119.467,35.644 119.467 C 36.000 119.467,36.089 119.556,36.089 119.911 C 36.089 120.288,36.171 120.356,36.633 120.356 C 36.965 120.356,37.132 120.428,37.062 120.541 C 36.866 120.858,37.286 121.169,37.976 121.215 C 38.742 121.267,38.981 121.379,38.834 121.617 C 38.776 121.710,38.835 121.765,38.965 121.738 C 39.094 121.711,39.183 121.780,39.162 121.892 C 39.137 122.026,39.523 122.101,40.300 122.112 C 41.280 122.127,41.519 122.192,41.736 122.502 C 41.900 122.736,42.122 122.842,42.332 122.787 C 42.516 122.739,42.628 122.763,42.580 122.841 C 42.340 123.230,45.757 122.938,46.160 122.535 C 46.544 122.151,47.661 121.990,48.978 122.130 C 49.173 122.151,49.276 122.110,49.206 122.038 C 49.010 121.838,49.822 121.282,50.409 121.215 C 50.795 121.171,50.948 121.050,50.990 120.756 C 51.031 120.466,51.166 120.356,51.479 120.356 C 51.717 120.356,51.911 120.271,51.911 120.167 C 51.911 120.064,51.982 120.023,52.069 120.076 C 52.219 120.169,52.717 119.706,52.706 119.484 C 52.703 119.425,52.805 119.402,52.932 119.431 C 53.267 119.508,53.741 118.971,53.619 118.653 C 53.562 118.504,53.591 118.429,53.684 118.486 C 53.978 118.667,54.578 117.893,54.578 117.333 C 54.578 116.841,54.743 116.712,55.301 116.768 C 55.356 116.774,55.421 116.405,55.444 115.949 C 55.493 115.012,55.492 115.013,55.939 114.948 C 56.253 114.902,56.269 114.404,56.315 103.050 L 56.363 91.200 44.001 91.200 L 31.639 91.200 31.647 102.236 M368.889 91.978 C 368.889 92.015,369.009 92.145,369.156 92.267 C 369.374 92.448,369.422 92.436,369.422 92.200 C 369.422 92.041,369.302 91.911,369.156 91.911 C 369.009 91.911,368.889 91.941,368.889 91.978 M319.289 93.600 C 319.108 93.818,319.120 93.867,319.356 93.867 C 319.515 93.867,319.644 93.747,319.644 93.600 C 319.644 93.453,319.614 93.333,319.577 93.333 C 319.540 93.333,319.411 93.453,319.289 93.600 M318.400 96.267 C 318.219 96.485,318.231 96.533,318.467 96.533 C 318.626 96.533,318.756 96.413,318.756 96.267 C 318.756 96.120,318.725 96.000,318.688 96.000 C 318.652 96.000,318.522 96.120,318.400 96.267 M377.956 104.267 C 377.774 104.485,377.786 104.533,378.023 104.533 C 378.181 104.533,378.311 104.413,378.311 104.267 C 378.311 104.120,378.281 104.000,378.244 104.000 C 378.207 104.000,378.077 104.120,377.956 104.267 M381.689 108.200 C 381.689 108.436,381.737 108.448,381.956 108.267 C 382.102 108.145,382.222 108.015,382.222 107.978 C 382.222 107.941,382.102 107.911,381.956 107.911 C 381.809 107.911,381.689 108.041,381.689 108.200 M344.000 109.756 C 344.000 109.793,344.120 109.923,344.267 110.044 C 344.485 110.226,344.533 110.214,344.533 109.977 C 344.533 109.819,344.413 109.689,344.267 109.689 C 344.120 109.689,344.000 109.719,344.000 109.756 M355.022 109.977 C 355.022 110.214,355.070 110.226,355.289 110.044 C 355.436 109.923,355.556 109.793,355.556 109.756 C 355.556 109.719,355.436 109.689,355.289 109.689 C 355.142 109.689,355.022 109.819,355.022 109.977 M57.067 117.600 C 56.885 117.818,56.898 117.867,57.134 117.867 C 57.292 117.867,57.422 117.747,57.422 117.600 C 57.422 117.453,57.392 117.333,57.355 117.333 C 57.318 117.333,57.188 117.453,57.067 117.600 M69.689 123.311 C 69.689 123.547,69.737 123.559,69.956 123.378 C 70.102 123.256,70.222 123.126,70.222 123.089 C 70.222 123.052,70.102 123.022,69.956 123.022 C 69.809 123.022,69.689 123.152,69.689 123.311 M39.467 124.711 C 39.467 124.858,39.596 124.978,39.755 124.978 C 39.991 124.978,40.003 124.930,39.822 124.711 C 39.700 124.564,39.571 124.444,39.534 124.444 C 39.497 124.444,39.467 124.564,39.467 124.711 M18.667 124.867 C 18.667 124.904,18.787 125.034,18.933 125.156 C 19.152 125.337,19.200 125.325,19.200 125.088 C 19.200 124.930,19.080 124.800,18.933 124.800 C 18.787 124.800,18.667 124.830,18.667 124.867 M25.778 132.867 C 25.778 132.904,25.898 133.034,26.044 133.156 C 26.263 133.337,26.311 133.325,26.311 133.088 C 26.311 132.930,26.191 132.800,26.044 132.800 C 25.898 132.800,25.778 132.830,25.778 132.867 M59.022 134.866 C 59.022 135.102,59.070 135.115,59.289 134.933 C 59.436 134.812,59.556 134.682,59.556 134.645 C 59.556 134.608,59.436 134.578,59.289 134.578 C 59.142 134.578,59.022 134.708,59.022 134.866 M37.333 138.200 C 37.333 138.237,37.453 138.367,37.600 138.489 C 37.818 138.670,37.867 138.658,37.867 138.422 C 37.867 138.263,37.747 138.133,37.600 138.133 C 37.453 138.133,37.333 138.164,37.333 138.200 " stroke="none" fill="#94daf5" fill-rule="evenodd"></path><path id="path2" d="M36.978 39.200 C 36.978 39.556,36.889 39.644,36.533 39.644 L 36.089 39.644 36.089 43.111 L 36.089 46.578 36.533 46.578 C 36.890 46.578,36.978 46.666,36.978 47.026 L 36.978 47.474 40.400 47.426 C 43.797 47.378,43.823 47.375,43.879 46.978 C 43.920 46.688,44.055 46.578,44.368 46.578 L 44.800 46.578 44.800 43.111 L 44.800 39.644 44.356 39.644 C 44.000 39.644,43.911 39.556,43.911 39.200 L 43.911 38.756 40.444 38.756 L 36.978 38.756 36.978 39.200 M46.042 47.025 C 45.929 47.162,45.888 47.357,45.951 47.459 C 46.021 47.572,45.854 47.644,45.522 47.644 C 45.060 47.644,44.978 47.712,44.978 48.089 C 44.978 48.444,44.889 48.533,44.533 48.533 L 44.089 48.533 44.089 52.089 L 44.089 55.644 37.422 55.644 L 30.756 55.644 30.756 56.089 C 30.756 56.444,30.667 56.533,30.311 56.533 C 29.897 56.533,29.867 56.593,29.867 57.410 C 29.867 58.183,29.820 58.293,29.467 58.343 L 29.067 58.400 29.020 65.822 L 28.974 73.244 29.420 73.244 C 29.837 73.244,29.867 73.303,29.867 74.133 C 29.867 74.963,29.896 75.022,30.311 75.022 C 30.667 75.022,30.756 75.111,30.756 75.467 L 30.756 75.911 36.978 75.911 L 43.200 75.911 43.200 80.356 L 43.200 84.800 43.644 84.800 C 44.001 84.800,44.089 84.889,44.089 85.247 L 44.089 85.693 50.622 85.647 L 57.156 85.600 57.212 85.200 C 57.253 84.910,57.389 84.800,57.701 84.800 C 58.100 84.800,58.133 84.733,58.133 83.923 C 58.133 83.150,58.180 83.040,58.533 82.990 L 58.933 82.933 58.970 77.644 C 58.993 74.365,58.943 72.356,58.837 72.356 C 58.743 72.356,58.687 72.296,58.711 72.222 C 58.736 72.149,58.684 72.076,58.598 72.060 C 58.352 72.014,58.139 71.596,58.136 71.156 C 58.134 70.848,58.030 70.756,57.689 70.756 C 57.333 70.756,57.244 70.667,57.244 70.311 L 57.244 69.867 53.689 69.867 L 50.133 69.867 50.133 64.889 L 50.133 59.911 52.800 59.911 L 55.467 59.911 55.467 59.467 C 55.467 59.111,55.556 59.022,55.911 59.022 C 56.267 59.022,56.356 58.933,56.356 58.578 C 56.356 58.333,56.436 58.123,56.533 58.111 C 56.631 58.099,56.811 58.079,56.933 58.067 C 57.104 58.050,57.168 56.932,57.207 53.289 L 57.259 48.533 56.807 48.533 C 56.444 48.533,56.356 48.446,56.356 48.089 C 56.356 47.733,56.267 47.644,55.911 47.644 C 55.554 47.644,55.467 47.556,55.467 47.195 L 55.467 46.746 50.857 46.761 C 47.144 46.774,46.208 46.825,46.042 47.025 M59.249 63.117 C 59.273 63.430,59.298 63.433,59.593 63.166 C 59.982 62.814,59.984 62.837,59.568 62.805 C 59.332 62.788,59.232 62.885,59.249 63.117 M124.954 81.911 C 124.940 86.580,124.893 90.211,124.850 89.979 C 124.806 89.748,124.623 89.521,124.444 89.474 C 124.265 89.427,124.071 89.196,124.012 88.961 C 123.943 88.689,123.776 88.533,123.552 88.533 C 123.295 88.533,123.200 88.413,123.200 88.089 C 123.200 87.737,123.110 87.644,122.768 87.644 C 122.455 87.644,122.320 87.534,122.279 87.244 C 122.243 86.987,122.079 86.824,121.822 86.788 C 121.533 86.747,121.422 86.611,121.422 86.299 C 121.422 85.958,121.330 85.867,120.984 85.867 C 120.557 85.867,120.075 85.318,120.303 85.090 C 120.364 85.028,119.964 84.978,119.413 84.978 C 118.649 84.978,118.433 84.921,118.503 84.740 C 118.732 84.141,118.178 84.083,112.690 84.131 L 107.289 84.178 107.232 84.578 C 107.182 84.931,107.072 84.978,106.299 84.978 C 105.482 84.978,105.422 85.008,105.422 85.422 C 105.422 85.810,105.344 85.867,104.813 85.867 C 104.234 85.867,103.644 86.232,103.644 86.590 C 103.644 86.681,103.444 86.756,103.200 86.756 C 102.848 86.756,102.756 86.845,102.756 87.188 C 102.756 87.500,102.645 87.635,102.356 87.677 C 102.014 87.725,101.948 87.857,101.901 88.578 C 101.853 89.335,101.803 89.422,101.422 89.422 C 101.035 89.422,100.990 89.508,100.931 90.356 C 100.895 90.869,100.851 87.667,100.833 83.239 L 100.800 75.190 96.222 75.209 C 91.727 75.228,91.175 75.295,91.196 75.819 C 91.198 75.870,91.200 81.551,91.200 88.444 L 91.200 100.978 90.756 100.978 C 90.400 100.978,90.311 101.067,90.311 101.422 C 90.311 101.778,90.222 101.867,89.867 101.867 C 89.511 101.867,89.422 101.956,89.422 102.311 L 89.422 102.756 87.012 102.756 C 85.383 102.756,84.638 102.695,84.716 102.570 C 85.157 101.856,83.421 101.662,82.734 102.349 C 82.318 102.765,82.311 102.825,82.311 106.230 L 82.311 109.689 82.756 109.689 C 83.111 109.689,83.200 109.778,83.200 110.133 L 83.200 110.578 88.000 110.578 L 92.800 110.578 92.800 110.133 C 92.800 109.699,92.830 109.689,94.133 109.689 C 95.437 109.689,95.467 109.679,95.467 109.244 C 95.467 108.889,95.556 108.800,95.911 108.800 C 96.267 108.800,96.356 108.711,96.356 108.356 C 96.356 107.998,96.444 107.911,96.808 107.911 C 97.391 107.911,98.133 107.168,98.133 106.586 C 98.133 106.222,98.220 106.133,98.578 106.133 C 98.993 106.133,99.022 106.074,99.022 105.244 C 99.022 104.415,99.052 104.356,99.467 104.356 C 99.902 104.356,99.912 104.326,99.945 102.889 C 99.973 101.667,99.989 101.593,100.042 102.444 C 100.102 103.393,100.138 103.467,100.542 103.467 C 100.946 103.467,100.978 103.531,100.978 104.356 C 100.978 105.185,101.007 105.244,101.422 105.244 C 101.780 105.244,101.867 105.333,101.867 105.697 C 101.867 105.946,102.067 106.349,102.311 106.594 C 102.556 106.838,102.756 107.109,102.756 107.195 C 102.756 107.546,103.351 107.911,103.924 107.911 C 104.455 107.911,104.533 107.968,104.533 108.356 C 104.533 108.708,104.623 108.800,104.965 108.800 C 105.278 108.800,105.413 108.910,105.454 109.200 C 105.506 109.567,105.617 109.604,106.800 109.652 C 108.039 109.702,108.089 109.722,108.089 110.144 L 108.089 110.584 112.489 110.536 L 116.889 110.489 116.946 110.089 C 116.998 109.719,117.093 109.689,118.215 109.689 C 119.361 109.689,119.434 109.664,119.540 109.244 C 119.623 108.914,119.776 108.795,120.137 108.779 C 121.079 108.740,121.156 108.708,121.156 108.356 C 121.156 108.100,121.293 107.984,121.644 107.944 C 121.989 107.904,122.164 107.760,122.241 107.455 C 122.308 107.187,122.478 107.022,122.686 107.022 C 122.871 107.022,123.032 106.942,123.044 106.844 C 123.057 106.747,123.077 106.567,123.089 106.444 C 123.101 106.322,123.284 106.198,123.496 106.167 C 123.810 106.123,123.902 105.954,123.999 105.252 C 124.099 104.527,124.185 104.379,124.548 104.309 L 124.978 104.227 124.978 106.958 L 124.978 109.689 129.422 109.689 L 133.867 109.689 133.867 101.689 C 133.867 93.628,133.870 93.577,134.444 93.668 C 134.518 93.679,134.568 93.629,134.557 93.556 C 134.447 92.862,135.399 91.936,136.276 91.883 C 137.795 91.790,138.451 91.893,138.425 92.220 C 138.411 92.390,138.580 92.609,138.800 92.707 C 139.045 92.815,139.200 93.040,139.200 93.287 C 139.200 93.597,139.301 93.689,139.644 93.689 L 140.089 93.689 140.089 101.689 L 140.089 109.689 144.444 109.689 L 148.800 109.689 148.800 99.992 L 148.800 90.295 148.356 89.851 C 148.001 89.496,147.911 89.228,147.911 88.525 C 147.911 87.704,147.881 87.644,147.467 87.644 C 147.111 87.644,147.022 87.556,147.022 87.200 C 147.022 86.844,146.933 86.756,146.578 86.756 C 146.222 86.756,146.133 86.667,146.133 86.311 C 146.133 85.956,146.044 85.867,145.689 85.867 C 145.333 85.867,145.244 85.778,145.244 85.422 C 145.244 85.070,145.155 84.978,144.812 84.978 C 144.500 84.978,144.365 84.867,144.323 84.578 L 144.267 84.178 139.956 84.130 L 135.644 84.083 135.644 84.530 C 135.644 84.930,135.572 84.978,134.969 84.978 C 134.597 84.978,134.197 85.074,134.080 85.191 C 133.914 85.357,133.867 84.136,133.867 79.695 C 133.867 76.554,133.818 73.858,133.759 73.703 C 133.664 73.458,133.105 73.422,129.314 73.422 L 124.978 73.422 124.954 81.911 M192.706 74.584 C 192.594 74.719,192.554 74.912,192.617 75.014 C 192.693 75.138,192.190 75.207,191.122 75.222 C 189.057 75.251,189.071 75.249,189.044 75.511 C 189.032 75.633,189.012 75.809,189.000 75.901 C 188.988 75.993,188.638 76.093,188.222 76.123 C 187.639 76.165,187.427 76.269,187.291 76.578 C 187.184 76.823,186.960 76.978,186.713 76.978 C 186.403 76.978,186.311 77.079,186.311 77.422 C 186.311 77.778,186.222 77.867,185.867 77.867 C 185.511 77.867,185.422 77.956,185.422 78.311 C 185.422 78.667,185.333 78.756,184.978 78.756 C 184.626 78.756,184.533 78.845,184.533 79.188 C 184.533 79.500,184.423 79.635,184.133 79.677 C 183.792 79.725,183.725 79.857,183.679 80.578 C 183.630 81.340,183.583 81.422,183.190 81.422 L 182.756 81.422 182.756 85.333 L 182.756 89.244 183.200 89.244 C 183.615 89.244,183.644 89.304,183.644 90.133 C 183.644 90.963,183.674 91.022,184.089 91.022 C 184.444 91.022,184.533 91.111,184.533 91.467 C 184.533 91.822,184.622 91.911,184.978 91.911 C 185.333 91.911,185.422 92.000,185.422 92.356 C 185.422 92.711,185.511 92.800,185.867 92.800 C 186.222 92.800,186.311 92.889,186.311 93.244 C 186.311 93.600,186.400 93.689,186.756 93.689 C 187.000 93.689,187.200 93.763,187.200 93.854 C 187.200 94.210,187.788 94.578,188.356 94.578 C 188.840 94.578,188.964 94.654,189.010 94.978 C 189.062 95.345,189.173 95.382,190.356 95.430 C 191.592 95.480,191.644 95.500,191.644 95.919 C 191.644 96.324,191.708 96.356,192.533 96.356 C 193.363 96.356,193.422 96.385,193.422 96.800 C 193.422 97.215,193.481 97.244,194.311 97.244 C 195.141 97.244,195.200 97.274,195.200 97.689 C 195.200 98.041,195.290 98.133,195.632 98.133 C 195.945 98.133,196.080 98.244,196.121 98.533 C 196.157 98.790,196.321 98.954,196.578 98.990 C 197.004 99.051,197.213 100.948,196.800 101.000 C 196.702 101.012,196.522 101.032,196.400 101.044 C 196.278 101.057,196.168 101.167,196.156 101.289 C 196.143 101.411,196.123 101.591,196.111 101.689 C 196.099 101.787,195.889 101.867,195.644 101.867 C 195.289 101.867,195.200 101.956,195.200 102.311 L 195.200 102.756 192.453 102.756 C 189.759 102.756,189.696 102.747,189.261 102.311 C 188.911 101.962,188.634 101.866,187.964 101.864 C 187.118 101.861,186.663 101.706,186.607 101.402 C 186.591 101.316,186.518 101.264,186.444 101.289 C 186.371 101.313,186.311 101.253,186.311 101.156 C 186.311 101.053,185.751 100.978,184.987 100.978 L 183.663 100.978 183.609 101.911 C 183.580 102.424,183.494 102.823,183.419 102.796 C 183.013 102.653,182.800 103.282,182.770 104.709 C 182.752 105.541,182.660 106.249,182.565 106.283 C 182.053 106.462,181.852 106.923,181.903 107.799 L 181.956 108.711 182.789 108.765 C 183.492 108.810,183.631 108.880,183.678 109.209 C 183.730 109.577,183.835 109.603,185.467 109.651 C 187.184 109.701,187.200 109.706,187.200 110.146 L 187.200 110.591 192.400 110.540 C 197.464 110.490,197.600 110.480,197.600 110.133 C 197.600 109.813,197.736 109.773,198.978 109.726 C 200.300 109.676,200.356 109.657,200.356 109.247 C 200.356 108.863,200.442 108.814,201.200 108.765 C 201.921 108.719,202.053 108.652,202.101 108.311 C 202.142 108.022,202.277 107.911,202.590 107.911 C 202.933 107.911,203.022 107.819,203.022 107.467 C 203.022 107.091,203.105 107.022,203.556 107.022 C 203.849 107.022,204.079 106.962,204.068 106.889 C 203.994 106.423,204.145 106.164,204.533 106.090 L 204.978 106.005 204.978 107.847 L 204.978 109.689 209.860 109.689 L 214.742 109.689 214.752 101.748 C 214.762 94.044,214.773 93.799,215.114 93.560 C 215.314 93.420,215.514 93.012,215.578 92.613 C 215.688 91.922,215.702 91.911,216.452 91.911 C 216.888 91.911,217.259 91.816,217.319 91.689 C 217.401 91.511,217.443 91.511,217.526 91.689 C 217.586 91.817,217.960 91.911,218.415 91.911 C 219.134 91.911,219.200 91.949,219.200 92.356 C 219.200 92.711,219.289 92.800,219.644 92.800 C 220.069 92.800,220.089 92.848,220.089 93.854 C 220.089 94.866,220.358 95.467,220.813 95.467 C 220.912 95.467,220.978 98.320,220.978 102.578 L 220.978 109.689 225.333 109.689 L 229.689 109.689 229.722 106.533 C 229.740 104.798,229.784 103.798,229.820 104.311 C 229.880 105.165,229.922 105.244,230.320 105.244 C 230.666 105.244,230.756 105.336,230.756 105.689 C 230.756 106.044,230.844 106.133,231.200 106.133 C 231.558 106.133,231.644 106.222,231.644 106.586 C 231.644 106.835,231.841 107.235,232.081 107.475 C 232.321 107.715,232.721 107.911,232.970 107.911 C 233.334 107.911,233.422 107.998,233.422 108.356 C 233.422 108.699,233.514 108.800,233.825 108.800 C 234.071 108.800,234.295 108.955,234.402 109.200 C 234.590 109.628,234.622 109.637,236.044 109.668 C 236.916 109.688,236.978 109.718,236.978 110.133 L 236.978 110.578 241.333 110.578 L 245.689 110.578 245.689 110.141 C 245.689 109.722,245.742 109.702,246.978 109.652 C 248.151 109.605,248.272 109.565,248.322 109.209 C 248.369 108.880,248.508 108.810,249.211 108.765 C 249.920 108.719,250.053 108.652,250.101 108.311 C 250.142 108.022,250.277 107.911,250.590 107.911 C 250.933 107.911,251.022 107.819,251.022 107.467 C 251.022 107.111,251.111 107.022,251.467 107.022 C 251.822 107.022,251.911 106.933,251.911 106.578 C 251.911 106.222,252.000 106.133,252.356 106.133 C 252.770 106.133,252.800 106.074,252.800 105.244 C 252.800 104.415,252.830 104.356,253.244 104.356 C 253.659 104.356,253.689 104.296,253.689 103.467 C 253.689 102.637,253.719 102.578,254.133 102.578 L 254.578 102.578 254.608 100.311 C 254.636 98.173,254.644 98.129,254.741 99.527 C 254.828 100.779,254.907 101.061,255.244 101.334 C 255.596 101.618,255.644 101.821,255.644 103.006 C 255.644 104.327,255.654 104.356,256.089 104.356 C 256.524 104.356,256.533 104.385,256.533 105.697 C 256.533 106.891,256.582 107.087,256.978 107.483 C 257.332 107.837,257.422 108.106,257.422 108.808 L 257.422 109.689 261.778 109.689 L 266.133 109.689 266.133 109.244 C 266.133 108.889,266.222 108.800,266.578 108.800 C 267.012 108.800,267.022 108.770,267.022 107.475 C 267.022 106.298,267.072 106.100,267.467 105.705 C 267.862 105.310,267.911 105.113,267.911 103.919 C 267.911 102.563,267.975 102.448,268.667 102.557 C 268.740 102.568,268.791 102.518,268.779 102.444 C 268.560 101.059,268.837 99.022,269.244 99.022 C 269.658 99.022,269.691 98.956,269.725 98.089 C 269.745 97.576,269.805 98.156,269.858 99.378 C 269.952 101.513,269.971 101.602,270.356 101.657 C 270.731 101.710,270.873 102.778,270.749 104.629 C 270.738 104.793,271.255 105.244,271.454 105.244 C 271.572 105.244,271.644 105.748,271.644 106.578 C 271.644 107.881,271.654 107.911,272.089 107.911 C 272.504 107.911,272.533 107.970,272.533 108.803 L 272.533 109.695 276.933 109.648 L 281.333 109.600 281.388 108.756 C 281.431 108.074,281.505 107.911,281.768 107.911 C 282.195 107.911,282.311 107.571,282.311 106.317 C 282.311 105.464,282.368 105.271,282.640 105.200 C 282.979 105.111,283.194 104.319,283.198 103.144 C 283.199 102.791,283.379 102.320,283.644 101.971 C 284.016 101.485,284.089 101.194,284.089 100.205 C 284.089 99.163,284.129 99.022,284.425 99.022 C 284.862 99.022,284.978 98.689,284.978 97.428 C 284.978 96.575,285.034 96.382,285.306 96.311 C 285.664 96.218,285.867 95.515,285.867 94.372 C 285.867 93.927,285.960 93.642,286.127 93.578 C 286.348 93.493,286.554 93.120,286.680 92.578 C 286.697 92.504,286.721 91.930,286.733 91.301 C 286.754 90.259,286.791 90.153,287.156 90.101 C 287.587 90.040,287.593 90.018,287.624 88.489 C 287.642 87.603,287.690 87.467,287.989 87.467 C 288.476 87.467,288.702 85.476,288.367 84.133 C 288.361 84.109,286.396 84.089,284.000 84.089 L 279.644 84.089 279.644 85.061 C 279.644 85.801,279.587 86.011,279.406 85.942 C 279.161 85.848,278.722 86.244,278.770 86.515 C 279.013 87.869,278.718 90.311,278.311 90.311 L 277.867 90.311 277.867 92.521 C 277.867 94.677,277.857 94.732,277.467 94.788 C 277.117 94.837,277.054 94.978,276.963 95.911 C 276.906 96.498,276.846 96.878,276.830 96.756 C 276.813 96.633,276.640 96.533,276.444 96.533 C 276.115 96.533,276.089 96.415,276.089 94.906 C 276.089 93.454,276.042 93.217,275.655 92.709 C 275.299 92.242,275.213 91.907,275.183 90.855 C 275.162 90.149,275.052 89.477,274.938 89.363 C 274.824 89.249,274.696 89.076,274.654 88.978 C 274.612 88.880,274.518 88.820,274.444 88.844 C 274.371 88.869,274.305 88.349,274.298 87.689 C 274.281 86.222,274.217 85.850,273.952 85.687 C 273.840 85.617,273.754 85.509,273.763 85.447 C 273.771 85.384,273.718 85.336,273.644 85.340 C 273.571 85.344,273.471 85.084,273.422 84.763 L 273.333 84.178 269.822 84.130 L 266.311 84.081 266.311 84.530 C 266.311 84.889,266.223 84.978,265.867 84.978 L 265.422 84.978 265.422 86.756 L 265.422 88.533 264.978 88.533 C 264.534 88.533,264.533 88.534,264.533 90.299 C 264.533 91.994,264.517 92.066,264.133 92.121 C 263.754 92.175,263.731 92.267,263.683 93.900 C 263.635 95.524,263.609 95.625,263.238 95.677 C 262.870 95.730,262.838 95.847,262.746 97.511 L 262.647 99.289 262.612 97.903 C 262.582 96.697,262.522 96.461,262.144 96.084 C 261.742 95.681,261.707 95.501,261.655 93.558 C 261.603 91.594,261.576 91.456,261.205 91.293 C 260.833 91.130,260.807 90.997,260.761 88.982 C 260.713 86.918,260.697 86.842,260.311 86.788 C 259.937 86.735,259.911 86.644,259.911 85.410 L 259.911 84.089 255.556 84.089 L 251.200 84.089 251.200 85.480 L 251.200 86.872 250.667 86.793 C 250.245 86.732,250.133 86.626,250.133 86.291 C 250.133 85.957,250.039 85.867,249.689 85.867 C 249.333 85.867,249.244 85.778,249.244 85.422 C 249.244 85.008,249.184 84.978,248.368 84.978 C 247.594 84.978,247.485 84.931,247.435 84.578 L 247.378 84.178 241.778 84.178 C 237.428 84.178,236.173 84.227,236.156 84.400 C 236.143 84.522,236.123 84.702,236.111 84.800 C 236.099 84.898,235.689 84.978,235.200 84.978 C 234.370 84.978,234.311 85.007,234.311 85.422 C 234.311 85.789,234.225 85.867,233.821 85.867 C 233.525 85.867,233.243 86.006,233.111 86.217 C 232.991 86.409,232.635 86.634,232.319 86.715 C 231.937 86.814,231.713 86.998,231.650 87.265 C 231.598 87.485,231.494 87.659,231.419 87.651 C 231.266 87.635,230.787 88.039,230.794 88.178 C 230.838 89.159,230.736 89.422,230.311 89.422 C 229.897 89.422,229.867 89.482,229.867 90.306 C 229.867 91.009,229.814 91.168,229.606 91.089 C 229.462 91.034,229.362 90.936,229.383 90.872 C 229.405 90.808,229.351 90.742,229.264 90.726 C 228.961 90.670,228.805 90.216,228.803 89.378 C 228.800 88.596,228.767 88.533,228.356 88.533 C 227.941 88.533,227.911 88.474,227.911 87.644 C 227.911 86.815,227.881 86.756,227.467 86.756 C 227.111 86.756,227.022 86.667,227.022 86.311 C 227.022 85.956,226.933 85.867,226.578 85.867 C 226.222 85.867,226.133 85.778,226.133 85.422 C 226.133 85.016,226.067 84.978,225.353 84.978 C 224.667 84.978,224.558 84.923,224.460 84.530 L 224.347 84.083 220.040 84.130 L 215.733 84.178 215.677 84.578 C 215.635 84.870,215.501 84.978,215.180 84.978 C 214.937 84.978,214.539 85.178,214.295 85.422 C 213.807 85.910,213.468 86.008,213.618 85.618 C 213.670 85.482,213.557 85.261,213.367 85.128 C 213.176 84.994,212.990 84.726,212.952 84.531 C 212.887 84.193,212.716 84.176,208.931 84.130 L 204.978 84.082 204.978 88.594 L 204.978 93.106 204.543 93.023 C 204.268 92.971,204.089 92.800,204.054 92.559 C 204.016 92.292,203.853 92.161,203.511 92.122 C 203.127 92.077,203.022 91.972,203.022 91.633 C 203.022 91.293,202.929 91.200,202.590 91.200 C 202.277 91.200,202.142 91.090,202.101 90.800 C 202.053 90.459,201.921 90.392,201.200 90.346 C 200.438 90.297,200.356 90.249,200.356 89.857 C 200.356 89.455,200.290 89.422,199.479 89.422 C 198.705 89.422,198.596 89.375,198.546 89.022 C 198.494 88.655,198.383 88.618,197.200 88.570 C 195.964 88.520,195.911 88.500,195.911 88.081 C 195.911 87.676,195.848 87.644,195.022 87.644 C 194.193 87.644,194.133 87.615,194.133 87.200 C 194.133 86.858,194.041 86.755,193.733 86.753 C 193.310 86.750,192.848 86.526,192.874 86.336 C 192.882 86.273,192.838 86.229,192.775 86.237 C 192.585 86.263,192.361 85.801,192.358 85.378 C 192.356 85.064,192.254 84.978,191.881 84.978 C 191.621 84.978,191.466 84.919,191.538 84.847 C 191.611 84.775,191.804 84.715,191.968 84.713 C 192.271 84.711,192.299 84.628,192.335 83.600 C 192.355 83.028,192.364 83.022,193.244 83.022 C 194.074 83.022,194.133 82.993,194.133 82.578 L 194.133 82.133 196.000 82.133 L 197.867 82.133 197.867 82.578 C 197.867 83.015,197.890 83.022,199.299 83.022 C 200.249 83.022,200.693 83.085,200.617 83.208 C 200.332 83.669,200.937 83.923,202.247 83.893 C 203.533 83.863,204.084 83.624,203.827 83.208 C 203.764 83.106,203.797 83.022,203.901 83.022 C 204.004 83.022,204.079 82.962,204.068 82.889 C 203.909 81.886,204.124 80.356,204.424 80.356 C 204.872 80.356,204.972 79.896,204.975 77.822 L 204.978 76.089 204.000 76.089 C 203.076 76.089,203.022 76.065,203.022 75.644 L 203.022 75.200 201.145 75.200 C 199.886 75.200,199.306 75.139,199.383 75.014 C 199.821 74.305,193.286 73.885,192.706 74.584 M379.014 74.741 C 379.154 75.124,379.911 75.371,379.911 75.035 C 379.911 74.930,379.791 74.844,379.644 74.844 C 379.498 74.844,379.378 74.738,379.378 74.607 C 379.378 74.477,379.265 74.370,379.128 74.370 C 378.949 74.370,378.917 74.477,379.014 74.741 M148.978 96.889 L 148.978 109.689 153.333 109.689 L 157.689 109.689 157.689 102.133 L 157.689 94.578 158.133 94.578 C 158.548 94.578,158.578 94.519,158.578 93.689 C 158.578 92.859,158.607 92.800,159.022 92.800 C 159.378 92.800,159.467 92.711,159.467 92.356 C 159.467 91.942,159.527 91.911,160.344 91.911 C 161.164 91.911,161.226 91.879,161.295 91.422 L 161.369 90.933 161.396 91.422 C 161.421 91.887,161.466 91.911,162.311 91.911 C 163.141 91.911,163.200 91.941,163.200 92.356 C 163.200 92.711,163.289 92.800,163.644 92.800 L 164.089 92.800 164.089 101.247 L 164.089 109.695 168.844 109.647 L 173.600 109.600 173.639 101.291 C 173.661 96.720,173.621 92.990,173.551 93.001 C 172.837 93.108,172.812 93.039,172.761 90.800 C 172.725 89.218,172.650 88.616,172.489 88.600 C 172.367 88.588,172.187 88.568,172.089 88.556 C 171.991 88.543,171.911 88.223,171.911 87.844 C 171.911 87.144,171.550 86.628,171.208 86.840 C 171.094 86.910,171.022 86.743,171.022 86.410 C 171.022 85.953,170.954 85.867,170.590 85.867 C 170.277 85.867,170.142 85.756,170.101 85.467 C 170.053 85.125,169.921 85.059,169.200 85.012 C 168.438 84.964,168.356 84.916,168.356 84.524 L 168.356 84.089 164.000 84.089 L 159.644 84.089 159.644 84.533 C 159.644 84.948,159.585 84.978,158.756 84.978 C 157.926 84.978,157.867 85.007,157.867 85.422 C 157.867 85.775,157.777 85.867,157.434 85.867 C 157.099 85.867,156.985 85.976,156.927 86.356 C 156.887 86.624,156.841 86.224,156.827 85.467 L 156.800 84.089 152.889 84.089 L 148.978 84.089 148.978 96.889 M377.422 89.606 C 377.422 89.700,377.496 89.778,377.587 89.778 C 377.678 89.778,377.788 89.918,377.833 90.089 C 377.897 90.334,377.919 90.320,377.935 90.019 C 377.946 89.809,377.836 89.592,377.689 89.536 C 377.542 89.479,377.422 89.511,377.422 89.606 M326.146 90.653 C 325.851 90.978,325.851 90.993,326.148 90.950 C 326.319 90.924,326.479 90.770,326.503 90.607 C 326.558 90.231,326.523 90.236,326.146 90.653 M114.311 91.467 C 114.311 91.822,114.400 91.911,114.756 91.911 C 115.111 91.911,115.200 92.000,115.200 92.356 C 115.200 92.711,115.289 92.800,115.644 92.800 C 116.079 92.800,116.089 92.830,116.089 94.133 C 116.089 95.437,116.099 95.467,116.533 95.467 L 116.978 95.467 116.978 97.333 L 116.978 99.200 116.533 99.200 C 116.119 99.200,116.089 99.260,116.089 100.081 C 116.089 100.783,115.999 101.052,115.644 101.406 C 115.400 101.651,115.200 102.054,115.200 102.303 C 115.200 102.667,115.113 102.756,114.756 102.756 C 114.400 102.756,114.311 102.844,114.311 103.200 C 114.311 103.637,114.287 103.644,112.889 103.644 C 111.490 103.644,111.467 103.637,111.467 103.200 C 111.467 102.844,111.378 102.756,111.022 102.756 C 110.616 102.756,110.474 102.525,110.557 102.000 C 110.568 101.927,110.378 101.867,110.133 101.867 C 109.772 101.867,109.685 101.775,109.668 101.378 C 109.629 100.436,109.597 100.354,109.200 100.180 C 108.807 100.008,108.800 99.951,108.800 96.847 L 108.800 93.689 109.244 93.689 C 109.659 93.689,109.689 93.630,109.689 92.800 C 109.689 91.970,109.719 91.911,110.133 91.911 C 110.489 91.911,110.578 91.822,110.578 91.467 L 110.578 91.022 112.444 91.022 L 114.311 91.022 114.311 91.467 M243.200 91.467 C 243.200 91.822,243.289 91.911,243.644 91.911 C 244.000 91.911,244.089 92.000,244.089 92.356 C 244.089 92.711,244.178 92.800,244.533 92.800 C 244.961 92.800,244.978 92.843,244.978 93.934 C 244.978 95.067,245.258 95.644,245.681 95.383 C 245.806 95.306,245.867 95.914,245.867 97.234 L 245.867 99.200 245.422 99.200 C 244.988 99.200,244.978 99.230,244.978 100.533 C 244.978 101.837,244.968 101.867,244.533 101.867 C 244.178 101.867,244.089 101.956,244.089 102.311 C 244.089 102.667,244.000 102.756,243.644 102.756 C 243.289 102.756,243.200 102.844,243.200 103.200 C 243.200 103.637,243.176 103.644,241.778 103.644 C 240.379 103.644,240.356 103.637,240.356 103.200 C 240.356 102.844,240.267 102.756,239.911 102.756 C 239.556 102.756,239.467 102.667,239.467 102.311 C 239.467 101.958,239.377 101.867,239.032 101.867 C 238.639 101.867,238.592 101.785,238.543 101.022 C 238.497 100.302,238.430 100.169,238.089 100.121 C 237.691 100.064,237.689 100.047,237.689 96.877 L 237.689 93.689 238.133 93.689 C 238.487 93.689,238.578 93.599,238.578 93.253 C 238.578 93.013,238.778 92.616,239.022 92.372 C 239.267 92.127,239.467 91.724,239.467 91.475 L 239.467 91.022 241.333 91.022 L 243.200 91.022 243.200 91.467 M380.978 91.383 C 380.978 91.478,381.052 91.556,381.143 91.556 C 381.233 91.556,381.344 91.696,381.389 91.867 C 381.453 92.112,381.474 92.097,381.490 91.797 C 381.502 91.587,381.391 91.370,381.244 91.313 C 381.098 91.257,380.978 91.289,380.978 91.383 M325.257 93.320 C 324.962 93.645,324.962 93.660,325.259 93.616 C 325.430 93.591,325.590 93.437,325.614 93.274 C 325.669 92.898,325.635 92.902,325.257 93.320 M351.762 94.000 C 351.651 94.184,352.023 94.484,352.400 94.514 C 352.782 94.544,352.786 94.500,352.432 94.146 C 352.128 93.843,351.889 93.790,351.762 94.000 M364.012 94.969 C 363.939 95.136,363.931 95.356,363.995 95.458 C 364.142 95.696,364.800 95.703,364.800 95.467 C 364.800 95.369,364.686 95.289,364.546 95.289 C 364.407 95.289,364.259 95.149,364.218 94.978 C 364.149 94.688,364.135 94.687,364.012 94.969 M378.425 97.689 C 378.481 97.836,378.698 97.946,378.908 97.935 C 379.208 97.919,379.223 97.897,378.978 97.833 C 378.807 97.788,378.667 97.678,378.667 97.587 C 378.667 97.496,378.589 97.422,378.494 97.422 C 378.400 97.422,378.368 97.542,378.425 97.689 M365.363 100.778 C 365.232 100.861,365.112 101.000,365.096 101.087 C 365.080 101.173,364.999 101.222,364.917 101.194 C 364.834 101.167,364.799 101.242,364.839 101.361 C 364.940 101.664,365.356 101.776,365.294 101.483 C 365.267 101.352,365.339 101.263,365.455 101.285 C 365.571 101.307,365.654 101.167,365.640 100.974 C 365.625 100.781,365.610 100.623,365.607 100.625 C 365.603 100.626,365.493 100.695,365.363 100.778 M385.736 101.645 C 385.539 101.689,385.440 101.862,385.462 102.126 C 385.503 102.611,385.888 102.539,386.054 102.015 C 386.186 101.599,386.147 101.554,385.736 101.645 M370.311 104.717 C 370.311 104.811,370.385 104.889,370.476 104.889 C 370.566 104.889,370.677 105.029,370.722 105.200 C 370.786 105.445,370.808 105.431,370.824 105.130 C 370.835 104.921,370.724 104.703,370.578 104.647 C 370.431 104.591,370.311 104.622,370.311 104.717 M382.960 107.547 C 383.036 107.776,383.280 107.817,383.302 107.604 C 383.310 107.534,383.220 107.444,383.102 107.404 C 382.985 107.365,382.921 107.429,382.960 107.547 M358.696 108.919 C 358.518 109.097,358.554 109.689,358.743 109.689 C 358.957 109.689,359.467 109.179,359.467 108.965 C 359.467 108.776,358.875 108.740,358.696 108.919 M32.533 119.828 C 32.533 119.923,32.607 120.000,32.698 120.000 C 32.789 120.000,32.899 120.140,32.944 120.311 C 33.008 120.557,33.030 120.542,33.046 120.241 C 33.057 120.032,32.947 119.814,32.800 119.758 C 32.653 119.702,32.533 119.733,32.533 119.828 M54.805 120.006 C 54.828 120.318,54.853 120.322,55.148 120.055 C 55.537 119.703,55.539 119.726,55.123 119.694 C 54.887 119.676,54.787 119.774,54.805 120.006 M33.422 120.717 C 33.422 120.811,33.496 120.889,33.587 120.889 C 33.678 120.889,33.788 121.029,33.833 121.200 C 33.897 121.445,33.919 121.431,33.935 121.130 C 33.946 120.921,33.836 120.703,33.689 120.647 C 33.542 120.591,33.422 120.622,33.422 120.717 M66.252 128.474 C 66.074 128.652,66.109 129.244,66.298 129.244 C 66.512 129.244,67.022 128.734,67.022 128.521 C 67.022 128.332,66.430 128.296,66.252 128.474 M62.972 132.117 C 63.000 132.248,62.915 132.333,62.783 132.306 C 62.491 132.244,62.603 132.660,62.906 132.761 C 63.025 132.801,63.100 132.726,63.072 132.594 C 63.045 132.463,63.130 132.378,63.261 132.406 C 63.392 132.433,63.468 132.358,63.428 132.239 C 63.327 131.936,62.911 131.824,62.972 132.117 M57.627 135.991 C 57.703 136.220,57.947 136.261,57.969 136.049 C 57.976 135.978,57.886 135.888,57.769 135.849 C 57.652 135.810,57.588 135.874,57.627 135.991 M55.585 136.474 C 55.407 136.652,55.443 137.244,55.632 137.244 C 55.846 137.244,56.356 136.734,56.356 136.521 C 56.356 136.332,55.763 136.296,55.585 136.474 " stroke="none" fill="#3c4394" fill-rule="evenodd"></path><path id="path3" d="M67.269 62.076 C 67.218 62.160,67.110 62.188,67.030 62.138 C 66.813 62.004,66.527 62.442,66.587 62.816 C 66.663 63.288,66.741 63.317,67.124 63.018 C 67.318 62.866,67.361 62.771,67.222 62.797 C 67.088 62.823,66.937 62.732,66.887 62.594 C 66.823 62.419,66.884 62.379,67.087 62.461 C 67.247 62.525,67.498 62.626,67.644 62.686 C 67.791 62.745,67.911 62.700,67.911 62.586 C 67.911 62.472,67.847 62.418,67.769 62.467 C 67.691 62.515,67.523 62.490,67.395 62.410 C 67.231 62.309,67.221 62.230,67.359 62.145 C 67.467 62.078,67.556 62.091,67.556 62.174 C 67.556 62.257,67.717 62.337,67.915 62.351 C 68.113 62.366,68.228 62.303,68.171 62.211 C 68.009 61.950,68.263 62.016,68.560 62.313 C 68.708 62.460,68.915 62.528,69.020 62.463 C 69.125 62.398,69.160 62.427,69.098 62.526 C 69.037 62.626,68.784 62.669,68.538 62.622 C 68.269 62.571,68.089 62.618,68.089 62.740 C 68.089 63.162,68.645 63.132,69.436 62.668 C 70.156 62.246,70.246 62.228,70.184 62.516 C 70.144 62.697,70.017 62.824,69.901 62.800 C 69.784 62.776,69.688 62.896,69.686 63.067 C 69.684 63.313,69.651 63.327,69.528 63.133 C 69.436 62.987,69.280 62.945,69.143 63.030 C 68.969 63.137,68.997 63.206,69.256 63.307 C 69.755 63.502,70.082 63.494,69.956 63.289 C 69.800 63.038,70.285 63.067,70.542 63.324 C 70.702 63.484,70.756 63.485,70.756 63.330 C 70.756 63.213,70.891 63.166,71.067 63.222 C 71.329 63.307,71.336 63.293,71.111 63.131 C 70.863 62.952,70.986 62.850,71.363 62.922 C 71.453 62.939,71.434 62.842,71.321 62.705 C 71.037 62.363,71.259 62.194,71.689 62.424 C 71.893 62.533,71.997 62.540,71.936 62.440 C 71.877 62.345,72.030 62.267,72.276 62.267 C 72.549 62.267,72.800 62.413,72.924 62.644 C 73.107 62.989,73.098 63.000,72.819 62.770 C 72.555 62.552,72.521 62.552,72.568 62.770 C 72.598 62.909,72.522 63.002,72.400 62.978 C 72.278 62.953,72.178 63.013,72.178 63.111 C 72.178 63.209,72.258 63.289,72.356 63.289 C 72.601 63.289,72.602 109.803,72.356 109.955 C 72.239 110.028,72.164 102.169,72.133 86.721 L 72.088 63.378 72.044 86.724 C 72.005 107.341,72.031 110.101,72.268 110.338 C 72.416 110.486,72.492 110.679,72.437 110.768 C 72.382 110.857,72.453 110.885,72.595 110.830 C 72.737 110.776,72.907 110.819,72.974 110.927 C 73.103 111.135,72.358 111.066,71.984 110.834 C 71.848 110.750,71.822 110.832,71.904 111.089 C 71.978 111.321,71.947 111.467,71.824 111.467 C 71.714 111.467,71.674 111.548,71.735 111.647 C 71.797 111.747,71.934 111.774,72.040 111.709 C 72.155 111.637,72.182 111.672,72.105 111.796 C 72.034 111.910,72.141 112.212,72.342 112.467 C 72.542 112.722,72.654 113.016,72.589 113.120 C 72.519 113.234,72.425 113.176,72.355 112.975 C 72.243 112.657,71.791 113.451,71.747 114.044 C 71.739 114.142,71.653 114.288,71.556 114.369 C 71.450 114.456,71.486 114.485,71.644 114.440 C 71.791 114.398,71.889 114.238,71.862 114.086 C 71.822 113.856,72.252 113.773,72.489 113.964 C 72.513 113.984,72.613 114.010,72.711 114.022 C 72.809 114.034,72.889 114.104,72.889 114.178 C 72.889 114.251,72.889 114.383,72.889 114.470 C 72.889 114.557,72.765 114.676,72.613 114.734 C 72.455 114.795,72.383 114.765,72.446 114.665 C 72.505 114.568,72.467 114.489,72.360 114.489 C 72.254 114.489,72.124 114.601,72.071 114.738 C 72.018 114.875,71.816 115.029,71.622 115.080 C 71.333 115.155,71.307 115.126,71.478 114.919 C 71.645 114.719,71.629 114.667,71.402 114.667 C 71.207 114.667,71.127 114.805,71.151 115.099 C 71.182 115.475,71.258 115.521,71.727 115.451 C 72.167 115.386,72.201 115.402,71.911 115.540 C 71.604 115.685,71.598 115.710,71.867 115.721 C 72.038 115.728,72.178 115.818,72.178 115.922 C 72.178 116.025,72.103 116.064,72.012 116.008 C 71.921 115.951,71.807 115.970,71.758 116.049 C 71.709 116.128,71.824 116.204,72.012 116.219 C 72.290 116.240,72.356 116.128,72.356 115.633 C 72.356 115.297,72.436 115.012,72.533 115.000 C 72.631 114.988,72.811 114.968,72.932 114.956 C 73.253 114.923,73.338 113.129,73.037 112.757 C 72.867 112.548,72.863 112.503,73.022 112.599 C 73.144 112.672,73.244 112.660,73.244 112.571 C 73.244 112.482,73.064 112.322,72.844 112.215 L 72.444 112.020 72.842 111.901 L 73.240 111.782 73.241 86.848 L 73.243 61.914 71.244 61.917 C 70.066 61.919,69.139 62.000,68.988 62.115 C 68.793 62.264,68.764 62.258,68.866 62.087 C 68.968 61.918,68.799 61.871,68.182 61.894 C 67.731 61.911,67.321 61.993,67.269 62.076 M71.111 62.241 C 71.111 62.351,70.982 62.388,70.819 62.326 C 70.579 62.234,70.566 62.260,70.746 62.477 C 71.003 62.787,70.847 63.135,70.510 63.006 C 70.146 62.866,70.374 62.044,70.776 62.044 C 70.960 62.044,71.111 62.133,71.111 62.241 M14.879 62.869 C 14.640 63.108,14.736 110.961,14.975 111.258 C 15.152 111.476,15.152 111.506,14.978 111.401 C 14.827 111.311,14.756 111.393,14.756 111.655 C 14.756 111.972,14.836 112.026,15.202 111.956 C 15.569 111.886,15.611 111.915,15.440 112.121 C 15.325 112.259,15.125 112.331,14.994 112.280 C 14.859 112.229,14.756 112.307,14.756 112.461 C 14.756 112.621,14.840 112.680,14.961 112.605 C 15.099 112.520,15.117 112.557,15.018 112.718 C 14.806 113.061,14.720 114.844,14.915 114.844 C 15.004 114.844,15.038 114.995,14.990 115.180 C 14.879 115.606,15.192 115.807,15.599 115.571 C 15.890 115.401,15.888 115.388,15.570 115.383 C 15.383 115.380,15.283 115.324,15.349 115.259 C 15.414 115.193,15.340 114.945,15.184 114.706 C 14.979 114.393,14.954 114.219,15.095 114.079 C 15.235 113.938,15.293 113.980,15.303 114.231 C 15.311 114.430,15.356 114.481,15.409 114.350 C 15.459 114.224,15.401 114.002,15.280 113.856 C 15.159 113.711,15.116 113.536,15.184 113.468 C 15.252 113.399,15.374 113.450,15.454 113.580 C 15.544 113.725,15.642 113.748,15.710 113.638 C 15.771 113.540,15.711 113.417,15.577 113.366 C 15.423 113.307,15.365 113.105,15.419 112.819 C 15.516 112.315,15.938 111.879,16.137 112.078 C 16.208 112.149,16.112 112.319,15.924 112.457 C 15.736 112.595,15.647 112.768,15.726 112.843 C 15.806 112.917,15.907 112.877,15.951 112.754 C 15.994 112.632,16.087 112.566,16.155 112.608 C 16.427 112.776,16.329 111.844,16.051 111.619 C 15.797 111.412,15.773 111.416,15.890 111.644 C 15.965 111.791,15.909 111.767,15.764 111.591 C 15.620 111.414,15.468 111.334,15.425 111.413 C 15.382 111.491,15.377 111.442,15.413 111.303 C 15.449 111.164,15.387 110.994,15.274 110.924 C 15.125 110.832,15.122 110.708,15.263 110.466 C 15.369 110.283,15.458 110.196,15.461 110.273 C 15.464 110.350,15.647 110.317,15.867 110.199 C 16.229 110.006,16.244 110.014,16.023 110.282 C 15.888 110.445,15.831 110.578,15.896 110.578 C 15.960 110.578,15.927 110.738,15.822 110.933 C 15.669 111.220,15.687 111.289,15.915 111.289 C 16.071 111.289,16.149 111.209,16.089 111.111 C 16.028 111.013,16.026 110.857,16.084 110.764 C 16.254 110.489,16.188 64.295,16.017 64.124 C 15.929 64.036,15.804 64.064,15.726 64.190 C 15.638 64.332,15.662 64.365,15.795 64.283 C 15.951 64.187,15.990 69.738,15.956 87.056 C 15.926 102.174,15.851 109.956,15.733 109.956 C 15.616 109.956,15.525 102.162,15.465 87.111 L 15.374 64.267 15.334 87.156 C 15.312 99.744,15.252 110.044,15.200 110.044 C 15.148 110.044,15.108 99.772,15.110 87.217 L 15.114 64.390 15.611 63.858 C 16.020 63.420,16.169 63.359,16.445 63.513 C 16.688 63.649,16.800 63.644,16.849 63.496 C 16.898 63.351,17.016 63.344,17.259 63.474 C 17.452 63.578,17.600 63.586,17.600 63.494 C 17.600 63.266,16.911 63.109,16.788 63.308 C 16.732 63.399,16.612 63.426,16.521 63.370 C 16.154 63.143,16.397 63.002,17.136 63.012 C 17.797 63.021,17.908 63.073,17.855 63.350 C 17.797 63.648,17.827 63.655,18.175 63.424 C 18.614 63.133,18.793 62.756,18.493 62.756 C 18.382 62.756,18.336 62.830,18.392 62.921 C 18.449 63.012,18.413 63.137,18.314 63.198 C 18.215 63.260,18.133 63.196,18.133 63.056 C 18.133 62.860,17.830 62.800,16.797 62.791 C 15.816 62.782,15.492 62.835,15.578 62.989 C 15.659 63.133,15.620 63.129,15.457 62.978 C 15.189 62.729,15.046 62.702,14.879 62.869 M19.028 63.099 C 18.775 63.573,18.798 63.726,19.088 63.486 C 19.338 63.278,19.482 63.378,19.825 64.000 C 19.936 64.201,19.887 64.192,19.626 63.965 C 19.389 63.757,19.199 63.714,19.018 63.828 C 18.825 63.951,18.817 63.995,18.988 63.997 C 19.116 63.999,19.184 64.060,19.139 64.133 C 19.032 64.306,20.018 64.308,20.191 64.135 C 20.264 64.062,20.450 64.101,20.606 64.221 C 20.779 64.354,20.758 64.285,20.550 64.041 C 20.340 63.795,20.294 63.644,20.429 63.644 C 20.565 63.644,20.604 63.512,20.533 63.289 C 20.471 63.092,20.501 62.933,20.602 62.933 C 20.958 62.933,21.676 63.344,21.583 63.494 C 21.531 63.578,21.368 63.527,21.220 63.379 C 21.073 63.232,20.865 63.111,20.759 63.111 C 20.653 63.111,20.759 63.297,20.994 63.524 C 21.230 63.752,21.564 63.916,21.738 63.889 C 21.912 63.863,22.007 63.917,21.950 64.009 C 21.893 64.102,21.939 64.178,22.053 64.178 C 22.167 64.178,22.215 64.297,22.159 64.442 C 22.100 64.595,22.148 64.689,22.273 64.665 C 22.392 64.641,22.472 64.714,22.451 64.826 C 22.428 64.947,22.555 64.993,22.763 64.939 C 22.955 64.888,23.248 64.956,23.412 65.090 C 23.669 65.299,23.697 65.295,23.611 65.066 C 23.556 64.919,23.621 64.695,23.756 64.570 C 23.957 64.382,24.002 64.398,24.012 64.660 C 24.023 64.938,24.043 64.934,24.168 64.622 C 24.247 64.427,24.242 64.179,24.157 64.073 C 24.073 63.966,24.049 63.832,24.106 63.775 C 24.163 63.719,23.765 63.688,23.221 63.706 C 22.617 63.727,22.297 63.677,22.397 63.576 C 22.706 63.268,22.246 62.693,21.909 62.966 C 21.750 63.095,21.675 63.109,21.741 62.999 C 21.906 62.724,19.734 62.722,19.388 62.997 C 19.191 63.153,19.162 63.149,19.265 62.978 C 19.339 62.856,19.357 62.756,19.306 62.756 C 19.254 62.756,19.130 62.910,19.028 63.099 M59.583 63.171 L 59.200 63.578 59.200 87.167 C 59.200 105.918,59.246 110.756,59.422 110.758 C 59.560 110.760,59.577 110.811,59.467 110.892 C 59.221 111.072,59.125 111.712,59.366 111.563 C 59.470 111.498,59.556 111.526,59.556 111.623 C 59.556 111.721,59.476 111.851,59.378 111.911 C 59.280 111.972,59.203 112.116,59.206 112.233 C 59.209 112.349,59.329 112.289,59.473 112.098 C 59.617 111.908,59.686 111.598,59.626 111.409 C 59.552 111.177,59.583 111.107,59.723 111.193 C 59.836 111.263,59.915 111.453,59.899 111.616 C 59.850 112.114,59.904 112.356,60.064 112.356 C 60.148 112.356,60.179 112.156,60.132 111.911 C 60.034 111.396,60.130 111.356,60.521 111.746 C 60.775 112.000,60.800 112.000,60.800 111.746 C 60.800 111.592,60.733 111.467,60.652 111.467 C 60.445 111.467,60.456 110.891,60.668 110.622 C 60.798 110.458,60.734 110.398,60.422 110.394 L 60.000 110.388 60.401 110.053 L 60.803 109.719 60.757 86.788 C 60.711 63.904,60.712 63.858,61.067 64.054 C 61.478 64.281,62.744 64.340,62.869 64.137 C 62.916 64.062,63.034 64.000,63.132 64.000 C 63.230 64.000,63.268 64.068,63.216 64.152 C 63.164 64.236,63.321 64.266,63.564 64.220 C 63.807 64.173,63.995 64.045,63.982 63.934 C 63.927 63.479,64.012 63.275,64.205 63.395 C 64.328 63.470,64.362 63.444,64.291 63.329 C 64.226 63.223,64.262 63.080,64.373 63.012 C 64.612 62.864,64.912 63.281,64.758 63.547 C 64.698 63.650,64.822 63.583,65.032 63.398 C 65.603 62.898,65.413 62.815,63.647 62.794 C 62.440 62.780,62.067 62.828,62.163 62.987 C 62.260 63.148,62.228 63.151,62.034 62.999 C 61.540 62.611,60.008 62.718,59.583 63.171 M65.661 63.283 C 65.710 63.537,65.638 63.856,65.495 64.015 C 65.277 64.259,65.279 64.296,65.510 64.296 C 65.657 64.296,65.738 64.257,65.690 64.208 C 65.641 64.160,65.844 63.972,66.140 63.792 C 66.591 63.518,66.648 63.409,66.494 63.121 C 66.209 62.589,65.551 62.717,65.661 63.283 M66.369 63.354 C 66.507 63.276,66.503 63.318,66.356 63.476 C 66.072 63.782,65.663 63.439,65.887 63.082 C 66.003 62.897,66.050 62.913,66.098 63.158 C 66.134 63.337,66.250 63.421,66.369 63.354 M67.435 63.156 C 67.358 63.278,67.293 63.431,67.292 63.496 C 67.287 63.718,67.022 63.882,67.022 63.664 C 67.022 63.544,66.932 63.501,66.822 63.570 C 66.685 63.654,66.713 63.745,66.912 63.856 C 67.292 64.069,67.494 63.930,67.599 63.380 C 67.693 62.885,67.647 62.822,67.435 63.156 M63.822 63.511 C 63.822 63.584,63.682 63.646,63.511 63.647 C 63.245 63.649,63.239 63.675,63.467 63.822 C 63.653 63.943,63.665 63.995,63.506 63.997 C 63.380 63.999,63.226 63.866,63.163 63.702 C 63.060 63.432,63.038 63.435,62.934 63.731 C 62.871 63.911,62.684 64.078,62.520 64.103 C 62.355 64.127,62.266 64.072,62.323 63.981 C 62.379 63.891,62.490 63.857,62.570 63.906 C 62.650 63.956,62.755 63.845,62.803 63.660 C 62.889 63.333,63.822 63.196,63.822 63.511 M59.956 63.753 C 60.029 63.767,60.169 63.788,60.267 63.800 C 60.364 63.812,60.444 63.942,60.444 64.089 C 60.444 64.236,60.364 64.356,60.267 64.356 C 60.149 64.356,60.089 72.026,60.089 87.012 C 60.089 102.088,60.029 109.705,59.909 109.779 C 59.788 109.854,59.730 102.349,59.731 86.901 C 59.732 74.256,59.666 63.927,59.585 63.946 C 59.504 63.965,59.437 63.868,59.437 63.731 C 59.437 63.594,59.524 63.538,59.630 63.605 C 59.736 63.673,59.882 63.739,59.956 63.753 M18.430 63.763 C 18.198 63.995,18.316 64.356,18.622 64.353 C 18.868 64.351,18.882 64.318,18.688 64.195 C 18.554 64.110,18.498 63.951,18.565 63.842 C 18.700 63.625,18.616 63.576,18.430 63.763 M24.290 63.770 C 24.100 63.959,24.364 64.163,24.672 64.064 C 24.859 64.004,24.902 64.025,24.783 64.117 C 24.524 64.316,24.303 65.244,24.515 65.244 C 24.607 65.244,24.722 65.144,24.771 65.022 C 24.843 64.844,24.864 64.844,24.875 65.022 C 24.883 65.144,24.966 65.244,25.061 65.244 C 25.156 65.244,25.186 65.122,25.129 64.972 C 25.039 64.738,25.091 64.730,25.495 64.914 C 25.868 65.084,26.010 65.081,26.183 64.900 C 26.357 64.716,26.325 64.694,26.015 64.787 C 25.803 64.851,25.584 64.829,25.528 64.738 C 25.472 64.648,25.565 64.584,25.735 64.598 C 25.905 64.611,26.024 64.522,26.000 64.400 C 25.976 64.278,26.036 64.178,26.133 64.178 C 26.294 64.178,26.373 64.458,26.364 64.993 C 26.362 65.099,26.466 65.185,26.595 65.185 C 26.723 65.185,26.788 65.119,26.737 65.037 C 26.687 64.956,26.775 64.889,26.933 64.889 C 27.092 64.889,27.180 64.956,27.129 65.037 C 27.079 65.119,27.147 65.185,27.280 65.185 C 27.413 65.185,27.483 65.085,27.437 64.963 C 27.373 64.798,27.544 64.767,28.103 64.842 C 28.659 64.917,28.820 64.888,28.723 64.731 C 28.651 64.614,28.439 64.568,28.252 64.627 C 28.064 64.686,27.911 64.657,27.911 64.561 C 27.911 64.465,28.115 64.409,28.365 64.437 C 28.737 64.478,28.809 64.419,28.765 64.110 C 28.705 63.691,27.762 63.551,27.134 63.867 C 26.902 63.985,26.838 63.977,26.922 63.840 C 26.998 63.717,26.874 63.644,26.588 63.644 C 26.338 63.644,26.132 63.744,26.131 63.867 C 26.129 64.026,26.091 64.032,25.997 63.887 C 25.864 63.681,24.474 63.585,24.290 63.770 M22.858 64.124 C 22.833 64.290,22.713 64.459,22.589 64.500 C 22.415 64.558,22.410 64.491,22.567 64.198 C 22.810 63.744,22.917 63.717,22.858 64.124 M59.511 98.533 C 59.485 104.816,59.464 99.676,59.464 87.111 C 59.464 74.547,59.485 69.407,59.511 75.689 C 59.537 81.971,59.537 92.251,59.511 98.533 M28.259 87.541 C 28.252 106.694,28.215 109.878,28.006 109.422 C 27.786 108.940,27.761 108.928,27.748 109.294 C 27.740 109.518,27.613 109.746,27.467 109.802 C 27.115 109.937,27.126 110.333,27.485 110.471 C 27.660 110.538,27.728 110.692,27.660 110.868 C 27.599 111.027,27.643 111.269,27.758 111.408 C 27.919 111.601,27.917 111.709,27.751 111.874 C 27.581 112.042,27.559 112.025,27.646 111.796 C 27.707 111.635,27.678 111.453,27.580 111.393 C 27.483 111.333,27.349 111.455,27.283 111.664 C 27.207 111.904,27.251 112.101,27.403 112.197 C 27.821 112.462,28.088 112.370,28.101 111.956 C 28.113 111.576,28.121 111.574,28.257 111.911 C 28.336 112.107,28.365 112.327,28.323 112.400 C 28.203 112.608,28.587 112.557,28.716 112.348 C 28.779 112.246,28.740 112.053,28.629 111.919 C 28.490 111.752,28.485 111.640,28.614 111.561 C 28.716 111.498,28.800 111.246,28.800 111.001 C 28.800 110.750,28.723 110.605,28.622 110.667 C 28.524 110.727,28.444 110.692,28.444 110.588 C 28.444 110.485,28.504 110.400,28.578 110.400 C 28.651 110.400,28.711 100.213,28.711 87.763 C 28.711 69.772,28.666 65.126,28.489 65.126 C 28.312 65.126,28.265 69.732,28.259 87.541 M374.504 74.513 C 374.555 74.595,374.458 74.610,374.289 74.545 C 374.052 74.454,374.009 74.496,374.100 74.734 C 374.170 74.916,374.147 74.997,374.042 74.932 C 373.946 74.872,373.867 74.928,373.867 75.056 C 373.867 75.232,373.489 75.282,372.311 75.262 C 370.838 75.236,370.267 75.403,370.317 75.847 C 370.329 75.950,369.326 76.367,368.927 76.426 C 368.857 76.436,368.800 76.544,368.800 76.665 C 368.800 76.786,368.560 76.928,368.267 76.980 C 367.840 77.056,367.734 77.164,367.737 77.519 C 367.740 77.877,367.654 77.963,367.293 77.959 C 366.919 77.956,366.826 78.059,366.735 78.578 C 366.674 78.920,366.614 79.130,366.601 79.045 C 366.588 78.960,366.459 78.989,366.313 79.109 C 366.167 79.230,366.092 79.400,366.146 79.487 C 366.199 79.574,366.158 79.644,366.055 79.644 C 365.951 79.644,365.867 79.929,365.867 80.277 C 365.867 80.734,365.928 80.874,366.089 80.780 C 366.211 80.708,366.159 80.824,365.972 81.036 C 365.786 81.248,365.535 81.422,365.414 81.422 C 365.293 81.422,365.145 81.549,365.086 81.703 C 364.898 82.194,364.959 82.333,365.289 82.164 C 365.460 82.076,365.711 82.043,365.847 82.092 C 366.013 82.151,366.047 82.104,365.953 81.951 C 365.849 81.783,365.911 81.748,366.185 81.819 C 366.469 81.894,366.571 81.827,366.612 81.536 C 366.646 81.299,366.776 81.173,366.956 81.203 C 367.412 81.277,367.512 80.394,367.064 80.252 C 366.870 80.190,366.658 80.225,366.594 80.329 C 366.522 80.446,366.389 80.378,366.247 80.152 C 366.060 79.851,366.060 79.811,366.250 79.928 C 366.617 80.155,367.346 80.094,367.435 79.829 C 367.489 79.666,367.421 79.624,367.224 79.699 C 366.885 79.829,366.400 79.679,366.400 79.444 C 366.400 79.355,366.603 79.333,366.850 79.395 C 367.183 79.479,367.387 79.408,367.635 79.122 C 367.959 78.748,367.973 78.747,368.079 79.081 C 368.193 79.440,368.658 79.503,368.995 79.205 C 369.240 78.989,369.095 78.387,368.725 78.076 C 368.571 77.946,368.544 77.891,368.667 77.953 C 368.807 78.024,368.889 77.938,368.889 77.718 C 368.889 77.528,369.014 77.323,369.167 77.265 C 369.320 77.206,369.396 77.078,369.335 76.981 C 369.271 76.877,369.346 76.850,369.517 76.916 C 369.745 77.003,369.757 77.058,369.575 77.171 C 369.395 77.282,369.412 77.341,369.649 77.432 C 370.056 77.588,370.035 77.778,369.566 78.215 C 369.347 78.419,369.275 78.578,369.401 78.578 C 369.807 78.578,370.175 78.047,370.088 77.589 C 369.980 77.024,370.234 76.858,370.486 77.328 C 370.592 77.527,370.761 77.689,370.861 77.689 C 370.961 77.689,370.997 77.614,370.941 77.523 C 370.885 77.432,370.914 77.311,371.007 77.254 C 371.100 77.197,371.226 77.232,371.289 77.333 C 371.351 77.434,371.481 77.468,371.576 77.409 C 371.830 77.253,371.374 76.763,371.080 76.876 C 370.782 76.991,370.901 76.096,371.209 75.906 C 371.318 75.838,371.381 75.719,371.349 75.642 C 371.258 75.430,372.075 75.352,372.412 75.541 C 372.621 75.658,372.502 75.687,371.998 75.642 C 371.425 75.590,371.340 75.619,371.556 75.790 C 371.924 76.082,371.968 76.444,371.634 76.444 C 371.470 76.444,371.408 76.361,371.484 76.239 C 371.563 76.111,371.532 76.082,371.402 76.163 C 371.288 76.233,371.234 76.356,371.283 76.435 C 371.395 76.616,372.082 76.804,372.062 76.648 C 371.996 76.141,372.500 75.747,372.733 76.124 C 372.807 76.244,372.774 76.272,372.650 76.195 C 372.537 76.125,372.444 76.153,372.444 76.256 C 372.444 76.360,372.544 76.446,372.667 76.447 C 372.815 76.449,372.800 76.507,372.622 76.622 C 372.459 76.728,372.438 76.796,372.569 76.797 C 372.686 76.799,372.864 76.660,372.964 76.489 C 373.136 76.193,373.145 76.193,373.150 76.489 C 373.153 76.660,373.240 76.800,373.344 76.800 C 373.447 76.800,373.491 76.733,373.440 76.651 C 373.298 76.421,373.866 75.712,374.084 75.847 C 374.197 75.916,374.226 75.884,374.155 75.769 C 374.029 75.565,373.511 75.671,373.511 75.901 C 373.511 75.972,373.391 76.030,373.244 76.030 C 373.098 76.030,372.978 75.963,372.978 75.881 C 372.978 75.800,373.102 75.733,373.255 75.733 C 373.407 75.733,373.490 75.665,373.438 75.581 C 373.386 75.497,373.622 75.450,373.962 75.477 C 374.304 75.504,374.536 75.453,374.480 75.363 C 374.424 75.273,374.434 75.111,374.501 75.003 C 374.684 74.706,374.947 75.218,374.820 75.626 C 374.729 75.919,374.750 75.930,374.984 75.715 C 375.180 75.534,375.202 75.404,375.063 75.234 C 374.928 75.069,374.929 75.018,375.068 75.057 C 375.176 75.087,375.383 74.951,375.528 74.756 C 375.674 74.560,375.857 74.465,375.936 74.543 C 376.014 74.622,375.961 74.765,375.817 74.862 C 375.588 75.015,375.589 75.033,375.822 75.009 C 376.369 74.950,376.711 75.028,376.711 75.210 C 376.711 75.314,376.637 75.353,376.546 75.296 C 376.455 75.240,376.333 75.270,376.276 75.363 C 376.050 75.729,376.534 75.717,376.878 75.348 C 377.291 74.905,377.379 74.512,376.994 74.831 C 376.819 74.976,376.707 74.982,376.626 74.850 C 376.561 74.746,376.434 74.707,376.343 74.763 C 376.252 74.819,376.178 74.786,376.178 74.689 C 376.178 74.423,377.263 74.508,377.368 74.782 C 377.418 74.913,377.532 74.975,377.622 74.920 C 377.711 74.865,377.730 74.733,377.664 74.627 C 377.591 74.509,377.628 74.485,377.759 74.566 C 377.909 74.659,377.918 74.765,377.788 74.921 C 377.686 75.044,377.662 75.209,377.735 75.289 C 377.807 75.369,377.787 75.395,377.689 75.346 C 377.591 75.297,377.371 75.404,377.200 75.584 C 376.902 75.897,376.908 75.911,377.339 75.911 C 377.596 75.911,377.838 75.783,377.903 75.613 C 378.006 75.346,378.032 75.351,378.152 75.657 C 378.275 75.971,378.287 75.972,378.299 75.678 C 378.305 75.502,378.386 75.403,378.478 75.460 C 378.570 75.517,378.615 75.720,378.578 75.911 C 378.539 76.112,378.600 76.242,378.722 76.219 C 378.838 76.196,378.914 76.275,378.891 76.394 C 378.865 76.527,378.961 76.566,379.142 76.497 C 379.310 76.432,379.391 76.457,379.330 76.555 C 379.272 76.649,379.479 76.739,379.790 76.755 C 380.308 76.782,380.321 76.770,379.944 76.616 C 379.697 76.515,379.584 76.365,379.660 76.241 C 379.732 76.125,379.711 76.081,379.613 76.142 C 379.516 76.202,379.437 76.151,379.437 76.029 C 379.437 75.779,380.054 75.745,380.412 75.975 C 380.541 76.057,380.606 76.230,380.557 76.359 C 380.507 76.488,380.575 76.634,380.708 76.685 C 380.840 76.736,380.999 76.645,381.061 76.483 C 381.123 76.321,381.118 76.245,381.049 76.314 C 380.904 76.459,380.458 75.928,380.450 75.600 C 380.447 75.478,380.584 75.379,380.756 75.381 C 380.954 75.382,380.993 75.431,380.864 75.514 C 380.576 75.700,381.032 76.097,381.326 75.916 C 381.487 75.816,381.493 75.743,381.349 75.654 C 381.240 75.587,381.196 75.457,381.252 75.365 C 381.407 75.115,380.196 75.167,379.921 75.422 C 379.729 75.600,379.706 75.600,379.807 75.422 C 379.890 75.275,379.806 75.200,379.562 75.200 C 379.331 75.200,379.128 75.034,379.024 74.761 C 378.859 74.326,378.843 74.323,376.635 74.343 C 375.412 74.355,374.453 74.431,374.504 74.513 M297.427 92.199 C 297.413 109.148,297.413 109.164,297.770 108.941 C 298.069 108.755,298.178 108.769,298.435 109.026 C 298.712 109.303,298.714 109.333,298.453 109.333 C 298.293 109.333,298.130 109.234,298.089 109.113 C 298.036 108.952,297.935 108.973,297.719 109.189 C 297.299 109.609,297.344 109.689,298.000 109.686 C 298.912 109.682,299.052 109.492,298.562 108.923 C 298.326 108.649,298.132 108.489,298.131 108.568 C 298.129 108.647,298.050 108.591,297.956 108.444 C 297.740 108.110,297.713 76.800,297.928 76.800 C 298.011 76.800,298.271 76.634,298.505 76.430 L 298.932 76.060 298.399 76.177 L 297.867 76.295 298.291 75.951 C 298.754 75.575,298.614 75.362,297.849 75.278 L 297.441 75.233 297.427 92.199 M300.000 75.383 L 298.933 75.514 300.045 75.490 C 300.683 75.477,301.156 75.542,301.156 75.644 C 301.156 75.751,300.708 75.797,300.045 75.761 C 299.433 75.727,298.973 75.739,299.022 75.788 C 299.345 76.109,301.420 75.981,301.370 75.643 C 301.328 75.350,301.351 75.335,301.503 75.556 C 301.641 75.756,301.598 75.896,301.332 76.117 C 300.811 76.551,300.769 104.711,301.289 104.711 C 301.583 104.711,301.595 103.959,301.511 90.689 C 301.440 79.516,301.467 76.685,301.644 76.753 C 301.820 76.820,301.867 79.796,301.867 90.810 C 301.867 100.010,301.927 104.722,302.042 104.607 C 302.154 104.495,302.241 99.374,302.281 90.571 C 302.315 82.948,302.376 76.391,302.416 76.000 L 302.489 75.289 301.778 75.270 C 301.387 75.260,300.587 75.311,300.000 75.383 M325.135 75.289 C 325.124 75.289,325.096 75.542,325.073 75.851 C 325.041 76.273,325.093 76.389,325.281 76.317 C 325.419 76.264,325.492 76.119,325.445 75.994 C 325.318 75.665,325.529 75.527,325.806 75.757 C 325.981 75.902,326.044 75.902,326.044 75.755 C 326.044 75.617,326.710 75.566,328.222 75.591 L 330.400 75.626 328.444 75.727 L 326.489 75.829 328.472 75.914 C 329.658 75.966,330.415 75.934,330.354 75.836 C 330.298 75.747,330.386 75.724,330.549 75.786 C 330.711 75.849,330.844 75.822,330.844 75.728 C 330.844 75.633,330.960 75.556,331.101 75.556 C 331.242 75.556,331.394 75.496,331.439 75.422 C 331.484 75.349,330.089 75.289,328.339 75.289 C 326.588 75.289,325.146 75.289,325.135 75.289 M331.556 75.467 C 331.387 75.670,331.383 75.792,331.541 75.983 C 331.655 76.120,331.678 76.260,331.591 76.294 C 331.187 76.453,331.198 77.511,331.604 77.514 C 331.914 77.516,331.944 77.552,331.742 77.680 C 331.600 77.770,331.406 77.795,331.311 77.737 C 331.217 77.678,331.293 77.835,331.481 78.085 C 331.700 78.377,331.866 78.469,331.946 78.341 C 332.025 78.215,332.118 78.270,332.203 78.494 C 332.277 78.687,332.522 78.991,332.748 79.170 C 332.974 79.349,333.106 79.581,333.041 79.686 C 332.974 79.795,333.006 79.826,333.116 79.758 C 333.222 79.692,333.353 79.711,333.408 79.799 C 333.509 79.962,332.652 80.158,332.321 80.048 C 332.222 80.015,332.178 79.790,332.224 79.549 C 332.286 79.225,332.231 79.111,332.010 79.111 C 331.846 79.111,331.757 79.039,331.811 78.952 C 331.924 78.769,331.467 78.548,331.306 78.709 C 331.129 78.885,331.187 79.140,331.378 79.022 C 331.476 78.962,331.556 79.035,331.556 79.184 C 331.556 79.362,331.662 79.421,331.867 79.355 C 332.108 79.278,332.122 79.295,331.933 79.430 C 331.783 79.537,331.744 79.699,331.834 79.847 C 331.953 80.044,331.923 80.043,331.674 79.841 C 331.506 79.705,331.318 79.645,331.255 79.708 C 331.192 79.771,331.239 79.822,331.359 79.822 C 331.478 79.822,331.519 79.915,331.450 80.027 C 331.380 80.140,331.395 80.191,331.484 80.141 C 331.572 80.091,331.884 80.155,332.178 80.283 C 332.508 80.428,332.593 80.520,332.400 80.525 C 332.224 80.530,332.090 80.669,332.092 80.844 C 332.094 81.110,332.119 81.117,332.267 80.889 C 332.361 80.742,332.440 80.705,332.442 80.806 C 332.443 80.908,332.606 80.990,332.804 80.990 C 333.017 80.990,333.130 80.901,333.080 80.771 C 333.031 80.642,333.095 80.590,333.236 80.643 C 333.493 80.742,333.547 81.956,333.295 81.956 C 333.214 81.956,333.199 82.156,333.261 82.402 C 333.356 82.781,333.323 82.831,333.042 82.734 C 332.741 82.629,332.744 82.646,333.067 82.921 C 333.318 83.134,333.357 83.249,333.200 83.315 C 333.078 83.366,332.978 83.484,332.978 83.577 C 332.978 83.671,333.155 83.651,333.378 83.533 C 333.747 83.336,333.758 83.343,333.521 83.631 C 333.296 83.903,333.296 83.938,333.521 83.906 C 333.994 83.838,334.163 83.909,334.163 84.178 C 334.163 84.324,334.076 84.446,333.970 84.447 C 333.864 84.449,333.885 84.518,334.017 84.602 C 334.192 84.713,334.105 84.806,333.693 84.950 C 333.383 85.058,333.180 85.228,333.242 85.329 C 333.403 85.589,333.555 85.555,333.417 85.289 C 333.354 85.167,333.477 85.267,333.690 85.511 C 333.967 85.829,334.048 85.864,333.974 85.635 C 333.914 85.447,333.943 85.360,334.046 85.423 C 334.143 85.483,334.222 85.647,334.222 85.788 C 334.222 85.929,334.322 86.047,334.444 86.050 C 334.578 86.054,334.541 86.151,334.350 86.296 C 334.077 86.503,334.071 86.550,334.306 86.640 C 334.455 86.697,334.578 86.680,334.578 86.602 C 334.578 86.524,334.765 86.360,334.995 86.237 C 335.562 85.933,335.592 84.730,335.042 84.345 C 334.712 84.114,334.658 84.119,334.537 84.399 C 334.462 84.570,334.451 84.351,334.511 83.911 L 334.621 83.111 334.244 83.450 C 334.036 83.636,333.867 83.729,333.867 83.656 C 333.867 83.583,334.034 83.388,334.238 83.223 C 334.833 82.741,334.512 81.470,333.822 81.579 C 333.749 81.591,333.689 81.115,333.689 80.523 C 333.689 79.805,333.626 79.485,333.499 79.563 C 333.385 79.633,333.332 79.549,333.366 79.351 C 333.399 79.158,333.329 79.041,333.195 79.068 C 333.070 79.094,332.922 78.994,332.865 78.847 C 332.809 78.700,332.678 78.632,332.573 78.697 C 332.448 78.774,332.431 78.698,332.524 78.474 C 332.923 77.501,332.846 76.845,332.286 76.430 C 331.864 76.118,331.765 75.935,331.846 75.614 C 331.966 75.136,331.870 75.087,331.556 75.467 M343.728 75.370 C 343.797 75.481,343.722 75.506,343.534 75.434 C 343.302 75.345,343.254 75.386,343.338 75.605 C 343.412 75.798,343.371 75.863,343.214 75.803 C 342.783 75.638,342.760 76.452,342.758 92.430 C 342.756 106.827,342.703 110.567,342.553 106.892 C 342.502 105.658,341.828 104.765,341.431 105.407 C 341.187 105.803,340.994 106.500,341.156 106.400 C 341.254 106.339,341.333 106.183,341.333 106.053 C 341.333 105.923,341.455 105.769,341.604 105.712 C 341.833 105.624,341.842 105.670,341.664 106.004 C 341.472 106.364,341.480 106.378,341.751 106.154 C 342.017 105.935,342.043 105.953,341.986 106.322 C 341.951 106.550,341.830 106.768,341.716 106.806 C 341.602 106.844,341.555 106.950,341.612 107.042 C 341.669 107.134,341.649 107.197,341.569 107.183 C 341.162 107.113,341.030 107.249,341.016 107.757 C 341.007 108.064,341.115 108.429,341.256 108.570 C 341.457 108.772,341.512 108.776,341.514 108.590 C 341.516 108.414,341.558 108.421,341.682 108.617 C 341.929 109.008,341.792 109.053,340.392 109.045 C 339.459 109.039,339.364 109.056,340.044 109.105 C 341.171 109.187,341.295 109.511,340.200 109.511 C 339.203 109.511,338.311 109.333,338.311 109.133 C 338.311 109.048,338.408 108.998,338.526 109.022 C 338.644 109.047,338.753 108.889,338.768 108.671 C 338.783 108.454,338.726 108.319,338.642 108.371 C 338.558 108.423,338.489 108.367,338.489 108.248 C 338.489 108.128,338.437 108.081,338.375 108.144 C 338.312 108.207,338.363 108.382,338.489 108.533 C 338.681 108.765,338.671 108.790,338.425 108.696 C 338.265 108.635,338.133 108.665,338.133 108.764 C 338.133 108.863,337.953 108.905,337.733 108.857 C 337.404 108.785,337.380 108.805,337.600 108.972 C 337.825 109.143,337.811 109.165,337.511 109.110 C 337.316 109.075,337.178 108.944,337.204 108.821 C 337.231 108.697,337.171 108.646,337.071 108.708 C 336.971 108.770,336.889 108.736,336.889 108.633 C 336.889 108.529,337.002 108.444,337.141 108.444 C 337.280 108.444,337.533 108.391,337.704 108.325 C 337.932 108.238,337.975 108.269,337.868 108.442 C 337.775 108.592,337.792 108.635,337.914 108.559 C 338.117 108.434,338.109 107.472,337.903 107.266 C 337.836 107.199,337.880 107.021,338.002 106.871 C 338.183 106.647,338.177 106.626,337.970 106.751 C 337.783 106.863,337.746 106.829,337.826 106.619 C 337.904 106.416,337.839 106.342,337.599 106.360 C 337.347 106.379,337.310 106.442,337.451 106.613 C 337.555 106.738,337.592 106.965,337.533 107.118 C 337.472 107.276,337.501 107.350,337.600 107.289 C 337.699 107.228,337.727 107.302,337.666 107.463 C 337.514 107.859,337.081 107.281,337.070 106.667 C 337.063 106.326,337.023 106.283,336.898 106.480 C 336.808 106.622,336.773 106.801,336.821 106.878 C 336.868 106.955,336.801 106.977,336.671 106.927 C 336.492 106.859,336.451 107.010,336.502 107.551 C 336.548 108.027,336.638 108.224,336.774 108.143 C 336.886 108.076,336.868 108.156,336.734 108.321 C 336.562 108.533,336.544 108.688,336.674 108.844 C 336.776 108.967,336.869 109.147,336.882 109.244 C 336.895 109.342,337.061 109.397,337.253 109.367 C 337.448 109.335,337.601 109.413,337.603 109.544 C 337.605 109.715,337.652 109.706,337.778 109.511 C 337.874 109.363,337.951 109.335,337.953 109.450 C 337.955 109.592,338.591 109.662,340.013 109.676 C 341.634 109.693,342.044 109.650,341.943 109.471 C 341.840 109.289,341.862 109.289,342.054 109.467 C 342.186 109.589,342.477 109.689,342.702 109.689 C 342.927 109.689,343.112 109.589,343.114 109.467 C 343.116 109.316,343.177 109.330,343.305 109.511 C 343.448 109.713,343.516 109.723,343.584 109.554 C 343.634 109.432,343.748 109.376,343.837 109.432 C 343.927 109.487,344.000 109.444,344.000 109.336 C 344.000 109.228,343.860 109.163,343.689 109.192 C 343.451 109.232,343.390 109.140,343.430 108.800 C 343.473 108.437,343.446 108.404,343.283 108.622 C 343.106 108.859,343.093 108.859,343.164 108.622 C 343.208 108.476,343.338 108.377,343.452 108.402 C 343.566 108.428,343.816 108.365,344.007 108.263 C 344.419 108.042,344.571 77.086,344.162 76.678 C 344.023 76.539,344.036 76.443,344.207 76.336 C 344.540 76.128,344.889 76.093,344.889 76.268 C 344.889 76.349,344.789 76.455,344.667 76.504 C 344.512 76.567,344.526 76.603,344.711 76.622 C 344.858 76.637,345.102 76.674,345.253 76.704 C 345.405 76.734,345.680 76.648,345.865 76.512 C 346.050 76.377,346.106 76.267,345.990 76.267 C 345.873 76.267,345.778 76.102,345.778 75.901 C 345.778 75.652,345.849 75.579,346.000 75.673 C 346.401 75.924,346.644 75.884,346.551 75.582 C 346.482 75.360,346.529 75.373,346.745 75.639 C 346.902 75.832,346.996 76.079,346.954 76.187 C 346.912 76.296,346.974 76.385,347.090 76.385 C 347.206 76.385,347.302 76.244,347.302 76.072 C 347.302 75.900,347.239 75.798,347.162 75.846 C 347.085 75.893,347.022 75.803,347.022 75.644 C 347.022 75.486,347.097 75.403,347.188 75.459 C 347.279 75.515,347.400 75.485,347.458 75.392 C 347.518 75.294,347.259 75.226,346.851 75.232 C 346.370 75.239,346.177 75.307,346.259 75.443 C 346.348 75.592,346.313 75.593,346.123 75.445 C 345.792 75.188,343.573 75.118,343.728 75.370 M348.837 75.511 C 348.877 75.749,348.777 75.815,348.410 75.790 C 348.075 75.768,347.959 75.829,348.023 75.997 C 348.074 76.128,348.009 76.271,347.880 76.314 C 347.750 76.358,347.781 76.373,347.947 76.349 C 348.113 76.324,348.287 76.396,348.333 76.508 C 348.379 76.620,348.423 76.566,348.430 76.390 C 348.438 76.213,348.521 76.115,348.614 76.173 C 348.708 76.231,348.737 76.415,348.679 76.583 C 348.592 76.837,348.619 76.831,348.838 76.551 C 349.043 76.287,349.133 76.264,349.245 76.445 C 349.346 76.608,349.319 76.634,349.156 76.533 C 349.002 76.438,348.966 76.459,349.050 76.595 C 349.119 76.708,349.392 76.794,349.655 76.788 C 350.082 76.776,350.095 76.759,349.778 76.622 C 349.538 76.519,349.509 76.464,349.689 76.454 C 349.926 76.441,349.926 76.420,349.689 76.267 C 349.458 76.118,349.462 76.094,349.719 76.092 C 349.881 76.090,350.062 76.189,350.120 76.311 C 350.177 76.433,350.181 76.293,350.129 76.000 C 350.077 75.707,350.098 75.562,350.176 75.678 C 350.254 75.794,350.539 75.865,350.808 75.835 C 351.077 75.805,351.340 75.850,351.393 75.935 C 351.445 76.020,351.238 76.089,350.933 76.089 C 350.628 76.089,350.423 76.161,350.478 76.248 C 350.702 76.612,352.566 76.354,352.926 75.910 C 353.295 75.453,353.713 75.306,353.878 75.574 C 353.934 75.665,354.075 75.682,354.190 75.613 C 354.316 75.538,354.305 75.605,354.162 75.781 C 353.953 76.039,353.971 76.066,354.307 76.001 C 354.553 75.953,354.664 76.004,354.616 76.141 C 354.575 76.259,354.389 76.332,354.204 76.304 C 353.932 76.262,353.917 76.285,354.125 76.425 C 354.267 76.521,354.441 76.563,354.513 76.519 C 354.584 76.475,354.692 76.520,354.754 76.619 C 354.815 76.719,355.001 76.794,355.166 76.788 C 355.411 76.777,355.401 76.747,355.111 76.622 C 354.759 76.471,354.759 76.469,355.120 76.457 C 355.320 76.450,355.561 76.521,355.655 76.615 C 355.763 76.723,355.921 76.717,356.090 76.598 C 356.309 76.444,356.298 76.429,356.032 76.516 C 355.795 76.593,355.723 76.541,355.765 76.325 C 355.796 76.163,355.933 75.992,356.069 75.947 C 356.208 75.901,356.266 75.946,356.203 76.049 C 356.140 76.149,356.336 76.318,356.638 76.423 C 356.940 76.529,357.229 76.776,357.281 76.974 C 357.333 77.172,357.450 77.333,357.542 77.333 C 357.634 77.333,357.664 77.259,357.608 77.168 C 357.551 77.077,357.587 76.952,357.686 76.891 C 357.785 76.829,357.860 76.604,357.853 76.390 C 357.845 76.175,357.798 76.100,357.749 76.222 C 357.646 76.477,357.156 76.526,357.156 76.281 C 357.156 76.192,357.236 76.092,357.335 76.059 C 357.434 76.026,357.547 75.831,357.586 75.626 C 357.656 75.262,357.599 75.253,355.274 75.251 C 353.962 75.251,352.889 75.317,352.889 75.398 C 352.889 75.480,352.684 75.469,352.433 75.373 C 351.845 75.150,350.492 75.151,351.022 75.375 C 351.275 75.482,351.135 75.517,350.535 75.496 C 350.071 75.480,349.691 75.407,349.690 75.333 C 349.689 75.260,349.485 75.200,349.237 75.200 C 348.905 75.200,348.798 75.283,348.837 75.511 M381.532 75.556 C 381.538 75.800,381.486 76.180,381.417 76.400 C 381.314 76.731,381.359 76.800,381.679 76.800 C 381.891 76.800,382.026 76.863,381.978 76.941 C 381.930 77.018,382.066 77.094,382.279 77.109 C 382.626 77.133,382.634 77.117,382.352 76.957 C 382.179 76.859,382.083 76.704,382.139 76.613 C 382.195 76.523,382.125 76.493,381.983 76.547 C 381.841 76.602,381.664 76.549,381.591 76.429 C 381.499 76.281,381.530 76.258,381.689 76.356 C 381.835 76.446,381.878 76.430,381.806 76.313 C 381.668 76.089,382.175 75.861,382.444 76.027 C 382.555 76.096,382.581 76.060,382.506 75.939 C 382.431 75.817,382.492 75.733,382.656 75.733 C 383.035 75.733,383.008 76.218,382.622 76.336 C 382.451 76.389,382.532 76.406,382.802 76.374 C 383.179 76.329,383.249 76.367,383.107 76.539 C 382.974 76.698,382.999 76.841,383.194 77.035 C 383.601 77.443,383.523 77.617,383.103 77.238 C 382.750 76.918,382.744 76.918,382.867 77.239 C 382.955 77.468,382.930 77.530,382.786 77.441 C 382.632 77.346,382.625 77.434,382.759 77.788 C 382.905 78.171,382.892 78.244,382.692 78.168 C 382.360 78.040,382.046 78.376,382.177 78.717 C 382.236 78.870,382.207 78.945,382.112 78.886 C 382.019 78.829,381.926 78.975,381.906 79.213 C 381.857 79.811,381.995 79.752,382.288 79.050 C 382.549 78.426,383.559 78.151,383.851 78.624 C 384.027 78.909,383.861 79.115,383.551 78.996 C 383.408 78.941,383.327 78.838,383.371 78.767 C 383.475 78.598,382.903 78.643,382.604 78.828 C 382.475 78.908,382.409 79.125,382.457 79.309 C 382.505 79.494,382.481 79.606,382.404 79.558 C 382.326 79.510,382.169 79.583,382.055 79.720 C 381.891 79.918,381.915 79.991,382.168 80.068 C 382.345 80.121,382.273 80.140,382.009 80.109 C 381.518 80.051,381.237 80.375,381.435 80.772 C 381.491 80.885,381.414 80.826,381.263 80.640 C 381.113 80.455,381.039 80.223,381.100 80.124 C 381.163 80.022,381.122 80.000,381.005 80.072 C 380.874 80.153,380.855 80.309,380.953 80.500 C 381.038 80.665,381.072 80.900,381.028 81.022 C 380.969 81.187,381.276 81.244,382.207 81.244 C 383.534 81.244,383.889 80.882,383.244 80.187 C 383.066 79.994,383.068 79.974,383.259 80.083 C 383.434 80.182,383.480 80.106,383.433 79.798 C 383.398 79.567,383.285 79.393,383.181 79.412 C 383.077 79.431,382.993 79.339,382.993 79.207 C 382.993 79.075,383.099 79.008,383.230 79.058 C 383.360 79.108,383.467 79.221,383.467 79.308 C 383.467 79.395,383.564 79.447,383.682 79.422 C 384.042 79.348,384.314 79.039,384.267 78.756 C 384.243 78.609,384.310 78.380,384.416 78.247 C 384.559 78.069,384.539 77.953,384.340 77.803 C 384.121 77.638,384.110 77.523,384.282 77.185 C 384.631 76.497,384.408 76.089,383.683 76.089 C 383.336 76.089,383.100 76.040,383.160 75.981 C 383.372 75.768,382.852 75.412,382.557 75.568 C 382.398 75.652,382.218 75.670,382.157 75.609 C 382.095 75.547,382.042 75.610,382.038 75.748 C 382.035 75.887,381.917 75.800,381.777 75.556 L 381.522 75.111 381.532 75.556 M381.867 75.362 C 381.867 75.543,382.321 75.516,382.513 75.324 C 382.581 75.256,382.464 75.200,382.252 75.200 C 382.040 75.200,381.867 75.273,381.867 75.362 M298.133 75.644 C 298.133 75.791,298.004 75.911,297.845 75.911 C 297.609 75.911,297.597 75.863,297.778 75.644 C 297.900 75.498,298.029 75.378,298.066 75.378 C 298.103 75.378,298.133 75.498,298.133 75.644 M352.829 75.853 C 352.631 76.050,351.644 76.099,351.644 75.911 C 351.644 75.802,351.724 75.761,351.821 75.821 C 351.918 75.881,352.136 75.853,352.306 75.758 C 352.596 75.595,353.011 75.671,352.829 75.853 M325.605 76.123 C 325.682 76.857,325.882 77.084,326.104 76.689 C 326.384 76.194,326.414 75.586,326.140 75.960 C 325.956 76.212,325.918 76.210,325.746 75.942 C 325.587 75.694,325.563 75.724,325.605 76.123 M345.553 76.247 C 345.538 76.443,345.432 76.546,345.297 76.495 C 344.984 76.375,345.004 75.911,345.322 75.911 C 345.463 75.911,345.567 76.062,345.553 76.247 M358.151 76.524 C 358.182 76.715,358.121 76.975,358.015 77.102 C 357.875 77.270,357.882 77.333,358.039 77.333 C 358.158 77.333,358.297 77.174,358.348 76.980 C 358.399 76.785,358.520 76.676,358.618 76.736 C 358.715 76.796,358.863 76.736,358.946 76.602 C 359.073 76.396,359.132 76.397,359.326 76.605 C 359.498 76.790,359.499 76.820,359.333 76.727 C 359.211 76.658,359.111 76.686,359.111 76.790 C 359.111 76.893,359.211 76.987,359.333 76.998 C 359.456 77.010,359.636 77.037,359.733 77.060 C 359.831 77.083,360.056 77.114,360.233 77.128 C 360.410 77.143,360.505 77.236,360.444 77.333 C 360.384 77.431,360.455 77.511,360.601 77.511 C 360.748 77.511,360.926 77.605,360.996 77.719 C 361.079 77.853,361.033 77.892,360.866 77.828 C 360.678 77.755,360.610 77.862,360.614 78.220 C 360.619 78.571,360.693 78.683,360.875 78.613 C 361.052 78.545,361.095 78.603,361.018 78.805 C 360.944 78.996,360.984 79.063,361.136 79.005 C 361.262 78.957,361.467 79.040,361.592 79.190 C 361.716 79.340,361.895 79.415,361.989 79.357 C 362.083 79.299,362.056 79.417,361.930 79.619 C 361.766 79.882,361.662 79.925,361.564 79.771 C 361.489 79.652,361.424 79.636,361.419 79.733 C 361.415 79.831,361.293 79.756,361.150 79.566 C 361.006 79.376,360.889 79.311,360.889 79.421 C 360.889 79.530,361.143 79.801,361.454 80.023 C 361.765 80.244,361.961 80.330,361.888 80.213 C 361.816 80.096,361.795 80.000,361.842 80.000 C 361.888 80.000,362.067 79.946,362.239 79.880 C 362.480 79.788,362.525 79.829,362.436 80.060 C 362.286 80.450,362.629 81.195,362.999 81.283 C 363.863 81.488,364.268 80.838,363.467 80.533 C 363.222 80.440,363.022 80.278,363.022 80.172 C 363.022 80.066,362.946 80.026,362.852 80.084 C 362.759 80.142,362.730 80.327,362.789 80.495 C 362.876 80.741,362.851 80.758,362.661 80.582 C 362.467 80.402,362.481 80.304,362.738 80.019 C 363.162 79.550,363.048 78.861,362.533 78.788 C 362.238 78.746,362.133 78.612,362.133 78.278 C 362.133 77.893,362.068 77.838,361.698 77.908 C 361.336 77.978,361.254 77.914,361.209 77.529 C 361.168 77.173,361.043 77.054,360.667 77.010 C 360.321 76.971,360.178 76.853,360.178 76.610 C 360.178 76.421,360.118 76.261,360.044 76.253 C 359.971 76.246,359.719 76.205,359.484 76.162 C 359.250 76.118,358.875 76.203,358.652 76.349 C 358.345 76.550,358.228 76.561,358.171 76.396 C 358.130 76.276,358.121 76.334,358.151 76.524 M370.733 76.658 C 370.685 76.736,370.510 76.791,370.345 76.779 C 370.063 76.760,370.064 76.754,370.346 76.679 C 370.534 76.630,370.611 76.488,370.551 76.300 C 370.472 76.052,370.487 76.045,370.638 76.258 C 370.739 76.400,370.782 76.580,370.733 76.658 M325.038 77.041 C 325.016 77.894,324.970 78.030,324.740 77.942 C 324.356 77.795,323.997 78.341,324.265 78.665 C 324.386 78.810,324.556 78.886,324.642 78.832 C 324.729 78.778,324.800 78.825,324.800 78.935 C 324.800 79.046,324.659 79.090,324.481 79.034 C 324.223 78.952,324.164 79.033,324.170 79.466 C 324.174 79.760,324.237 80.000,324.311 80.000 C 324.384 80.000,324.444 79.875,324.444 79.723 C 324.444 79.566,324.530 79.498,324.641 79.567 C 324.768 79.646,324.676 79.839,324.380 80.112 C 324.096 80.376,323.960 80.653,324.020 80.846 C 324.110 81.138,324.104 81.138,323.916 80.844 C 323.487 80.178,323.263 80.496,323.235 81.809 C 323.209 83.057,323.036 83.440,322.579 83.265 C 322.445 83.214,322.361 83.406,322.346 83.795 C 322.324 84.394,323.032 84.869,323.269 84.414 C 323.314 84.328,323.328 84.360,323.301 84.484 C 323.273 84.609,323.039 84.696,322.781 84.678 C 322.278 84.642,322.252 84.693,322.489 85.244 C 322.596 85.493,322.645 85.518,322.654 85.327 C 322.661 85.176,322.833 85.041,323.036 85.026 C 323.323 85.004,323.361 85.050,323.209 85.234 C 323.063 85.409,323.077 85.494,323.262 85.565 C 323.399 85.618,323.509 85.567,323.505 85.453 C 323.487 84.811,323.576 84.436,323.724 84.528 C 323.817 84.585,323.863 84.476,323.826 84.285 C 323.789 84.095,323.872 83.896,324.010 83.843 C 324.281 83.739,324.166 83.200,323.873 83.200 C 323.775 83.200,323.740 83.317,323.795 83.461 C 323.897 83.724,323.662 83.960,323.235 84.025 C 323.062 84.051,323.052 84.000,323.197 83.825 C 323.341 83.652,323.332 83.566,323.163 83.498 C 323.021 83.441,323.056 83.400,323.255 83.392 C 323.432 83.384,323.532 83.306,323.478 83.219 C 323.424 83.131,323.493 82.841,323.631 82.574 C 323.770 82.307,323.836 81.966,323.778 81.815 C 323.710 81.639,323.745 81.586,323.876 81.667 C 324.132 81.825,324.667 81.217,324.748 80.675 C 324.782 80.451,324.884 80.241,324.974 80.210 C 325.074 80.176,325.073 79.985,324.973 79.721 C 324.843 79.380,324.866 79.289,325.082 79.289 C 325.232 79.289,325.305 79.368,325.246 79.465 C 325.185 79.562,325.250 79.597,325.391 79.543 C 325.723 79.416,325.679 78.917,325.305 78.571 C 324.993 78.281,325.187 77.577,325.522 77.785 C 325.614 77.841,325.689 77.808,325.689 77.710 C 325.689 77.612,325.609 77.483,325.511 77.422 C 325.413 77.362,325.340 77.217,325.347 77.101 C 325.357 76.949,325.387 76.952,325.451 77.112 C 325.501 77.235,325.615 77.290,325.704 77.235 C 325.793 77.180,325.870 77.279,325.873 77.456 C 325.878 77.725,325.916 77.711,326.104 77.372 C 326.228 77.149,326.390 77.027,326.463 77.100 C 326.536 77.174,326.540 77.087,326.472 76.908 C 326.357 76.611,326.333 76.608,326.181 76.879 C 326.074 77.071,325.942 77.119,325.808 77.015 C 325.332 76.649,325.048 76.658,325.038 77.041 M298.311 92.430 C 298.311 104.772,298.264 108.076,298.089 108.140 C 297.967 108.185,298.067 108.212,298.311 108.200 L 298.756 108.179 298.709 92.490 C 298.679 82.293,298.601 76.800,298.487 76.800 C 298.372 76.800,298.311 82.241,298.311 92.430 M302.443 90.622 C 302.420 98.273,302.407 104.584,302.415 104.647 C 302.424 104.709,302.325 104.825,302.196 104.905 C 302.067 104.985,304.920 105.063,308.536 105.079 C 316.368 105.114,315.733 105.088,315.733 105.382 C 315.733 105.513,315.813 105.572,315.911 105.511 C 316.009 105.451,316.089 105.529,316.089 105.685 C 316.089 105.919,316.133 105.929,316.336 105.740 C 316.546 105.546,316.529 105.490,316.227 105.376 C 315.918 105.259,315.903 105.201,316.113 104.943 C 316.327 104.679,316.315 104.660,316.010 104.774 C 315.668 104.903,315.668 104.900,316.008 104.524 L 316.352 104.144 316.470 104.561 C 316.555 104.864,316.592 104.890,316.605 104.656 C 316.614 104.479,316.694 104.379,316.781 104.433 C 316.869 104.487,316.986 104.412,317.042 104.266 C 317.099 104.120,317.067 104.000,316.972 104.000 C 316.877 104.000,316.794 103.860,316.788 103.689 C 316.777 103.423,316.753 103.429,316.622 103.733 C 316.486 104.051,316.468 104.054,316.457 103.767 C 316.450 103.590,316.363 103.496,316.264 103.557 C 316.164 103.619,316.122 103.732,316.169 103.808 C 316.291 104.005,315.875 104.502,315.556 104.542 C 315.409 104.560,312.429 104.566,308.934 104.554 L 302.580 104.533 302.533 90.622 L 302.487 76.711 302.443 90.622 M343.067 100.311 C 343.040 104.638,343.018 101.098,343.018 92.444 C 343.018 83.791,343.040 80.251,343.067 84.578 C 343.093 88.904,343.093 95.984,343.067 100.311 M343.778 100.311 C 343.751 104.638,343.730 101.098,343.730 92.444 C 343.730 83.791,343.751 80.251,343.778 84.578 C 343.804 88.904,343.804 95.984,343.778 100.311 M359.678 77.344 C 359.721 77.473,359.861 77.613,359.989 77.656 C 360.122 77.700,360.189 77.633,360.144 77.500 C 360.102 77.372,359.962 77.232,359.833 77.189 C 359.700 77.144,359.633 77.211,359.678 77.344 M359.303 77.655 C 359.295 77.783,359.209 77.838,359.111 77.778 C 359.013 77.717,358.933 77.756,358.933 77.864 C 358.933 77.972,359.073 78.037,359.244 78.008 C 359.456 77.972,359.539 78.055,359.503 78.267 C 359.438 78.655,359.746 78.678,359.892 78.297 C 360.044 77.902,360.032 77.867,359.748 77.867 C 359.610 77.867,359.456 77.767,359.407 77.644 C 359.338 77.474,359.314 77.476,359.303 77.655 M369.148 77.701 C 369.084 77.805,369.074 77.933,369.126 77.985 C 369.253 78.113,369.600 77.888,369.600 77.678 C 369.600 77.451,369.293 77.466,369.148 77.701 M376.829 78.212 C 376.668 78.412,376.672 78.438,376.851 78.331 C 377.021 78.228,377.056 78.317,376.991 78.680 C 376.875 79.317,377.029 79.540,377.383 79.253 C 377.540 79.126,377.619 79.105,377.559 79.205 C 377.464 79.365,377.699 79.388,378.544 79.303 C 378.623 79.295,378.643 79.216,378.588 79.128 C 378.533 79.039,378.669 79.005,378.889 79.052 C 379.258 79.131,379.265 79.118,378.981 78.885 C 378.811 78.745,378.731 78.573,378.803 78.501 C 378.875 78.429,378.813 78.397,378.667 78.430 C 378.519 78.462,378.422 78.384,378.449 78.253 C 378.476 78.124,378.426 78.062,378.339 78.116 C 378.252 78.170,378.207 78.436,378.240 78.707 C 378.272 78.978,378.255 79.059,378.202 78.887 C 378.150 78.714,378.031 78.620,377.939 78.677 C 377.847 78.734,377.829 78.875,377.898 78.990 C 377.986 79.137,377.942 79.134,377.751 78.979 C 377.601 78.858,377.408 78.818,377.323 78.890 C 377.237 78.963,377.264 78.895,377.382 78.739 C 377.500 78.584,377.538 78.386,377.465 78.301 C 377.393 78.215,377.473 78.252,377.644 78.383 C 377.915 78.590,377.956 78.578,377.956 78.288 C 377.956 77.879,377.143 77.824,376.829 78.212 M351.111 78.173 C 351.258 78.230,351.437 78.364,351.508 78.471 C 351.597 78.604,351.640 78.591,351.642 78.432 C 351.644 78.207,351.772 78.220,352.711 78.539 C 352.920 78.611,352.932 78.581,352.767 78.402 C 352.651 78.277,352.171 78.151,351.700 78.123 C 351.230 78.094,350.964 78.117,351.111 78.173 M349.802 78.490 C 349.522 78.636,349.162 78.761,349.002 78.768 C 348.779 78.778,348.794 78.814,349.067 78.924 C 349.262 79.002,349.502 79.024,349.600 78.971 C 349.698 78.919,349.715 78.945,349.639 79.028 C 349.506 79.175,349.139 79.288,348.667 79.329 C 348.544 79.340,348.441 79.435,348.436 79.541 C 348.375 81.015,348.490 89.336,348.574 89.561 C 348.669 89.813,348.643 89.829,348.424 89.652 C 348.204 89.473,348.160 89.510,348.160 89.870 C 348.160 90.258,348.190 90.275,348.450 90.040 C 348.664 89.846,349.130 89.786,350.236 89.811 L 351.733 89.844 350.222 89.956 L 348.711 90.067 350.167 90.100 C 352.166 90.146,352.244 90.461,350.273 90.527 L 348.711 90.580 350.339 90.668 C 351.235 90.716,351.930 90.696,351.884 90.622 C 351.839 90.549,351.906 90.486,352.034 90.483 C 352.179 90.480,352.138 90.381,351.927 90.220 C 351.602 89.975,351.614 89.951,352.186 89.711 C 352.747 89.475,352.657 89.460,350.748 89.485 L 348.711 89.511 348.711 84.933 C 348.711 82.416,348.771 80.356,348.844 80.356 C 349.060 80.356,349.639 79.745,349.561 79.600 C 349.522 79.527,350.303 79.467,351.297 79.467 C 352.872 79.467,353.091 79.431,352.997 79.185 C 352.937 79.031,352.888 78.851,352.886 78.785 C 352.885 78.720,352.818 78.770,352.738 78.895 C 352.659 79.021,352.360 79.157,352.074 79.198 C 351.789 79.239,351.656 79.229,351.779 79.177 C 351.902 79.124,351.949 78.994,351.883 78.888 C 351.807 78.765,351.872 78.736,352.060 78.809 C 352.222 78.871,352.356 78.853,352.356 78.769 C 352.356 78.533,351.848 78.393,351.723 78.594 C 351.651 78.711,351.529 78.703,351.370 78.571 C 351.236 78.460,351.043 78.421,350.941 78.484 C 350.839 78.547,350.756 78.514,350.756 78.410 C 350.756 78.148,350.401 78.177,349.802 78.490 M374.434 78.571 C 374.101 78.757,373.928 78.950,374.035 79.016 C 374.151 79.088,374.164 79.248,374.069 79.433 C 373.984 79.598,373.951 79.833,373.994 79.956 C 374.038 80.078,373.983 80.178,373.871 80.178 C 373.759 80.178,373.717 80.098,373.778 80.000 C 373.838 79.902,373.748 79.822,373.577 79.822 C 373.295 79.822,373.298 79.796,373.610 79.542 C 373.800 79.388,373.856 79.313,373.733 79.375 C 373.611 79.437,373.510 79.383,373.508 79.255 C 373.506 79.086,373.459 79.095,373.333 79.289 C 373.190 79.510,373.160 79.495,373.156 79.200 C 373.151 78.905,373.121 78.890,372.978 79.111 C 372.832 79.336,372.805 79.338,372.803 79.122 C 372.801 78.935,372.724 78.960,372.520 79.210 C 372.366 79.400,372.285 79.465,372.340 79.354 C 372.474 79.084,371.769 78.910,371.605 79.172 C 371.384 79.526,371.402 79.799,371.661 80.014 C 371.982 80.280,371.989 80.779,371.671 80.657 C 371.504 80.593,371.444 80.693,371.476 80.985 C 371.525 81.431,371.129 81.917,370.951 81.629 C 370.895 81.537,370.969 81.362,371.118 81.239 C 371.357 81.040,371.356 80.982,371.104 80.703 C 370.801 80.368,371.029 80.051,371.405 80.284 C 371.528 80.360,371.562 80.333,371.491 80.217 C 371.425 80.110,371.218 80.034,371.031 80.048 C 370.844 80.062,370.731 80.138,370.779 80.216 C 370.827 80.294,370.759 80.400,370.627 80.450 C 370.319 80.568,370.266 81.235,370.536 81.592 C 370.650 81.743,370.674 81.807,370.588 81.735 C 370.413 81.586,369.956 81.905,369.956 82.177 C 369.956 82.289,370.040 82.285,370.184 82.165 C 370.310 82.060,370.672 82.015,370.988 82.065 C 371.403 82.131,371.550 82.091,371.515 81.922 C 371.488 81.794,371.567 81.709,371.689 81.733 C 371.811 81.758,371.911 81.663,371.911 81.523 C 371.911 81.384,372.091 81.244,372.310 81.212 C 372.592 81.172,372.735 81.004,372.799 80.637 C 372.848 80.352,373.009 80.086,373.156 80.046 C 373.393 79.981,373.393 79.994,373.156 80.162 C 372.952 80.306,373.089 80.351,373.733 80.353 C 374.578 80.356,374.724 80.197,374.598 79.419 C 374.585 79.343,374.788 79.335,375.048 79.400 C 375.383 79.484,375.483 79.459,375.392 79.312 C 375.322 79.198,375.199 79.146,375.119 79.195 C 374.891 79.336,374.774 78.630,374.989 78.415 C 375.255 78.150,375.122 78.187,374.434 78.571 M350.650 78.687 C 350.744 78.932,350.804 78.939,351.069 78.731 C 351.348 78.512,351.355 78.518,351.134 78.800 C 350.920 79.072,350.932 79.113,351.223 79.125 C 351.406 79.133,351.461 79.178,351.345 79.225 C 351.053 79.344,350.009 78.778,350.138 78.570 C 350.297 78.313,350.527 78.366,350.650 78.687 M368.842 78.735 C 368.828 78.918,368.715 79.054,368.591 79.036 C 368.281 78.990,368.068 78.756,368.336 78.756 C 368.456 78.756,368.505 78.676,368.444 78.578 C 368.384 78.480,368.454 78.400,368.601 78.400 C 368.752 78.400,368.856 78.545,368.842 78.735 M348.089 79.005 C 348.089 79.157,348.238 79.256,348.444 79.240 C 348.640 79.225,348.800 79.193,348.800 79.168 C 348.800 79.144,348.640 79.038,348.444 78.933 C 348.164 78.783,348.089 78.798,348.089 79.005 M353.490 78.892 C 352.619 79.462,353.598 80.393,354.969 80.297 C 356.080 80.220,356.838 80.573,356.553 81.035 C 356.495 81.128,356.608 81.246,356.803 81.297 C 357.034 81.357,357.120 81.485,357.051 81.665 C 356.977 81.857,357.085 81.976,357.406 82.056 C 357.660 82.120,357.872 82.303,357.878 82.464 C 357.884 82.624,357.880 83.485,357.868 84.377 C 357.847 85.945,358.235 86.744,358.672 86.036 C 358.724 85.951,358.878 85.925,359.014 85.977 C 359.217 86.055,359.208 86.129,358.964 86.386 C 358.712 86.652,358.624 86.663,358.386 86.461 C 358.021 86.151,357.867 86.159,357.867 86.489 C 357.867 86.636,357.947 86.756,358.044 86.756 C 358.142 86.756,358.222 86.852,358.222 86.969 C 358.222 87.135,358.175 87.135,358.009 86.969 C 357.607 86.567,356.978 86.708,356.978 87.200 C 356.978 87.556,356.889 87.644,356.533 87.644 C 356.178 87.644,356.089 87.733,356.089 88.089 C 356.089 88.387,355.987 88.539,355.778 88.550 C 355.280 88.577,354.236 88.660,354.070 88.687 C 353.986 88.700,353.962 88.827,354.016 88.969 C 354.082 89.141,354.030 89.194,353.858 89.127 C 353.459 88.975,353.553 89.369,353.988 89.674 C 354.272 89.873,354.438 89.894,354.608 89.753 C 354.780 89.610,354.867 89.630,354.944 89.829 C 355.115 90.277,356.708 90.151,356.721 89.689 C 356.739 89.080,356.977 88.356,357.159 88.356 C 357.255 88.356,357.333 88.191,357.333 87.990 C 357.333 87.755,357.408 87.670,357.541 87.752 C 357.682 87.839,357.712 87.782,357.632 87.575 C 357.565 87.401,357.587 87.315,357.684 87.375 C 357.778 87.433,358.003 87.331,358.185 87.148 C 358.367 86.966,358.690 86.830,358.903 86.845 C 359.115 86.861,359.289 86.807,359.289 86.726 C 359.289 86.644,359.189 86.577,359.067 86.575 C 358.911 86.573,358.921 86.524,359.098 86.412 C 359.404 86.218,359.546 85.494,359.265 85.556 C 359.156 85.580,359.026 85.487,358.976 85.349 C 358.917 85.187,358.974 85.133,359.138 85.196 C 359.454 85.317,359.442 83.865,359.123 83.481 C 358.968 83.295,358.970 83.218,359.129 83.164 C 359.290 83.111,359.287 82.996,359.119 82.681 C 358.997 82.454,358.980 82.319,359.081 82.381 C 359.182 82.444,359.315 82.413,359.376 82.314 C 359.437 82.215,359.387 82.133,359.264 82.133 C 359.098 82.133,359.092 82.082,359.240 81.933 C 359.369 81.804,359.386 81.588,359.287 81.329 C 359.203 81.107,359.189 80.864,359.256 80.790 C 359.323 80.715,359.278 80.712,359.156 80.782 C 359.029 80.855,358.933 80.816,358.933 80.692 C 358.933 80.572,358.882 80.525,358.819 80.588 C 358.756 80.651,358.789 80.804,358.892 80.929 C 359.038 81.106,358.912 81.156,358.309 81.160 C 357.882 81.162,357.585 81.091,357.641 81.000 C 357.697 80.909,357.596 80.727,357.415 80.595 C 357.173 80.418,357.154 80.354,357.344 80.350 C 357.542 80.346,357.530 80.291,357.292 80.110 C 357.122 79.981,357.035 79.824,357.099 79.760 C 357.163 79.697,357.130 79.644,357.026 79.644 C 356.923 79.644,356.793 79.761,356.739 79.903 C 356.621 80.211,355.342 80.160,355.237 79.843 C 355.200 79.734,355.022 79.644,354.840 79.644 C 354.337 79.644,354.237 79.305,354.691 79.137 C 355.086 78.991,355.084 78.987,354.609 78.896 C 354.343 78.845,354.082 78.873,354.030 78.957 C 353.977 79.042,353.855 79.111,353.757 79.111 C 353.659 79.111,353.628 79.031,353.689 78.933 C 353.818 78.724,353.763 78.712,353.490 78.892 M355.200 79.022 C 355.200 79.169,355.340 79.288,355.511 79.286 C 355.755 79.284,355.770 79.250,355.580 79.130 C 355.375 79.000,355.375 78.961,355.580 78.880 C 355.749 78.813,355.728 78.779,355.511 78.770 C 355.340 78.762,355.200 78.876,355.200 79.022 M356.156 78.958 C 356.179 79.074,356.327 79.188,356.486 79.211 C 356.677 79.239,356.741 79.164,356.674 78.990 C 356.600 78.796,356.538 78.782,356.441 78.939 C 356.345 79.095,356.284 79.098,356.213 78.949 C 356.144 78.807,356.128 78.809,356.156 78.958 M375.467 78.944 C 375.467 79.047,375.547 79.083,375.644 79.022 C 375.742 78.962,375.822 79.037,375.822 79.190 C 375.822 79.648,376.651 79.540,376.770 79.067 C 376.866 78.688,376.858 78.684,376.622 78.999 C 376.393 79.306,376.351 79.310,376.085 79.044 C 375.756 78.714,375.467 78.668,375.467 78.944 M380.008 79.067 C 379.961 79.189,379.700 79.291,379.428 79.295 C 379.156 79.298,378.780 79.416,378.592 79.558 C 378.319 79.765,378.197 79.772,377.978 79.597 C 377.811 79.462,377.839 79.564,378.053 79.861 C 378.426 80.381,379.270 80.568,379.087 80.090 C 379.031 79.944,378.914 79.868,378.828 79.921 C 378.741 79.975,378.718 79.894,378.777 79.743 C 378.895 79.435,379.556 79.360,379.556 79.655 C 379.556 79.758,379.468 79.789,379.361 79.723 C 379.241 79.649,379.190 79.725,379.227 79.922 C 379.272 80.154,379.472 80.251,379.952 80.276 C 380.316 80.294,380.572 80.240,380.519 80.155 C 380.466 80.070,380.509 80.000,380.614 80.000 C 380.731 80.000,380.767 79.781,380.706 79.439 C 380.625 78.976,380.650 78.913,380.852 79.080 C 381.040 79.236,381.096 79.225,381.096 79.029 C 381.096 78.710,380.132 78.744,380.008 79.067 M372.089 79.458 C 372.089 79.540,371.977 79.649,371.840 79.702 C 371.621 79.786,371.593 79.657,371.720 79.145 C 371.748 79.029,372.089 79.318,372.089 79.458 M380.444 79.299 C 380.444 79.403,380.364 79.438,380.267 79.378 C 380.169 79.317,380.083 79.453,380.075 79.678 C 380.067 79.904,380.020 79.986,379.969 79.861 C 379.919 79.735,379.923 79.515,379.978 79.372 C 380.093 79.072,380.444 79.018,380.444 79.299 M381.442 79.362 C 381.532 79.600,381.509 79.664,381.364 79.574 C 381.249 79.504,381.156 79.530,381.156 79.634 C 381.156 79.880,381.457 79.876,381.610 79.629 C 381.675 79.523,381.635 79.343,381.521 79.229 C 381.362 79.071,381.343 79.103,381.442 79.362 M328.099 79.561 C 327.648 79.903,327.591 80.364,327.966 80.649 C 328.247 80.863,328.246 80.878,327.956 80.883 C 327.600 80.890,327.532 81.348,327.867 81.480 C 327.989 81.528,327.769 81.533,327.378 81.492 C 326.890 81.440,326.775 81.463,327.010 81.566 C 327.252 81.672,327.290 81.756,327.141 81.848 C 327.024 81.921,326.974 82.055,327.030 82.146 C 327.086 82.237,327.061 82.311,326.974 82.311 C 326.887 82.311,326.819 82.631,326.823 83.022 C 326.829 83.479,326.913 83.733,327.060 83.733 C 327.186 83.733,327.289 83.818,327.289 83.922 C 327.289 84.025,327.214 84.064,327.123 84.008 C 327.032 83.951,326.911 83.981,326.854 84.074 C 326.797 84.166,326.838 84.296,326.945 84.363 C 327.056 84.431,327.091 84.676,327.028 84.931 C 326.942 85.274,326.975 85.355,327.169 85.281 C 327.309 85.227,327.500 85.260,327.595 85.355 C 327.705 85.465,327.856 85.412,328.016 85.208 C 328.236 84.928,328.233 84.911,327.992 85.067 C 327.840 85.164,327.868 85.105,328.054 84.935 C 328.352 84.661,328.359 84.612,328.108 84.516 C 327.952 84.456,327.874 84.327,327.934 84.229 C 328.002 84.120,328.103 84.144,328.197 84.292 C 328.318 84.483,328.351 84.471,328.353 84.237 C 328.354 84.074,328.256 83.900,328.133 83.851 C 327.956 83.779,327.956 83.759,328.133 83.747 C 328.277 83.738,328.356 83.445,328.356 82.923 C 328.356 82.477,328.426 82.156,328.513 82.210 C 328.600 82.263,328.770 82.188,328.891 82.042 C 329.011 81.897,329.051 81.778,328.978 81.778 C 328.905 81.778,328.955 81.642,329.089 81.477 C 329.289 81.231,329.290 81.202,329.093 81.321 C 328.900 81.437,328.892 81.394,329.049 81.100 C 329.313 80.607,329.304 80.588,328.844 80.628 C 328.348 80.671,328.139 80.562,328.280 80.335 C 328.348 80.224,328.474 80.233,328.626 80.358 C 328.809 80.511,328.912 80.467,329.072 80.168 C 329.218 79.896,329.221 79.816,329.081 79.902 C 328.629 80.182,328.006 80.190,328.112 79.915 C 328.169 79.767,328.273 79.683,328.345 79.727 C 328.416 79.771,328.474 79.690,328.474 79.548 C 328.474 79.405,328.467 79.291,328.459 79.295 C 328.451 79.298,328.289 79.418,328.099 79.561 M347.689 80.029 L 347.200 80.578 347.200 85.100 C 347.200 88.069,347.261 89.583,347.378 89.511 C 347.489 89.443,347.556 87.709,347.556 84.900 C 347.556 80.592,347.666 79.636,348.112 80.082 C 348.155 80.125,348.190 80.005,348.190 79.814 C 348.190 79.623,348.188 79.470,348.184 79.474 C 348.181 79.477,347.958 79.727,347.689 80.029 M367.750 79.794 C 367.689 79.894,367.725 80.029,367.831 80.094 C 368.104 80.263,368.281 80.032,368.053 79.804 C 367.925 79.676,367.826 79.673,367.750 79.794 M356.089 80.889 C 356.089 81.221,356.136 81.268,356.302 81.102 C 356.468 80.936,356.468 80.841,356.302 80.676 C 356.136 80.510,356.089 80.557,356.089 80.889 M324.406 81.665 C 324.192 81.828,324.157 81.953,324.289 82.085 C 324.422 82.217,324.414 82.391,324.265 82.670 C 324.101 82.977,324.100 83.097,324.263 83.198 C 324.380 83.270,324.462 83.382,324.446 83.447 C 324.376 83.739,324.558 83.981,324.689 83.769 C 324.763 83.649,324.729 83.556,324.611 83.556 C 324.454 83.556,324.457 83.488,324.622 83.289 C 324.789 83.088,324.791 83.022,324.632 83.022 C 324.498 83.022,324.464 82.885,324.539 82.649 C 324.643 82.323,324.786 81.412,324.731 81.428 C 324.720 81.431,324.574 81.538,324.406 81.665 M362.311 81.578 C 362.311 81.688,362.382 81.778,362.468 81.778 C 362.757 81.778,362.816 82.492,362.549 82.759 C 362.316 82.992,362.327 83.022,362.643 83.022 C 362.840 83.022,363.058 82.930,363.128 82.817 C 363.214 82.678,363.175 82.660,363.005 82.762 C 362.798 82.887,362.794 82.865,362.984 82.630 C 363.112 82.473,363.364 82.387,363.553 82.437 C 363.834 82.510,363.886 82.439,363.854 82.022 C 363.815 81.521,362.844 81.146,362.844 81.632 C 362.844 81.751,362.746 81.739,362.578 81.600 C 362.394 81.447,362.311 81.440,362.311 81.578 M369.778 81.674 C 369.778 81.813,369.678 81.962,369.556 82.005 C 369.433 82.049,369.394 82.150,369.469 82.229 C 369.543 82.309,369.720 82.220,369.863 82.031 C 370.169 81.626,370.200 81.422,369.956 81.422 C 369.858 81.422,369.778 81.536,369.778 81.674 M363.444 81.919 C 363.492 81.996,363.376 82.071,363.188 82.085 C 362.999 82.100,362.844 82.036,362.844 81.945 C 362.844 81.742,363.322 81.722,363.444 81.919 M328.632 82.522 C 328.231 83.741,328.984 85.108,329.623 84.321 C 329.762 84.150,329.776 84.075,329.657 84.147 C 329.541 84.216,329.396 84.190,329.333 84.089 C 329.271 83.988,329.150 83.948,329.065 84.001 C 328.838 84.142,328.930 83.325,329.169 83.078 C 329.327 82.916,329.423 82.916,329.586 83.078 C 329.751 83.241,329.772 83.218,329.680 82.978 C 329.606 82.782,329.646 82.667,329.788 82.667 C 330.193 82.667,329.826 82.466,329.260 82.376 C 328.899 82.319,328.682 82.370,328.632 82.522 M370.406 82.480 C 370.340 82.586,370.200 82.619,370.094 82.553 C 369.985 82.486,369.951 82.516,370.016 82.621 C 370.079 82.723,370.017 82.851,369.877 82.905 C 369.737 82.958,369.579 82.907,369.526 82.790 C 369.472 82.673,369.452 82.738,369.480 82.933 C 369.509 83.129,369.627 83.269,369.744 83.244 C 369.860 83.220,369.956 83.285,369.956 83.388 C 369.956 83.492,370.056 83.518,370.178 83.447 C 370.300 83.375,370.249 83.489,370.065 83.699 C 369.881 83.909,369.661 84.038,369.576 83.985 C 369.492 83.933,369.422 83.975,369.422 84.078 C 369.422 84.354,369.902 84.308,370.011 84.022 C 370.068 83.874,370.271 83.809,370.528 83.858 C 370.897 83.929,371.142 83.181,370.845 82.889 C 370.820 82.864,370.790 82.771,370.778 82.681 C 370.739 82.395,370.528 82.281,370.406 82.480 M363.349 82.703 C 363.479 82.870,363.535 83.090,363.471 83.192 C 363.408 83.294,363.442 83.378,363.545 83.378 C 363.649 83.378,363.733 83.344,363.733 83.303 C 363.733 83.263,363.786 83.083,363.851 82.903 C 363.915 82.724,363.902 82.637,363.822 82.710 C 363.743 82.782,363.550 82.742,363.394 82.621 C 363.145 82.427,363.140 82.437,363.349 82.703 M364.978 84.135 C 364.978 85.700,365.221 86.116,365.731 85.422 C 365.802 85.324,365.864 85.444,365.867 85.689 C 365.871 86.012,365.915 86.066,366.027 85.888 C 366.113 85.754,366.271 85.698,366.380 85.765 C 366.498 85.838,366.578 85.756,366.578 85.564 C 366.578 85.386,366.398 85.161,366.178 85.064 C 365.743 84.873,365.603 83.377,366.008 83.242 C 366.115 83.206,366.156 83.055,366.099 82.907 C 366.014 82.688,365.967 82.682,365.845 82.874 C 365.728 83.057,365.694 83.043,365.692 82.811 C 365.690 82.621,365.558 82.521,365.333 82.538 C 365.007 82.562,364.978 82.692,364.978 84.135 M362.938 83.370 C 362.880 83.464,362.716 83.495,362.572 83.440 C 362.400 83.374,362.312 83.452,362.314 83.670 C 362.316 83.877,362.365 83.924,362.447 83.797 C 362.534 83.662,362.692 83.745,362.919 84.049 C 363.107 84.298,363.168 84.446,363.055 84.376 C 362.920 84.292,362.885 84.344,362.954 84.523 C 363.076 84.840,363.378 84.720,363.378 84.356 C 363.378 84.220,363.451 84.065,363.540 84.009 C 363.635 83.951,363.649 84.113,363.572 84.399 C 363.445 84.874,362.844 85.208,362.844 84.803 C 362.844 84.692,362.761 84.653,362.658 84.716 C 362.532 84.794,362.544 84.919,362.694 85.100 C 362.876 85.319,362.861 85.399,362.614 85.537 C 362.447 85.630,362.312 85.803,362.314 85.920 C 362.316 86.066,362.358 86.073,362.447 85.943 C 362.637 85.666,363.204 85.485,363.153 85.718 C 363.130 85.824,363.211 85.948,363.333 85.993 C 363.675 86.118,363.597 85.553,363.244 85.348 C 363.073 85.248,363.013 85.165,363.111 85.164 C 363.420 85.159,363.689 85.410,363.772 85.778 C 363.816 85.973,363.845 85.556,363.837 84.850 L 363.822 83.567 363.305 83.658 C 362.855 83.737,362.818 83.713,363.016 83.474 C 363.141 83.323,363.198 83.200,363.143 83.200 C 363.088 83.200,362.996 83.277,362.938 83.370 M358.314 84.013 C 358.282 84.363,358.211 84.693,358.156 84.747 C 358.083 84.821,358.126 83.795,358.212 83.422 C 358.285 83.105,358.359 83.532,358.314 84.013 M329.833 84.506 C 329.236 85.136,329.310 87.349,329.931 87.437 C 330.221 87.478,330.324 87.424,330.264 87.259 C 330.217 87.129,330.096 87.039,329.997 87.060 C 329.763 87.110,329.583 86.432,329.794 86.301 C 329.883 86.246,329.956 86.316,329.956 86.457 C 329.956 86.597,330.075 86.811,330.221 86.932 C 330.390 87.073,330.446 87.307,330.377 87.585 C 330.282 87.964,330.320 88.009,330.690 87.946 C 330.921 87.906,331.011 87.908,330.889 87.950 C 330.767 87.992,330.665 88.160,330.664 88.324 C 330.662 88.582,330.638 88.586,330.489 88.356 C 330.362 88.160,330.316 88.286,330.314 88.830 C 330.310 89.831,330.440 90.113,330.856 90.004 C 331.108 89.938,331.200 90.005,331.200 90.255 C 331.200 90.442,331.300 90.703,331.422 90.835 C 331.547 90.970,331.557 91.026,331.444 90.963 C 331.320 90.893,331.244 91.018,331.244 91.292 C 331.244 91.535,331.334 91.734,331.444 91.736 C 331.554 91.737,331.824 91.786,332.044 91.845 C 332.293 91.912,332.444 91.874,332.444 91.744 C 332.444 91.629,332.378 91.576,332.296 91.626 C 332.214 91.677,332.017 91.445,331.858 91.112 C 331.616 90.604,331.611 90.521,331.828 90.605 C 331.970 90.659,332.043 90.776,331.988 90.863 C 331.852 91.084,332.299 91.058,332.622 90.825 C 332.847 90.663,332.839 90.649,332.576 90.734 C 332.317 90.817,332.281 90.759,332.373 90.395 C 332.469 90.010,332.424 89.956,332.009 89.956 C 331.696 89.956,331.578 90.025,331.661 90.161 C 331.740 90.288,331.710 90.318,331.583 90.239 C 331.104 89.943,331.977 89.548,332.533 89.809 C 332.883 89.972,332.883 89.971,332.544 89.699 C 332.354 89.547,332.089 89.422,331.956 89.422 C 331.822 89.422,331.752 89.358,331.800 89.280 C 331.848 89.202,331.807 89.022,331.708 88.880 C 331.572 88.684,331.507 88.676,331.438 88.844 C 331.301 89.182,331.017 89.110,331.147 88.771 C 331.223 88.572,331.192 88.517,331.052 88.604 C 330.924 88.683,330.844 88.613,330.844 88.419 C 330.844 88.176,330.892 88.154,331.058 88.320 C 331.175 88.437,331.435 88.524,331.636 88.513 C 331.935 88.496,331.945 88.478,331.689 88.411 C 331.314 88.313,331.308 88.197,331.650 87.744 C 332.022 87.251,331.987 87.164,331.276 86.827 C 330.874 86.636,330.718 86.478,330.851 86.396 C 331.002 86.303,331.005 86.233,330.863 86.145 C 330.593 85.978,330.279 86.200,330.423 86.457 C 330.488 86.572,330.429 86.557,330.293 86.422 C 330.101 86.233,330.095 86.159,330.267 86.096 C 330.389 86.051,330.489 85.897,330.489 85.752 C 330.489 85.608,330.555 85.531,330.636 85.581 C 330.717 85.631,330.764 85.496,330.740 85.281 C 330.716 85.065,330.690 84.829,330.681 84.756 C 330.673 84.682,330.542 84.622,330.390 84.622 C 330.237 84.622,330.162 84.542,330.223 84.444 C 330.296 84.326,330.464 84.329,330.722 84.454 C 331.098 84.635,331.099 84.632,330.766 84.365 C 330.324 84.012,330.297 84.016,329.833 84.506 M369.942 84.557 C 369.539 84.996,369.536 85.013,369.896 84.826 C 370.107 84.716,370.316 84.525,370.360 84.402 C 370.404 84.279,370.503 84.236,370.579 84.307 C 370.655 84.378,370.617 84.557,370.494 84.705 C 370.372 84.852,370.322 85.106,370.384 85.267 C 370.474 85.502,370.447 85.520,370.250 85.357 C 370.052 85.192,370.009 85.230,370.031 85.554 C 370.046 85.775,370.153 85.937,370.267 85.913 C 370.394 85.888,370.432 85.987,370.363 86.165 C 370.288 86.361,370.319 86.416,370.458 86.330 C 370.612 86.235,370.621 86.320,370.493 86.657 C 370.398 86.907,370.378 87.117,370.449 87.124 C 370.520 87.132,370.758 87.171,370.978 87.212 C 371.217 87.257,371.379 87.198,371.381 87.066 C 371.382 86.915,371.436 86.927,371.547 87.103 C 371.636 87.245,371.668 87.428,371.617 87.510 C 371.567 87.593,371.623 87.778,371.743 87.923 C 371.922 88.139,371.911 88.167,371.680 88.078 C 371.525 88.018,371.362 87.837,371.318 87.674 C 371.274 87.511,371.249 87.670,371.263 88.027 C 371.277 88.384,371.340 88.645,371.402 88.607 C 371.464 88.569,371.607 88.648,371.720 88.784 C 371.836 88.924,372.087 88.987,372.301 88.931 C 372.558 88.864,372.637 88.898,372.551 89.039 C 372.402 89.280,372.608 89.304,372.952 89.086 C 373.155 88.957,373.155 88.909,372.952 88.782 C 372.641 88.588,372.207 88.582,372.069 88.770 C 372.009 88.852,371.912 88.793,371.853 88.639 C 371.791 88.476,371.832 88.377,371.954 88.402 C 372.068 88.425,372.165 88.271,372.170 88.059 C 372.174 87.847,372.235 87.617,372.306 87.546 C 372.376 87.476,372.400 87.589,372.360 87.797 C 372.306 88.081,372.396 88.203,372.721 88.285 C 372.960 88.345,373.156 88.470,373.156 88.563 C 373.156 88.656,373.242 88.679,373.347 88.614 C 373.477 88.533,373.507 88.697,373.440 89.119 C 373.368 89.573,373.412 89.770,373.604 89.844 C 373.748 89.899,373.868 89.847,373.869 89.728 C 373.871 89.591,373.935 89.610,374.041 89.778 C 374.271 90.142,375.462 90.144,375.481 89.780 C 375.490 89.598,375.524 89.583,375.584 89.733 C 375.634 89.856,375.887 89.954,376.148 89.953 C 376.534 89.951,376.573 89.914,376.356 89.758 C 376.134 89.599,376.144 89.584,376.415 89.672 C 376.594 89.730,376.834 89.779,376.948 89.781 C 377.067 89.782,377.042 89.857,376.889 89.956 C 376.693 90.082,376.684 90.129,376.856 90.131 C 377.009 90.132,377.051 90.257,376.978 90.489 C 376.916 90.684,376.942 90.844,377.036 90.844 C 377.130 90.844,377.257 90.976,377.318 91.136 C 377.408 91.369,377.380 91.387,377.184 91.224 C 376.981 91.055,376.954 91.106,377.032 91.510 C 377.083 91.779,377.159 91.900,377.200 91.778 C 377.241 91.656,377.352 91.556,377.448 91.556 C 377.543 91.556,377.579 91.624,377.527 91.708 C 377.475 91.791,377.648 91.819,377.912 91.768 C 378.343 91.686,378.355 91.663,378.039 91.540 C 377.693 91.404,377.693 91.403,378.044 91.388 C 378.309 91.376,378.339 91.334,378.163 91.222 C 377.970 91.099,377.975 91.053,378.190 90.970 C 378.367 90.902,378.438 90.967,378.406 91.168 C 378.379 91.332,378.435 91.617,378.530 91.800 C 378.650 92.032,378.639 92.174,378.492 92.265 C 378.339 92.359,378.354 92.423,378.548 92.497 C 378.762 92.579,378.802 92.470,378.750 91.944 C 378.714 91.584,378.740 91.409,378.809 91.556 C 378.958 91.875,379.655 92.004,379.817 91.742 C 379.966 91.500,380.267 91.501,380.267 91.744 C 380.267 91.847,380.167 91.875,380.044 91.804 C 379.915 91.730,379.884 91.744,379.970 91.838 C 380.052 91.927,380.291 92.000,380.501 92.000 C 380.747 92.000,380.904 92.128,380.940 92.356 C 380.988 92.655,380.965 92.669,380.796 92.444 C 380.617 92.207,380.604 92.207,380.672 92.444 C 380.715 92.591,380.881 92.700,381.041 92.686 C 381.219 92.671,381.333 92.795,381.333 93.002 C 381.333 93.323,381.310 93.326,380.889 93.050 C 380.485 92.786,380.364 92.825,380.423 93.200 C 380.435 93.273,380.364 93.333,380.267 93.333 C 380.169 93.333,380.089 93.403,380.089 93.489 C 380.089 93.671,381.550 93.613,381.896 93.416 C 382.087 93.307,382.090 93.328,381.911 93.521 C 381.789 93.652,381.690 93.844,381.692 93.947 C 381.693 94.059,381.747 94.051,381.826 93.926 C 381.924 93.772,382.046 93.784,382.302 93.971 C 382.637 94.216,382.636 94.222,382.256 94.222 C 382.042 94.222,381.867 94.312,381.867 94.421 C 381.867 94.536,382.051 94.584,382.306 94.535 C 382.548 94.489,382.699 94.525,382.644 94.615 C 382.468 94.899,383.100 95.419,383.560 95.368 C 383.892 95.332,384.000 95.404,384.000 95.660 C 384.000 95.847,384.083 96.000,384.185 96.000 C 384.291 96.000,384.273 96.105,384.140 96.249 C 383.997 96.405,383.837 96.437,383.713 96.336 C 383.574 96.223,383.542 96.312,383.604 96.637 C 383.668 96.974,383.733 97.035,383.843 96.861 C 383.960 96.677,383.995 96.694,383.997 96.933 C 383.999 97.128,384.137 97.244,384.366 97.244 C 384.567 97.244,384.683 97.324,384.622 97.422 C 384.562 97.520,384.597 97.600,384.701 97.600 C 384.804 97.600,384.889 97.683,384.889 97.784 C 384.889 97.902,384.777 97.910,384.578 97.808 C 384.407 97.720,384.156 97.688,384.020 97.736 C 383.869 97.790,383.821 97.746,383.897 97.623 C 383.965 97.513,383.931 97.422,383.822 97.422 C 383.544 97.422,383.676 98.176,384.000 98.439 C 384.400 98.764,384.586 98.710,384.479 98.301 C 384.401 98.003,384.430 97.974,384.632 98.142 C 384.815 98.294,384.834 98.438,384.701 98.685 C 384.568 98.935,384.580 99.023,384.749 99.025 C 384.895 99.027,384.911 99.075,384.794 99.158 C 384.540 99.339,384.574 100.686,384.841 101.009 C 385.025 101.230,384.996 101.268,384.664 101.239 C 384.110 101.190,383.894 101.409,383.923 101.990 C 383.936 102.264,383.859 102.555,383.751 102.637 C 383.641 102.720,383.764 102.739,384.034 102.680 C 384.296 102.623,384.561 102.656,384.621 102.753 C 384.686 102.858,384.606 102.882,384.425 102.812 C 384.200 102.726,384.158 102.758,384.267 102.933 C 384.374 103.106,384.335 103.141,384.124 103.060 C 383.964 102.999,383.778 103.045,383.711 103.163 C 383.643 103.281,383.653 103.318,383.733 103.246 C 383.896 103.099,384.356 103.418,384.356 103.678 C 384.356 103.771,384.205 103.799,384.022 103.740 C 383.702 103.639,383.590 103.800,383.375 104.666 C 383.272 105.085,383.655 105.093,383.990 104.680 C 384.385 104.193,384.319 104.023,383.871 104.375 C 383.587 104.597,383.579 104.592,383.789 104.324 C 384.023 104.025,384.619 103.990,385.042 104.251 C 385.166 104.328,385.320 104.339,385.384 104.275 C 385.448 104.211,385.383 104.084,385.239 103.993 C 385.076 103.890,385.058 103.827,385.192 103.825 C 385.309 103.823,385.398 103.642,385.389 103.422 C 385.340 102.260,385.435 101.689,385.677 101.689 C 386.397 101.689,386.255 98.800,385.511 98.309 C 385.218 98.116,385.077 97.957,385.199 97.957 C 385.342 97.956,385.410 97.707,385.392 97.244 C 385.376 96.853,385.281 96.533,385.180 96.533 C 385.062 96.533,385.067 96.462,385.195 96.334 C 385.471 96.058,385.276 95.644,384.870 95.644 C 384.620 95.644,384.533 95.517,384.533 95.151 C 384.533 94.509,384.044 93.814,383.663 93.913 C 383.443 93.971,383.369 93.873,383.373 93.529 C 383.379 92.962,382.655 92.748,382.529 93.278 C 382.487 93.455,382.487 93.307,382.528 92.949 C 382.605 92.289,382.165 91.884,381.689 92.178 C 381.590 92.239,381.454 92.144,381.388 91.967 C 381.202 91.473,380.754 91.192,380.311 91.293 C 380.091 91.344,379.901 91.303,379.889 91.203 C 379.877 91.104,379.857 90.939,379.844 90.838 C 379.821 90.640,379.386 90.318,379.137 90.314 C 379.053 90.312,379.029 90.427,379.083 90.569 C 379.149 90.741,379.096 90.794,378.925 90.728 C 378.784 90.674,378.713 90.558,378.767 90.470 C 378.881 90.287,378.643 90.263,378.238 90.419 C 378.084 90.478,378.001 90.597,378.055 90.683 C 378.108 90.770,378.027 90.792,377.874 90.734 C 377.662 90.653,377.625 90.706,377.715 90.958 C 377.781 91.140,377.779 91.240,377.712 91.180 C 377.459 90.955,377.439 90.329,377.688 90.424 C 378.046 90.562,377.815 89.834,377.449 89.672 C 377.016 89.480,376.954 89.469,376.178 89.444 C 375.544 89.425,375.467 89.374,375.467 88.978 C 375.467 88.563,375.407 88.533,374.578 88.533 C 374.089 88.533,373.689 88.453,373.689 88.356 C 373.689 88.258,373.629 88.190,373.556 88.206 C 373.482 88.221,373.222 88.140,372.978 88.027 C 372.562 87.834,372.572 87.827,373.133 87.929 C 373.549 88.005,373.696 87.978,373.612 87.841 C 373.545 87.733,373.286 87.644,373.035 87.644 C 372.668 87.644,372.599 87.576,372.674 87.288 C 372.743 87.023,372.661 86.905,372.355 86.828 C 372.119 86.769,371.897 86.546,371.837 86.308 C 371.773 86.053,371.578 85.869,371.333 85.835 C 371.015 85.789,370.941 85.675,370.971 85.272 C 371.002 84.861,370.952 84.789,370.704 84.883 C 370.460 84.976,370.444 84.954,370.622 84.775 C 370.744 84.652,370.844 84.447,370.844 84.320 C 370.844 83.941,370.402 84.057,369.942 84.557 M326.613 84.658 C 326.496 84.775,326.400 84.971,326.400 85.093 C 326.400 85.216,326.260 85.404,326.089 85.513 C 325.899 85.633,325.878 85.684,326.036 85.643 C 326.178 85.606,326.436 85.321,326.610 85.010 C 326.950 84.403,326.950 84.321,326.613 84.658 M329.855 84.997 C 329.773 85.129,329.857 85.155,330.110 85.074 C 330.440 84.969,330.468 85.003,330.339 85.352 C 330.258 85.570,330.039 85.770,329.852 85.796 C 329.529 85.842,329.528 85.832,329.827 85.610 C 330.109 85.401,330.113 85.364,329.861 85.267 C 329.616 85.173,329.638 84.800,329.888 84.800 C 329.937 84.800,329.922 84.889,329.855 84.997 M326.823 85.724 C 326.753 85.837,326.781 85.872,326.887 85.806 C 326.990 85.743,327.117 85.804,327.170 85.942 C 327.223 86.080,327.145 86.239,326.996 86.296 C 326.615 86.443,326.541 86.426,326.667 86.222 C 326.727 86.124,326.692 86.044,326.588 86.044 C 326.485 86.044,326.400 86.122,326.400 86.217 C 326.400 86.311,326.260 86.344,326.089 86.289 C 325.825 86.204,325.819 86.218,326.044 86.383 C 326.269 86.547,326.272 86.603,326.065 86.736 C 325.869 86.862,325.890 86.917,326.169 87.005 C 326.361 87.066,326.519 87.231,326.519 87.371 C 326.519 87.568,326.441 87.584,326.171 87.439 C 325.945 87.318,325.863 87.316,325.935 87.434 C 325.997 87.533,325.981 87.641,325.901 87.674 C 325.820 87.707,325.812 87.933,325.882 88.178 C 326.003 88.601,326.010 88.603,326.027 88.222 C 326.036 88.002,326.116 87.822,326.205 87.822 C 326.376 87.822,326.318 89.030,326.137 89.211 C 326.077 89.271,325.873 89.223,325.683 89.104 C 325.194 88.799,324.840 88.836,325.255 89.150 C 325.591 89.404,325.590 89.411,325.200 89.417 C 324.980 89.420,324.800 89.500,324.800 89.594 C 324.800 89.689,324.940 89.713,325.111 89.647 C 325.393 89.539,325.391 89.575,325.080 90.053 C 324.834 90.430,324.817 90.513,325.018 90.348 C 325.248 90.160,325.316 90.160,325.391 90.348 C 325.472 90.550,325.584 90.095,325.538 89.750 C 325.529 89.686,325.660 89.660,325.828 89.692 C 325.996 89.724,326.198 89.757,326.277 89.764 C 326.356 89.772,326.371 89.859,326.309 89.958 C 326.248 90.058,326.103 90.085,325.988 90.019 C 325.872 89.953,325.837 89.964,325.910 90.044 C 325.982 90.124,325.952 90.297,325.843 90.428 C 325.692 90.611,325.713 90.667,325.934 90.667 C 326.092 90.667,326.222 90.542,326.222 90.390 C 326.222 90.235,326.308 90.165,326.415 90.232 C 326.542 90.310,326.597 90.145,326.575 89.753 C 326.519 88.744,326.571 88.153,326.711 88.228 C 327.037 88.402,327.386 87.668,327.424 86.727 C 327.464 85.744,327.142 85.207,326.823 85.724 M369.422 85.937 C 369.422 86.238,369.903 86.451,370.049 86.214 C 370.107 86.121,370.025 86.044,369.867 86.044 C 369.708 86.044,369.628 85.964,369.689 85.867 C 369.749 85.769,369.714 85.689,369.610 85.689 C 369.507 85.689,369.422 85.800,369.422 85.937 M321.460 86.455 C 321.451 86.761,321.539 87.129,321.655 87.273 C 321.821 87.478,321.818 87.506,321.644 87.401 C 321.365 87.234,321.367 87.264,321.679 88.151 C 321.834 88.590,321.870 88.921,321.768 88.984 C 321.608 89.083,321.533 88.957,321.579 88.667 C 321.591 88.593,321.520 88.533,321.422 88.533 C 321.255 88.533,321.170 88.792,321.228 89.124 C 321.242 89.205,321.166 89.217,321.059 89.151 C 320.952 89.085,320.908 88.919,320.960 88.782 C 321.013 88.645,320.980 88.533,320.887 88.533 C 320.610 88.533,320.465 89.449,320.685 89.802 C 320.944 90.216,320.945 90.367,320.688 90.208 C 320.566 90.133,320.520 90.181,320.569 90.331 C 320.615 90.467,320.785 90.604,320.947 90.635 C 321.164 90.677,321.211 90.610,321.125 90.384 C 321.045 90.175,321.073 90.116,321.212 90.202 C 321.347 90.285,321.381 90.234,321.313 90.055 C 321.255 89.904,321.141 89.821,321.060 89.871 C 320.980 89.921,320.856 89.868,320.785 89.753 C 320.704 89.623,320.733 89.593,320.861 89.672 C 320.974 89.742,321.067 89.709,321.067 89.600 C 321.067 89.491,321.147 89.451,321.244 89.511 C 321.342 89.572,321.422 89.536,321.422 89.433 C 321.422 89.329,321.538 89.244,321.678 89.244 C 322.059 89.244,322.157 88.974,321.962 88.461 C 321.866 88.209,321.846 88.023,321.916 88.046 C 321.987 88.070,322.068 88.030,322.097 87.959 C 322.125 87.887,322.262 87.557,322.401 87.226 C 322.539 86.894,322.602 86.573,322.541 86.511 C 322.480 86.450,322.536 86.400,322.667 86.400 C 322.797 86.400,322.851 86.347,322.786 86.283 C 322.722 86.218,322.490 86.318,322.271 86.505 C 321.937 86.789,321.913 86.793,322.117 86.529 C 322.393 86.173,322.337 86.063,321.828 85.965 C 321.539 85.910,321.473 85.998,321.460 86.455 M364.990 86.593 C 364.982 87.144,365.052 87.407,365.206 87.407 C 365.332 87.407,365.435 87.261,365.435 87.081 C 365.435 86.902,365.367 86.756,365.285 86.756 C 365.202 86.756,365.169 86.696,365.212 86.622 C 365.254 86.549,365.224 86.329,365.146 86.133 C 365.041 85.873,364.999 85.997,364.990 86.593 M363.187 86.711 L 362.756 87.200 363.244 86.769 C 363.825 86.257,363.900 86.467,363.347 87.055 L 362.960 87.467 363.436 87.467 C 363.861 87.467,363.911 87.400,363.911 86.844 C 363.911 86.094,363.755 86.066,363.187 86.711 M335.096 87.096 C 334.867 87.349,334.695 88.000,334.857 88.000 C 334.879 88.000,335.063 87.720,335.265 87.378 C 335.654 86.720,335.567 86.576,335.096 87.096 M335.687 86.971 C 335.581 87.098,335.441 87.417,335.375 87.680 C 335.309 87.943,335.123 88.193,334.961 88.236 C 334.701 88.305,334.703 88.317,334.978 88.335 C 335.149 88.346,335.289 88.436,335.289 88.533 C 335.289 88.631,335.229 88.691,335.156 88.667 C 335.082 88.642,335.022 88.702,335.022 88.800 C 335.022 88.898,335.092 88.955,335.177 88.926 C 335.263 88.898,335.281 89.009,335.218 89.173 C 335.121 89.426,335.146 89.437,335.378 89.244 C 335.529 89.119,335.704 89.067,335.767 89.130 C 335.830 89.193,335.797 89.244,335.693 89.244 C 335.589 89.244,335.455 89.373,335.395 89.531 C 335.316 89.736,335.361 89.788,335.554 89.714 C 335.715 89.652,335.822 89.711,335.822 89.862 C 335.822 90.252,335.438 90.394,335.257 90.069 C 335.167 89.909,335.008 89.778,334.903 89.778 C 334.771 89.778,334.773 89.851,334.910 90.016 C 335.019 90.148,335.049 90.314,334.976 90.387 C 334.904 90.459,334.964 90.492,335.111 90.459 C 335.258 90.427,335.362 90.488,335.342 90.596 C 335.321 90.713,335.483 90.758,335.742 90.709 C 336.212 90.619,336.359 90.964,335.922 91.131 C 335.761 91.193,335.679 91.446,335.700 91.820 C 335.718 92.145,335.784 92.379,335.847 92.341 C 335.909 92.302,336.069 92.405,336.202 92.569 C 336.342 92.742,336.356 92.815,336.235 92.742 C 336.119 92.673,335.979 92.691,335.923 92.781 C 335.866 92.872,335.947 92.928,336.102 92.905 C 336.257 92.882,336.456 92.749,336.545 92.610 C 336.676 92.402,336.706 92.400,336.708 92.600 C 336.711 92.798,336.762 92.802,336.982 92.619 C 337.311 92.345,337.450 92.460,337.485 93.032 C 337.501 93.299,337.597 93.433,337.733 93.381 C 337.856 93.334,337.956 93.384,337.956 93.492 C 337.956 93.600,337.836 93.689,337.689 93.689 C 337.542 93.689,337.422 93.774,337.422 93.877 C 337.422 93.981,337.495 94.021,337.583 93.966 C 337.672 93.911,337.698 94.045,337.640 94.265 C 337.539 94.651,337.530 94.648,337.332 94.164 C 337.106 93.611,336.805 93.519,336.802 94.002 C 336.799 94.558,337.435 94.996,337.766 94.665 C 338.435 93.997,337.997 91.712,337.285 92.153 C 337.214 92.197,337.171 91.686,337.189 91.018 C 337.228 89.634,336.962 89.180,336.341 89.568 C 336.134 89.697,336.000 89.710,336.000 89.602 C 336.000 89.503,336.074 89.422,336.163 89.422 C 336.253 89.422,336.310 88.903,336.289 88.268 C 336.250 87.052,336.024 86.565,335.687 86.971 M365.876 86.995 C 365.773 87.098,365.691 87.266,365.694 87.369 C 365.697 87.472,365.781 87.416,365.881 87.244 C 365.981 87.073,366.148 86.933,366.253 86.933 C 366.375 86.933,366.364 87.029,366.222 87.200 C 366.020 87.444,366.097 87.543,366.444 87.488 C 366.752 87.439,366.548 87.863,366.222 87.948 C 366.027 88.000,365.867 88.117,365.867 88.209 C 365.867 88.301,365.967 88.316,366.089 88.243 C 366.258 88.141,366.257 88.176,366.083 88.392 C 365.778 88.767,365.875 89.133,366.309 89.252 C 366.547 89.317,366.696 89.552,366.764 89.969 C 366.886 90.721,367.079 91.063,367.302 90.926 C 367.393 90.869,367.541 91.017,367.631 91.254 C 367.827 91.768,368.463 91.995,368.613 91.603 C 368.690 91.404,368.625 91.359,368.358 91.429 C 368.161 91.480,367.954 91.448,367.898 91.357 C 367.842 91.266,367.862 91.203,367.942 91.217 C 368.290 91.277,368.356 91.178,368.184 90.855 C 368.057 90.619,368.057 90.475,368.182 90.397 C 368.283 90.335,368.343 90.150,368.316 89.987 C 368.288 89.816,368.368 89.710,368.502 89.738 C 368.632 89.765,368.686 89.703,368.623 89.602 C 368.560 89.500,368.421 89.471,368.313 89.537 C 368.191 89.613,368.156 89.557,368.220 89.392 C 368.278 89.239,368.223 89.126,368.089 89.126 C 367.955 89.126,367.900 89.239,367.958 89.392 C 368.022 89.558,367.986 89.613,367.864 89.537 C 367.733 89.456,367.694 89.565,367.748 89.863 C 367.813 90.219,367.757 90.311,367.475 90.311 C 367.168 90.311,367.143 90.250,367.293 89.857 C 367.388 89.606,367.395 89.446,367.308 89.499 C 367.222 89.552,367.059 89.485,366.946 89.349 C 366.790 89.161,366.793 89.084,366.957 89.029 C 367.099 88.982,367.118 88.868,367.014 88.701 C 366.707 88.208,366.696 87.672,367.001 88.090 C 367.142 88.283,367.225 88.493,367.185 88.557 C 367.146 88.621,367.233 88.720,367.379 88.776 C 367.683 88.893,367.742 88.615,367.451 88.435 C 367.345 88.369,367.297 88.164,367.346 87.980 C 367.394 87.795,367.365 87.686,367.281 87.738 C 367.197 87.790,367.032 87.650,366.913 87.427 C 366.709 87.044,366.716 87.037,367.038 87.289 C 367.345 87.530,367.356 87.528,367.157 87.273 C 367.036 87.117,366.969 86.957,367.008 86.917 C 367.048 86.877,366.851 86.836,366.572 86.826 C 366.292 86.816,365.979 86.892,365.876 86.995 M361.559 87.144 C 361.452 87.462,361.605 88.000,361.802 88.000 C 361.890 88.000,361.910 87.780,361.845 87.511 C 361.699 86.904,361.657 86.850,361.559 87.144 M362.667 87.540 C 362.877 87.808,362.885 87.896,362.704 87.957 C 362.519 88.020,362.517 88.091,362.698 88.328 C 362.821 88.490,362.852 88.563,362.766 88.490 C 362.571 88.325,362.102 88.695,362.235 88.909 C 362.288 88.996,362.247 89.067,362.144 89.067 C 362.040 89.067,361.956 88.861,361.956 88.610 C 361.956 88.203,361.926 88.182,361.689 88.419 C 361.388 88.720,361.330 88.967,361.600 88.800 C 361.698 88.740,361.778 88.775,361.778 88.878 C 361.778 88.982,361.678 89.068,361.556 89.069 C 361.384 89.071,361.386 89.106,361.564 89.218 C 361.751 89.337,361.748 89.403,361.549 89.568 C 361.348 89.735,361.303 89.653,361.301 89.108 C 361.299 88.743,361.386 88.386,361.494 88.314 C 361.626 88.225,361.614 88.182,361.456 88.181 C 361.328 88.179,361.269 88.103,361.326 88.012 C 361.382 87.921,361.334 87.789,361.220 87.718 C 361.090 87.638,361.059 87.667,361.139 87.795 C 361.208 87.908,361.187 88.000,361.092 88.000 C 360.997 88.000,360.886 87.904,360.847 87.787 C 360.792 87.620,360.747 87.620,360.644 87.787 C 360.572 87.904,360.597 88.000,360.701 88.000 C 360.804 88.000,360.889 88.085,360.889 88.188 C 360.889 88.292,360.797 88.319,360.684 88.250 C 360.555 88.170,360.527 88.201,360.607 88.331 C 360.678 88.445,360.816 88.490,360.913 88.430 C 361.215 88.243,360.998 88.959,360.658 89.273 C 360.442 89.472,360.402 89.608,360.535 89.690 C 360.668 89.772,360.633 89.885,360.428 90.034 C 360.195 90.205,360.176 90.289,360.349 90.396 C 360.479 90.476,360.638 90.432,360.728 90.289 C 360.830 90.128,360.884 90.117,360.886 90.256 C 360.889 90.508,360.669 90.661,360.163 90.758 C 359.870 90.815,359.832 90.780,359.986 90.594 C 360.304 90.211,359.819 89.921,359.388 90.237 C 359.088 90.456,359.082 90.489,359.344 90.489 C 359.509 90.489,359.644 90.396,359.644 90.281 C 359.644 90.167,359.703 90.133,359.775 90.205 C 359.847 90.277,359.725 90.472,359.504 90.639 C 359.110 90.936,359.123 90.942,360.129 90.933 C 361.082 90.925,361.153 90.896,361.113 90.529 C 361.087 90.286,361.173 90.125,361.335 90.111 C 361.481 90.099,361.692 90.079,361.805 90.067 C 361.918 90.054,362.032 89.897,362.059 89.717 C 362.119 89.308,362.197 89.230,362.602 89.170 C 363.074 89.101,363.165 87.801,362.722 87.453 C 362.422 87.217,362.418 87.223,362.667 87.540 M370.578 87.467 C 370.382 87.551,370.327 87.625,370.455 87.632 C 370.583 87.639,370.639 87.724,370.579 87.820 C 370.519 87.917,370.581 87.953,370.717 87.901 C 370.852 87.849,370.963 87.690,370.963 87.548 C 370.963 87.405,370.956 87.295,370.948 87.301 C 370.940 87.308,370.773 87.383,370.578 87.467 M357.934 87.725 C 357.814 88.038,357.988 88.357,358.277 88.353 C 358.428 88.351,358.423 88.308,358.261 88.205 C 358.135 88.125,358.084 87.927,358.146 87.764 C 358.209 87.600,358.209 87.467,358.147 87.467 C 358.084 87.467,357.989 87.583,357.934 87.725 M322.325 87.733 C 322.349 87.831,322.312 88.036,322.244 88.189 C 322.003 88.733,322.348 89.222,322.711 88.851 C 323.007 88.548,323.007 88.533,322.711 88.521 C 322.440 88.510,322.436 88.493,322.685 88.388 C 322.938 88.280,322.931 88.227,322.626 87.911 C 322.437 87.716,322.302 87.636,322.325 87.733 M325.239 87.787 C 325.009 87.952,324.936 88.356,325.137 88.356 C 325.294 88.356,325.569 87.821,325.466 87.718 C 325.436 87.688,325.334 87.719,325.239 87.787 M356.569 89.333 C 356.550 90.001,356.539 90.009,356.000 89.778 C 355.696 89.647,355.690 89.623,355.956 89.612 C 356.127 89.606,356.267 89.527,356.267 89.439 C 356.267 89.350,356.127 89.299,355.956 89.324 C 355.784 89.350,355.744 89.337,355.867 89.295 C 355.989 89.253,356.090 89.124,356.092 89.009 C 356.093 88.887,356.166 88.911,356.267 89.067 C 356.414 89.295,356.439 89.288,356.442 89.022 C 356.443 88.851,356.477 88.711,356.516 88.711 C 356.555 88.711,356.579 88.991,356.569 89.333 M357.037 89.013 C 357.037 89.310,357.121 89.305,357.437 88.989 C 357.659 88.766,357.650 88.736,357.365 88.758 C 357.185 88.771,357.037 88.886,357.037 89.013 M321.579 89.630 C 321.567 89.760,321.578 89.947,321.602 90.044 C 321.626 90.142,321.648 90.321,321.652 90.442 C 321.658 90.613,321.716 90.614,321.916 90.448 C 322.083 90.309,322.104 90.212,321.975 90.169 C 321.867 90.133,321.778 90.026,321.778 89.930 C 321.778 89.835,321.868 89.813,321.979 89.881 C 322.098 89.955,322.148 89.910,322.101 89.769 C 321.992 89.443,321.605 89.340,321.579 89.630 M359.289 89.690 C 359.289 89.836,359.165 89.956,359.013 89.956 C 358.581 89.956,358.727 91.684,359.170 91.800 C 359.409 91.863,359.466 91.811,359.388 91.606 C 359.297 91.369,359.323 91.363,359.565 91.563 C 359.802 91.760,359.876 91.757,360.011 91.544 C 360.155 91.316,360.252 91.431,360.201 91.771 C 360.191 91.841,360.301 91.853,360.447 91.797 C 360.728 91.689,360.781 91.878,360.601 92.346 C 360.502 92.604,360.467 92.600,360.254 92.316 C 360.037 92.026,360.026 92.032,360.118 92.400 C 360.216 92.785,359.789 93.005,359.600 92.667 C 359.558 92.593,359.561 92.653,359.606 92.800 C 359.650 92.947,359.691 93.168,359.695 93.292 C 359.703 93.500,360.410 92.992,360.751 92.533 C 360.824 92.436,360.885 92.580,360.886 92.855 C 360.888 93.143,360.814 93.308,360.711 93.244 C 360.613 93.184,360.533 93.211,360.533 93.304 C 360.533 93.397,360.657 93.521,360.809 93.579 C 360.967 93.640,361.039 93.610,360.977 93.509 C 360.917 93.412,360.948 93.333,361.046 93.333 C 361.143 93.333,361.273 93.413,361.333 93.511 C 361.394 93.609,361.350 93.689,361.236 93.689 C 361.117 93.689,361.076 93.813,361.141 93.981 C 361.235 94.227,361.209 94.237,360.978 94.044 C 360.826 93.919,360.651 93.867,360.588 93.930 C 360.525 93.993,360.572 94.044,360.692 94.044 C 360.812 94.044,360.869 94.111,360.818 94.193 C 360.768 94.274,360.823 94.344,360.941 94.348 C 361.059 94.352,361.236 94.374,361.333 94.398 C 361.682 94.484,361.956 94.397,361.956 94.200 C 361.956 94.063,361.873 94.069,361.689 94.222 C 361.535 94.350,361.422 94.367,361.422 94.263 C 361.422 94.036,361.852 93.867,362.425 93.867 C 362.678 93.867,362.818 93.944,362.754 94.047 C 362.692 94.147,362.548 94.173,362.432 94.106 C 362.317 94.038,362.362 94.143,362.533 94.339 C 362.704 94.535,362.843 94.808,362.842 94.947 C 362.839 95.157,362.809 95.154,362.663 94.933 C 362.526 94.724,362.500 94.756,362.546 95.081 C 362.578 95.309,362.718 95.568,362.858 95.656 C 363.097 95.808,363.076 95.835,362.681 95.889 C 362.592 95.901,362.465 95.965,362.399 96.031 C 362.233 96.197,362.699 96.114,363.005 95.923 C 363.212 95.794,363.203 95.731,362.950 95.545 C 362.725 95.381,362.702 95.288,362.861 95.190 C 363.254 94.947,363.602 95.654,363.270 96.021 C 363.005 96.314,363.027 96.328,363.648 96.251 C 364.177 96.186,364.288 96.221,364.207 96.431 C 364.135 96.620,364.191 96.661,364.409 96.580 C 364.674 96.481,364.681 96.504,364.464 96.771 C 364.327 96.938,364.267 97.127,364.330 97.189 C 364.393 97.252,364.444 97.206,364.444 97.086 C 364.444 96.966,364.504 96.905,364.578 96.950 C 364.651 96.996,364.730 96.714,364.753 96.325 C 364.794 95.631,364.785 95.619,364.281 95.730 C 363.956 95.801,363.717 95.763,363.632 95.625 C 363.536 95.469,363.595 95.440,363.838 95.526 C 364.167 95.642,364.167 95.637,363.833 95.378 C 363.578 95.181,363.554 95.106,363.744 95.093 C 363.885 95.084,363.800 95.012,363.556 94.933 C 363.129 94.797,363.127 94.790,363.511 94.773 C 363.818 94.760,363.911 94.649,363.911 94.296 C 363.911 94.004,363.830 93.867,363.689 93.919 C 363.567 93.963,363.481 94.073,363.498 94.162 C 363.516 94.251,363.370 94.324,363.175 94.324 C 362.843 94.324,362.840 94.310,363.133 94.095 C 363.305 93.970,363.356 93.867,363.246 93.867 C 363.137 93.867,363.021 93.687,362.990 93.468 C 362.958 93.244,362.783 93.042,362.590 93.005 C 362.259 92.942,362.258 92.951,362.570 93.262 C 362.772 93.464,362.818 93.609,362.692 93.651 C 362.369 93.758,362.044 93.287,362.044 92.711 C 362.044 92.227,361.991 92.178,361.464 92.180 C 361.143 92.181,360.915 92.101,360.953 92.001 C 361.037 91.782,360.615 91.356,360.463 91.507 C 360.404 91.566,360.356 91.518,360.356 91.401 C 360.356 91.283,360.176 91.202,359.956 91.221 C 359.119 91.293,358.933 91.208,358.933 90.756 C 358.933 90.095,359.549 89.530,359.893 89.874 C 360.041 90.022,360.200 90.063,360.258 89.969 C 360.375 89.780,360.063 89.545,359.600 89.473 C 359.421 89.445,359.289 89.537,359.289 89.690 M375.647 90.571 C 375.649 90.870,375.680 90.887,375.822 90.667 C 375.958 90.457,375.995 90.452,375.997 90.644 C 375.999 90.840,376.052 90.845,376.265 90.668 C 376.596 90.393,376.460 90.148,375.996 90.182 C 375.768 90.200,375.645 90.337,375.647 90.571 M348.222 90.503 C 348.051 90.603,347.911 90.760,347.911 90.853 C 347.911 90.946,348.051 91.016,348.222 91.008 C 348.423 90.999,348.451 90.961,348.302 90.901 C 348.128 90.832,348.151 90.747,348.391 90.565 C 348.767 90.280,348.668 90.243,348.222 90.503 M357.595 90.497 C 357.416 90.787,357.842 91.173,358.229 91.072 C 358.752 90.935,358.682 90.311,358.144 90.311 C 357.905 90.311,357.658 90.395,357.595 90.497 M368.632 90.592 C 368.398 90.763,368.392 90.831,368.592 90.997 C 368.724 91.107,368.892 91.138,368.964 91.065 C 369.036 90.993,369.043 91.174,368.978 91.467 C 368.889 91.871,368.906 91.935,369.050 91.733 C 369.218 91.496,369.240 91.496,369.247 91.733 C 369.259 92.113,370.133 91.088,370.133 90.695 C 370.133 90.329,369.089 90.258,368.632 90.592 M375.052 90.430 C 374.836 90.646,374.929 90.835,375.200 90.731 C 375.347 90.675,375.467 90.557,375.467 90.470 C 375.467 90.283,375.222 90.260,375.052 90.430 M369.625 90.844 C 369.721 90.844,369.742 90.752,369.672 90.639 C 369.595 90.515,369.623 90.482,369.743 90.556 C 369.851 90.623,369.894 90.798,369.838 90.944 C 369.722 91.247,369.233 90.958,369.258 90.600 C 369.267 90.475,369.306 90.483,369.362 90.622 C 369.412 90.744,369.530 90.844,369.625 90.844 M325.091 90.933 C 325.035 91.080,325.060 91.200,325.146 91.200 C 325.233 91.200,325.344 91.100,325.393 90.978 C 325.447 90.845,325.489 90.881,325.497 91.067 C 325.505 91.238,325.426 91.378,325.323 91.378 C 325.219 91.378,325.195 91.478,325.268 91.600 C 325.346 91.730,325.262 91.711,325.067 91.556 C 324.852 91.385,324.781 91.372,324.870 91.518 C 324.975 91.693,324.892 91.719,324.522 91.626 C 324.175 91.539,324.072 91.563,324.164 91.710 C 324.234 91.824,324.356 91.877,324.436 91.828 C 324.515 91.778,324.795 91.907,325.057 92.113 L 325.533 92.487 325.135 92.910 C 324.801 93.266,324.785 93.333,325.036 93.333 C 325.199 93.333,325.333 93.209,325.333 93.056 C 325.333 92.904,325.406 92.824,325.495 92.879 C 325.584 92.934,325.684 92.500,325.718 91.913 C 325.761 91.145,325.712 90.822,325.541 90.757 C 325.212 90.632,325.206 90.634,325.091 90.933 M357.133 90.889 C 356.911 91.324,357.063 91.711,357.353 91.449 C 357.673 91.159,357.657 90.844,357.323 90.844 C 357.231 90.844,357.145 90.864,357.133 90.889 M320.897 91.193 C 320.755 91.282,320.555 91.306,320.453 91.247 C 320.350 91.187,320.407 91.299,320.578 91.495 C 320.749 91.691,320.850 91.906,320.803 91.972 C 320.755 92.039,320.842 91.982,320.997 91.845 C 321.356 91.530,321.269 90.961,320.897 91.193 M319.802 91.333 C 319.674 92.178,319.699 92.427,319.923 92.513 C 320.063 92.567,320.178 92.529,320.178 92.428 C 320.178 92.328,320.258 92.296,320.357 92.356 C 320.456 92.417,320.502 92.657,320.460 92.888 C 320.401 93.213,320.459 93.303,320.713 93.284 C 320.894 93.271,321.006 93.202,320.963 93.131 C 320.919 93.060,320.964 92.952,321.064 92.891 C 321.361 92.707,321.268 92.447,320.949 92.569 C 320.734 92.652,320.693 92.618,320.798 92.447 C 320.889 92.300,320.875 92.254,320.759 92.326 C 320.657 92.389,320.532 92.281,320.481 92.086 C 320.419 91.850,320.294 91.768,320.105 91.841 C 319.772 91.968,319.731 91.718,320.036 91.413 C 320.201 91.247,320.201 91.200,320.036 91.200 C 319.918 91.200,319.813 91.260,319.802 91.333 M370.133 91.545 C 370.133 91.735,370.226 91.947,370.339 92.017 C 370.466 92.096,370.496 92.066,370.417 91.939 C 370.347 91.826,370.372 91.733,370.473 91.733 C 370.573 91.733,370.609 91.613,370.553 91.467 C 370.407 91.086,370.133 91.137,370.133 91.545 M336.229 91.733 C 336.130 92.114,335.792 92.238,335.867 91.867 C 335.902 91.688,336.162 91.394,336.294 91.381 C 336.310 91.379,336.280 91.538,336.229 91.733 M331.200 92.172 C 331.200 92.397,331.702 92.800,331.982 92.800 C 332.034 92.800,332.166 92.632,332.276 92.428 C 332.454 92.095,332.435 92.068,332.104 92.173 C 331.886 92.242,331.733 92.212,331.733 92.101 C 331.733 91.997,331.613 91.911,331.467 91.911 C 331.320 91.911,331.200 92.028,331.200 92.172 M332.800 92.270 C 332.579 92.410,332.594 92.442,332.889 92.458 C 333.129 92.470,333.067 92.528,332.696 92.636 C 332.142 92.797,331.975 93.215,332.300 93.629 C 332.399 93.756,332.390 93.879,332.275 93.950 C 332.173 94.014,332.089 94.221,332.089 94.410 C 332.089 94.741,331.934 94.756,328.459 94.756 C 324.619 94.756,324.518 94.733,324.815 93.928 C 324.959 93.537,324.625 93.432,324.316 93.772 C 324.240 93.855,324.278 93.866,324.400 93.796 C 324.522 93.725,324.622 93.739,324.622 93.827 C 324.622 94.124,324.389 94.220,324.150 94.021 C 323.960 93.863,323.911 93.927,323.908 94.334 C 323.907 94.636,323.850 94.758,323.770 94.632 C 323.681 94.491,323.546 94.475,323.373 94.584 C 323.229 94.676,323.151 94.757,323.200 94.766 C 323.249 94.775,323.449 94.821,323.644 94.868 C 323.840 94.916,324.120 94.953,324.267 94.952 C 324.413 94.951,326.273 94.949,328.400 94.948 C 331.285 94.946,332.267 94.891,332.267 94.732 C 332.267 94.614,332.313 94.565,332.370 94.622 C 332.427 94.679,332.384 94.892,332.275 95.096 C 332.084 95.453,331.944 95.467,328.449 95.467 C 326.222 95.467,324.779 95.535,324.711 95.644 C 324.639 95.761,326.043 95.822,328.790 95.822 C 332.518 95.822,332.978 95.791,332.978 95.537 C 332.978 95.366,332.878 95.289,332.731 95.345 C 332.552 95.414,332.512 95.335,332.586 95.053 C 332.707 94.590,333.156 94.082,333.156 94.409 C 333.156 94.573,333.213 94.573,333.410 94.410 C 333.616 94.239,333.723 94.287,333.964 94.655 C 334.129 94.906,334.335 95.111,334.422 95.111 C 334.509 95.111,334.360 94.864,334.091 94.562 C 333.768 94.199,333.622 93.861,333.661 93.564 C 333.704 93.232,333.658 93.150,333.482 93.251 C 333.291 93.359,333.288 93.339,333.467 93.146 C 333.830 92.755,333.719 92.540,333.333 92.889 C 332.895 93.285,332.863 93.048,333.289 92.561 C 333.460 92.366,333.507 92.260,333.393 92.326 C 333.279 92.392,333.159 92.369,333.126 92.273 C 333.088 92.163,332.970 92.162,332.800 92.270 M358.269 92.471 C 358.248 92.756,358.269 92.756,358.590 92.482 C 358.779 92.320,358.933 92.183,358.933 92.176 C 358.933 91.975,358.285 92.262,358.269 92.471 M369.472 92.458 C 369.486 92.646,369.606 92.800,369.737 92.800 C 369.869 92.800,369.927 92.720,369.867 92.622 C 369.563 92.132,370.099 92.532,370.430 93.044 C 370.908 93.783,371.790 93.850,372.235 93.181 C 372.463 92.837,372.466 92.778,372.254 92.818 C 371.731 92.919,370.844 92.593,370.833 92.297 C 370.825 92.086,370.775 92.116,370.661 92.400 C 370.573 92.620,370.453 92.800,370.395 92.800 C 370.337 92.800,370.335 92.728,370.389 92.640 C 370.444 92.551,370.254 92.397,369.967 92.297 C 369.506 92.137,369.449 92.155,369.472 92.458 M371.957 92.489 C 371.933 92.802,371.961 92.811,372.232 92.575 C 372.398 92.431,372.653 92.271,372.800 92.221 C 372.947 92.170,372.823 92.131,372.525 92.135 C 372.116 92.139,371.976 92.227,371.957 92.489 M324.179 92.446 C 324.239 92.543,324.206 92.622,324.106 92.622 C 323.911 92.622,323.479 93.316,323.599 93.436 C 323.638 93.475,323.889 93.279,324.157 92.999 C 324.570 92.569,324.600 92.474,324.357 92.381 C 324.190 92.317,324.116 92.344,324.179 92.446 M351.409 92.419 C 351.231 92.490,351.090 92.624,351.097 92.718 C 351.103 92.812,350.559 92.889,349.888 92.889 C 348.336 92.889,348.294 93.317,349.841 93.363 L 350.844 93.393 349.778 93.511 L 348.711 93.629 350.187 93.659 C 351.188 93.679,351.845 93.786,352.231 93.992 C 352.544 94.159,352.860 94.279,352.933 94.259 C 353.007 94.239,353.067 94.302,353.067 94.400 C 353.067 94.498,353.247 94.577,353.467 94.575 C 353.790 94.573,353.816 94.539,353.600 94.400 C 353.370 94.251,353.375 94.227,353.634 94.225 C 353.799 94.223,353.989 94.311,354.056 94.419 C 354.133 94.543,354.535 94.582,355.147 94.524 C 356.123 94.431,356.666 94.222,355.930 94.222 C 355.713 94.222,355.576 94.156,355.626 94.074 C 355.676 93.993,355.601 93.926,355.459 93.926 C 355.316 93.926,355.197 94.053,355.194 94.207 C 355.190 94.393,355.101 94.337,354.933 94.044 C 354.794 93.800,354.676 93.672,354.673 93.759 C 354.670 93.847,354.458 93.693,354.202 93.418 C 353.822 93.007,353.681 92.952,353.422 93.115 C 353.247 93.223,353.054 93.261,352.992 93.199 C 352.821 93.029,352.341 92.988,352.443 93.153 C 352.491 93.231,352.405 93.344,352.251 93.404 C 351.848 93.558,351.644 93.536,351.644 93.339 C 351.644 93.244,351.747 93.206,351.873 93.254 C 352.011 93.307,352.102 93.205,352.102 92.995 C 352.102 92.804,352.019 92.662,351.917 92.679 C 351.816 92.697,351.753 92.611,351.778 92.489 C 351.802 92.367,351.802 92.272,351.778 92.279 C 351.753 92.286,351.587 92.349,351.409 92.419 M372.526 92.611 C 372.583 92.702,372.782 92.766,372.969 92.752 C 373.412 92.718,373.243 92.444,372.779 92.444 C 372.583 92.444,372.470 92.519,372.526 92.611 M319.644 92.905 C 319.644 93.054,319.716 93.132,319.804 93.078 C 319.891 93.024,320.011 93.106,320.070 93.260 C 320.222 93.656,320.210 93.689,319.911 93.689 C 319.764 93.689,319.644 93.764,319.644 93.856 C 319.644 93.949,319.480 93.992,319.280 93.954 C 318.822 93.866,318.634 94.304,318.958 94.703 C 319.125 94.909,319.131 94.960,318.978 94.868 C 318.819 94.773,318.756 94.893,318.756 95.289 C 318.756 95.637,318.826 95.799,318.945 95.726 C 319.050 95.661,319.092 95.497,319.040 95.360 C 318.987 95.223,319.047 95.111,319.172 95.111 C 319.297 95.111,319.554 94.985,319.744 94.831 C 319.934 94.677,319.977 94.610,319.839 94.681 C 319.675 94.766,319.633 94.740,319.716 94.605 C 319.786 94.492,319.758 94.390,319.655 94.378 C 319.200 94.324,319.134 94.304,319.015 94.186 C 318.946 94.117,319.100 94.098,319.356 94.145 C 319.623 94.193,319.822 94.147,319.822 94.037 C 319.822 93.932,319.911 93.900,320.019 93.967 C 320.156 94.052,320.143 94.135,319.975 94.242 C 319.784 94.362,319.797 94.395,320.032 94.397 C 320.393 94.401,320.519 93.762,320.267 93.203 C 320.051 92.723,319.644 92.528,319.644 92.905 M348.232 93.203 C 348.606 93.654,348.518 105.235,348.138 105.546 C 347.964 105.688,347.918 105.756,348.036 105.697 C 348.153 105.638,348.336 105.752,348.442 105.951 C 348.607 106.259,348.581 106.311,348.262 106.311 C 348.058 106.311,347.939 106.232,347.999 106.135 C 348.062 106.033,347.987 106.006,347.817 106.071 C 347.584 106.161,347.573 106.213,347.763 106.333 C 347.987 106.475,347.964 106.502,347.570 106.556 C 347.481 106.568,347.353 106.632,347.287 106.698 C 347.127 106.858,347.584 106.783,347.874 106.602 C 348.005 106.521,348.217 106.551,348.363 106.673 C 348.560 106.836,348.622 106.837,348.622 106.678 C 348.622 106.562,348.704 106.518,348.803 106.580 C 348.902 106.641,348.936 106.768,348.879 106.861 C 348.744 107.079,349.172 106.992,349.439 106.747 C 349.635 106.568,349.946 105.782,349.893 105.600 C 349.879 105.551,349.629 105.506,349.338 105.501 C 348.587 105.486,348.631 105.870,348.618 99.249 C 348.606 93.875,348.573 93.175,348.322 93.078 C 348.104 92.995,348.083 93.023,348.232 93.203 M372.403 93.289 C 372.058 93.680,372.299 94.494,372.712 94.335 C 372.858 94.279,372.978 94.311,372.978 94.406 C 372.978 94.500,373.216 94.578,373.508 94.578 L 374.038 94.578 373.697 93.970 C 373.397 93.436,373.389 93.350,373.631 93.257 C 373.823 93.184,373.876 93.231,373.805 93.415 C 373.637 93.852,374.048 93.929,374.421 93.530 C 374.783 93.141,374.633 92.993,374.258 93.369 C 374.092 93.535,374.044 93.515,374.044 93.280 C 374.044 93.114,373.984 92.981,373.911 92.984 C 373.673 92.995,372.882 93.498,373.092 93.505 C 373.512 93.518,373.515 94.044,373.095 94.044 C 372.756 94.044,372.697 93.958,372.728 93.511 C 372.771 92.887,372.762 92.881,372.403 93.289 M356.176 93.336 C 356.115 93.436,355.974 93.461,355.864 93.393 C 355.754 93.325,355.848 93.529,356.074 93.846 C 356.315 94.185,356.586 94.384,356.731 94.328 C 357.113 94.181,357.022 94.364,356.601 94.589 C 356.296 94.752,356.250 94.745,356.360 94.551 C 356.465 94.367,356.445 94.359,356.276 94.514 C 355.906 94.856,356.135 95.367,356.703 95.465 C 356.990 95.515,357.209 95.616,357.190 95.689 C 357.100 96.037,357.182 96.178,357.472 96.178 C 357.663 96.178,357.871 96.397,357.996 96.727 C 358.110 97.030,358.283 97.228,358.380 97.168 C 358.477 97.108,358.638 97.201,358.739 97.374 C 358.882 97.620,358.924 97.631,358.931 97.422 C 358.936 97.249,358.984 97.224,359.069 97.351 C 359.457 97.931,360.356 97.956,360.356 97.387 C 360.356 97.198,359.946 97.286,359.781 97.511 C 359.701 97.620,359.649 97.598,359.647 97.456 C 359.646 97.328,359.564 97.273,359.467 97.333 C 359.369 97.394,359.289 97.365,359.289 97.270 C 359.289 97.174,359.373 97.068,359.477 97.034 C 359.580 96.999,359.614 96.838,359.551 96.675 C 359.488 96.511,359.324 96.421,359.185 96.475 C 359.047 96.528,358.933 96.480,358.933 96.368 C 358.933 96.219,358.828 96.216,358.533 96.359 C 358.169 96.536,358.161 96.530,358.450 96.297 C 358.724 96.076,358.730 96.028,358.494 95.938 C 358.345 95.880,358.222 95.924,358.222 96.035 C 358.222 96.146,358.184 96.198,358.136 96.151 C 358.089 96.104,358.126 95.803,358.217 95.483 C 358.355 95.005,358.339 94.919,358.128 95.000 C 357.985 95.055,357.794 94.953,357.695 94.767 C 357.565 94.523,357.582 94.411,357.761 94.342 C 357.933 94.276,357.981 94.361,357.923 94.636 C 357.851 94.981,357.864 94.989,358.042 94.710 C 358.390 94.165,357.966 93.915,357.348 94.301 C 357.157 94.420,357.123 94.386,357.209 94.162 C 357.314 93.888,357.058 93.764,356.564 93.850 C 356.484 93.864,356.469 93.714,356.532 93.516 C 356.649 93.147,356.378 93.010,356.176 93.336 M347.200 99.556 C 347.200 103.348,347.263 105.422,347.378 105.422 C 347.493 105.422,347.556 103.348,347.556 99.556 C 347.556 95.763,347.493 93.689,347.378 93.689 C 347.263 93.689,347.200 95.763,347.200 99.556 M374.400 94.017 C 374.400 94.128,374.267 94.161,374.089 94.093 C 373.811 93.988,373.806 94.004,374.047 94.247 C 374.393 94.597,374.889 94.595,375.024 94.242 C 375.082 94.091,375.073 94.023,375.004 94.093 C 374.934 94.162,374.770 94.130,374.639 94.021 C 374.469 93.880,374.400 93.879,374.400 94.017 M375.383 94.010 C 375.270 94.123,375.450 94.400,375.636 94.400 C 375.717 94.400,375.825 94.293,375.875 94.163 C 375.960 93.943,375.571 93.822,375.383 94.010 M377.067 94.233 C 377.067 94.336,376.987 94.372,376.889 94.311 C 376.791 94.251,376.711 94.296,376.711 94.411 C 376.711 94.565,376.638 94.561,376.437 94.394 C 376.229 94.221,376.113 94.216,375.957 94.372 C 375.801 94.528,375.810 94.578,375.994 94.578 C 376.128 94.578,376.195 94.620,376.143 94.672 C 376.092 94.723,375.844 94.688,375.594 94.592 C 375.074 94.395,374.641 94.733,374.819 95.198 C 374.981 95.620,375.527 95.540,375.415 95.111 C 375.364 94.915,375.395 94.800,375.483 94.855 C 375.572 94.909,375.644 95.070,375.644 95.210 C 375.644 95.351,375.773 95.467,375.930 95.467 C 376.125 95.467,376.181 95.368,376.107 95.156 C 376.017 94.896,376.044 94.902,376.266 95.190 C 376.413 95.379,376.535 95.619,376.536 95.723 C 376.538 95.826,376.606 95.804,376.689 95.674 C 376.810 95.483,376.860 95.491,376.947 95.716 C 377.007 95.873,376.969 95.977,376.860 95.953 C 376.754 95.930,376.630 96.011,376.585 96.133 C 376.532 96.279,376.684 96.356,377.031 96.356 C 377.420 96.356,377.585 96.455,377.658 96.734 C 377.719 96.971,377.852 97.076,378.011 97.015 C 378.164 96.956,378.358 97.089,378.496 97.348 C 378.623 97.584,378.833 97.778,378.963 97.778 C 379.108 97.778,379.200 97.986,379.200 98.316 C 379.200 98.930,380.014 99.432,380.288 98.987 C 380.380 98.839,380.418 98.898,380.399 99.158 C 380.382 99.377,380.321 99.556,380.262 99.556 C 380.036 99.556,380.072 101.213,380.301 101.359 C 380.483 101.474,380.476 101.507,380.267 101.511 C 380.058 101.515,380.051 101.549,380.231 101.663 C 380.412 101.778,380.395 101.863,380.152 102.060 C 379.953 102.222,379.890 102.232,379.976 102.089 C 380.161 101.782,379.654 101.801,379.397 102.111 C 379.285 102.245,379.215 102.625,379.242 102.955 C 379.284 103.492,379.237 103.562,378.800 103.612 C 378.270 103.673,378.155 104.088,378.493 104.719 C 378.640 104.993,378.623 105.039,378.404 104.955 C 378.255 104.898,378.133 104.788,378.133 104.712 C 378.133 104.378,376.603 104.611,376.578 104.949 C 376.554 105.257,376.576 105.261,376.869 104.996 C 377.132 104.758,377.185 104.753,377.185 104.967 C 377.185 105.109,377.139 105.212,377.081 105.197 C 377.024 105.181,376.818 105.252,376.622 105.353 C 376.427 105.454,375.907 105.523,375.467 105.505 C 375.027 105.488,374.607 105.562,374.533 105.670 C 374.437 105.813,374.363 105.813,374.267 105.671 C 374.065 105.376,373.512 105.443,373.505 105.763 C 373.501 106.006,373.467 106.001,373.264 105.733 C 372.997 105.381,372.145 105.374,372.012 105.723 C 371.964 105.846,372.063 106.026,372.230 106.123 C 372.403 106.224,372.438 106.303,372.311 106.306 C 371.964 106.314,372.050 106.666,372.400 106.669 C 372.666 106.672,372.672 106.697,372.444 106.844 C 372.242 106.975,372.235 107.017,372.417 107.019 C 372.548 107.021,372.700 106.852,372.754 106.645 C 372.852 106.271,372.855 106.272,373.087 106.689 C 373.215 106.921,373.324 107.016,373.327 106.901 C 373.331 106.778,373.507 106.722,373.748 106.765 C 374.005 106.812,374.163 106.754,374.163 106.614 C 374.163 106.489,374.022 106.387,373.850 106.387 C 373.678 106.387,373.584 106.463,373.641 106.554 C 373.697 106.646,373.651 106.665,373.539 106.595 C 373.161 106.362,373.328 106.106,373.822 106.158 C 374.091 106.186,374.424 106.140,374.561 106.055 C 374.724 105.954,374.770 105.966,374.693 106.091 C 374.628 106.195,374.676 106.321,374.799 106.371 C 374.922 106.420,374.867 106.467,374.677 106.475 C 374.336 106.489,374.336 106.492,374.677 106.750 C 374.867 106.893,375.142 107.007,375.289 107.003 C 375.456 106.997,375.476 106.962,375.342 106.907 C 375.189 106.845,375.178 106.692,375.301 106.368 C 375.443 105.994,375.426 105.933,375.203 106.019 C 375.047 106.079,374.933 106.025,374.933 105.891 C 374.933 105.658,375.537 105.611,375.753 105.827 C 375.817 105.891,376.024 105.860,376.213 105.759 C 376.458 105.628,376.630 105.637,376.816 105.791 C 376.958 105.909,377.126 105.956,377.188 105.894 C 377.250 105.832,377.408 105.870,377.539 105.979 C 377.715 106.125,377.778 106.124,377.778 105.977 C 377.778 105.868,377.714 105.778,377.636 105.778 C 377.377 105.778,377.507 104.831,377.778 104.748 C 377.984 104.685,377.998 104.717,377.836 104.888 C 377.591 105.150,377.730 105.650,378.002 105.483 C 378.111 105.415,378.135 105.453,378.061 105.573 C 377.982 105.701,378.064 105.778,378.281 105.778 C 378.647 105.778,378.948 105.056,378.748 104.658 C 378.689 104.541,378.746 104.566,378.876 104.716 C 379.158 105.041,379.636 105.066,379.667 104.756 C 379.679 104.633,379.699 104.404,379.711 104.245 C 379.729 104.010,379.783 103.997,379.998 104.176 C 380.615 104.688,380.832 104.251,380.642 102.878 C 380.631 102.799,380.530 102.792,380.417 102.861 C 380.289 102.941,380.260 102.910,380.340 102.780 C 380.411 102.666,380.523 102.605,380.590 102.645 C 380.657 102.685,380.827 102.593,380.968 102.440 C 381.197 102.194,381.182 102.174,380.835 102.265 C 380.620 102.321,380.444 102.294,380.444 102.206 C 380.444 102.117,380.534 102.044,380.644 102.044 C 380.783 102.044,380.776 101.964,380.624 101.780 C 380.446 101.566,380.459 101.494,380.691 101.405 C 380.849 101.345,380.978 101.219,380.978 101.126 C 380.978 101.033,380.898 101.006,380.800 101.067 C 380.702 101.127,380.622 101.087,380.622 100.978 C 380.622 100.868,380.522 100.837,380.400 100.907 C 380.271 100.981,380.240 100.967,380.326 100.873 C 380.549 100.629,380.863 100.670,381.105 100.973 C 381.225 101.122,381.330 101.164,381.340 101.067 C 381.350 100.969,381.427 101.049,381.511 101.244 C 381.657 101.584,381.664 101.582,381.676 101.200 C 381.683 100.980,381.611 100.800,381.517 100.800 C 381.422 100.800,381.396 100.667,381.458 100.504 C 381.534 100.305,381.503 100.250,381.364 100.337 C 381.249 100.408,381.156 100.386,381.156 100.289 C 381.156 100.192,380.983 100.058,380.771 99.991 C 380.430 99.883,380.420 99.850,380.682 99.703 C 380.845 99.612,380.978 99.457,380.978 99.358 C 380.978 99.260,380.898 99.228,380.800 99.289 C 380.702 99.349,380.622 99.279,380.622 99.132 C 380.622 98.959,380.750 98.889,380.983 98.934 C 381.181 98.972,381.296 98.928,381.239 98.835 C 381.181 98.742,380.959 98.661,380.745 98.654 C 380.424 98.644,380.409 98.621,380.660 98.521 C 381.054 98.364,380.823 97.974,380.274 97.868 C 379.760 97.769,379.665 97.676,379.821 97.424 C 379.909 97.282,380.058 97.300,380.342 97.486 C 380.671 97.702,380.741 97.705,380.741 97.504 C 380.741 97.370,380.666 97.307,380.574 97.363 C 380.475 97.424,380.458 97.303,380.533 97.068 C 380.627 96.773,380.597 96.692,380.419 96.757 C 380.286 96.805,380.200 96.940,380.227 97.058 C 380.253 97.175,380.190 97.218,380.086 97.153 C 379.981 97.089,379.777 97.134,379.633 97.254 C 379.416 97.434,379.389 97.421,379.482 97.181 C 379.543 97.020,379.660 96.889,379.741 96.889 C 379.823 96.889,379.877 96.729,379.862 96.533 C 379.847 96.338,379.763 96.178,379.674 96.178 C 379.585 96.178,379.612 96.058,379.733 95.911 C 379.867 95.750,379.881 95.644,379.770 95.644 C 379.668 95.644,379.545 95.744,379.496 95.867 C 379.427 96.038,379.403 96.035,379.392 95.856 C 379.384 95.728,379.298 95.673,379.200 95.733 C 379.102 95.794,379.022 95.698,379.021 95.522 C 379.019 95.179,378.196 94.688,378.042 94.938 C 377.991 95.020,377.751 95.050,377.508 95.006 C 377.229 94.956,377.066 95.009,377.064 95.152 C 377.062 95.312,377.013 95.302,376.898 95.120 C 376.725 94.846,376.826 94.518,377.042 94.651 C 377.113 94.695,377.182 94.576,377.196 94.388 C 377.211 94.199,377.187 94.044,377.145 94.044 C 377.102 94.044,377.067 94.129,377.067 94.233 M320.065 94.978 C 319.990 95.198,319.923 95.418,319.915 95.467 C 319.844 95.941,319.476 96.229,319.461 95.822 L 319.444 95.378 319.279 95.789 C 319.180 96.037,319.032 96.150,318.907 96.073 C 318.782 95.995,318.748 96.022,318.823 96.143 C 318.890 96.251,319.054 96.298,319.188 96.247 C 319.321 96.195,319.485 96.241,319.551 96.348 C 319.617 96.455,319.605 96.531,319.524 96.517 C 319.128 96.448,318.746 96.552,318.840 96.703 C 318.977 96.926,319.733 96.647,319.733 96.373 C 319.733 96.259,319.822 96.108,319.930 96.038 C 320.039 95.968,320.106 95.771,320.079 95.600 C 320.053 95.429,320.087 95.289,320.156 95.289 C 320.224 95.289,320.292 95.129,320.307 94.933 C 320.342 94.463,320.234 94.482,320.065 94.978 M337.942 94.618 C 337.832 95.014,337.840 95.272,337.959 95.198 C 338.137 95.088,338.305 95.327,338.401 95.826 C 338.439 96.025,338.394 96.140,338.302 96.083 C 338.209 96.026,338.133 96.064,338.133 96.167 C 338.133 96.271,337.973 96.356,337.778 96.356 C 337.552 96.356,337.427 96.469,337.436 96.667 C 337.445 96.853,337.486 96.888,337.540 96.756 C 337.589 96.633,337.700 96.533,337.787 96.533 C 337.874 96.533,337.892 96.671,337.827 96.839 C 337.723 97.109,337.742 97.114,337.992 96.883 C 338.147 96.740,338.237 96.562,338.193 96.489 C 338.149 96.416,338.237 96.356,338.390 96.356 C 338.542 96.356,338.667 96.436,338.667 96.533 C 338.667 96.631,338.667 96.771,338.667 96.844 C 338.667 96.918,338.593 96.978,338.504 96.978 C 338.414 96.978,338.274 97.050,338.193 97.137 C 338.107 97.230,338.216 97.255,338.455 97.196 C 338.708 97.133,338.840 97.168,338.801 97.287 C 338.713 97.551,338.138 97.615,337.746 97.405 C 337.568 97.310,337.422 97.302,337.422 97.387 C 337.422 97.570,338.325 98.036,338.563 97.977 C 338.653 97.954,338.618 98.067,338.484 98.228 C 338.259 98.499,338.224 98.497,338.009 98.204 C 337.882 98.030,337.778 97.964,337.778 98.057 C 337.778 98.218,338.610 99.200,338.747 99.200 C 338.849 99.200,338.761 98.780,338.607 98.533 C 338.530 98.411,338.546 98.311,338.641 98.311 C 338.737 98.311,338.841 98.383,338.874 98.472 C 339.006 98.830,339.822 98.768,339.830 98.400 C 339.840 97.939,339.531 97.422,339.246 97.422 C 339.119 97.422,339.022 97.206,339.022 96.924 C 339.022 96.651,338.922 96.319,338.800 96.187 C 338.622 95.996,338.622 95.973,338.800 96.073 C 338.950 96.158,339.021 96.065,339.019 95.788 C 339.017 95.458,338.983 95.430,338.844 95.644 C 338.750 95.791,338.671 95.827,338.669 95.724 C 338.668 95.622,338.756 95.449,338.865 95.340 C 339.011 95.194,338.917 95.066,338.509 94.855 C 338.205 94.698,337.949 94.591,337.942 94.618 M336.583 95.120 C 336.600 95.349,336.671 95.396,336.815 95.277 C 336.928 95.182,337.172 95.152,337.355 95.210 C 337.652 95.304,337.650 95.291,337.333 95.087 C 336.799 94.743,336.555 94.754,336.583 95.120 M358.578 95.447 C 358.724 95.553,358.775 95.640,358.689 95.642 C 358.468 95.646,358.938 96.178,359.163 96.178 C 359.265 96.178,359.296 96.125,359.231 96.061 C 359.167 95.996,359.203 95.836,359.312 95.705 C 359.463 95.523,359.445 95.467,359.236 95.467 C 359.086 95.467,358.816 95.419,358.637 95.361 C 358.366 95.273,358.356 95.288,358.578 95.447 M379.556 96.356 C 379.714 96.651,379.691 96.711,379.420 96.711 C 379.240 96.711,378.997 96.807,378.880 96.924 C 378.714 97.090,378.667 97.059,378.667 96.782 C 378.667 96.069,379.252 95.788,379.556 96.356 M326.800 96.311 C 327.949 96.342,329.829 96.342,330.978 96.311 C 332.127 96.280,331.187 96.254,328.889 96.254 C 326.591 96.254,325.651 96.280,326.800 96.311 M319.805 96.698 C 319.250 97.312,319.243 97.503,319.795 96.989 C 320.103 96.702,320.356 96.442,320.356 96.411 C 320.356 96.263,320.059 96.418,319.805 96.698 M317.960 96.745 C 317.775 97.198,317.906 97.619,318.149 97.354 C 318.257 97.237,318.285 96.988,318.213 96.794 C 318.096 96.480,318.070 96.475,317.960 96.745 M363.250 97.819 C 363.266 98.109,363.342 98.240,363.438 98.144 C 363.627 97.956,364.089 98.095,364.089 98.341 C 364.089 98.434,364.026 98.471,363.949 98.424 C 363.872 98.376,363.810 98.482,363.810 98.660 C 363.810 98.849,363.904 98.946,364.038 98.894 C 364.164 98.846,364.267 98.748,364.267 98.677 C 364.267 98.427,364.786 98.552,364.904 98.830 C 365.112 99.320,365.087 99.924,364.851 100.121 C 364.601 100.328,364.544 100.626,364.756 100.616 C 365.198 100.597,365.759 99.875,365.461 99.709 C 365.293 99.615,365.157 99.442,365.158 99.324 C 365.160 99.177,365.207 99.184,365.311 99.348 C 365.432 99.538,365.484 99.527,365.573 99.295 C 365.642 99.115,365.613 99.048,365.497 99.120 C 365.386 99.188,365.333 99.103,365.366 98.911 C 365.397 98.732,365.342 98.583,365.244 98.579 C 365.147 98.575,364.967 98.509,364.844 98.431 C 364.722 98.354,364.622 98.345,364.622 98.411 C 364.622 98.478,364.503 98.434,364.358 98.313 C 364.212 98.192,364.128 98.036,364.171 97.966 C 364.215 97.896,364.074 97.852,363.858 97.870 C 363.643 97.887,363.412 97.773,363.345 97.617 C 363.266 97.433,363.233 97.503,363.250 97.819 M317.925 98.476 C 317.947 98.859,317.922 99.170,317.871 99.166 C 317.025 99.096,316.973 99.167,316.998 100.336 C 317.026 101.611,316.932 101.867,316.434 101.867 C 316.150 101.867,316.089 101.992,316.089 102.578 C 316.089 102.979,316.175 103.289,316.286 103.289 C 316.396 103.289,316.435 103.152,316.374 102.978 C 316.284 102.718,316.310 102.724,316.533 103.012 C 316.680 103.202,316.800 103.262,316.800 103.145 C 316.800 103.029,316.980 102.939,317.200 102.946 C 317.568 102.957,317.572 102.969,317.256 103.093 C 317.048 103.174,316.988 103.274,317.103 103.345 C 317.539 103.615,317.683 103.007,317.337 102.360 L 316.994 101.721 317.386 101.455 C 317.734 101.219,317.751 101.146,317.541 100.797 C 317.183 100.201,317.131 99.735,317.436 99.852 C 317.616 99.921,317.660 99.865,317.583 99.664 C 317.500 99.448,317.582 99.378,317.918 99.378 C 318.400 99.378,318.658 98.996,318.371 98.708 C 318.250 98.588,318.255 98.490,318.386 98.409 C 318.520 98.326,318.501 98.209,318.325 98.033 C 317.941 97.649,317.883 97.713,317.925 98.476 M358.784 98.333 C 358.823 98.716,359.030 99.094,359.366 99.394 L 359.887 99.861 359.366 100.109 C 358.931 100.317,358.838 100.475,358.807 101.068 C 358.786 101.458,358.813 101.678,358.866 101.556 C 358.919 101.433,359.121 101.333,359.314 101.333 C 359.518 101.333,359.616 101.413,359.548 101.523 C 359.484 101.627,359.387 101.668,359.333 101.615 C 359.212 101.494,358.769 102.056,358.761 102.340 C 358.752 102.663,359.357 102.495,359.560 102.117 C 359.833 101.610,360.231 101.585,360.091 102.084 C 360.002 102.398,360.018 102.429,360.161 102.222 C 360.265 102.072,360.300 101.581,360.241 101.094 C 360.140 100.259,360.149 100.239,360.513 100.433 C 360.720 100.544,360.895 100.612,360.903 100.584 C 360.989 100.282,360.959 99.601,360.862 99.661 C 360.793 99.704,360.685 99.658,360.624 99.558 C 360.563 99.459,360.352 99.378,360.157 99.378 C 359.961 99.378,359.851 99.298,359.911 99.200 C 359.972 99.102,359.936 99.022,359.833 99.022 C 359.729 99.022,359.644 99.107,359.644 99.210 C 359.644 99.314,359.556 99.344,359.448 99.277 C 359.305 99.189,359.306 99.122,359.452 99.031 C 359.588 98.947,359.539 98.804,359.297 98.585 C 359.102 98.408,358.981 98.200,359.029 98.122 C 359.077 98.044,359.029 97.926,358.921 97.859 C 358.789 97.777,358.744 97.932,358.784 98.333 M359.181 98.164 C 359.097 98.300,359.147 98.335,359.327 98.266 C 359.509 98.196,359.593 98.260,359.578 98.459 C 359.552 98.809,360.213 98.875,360.283 98.529 C 360.308 98.404,360.288 98.384,360.239 98.484 C 360.135 98.694,359.812 98.734,359.843 98.533 C 359.931 97.978,359.459 97.714,359.181 98.164 M319.192 98.362 C 318.812 98.803,318.790 98.946,319.102 98.946 C 319.317 98.946,319.422 98.813,319.422 98.540 C 319.422 98.316,319.412 98.136,319.400 98.140 C 319.388 98.144,319.294 98.244,319.192 98.362 M322.190 98.437 C 322.296 98.608,321.750 99.253,321.583 99.156 C 321.540 99.131,321.563 99.184,321.634 99.272 C 321.709 99.366,321.867 99.347,322.013 99.226 C 322.294 98.993,322.785 99.398,322.595 99.705 C 322.539 99.796,322.627 99.980,322.792 100.113 C 322.984 100.270,323.044 100.278,322.960 100.138 C 322.750 99.788,333.912 99.729,334.238 100.078 C 334.441 100.296,334.431 100.317,334.172 100.217 C 333.931 100.125,333.867 100.199,333.867 100.569 C 333.867 101.116,334.331 101.704,334.663 101.577 C 334.840 101.509,334.872 101.735,334.805 102.576 C 334.718 103.660,335.068 104.513,335.490 104.252 C 335.575 104.200,335.646 104.462,335.647 104.834 C 335.649 105.337,335.694 105.442,335.822 105.244 C 335.917 105.098,335.996 105.048,335.997 105.133 C 335.999 105.218,336.107 105.199,336.239 105.090 C 336.370 104.981,336.524 104.939,336.581 104.996 C 336.638 105.053,336.830 105.010,337.009 104.901 C 337.243 104.758,337.259 104.709,337.067 104.728 C 336.382 104.793,336.081 104.711,336.356 104.533 C 336.574 104.392,336.553 104.361,336.244 104.358 C 335.819 104.355,335.400 103.952,335.602 103.739 C 335.674 103.663,335.553 103.656,335.333 103.723 C 335.092 103.798,334.933 103.771,334.933 103.656 C 334.933 103.552,334.993 103.476,335.067 103.488 C 335.678 103.584,336.031 103.451,335.824 103.202 C 335.703 103.056,335.521 102.989,335.419 103.052 C 335.311 103.119,335.287 103.080,335.363 102.958 C 335.433 102.843,335.554 102.788,335.631 102.836 C 335.707 102.883,335.922 102.804,336.107 102.661 C 336.397 102.436,336.401 102.414,336.133 102.507 C 335.947 102.572,335.822 102.528,335.822 102.397 C 335.822 102.226,335.765 102.226,335.556 102.400 C 335.402 102.528,335.289 102.545,335.289 102.441 C 335.289 102.341,335.405 102.216,335.547 102.161 C 335.695 102.104,335.775 101.905,335.734 101.693 C 335.695 101.490,335.739 101.371,335.832 101.428 C 335.924 101.485,336.000 101.407,336.000 101.255 C 336.000 101.102,336.095 100.978,336.212 100.978 C 336.328 100.978,336.268 100.852,336.078 100.698 C 335.889 100.544,335.846 100.476,335.983 100.548 C 336.149 100.634,336.189 100.607,336.104 100.469 C 335.910 100.155,335.600 100.435,335.704 100.830 C 335.777 101.111,335.747 101.137,335.544 100.968 C 335.347 100.804,335.268 100.825,335.176 101.065 C 335.108 101.243,335.162 101.434,335.307 101.526 C 335.444 101.613,335.476 101.682,335.378 101.681 C 335.076 101.676,334.576 101.217,334.714 101.071 C 334.786 100.996,334.664 100.989,334.444 101.057 C 334.203 101.131,334.044 101.104,334.044 100.990 C 334.044 100.885,334.104 100.809,334.178 100.821 C 334.710 100.905,335.294 100.774,335.183 100.595 C 335.104 100.468,335.134 100.438,335.261 100.516 C 335.381 100.590,335.467 100.534,335.467 100.381 C 335.467 99.894,335.261 99.577,335.009 99.674 C 334.871 99.727,334.803 99.844,334.858 99.933 C 334.913 100.022,335.038 100.045,335.137 99.984 C 335.235 99.924,335.210 100.042,335.081 100.248 C 334.953 100.454,334.727 100.620,334.579 100.617 C 334.365 100.612,334.376 100.562,334.633 100.367 C 334.856 100.198,334.892 100.084,334.751 99.997 C 334.639 99.928,334.586 99.720,334.634 99.536 C 334.683 99.351,334.650 99.156,334.561 99.101 C 334.472 99.046,334.394 99.146,334.386 99.323 C 334.377 99.519,334.336 99.556,334.280 99.416 C 334.230 99.291,334.286 99.071,334.405 98.927 C 334.668 98.610,334.383 98.578,334.069 98.889 C 333.890 99.067,333.869 99.049,333.964 98.800 C 334.075 98.508,333.756 98.489,328.775 98.487 C 325.855 98.486,323.154 98.433,322.772 98.370 C 322.318 98.295,322.117 98.318,322.190 98.437 M323.257 99.044 C 322.858 99.443,322.436 99.368,322.605 98.928 C 322.660 98.784,322.914 98.667,323.170 98.667 L 323.635 98.667 323.257 99.044 M333.867 99.378 C 333.867 99.492,332.092 99.556,328.899 99.556 C 325.780 99.556,323.891 99.489,323.822 99.378 C 323.750 99.261,325.450 99.200,328.790 99.200 C 332.056 99.200,333.867 99.263,333.867 99.378 M338.756 99.641 C 338.302 100.015,338.262 100.110,338.489 100.276 C 338.669 100.408,338.690 100.499,338.553 100.555 C 338.429 100.606,338.532 100.827,338.820 101.127 C 339.175 101.497,339.351 101.570,339.544 101.430 C 339.744 101.285,339.776 101.308,339.691 101.535 C 339.608 101.756,339.664 101.805,339.922 101.737 C 340.110 101.689,340.221 101.582,340.171 101.500 C 340.121 101.419,340.230 101.381,340.413 101.416 C 340.678 101.467,340.740 101.383,340.712 101.007 C 340.651 100.160,340.627 100.114,340.222 100.057 C 339.955 100.019,339.840 99.897,339.874 99.689 C 339.903 99.518,339.848 99.378,339.751 99.378 C 339.655 99.378,339.471 99.518,339.343 99.689 C 339.216 99.860,338.991 100.027,338.844 100.059 C 338.673 100.097,338.731 99.955,339.009 99.659 C 339.552 99.080,339.446 99.072,338.756 99.641 M385.829 99.908 C 385.960 100.410,385.719 100.493,385.290 100.093 C 385.090 99.907,384.878 99.803,384.819 99.863 C 384.760 99.922,384.711 99.868,384.711 99.744 C 384.711 99.589,384.856 99.553,385.170 99.632 C 385.502 99.716,385.593 99.689,385.499 99.535 C 385.421 99.410,385.443 99.369,385.551 99.436 C 385.652 99.499,385.777 99.711,385.829 99.908 M317.845 99.871 C 317.950 100.066,317.951 100.299,317.849 100.482 C 317.746 100.666,317.749 100.816,317.857 100.883 C 317.951 100.941,317.984 101.107,317.929 101.250 C 317.860 101.429,317.947 101.511,318.205 101.511 C 318.531 101.511,318.553 101.466,318.368 101.170 C 318.198 100.898,318.207 100.765,318.411 100.514 C 318.570 100.318,318.583 100.249,318.444 100.332 C 318.290 100.425,318.222 100.327,318.222 100.010 C 318.222 99.720,318.124 99.556,317.949 99.556 C 317.743 99.556,317.718 99.633,317.845 99.871 M321.589 99.723 C 321.773 99.839,321.792 99.950,321.657 100.120 C 321.436 100.399,321.418 100.978,321.630 100.978 C 321.711 100.978,321.777 101.158,321.775 101.378 C 321.773 101.692,321.737 101.722,321.608 101.518 C 321.467 101.296,321.392 101.306,321.082 101.586 C 320.725 101.909,320.732 102.222,321.096 102.222 C 321.199 102.222,321.237 102.105,321.182 101.961 C 321.127 101.818,321.161 101.652,321.257 101.592 C 321.353 101.533,321.409 101.591,321.382 101.720 C 321.352 101.865,321.488 101.947,321.736 101.934 C 322.031 101.918,322.091 101.970,321.962 102.126 C 321.864 102.244,321.503 102.330,321.159 102.319 C 320.815 102.308,320.533 102.361,320.533 102.438 C 320.533 102.515,320.863 102.578,321.266 102.578 C 321.859 102.578,321.961 102.623,321.801 102.816 C 321.603 103.055,321.698 103.289,321.993 103.289 C 322.079 103.289,322.136 103.189,322.120 103.067 C 322.062 102.618,322.137 102.166,322.250 102.280 C 322.411 102.441,322.864 102.042,322.743 101.847 C 322.690 101.760,322.730 101.689,322.834 101.689 C 323.112 101.689,323.063 101.055,322.766 100.809 C 322.625 100.692,322.484 100.422,322.451 100.209 C 322.418 99.996,322.293 99.842,322.174 99.867 C 322.054 99.891,321.956 99.831,321.956 99.733 C 321.956 99.636,321.816 99.557,321.644 99.558 C 321.389 99.561,321.379 99.590,321.589 99.723 M360.622 99.837 C 360.622 99.924,360.482 100.022,360.311 100.055 C 360.128 100.090,360.000 100.007,360.000 99.851 C 360.000 99.701,360.134 99.607,360.311 99.633 C 360.482 99.658,360.622 99.750,360.622 99.837 M339.197 100.963 C 339.201 101.340,338.962 101.028,338.899 100.573 C 338.855 100.259,338.880 100.233,339.019 100.444 C 339.116 100.591,339.196 100.824,339.197 100.963 M365.217 100.589 C 365.052 100.851,364.974 100.858,364.572 100.643 C 364.162 100.424,364.065 100.438,363.633 100.777 C 363.369 100.985,363.251 101.156,363.373 101.156 C 363.494 101.156,363.553 101.262,363.503 101.393 C 363.453 101.523,363.508 101.630,363.626 101.630 C 363.743 101.630,363.836 101.430,363.831 101.186 C 363.826 100.941,363.935 100.704,364.073 100.658 C 364.251 100.598,364.288 100.665,364.202 100.890 C 364.098 101.161,364.120 101.173,364.363 100.972 C 364.611 100.766,364.631 100.793,364.530 101.194 C 364.459 101.477,364.499 101.702,364.636 101.786 C 364.794 101.884,364.828 101.814,364.755 101.535 C 364.678 101.243,364.725 101.159,364.945 101.196 C 365.105 101.223,365.238 101.142,365.240 101.016 C 365.243 100.890,365.353 100.720,365.486 100.638 C 365.618 100.556,365.655 100.444,365.566 100.390 C 365.478 100.335,365.321 100.425,365.217 100.589 M322.618 100.870 C 322.663 100.915,322.643 101.128,322.575 101.343 C 322.477 101.650,322.416 101.682,322.294 101.490 C 322.163 101.283,322.138 101.284,322.136 101.496 C 322.135 101.635,322.087 101.702,322.030 101.645 C 321.973 101.588,322.015 101.377,322.123 101.175 C 322.285 100.872,322.272 100.826,322.049 100.912 C 321.900 100.969,321.778 100.932,321.778 100.829 C 321.778 100.656,322.436 100.688,322.618 100.870 M384.625 102.012 C 384.474 102.163,384.002 101.652,384.118 101.464 C 384.188 101.351,384.324 101.405,384.470 101.605 C 384.600 101.783,384.670 101.967,384.625 102.012 M340.133 101.872 C 339.913 101.961,339.733 102.080,339.733 102.138 C 339.733 102.196,339.808 102.197,339.899 102.141 C 340.150 101.986,340.335 102.403,340.113 102.625 C 339.974 102.764,339.976 102.886,340.121 103.061 C 340.375 103.367,340.439 104.201,340.230 104.473 C 340.132 104.600,340.154 104.730,340.286 104.812 C 340.422 104.896,340.453 104.869,340.372 104.737 C 340.300 104.622,340.366 104.484,340.521 104.425 C 340.673 104.366,340.757 104.252,340.707 104.171 C 340.657 104.091,340.710 103.967,340.825 103.896 C 340.964 103.810,340.985 103.845,340.887 104.003 C 340.782 104.173,340.822 104.207,341.032 104.127 C 341.191 104.066,341.377 104.104,341.444 104.213 C 341.523 104.341,341.484 104.360,341.333 104.267 C 341.179 104.171,341.144 104.192,341.227 104.328 C 341.297 104.441,341.436 104.533,341.535 104.533 C 341.770 104.533,341.677 103.064,341.431 102.892 C 341.145 102.692,340.834 102.731,340.750 102.978 C 340.708 103.100,340.704 103.020,340.740 102.800 C 340.777 102.580,340.725 102.400,340.626 102.400 C 340.526 102.400,340.444 102.315,340.444 102.212 C 340.444 102.108,340.524 102.072,340.620 102.132 C 340.721 102.194,340.751 102.123,340.690 101.965 C 340.632 101.813,340.573 101.694,340.559 101.700 C 340.545 101.706,340.353 101.784,340.133 101.872 M363.276 102.876 C 363.290 103.392,363.230 103.752,363.138 103.695 C 363.049 103.640,363.016 103.706,363.066 103.842 C 363.115 103.978,363.243 104.070,363.349 104.047 C 363.456 104.023,363.497 104.123,363.441 104.269 C 363.385 104.414,363.255 104.533,363.151 104.533 C 363.048 104.533,363.011 104.582,363.071 104.641 C 363.130 104.700,363.021 104.918,362.828 105.126 C 362.496 105.481,362.494 105.509,362.794 105.628 C 362.982 105.702,363.013 105.758,362.870 105.765 C 362.730 105.773,362.672 105.890,362.731 106.044 C 362.787 106.191,362.759 106.311,362.667 106.311 C 362.576 106.311,362.416 106.471,362.311 106.667 C 362.144 106.979,362.176 107.022,362.572 107.022 C 362.928 107.022,363.022 106.934,363.022 106.599 C 363.022 106.325,363.148 106.142,363.377 106.082 C 363.890 105.948,364.022 105.453,363.623 105.162 C 363.439 105.028,363.349 104.858,363.423 104.785 C 363.496 104.712,363.556 104.745,363.556 104.859 C 363.556 104.973,363.636 105.067,363.733 105.067 C 363.831 105.067,363.911 104.902,363.911 104.701 C 363.911 104.469,363.836 104.381,363.706 104.461 C 363.591 104.532,363.548 104.511,363.609 104.413 C 363.669 104.316,363.961 104.273,364.259 104.316 C 364.857 104.404,365.060 103.640,364.578 103.121 C 364.400 102.929,364.400 102.906,364.578 103.007 C 364.716 103.084,364.800 103.023,364.800 102.844 C 364.800 102.686,364.728 102.601,364.641 102.655 C 364.553 102.709,364.436 102.634,364.380 102.488 C 364.324 102.342,364.135 102.221,363.961 102.219 C 363.708 102.217,363.694 102.185,363.894 102.059 C 364.075 101.944,364.020 101.908,363.698 101.928 C 363.279 101.954,363.253 102.012,363.276 102.876 M339.586 102.773 C 339.249 103.313,339.452 103.385,339.802 102.850 C 339.964 102.602,340.034 102.400,339.958 102.400 C 339.881 102.400,339.714 102.568,339.586 102.773 M363.729 103.009 C 363.490 103.248,363.312 103.006,363.454 102.635 C 363.556 102.369,363.599 102.361,363.735 102.581 C 363.832 102.738,363.830 102.908,363.729 103.009 M357.959 103.188 C 357.981 103.390,358.009 103.733,358.022 103.950 C 358.038 104.233,358.124 104.314,358.326 104.236 C 358.553 104.149,358.561 104.099,358.370 103.978 C 358.180 103.857,358.186 103.827,358.400 103.822 C 358.636 103.817,358.636 103.797,358.401 103.649 C 358.186 103.513,358.179 103.439,358.364 103.253 C 358.550 103.068,358.620 103.070,358.741 103.265 C 358.844 103.432,358.926 103.445,359.010 103.308 C 359.077 103.200,359.043 103.111,358.933 103.111 C 358.824 103.111,358.775 103.046,358.824 102.966 C 358.874 102.886,358.690 102.820,358.416 102.820 C 358.018 102.820,357.926 102.894,357.959 103.188 M320.713 103.116 C 320.890 103.228,320.904 103.284,320.756 103.286 C 320.633 103.288,320.533 103.369,320.533 103.467 C 320.533 103.809,321.232 103.452,321.242 103.104 C 321.243 103.052,321.065 102.996,320.847 102.979 C 320.534 102.955,320.506 102.985,320.713 103.116 M380.159 103.319 C 380.077 103.452,380.122 103.492,380.282 103.431 C 380.453 103.365,380.542 103.473,380.560 103.768 C 380.580 104.104,380.520 104.176,380.293 104.086 C 380.132 104.022,379.849 103.939,379.665 103.901 C 379.135 103.794,379.090 103.640,379.554 103.519 C 379.951 103.415,379.957 103.397,379.635 103.272 C 379.345 103.158,379.370 103.134,379.788 103.124 C 380.126 103.115,380.246 103.178,380.159 103.319 M384.863 103.346 C 385.072 103.599,384.818 104.003,384.453 103.997 C 384.351 103.996,384.387 103.910,384.533 103.806 C 384.734 103.664,384.745 103.595,384.578 103.527 C 384.325 103.425,384.275 103.111,384.512 103.111 C 384.597 103.111,384.755 103.217,384.863 103.346 M358.945 103.719 C 358.733 103.975,359.011 104.337,359.241 104.107 C 359.300 104.048,359.235 103.994,359.096 103.988 C 358.947 103.980,358.985 103.920,359.188 103.840 C 359.396 103.759,359.457 103.659,359.342 103.588 C 359.237 103.524,359.058 103.583,358.945 103.719 M321.222 103.867 C 321.210 103.940,321.190 104.073,321.178 104.163 C 321.126 104.543,320.889 104.557,320.889 104.180 C 320.889 103.780,320.878 103.777,320.538 104.085 C 320.344 104.260,320.225 104.466,320.272 104.542 C 320.319 104.618,320.297 104.714,320.223 104.756 C 320.149 104.797,320.196 104.803,320.328 104.770 C 320.459 104.737,320.608 104.869,320.659 105.064 C 320.710 105.259,320.835 105.367,320.937 105.304 C 321.048 105.235,321.068 105.276,320.987 105.407 C 320.913 105.526,320.742 105.582,320.606 105.530 C 320.471 105.478,320.372 105.492,320.386 105.562 C 320.443 105.839,320.125 106.311,319.882 106.311 C 319.740 106.311,319.666 106.380,319.719 106.465 C 319.771 106.550,320.015 106.597,320.260 106.570 C 320.650 106.527,320.683 106.564,320.525 106.860 C 320.303 107.276,320.097 107.312,320.252 106.908 C 320.340 106.678,320.313 106.659,320.121 106.818 C 319.988 106.929,319.813 106.960,319.733 106.887 C 319.653 106.815,319.645 106.856,319.715 106.978 C 319.786 107.100,319.758 107.200,319.655 107.200 C 319.551 107.200,319.467 107.277,319.467 107.372 C 319.467 107.467,319.570 107.505,319.695 107.457 C 319.835 107.403,319.924 107.509,319.924 107.729 C 319.924 107.956,319.807 108.089,319.606 108.089 C 319.432 108.089,319.289 108.174,319.289 108.277 C 319.289 108.381,319.381 108.408,319.494 108.339 C 319.617 108.263,319.651 108.290,319.579 108.406 C 319.513 108.513,319.301 108.570,319.107 108.533 C 318.895 108.492,318.756 108.561,318.756 108.708 C 318.756 109.051,315.841 109.171,315.138 108.857 C 314.720 108.671,314.665 108.673,314.825 108.866 C 315.065 109.155,314.903 109.345,314.536 109.204 C 314.353 109.134,314.290 109.182,314.346 109.349 C 314.499 109.808,320.356 109.777,320.356 109.318 C 320.356 109.119,320.181 108.896,319.956 108.806 C 319.736 108.717,319.660 108.640,319.788 108.633 C 319.920 108.627,319.965 108.532,319.892 108.414 C 319.810 108.281,319.856 108.242,320.022 108.305 C 320.188 108.369,320.279 108.290,320.279 108.082 C 320.279 107.904,320.217 107.798,320.140 107.846 C 320.063 107.893,320.000 107.852,320.000 107.754 C 320.000 107.446,320.314 107.368,320.422 107.649 C 320.645 108.231,321.174 107.353,321.221 106.322 C 321.256 105.559,321.357 105.270,321.699 104.952 C 322.291 104.404,322.286 103.733,321.689 103.733 C 321.444 103.733,321.234 103.793,321.222 103.867 M356.978 104.089 C 356.978 104.414,356.883 104.533,356.623 104.533 C 356.318 104.533,356.282 104.619,356.367 105.156 C 356.438 105.599,356.400 105.779,356.233 105.783 C 356.065 105.787,356.074 105.832,356.267 105.945 C 356.773 106.242,357.511 105.960,357.511 105.470 C 357.511 105.113,357.581 105.044,357.866 105.118 C 358.062 105.169,358.274 105.126,358.338 105.023 C 358.402 104.919,358.379 104.881,358.286 104.939 C 358.193 104.996,358.000 104.970,357.858 104.880 C 357.650 104.748,357.669 104.713,357.956 104.700 C 358.151 104.691,358.211 104.643,358.089 104.593 C 357.967 104.544,357.867 104.310,357.867 104.074 C 357.867 103.734,357.774 103.644,357.422 103.644 C 357.067 103.644,356.978 103.733,356.978 104.089 M365.042 105.035 C 365.060 105.984,365.136 106.394,365.308 106.451 C 365.488 106.511,365.515 106.425,365.415 106.111 C 365.306 105.767,365.327 105.727,365.530 105.891 C 365.678 106.009,365.921 106.032,366.133 105.946 C 366.329 105.867,366.409 105.796,366.311 105.788 C 366.213 105.779,366.253 105.694,366.400 105.597 C 366.592 105.472,366.480 105.445,366.005 105.502 C 365.338 105.583,364.973 105.379,365.358 105.140 C 365.488 105.060,365.518 105.089,365.439 105.217 C 365.369 105.330,365.389 105.422,365.482 105.422 C 365.575 105.422,365.700 105.293,365.761 105.136 C 365.847 104.912,365.926 104.894,366.120 105.055 C 366.441 105.322,366.692 104.974,366.417 104.643 C 366.312 104.516,366.278 104.359,366.342 104.295 C 366.566 104.071,366.756 104.180,366.756 104.533 C 366.756 104.729,366.870 104.889,367.009 104.889 C 367.149 104.889,367.420 105.089,367.613 105.333 C 368.082 105.930,368.356 105.893,368.356 105.234 C 368.356 104.894,368.427 104.734,368.545 104.807 C 368.650 104.872,368.695 105.030,368.646 105.159 C 368.596 105.288,368.667 105.435,368.802 105.487 C 368.971 105.552,369.014 105.492,368.939 105.295 C 368.859 105.089,368.894 105.049,369.064 105.154 C 369.221 105.251,369.257 105.231,369.173 105.094 C 369.103 104.981,369.130 104.889,369.234 104.889 C 369.338 104.889,369.422 104.809,369.422 104.711 C 369.422 104.450,368.890 104.494,368.883 104.756 C 368.875 105.066,368.178 104.198,368.178 103.878 C 368.178 103.573,366.933 103.596,366.714 103.905 C 366.630 104.023,366.582 104.009,366.581 103.865 C 366.579 103.715,366.320 103.651,365.798 103.671 L 365.018 103.701 365.042 105.035 M336.615 104.012 C 336.555 104.108,336.613 104.165,336.742 104.138 C 336.872 104.111,336.956 104.195,336.929 104.324 C 336.902 104.454,336.962 104.509,337.062 104.447 C 337.162 104.385,337.244 104.268,337.244 104.186 C 337.244 103.957,336.736 103.816,336.615 104.012 M317.015 104.760 C 316.666 105.131,316.661 105.168,316.954 105.168 C 317.367 105.168,317.691 104.824,317.526 104.558 C 317.439 104.416,317.279 104.479,317.015 104.760 M384.512 104.505 C 384.501 104.569,384.504 104.692,384.520 104.777 C 384.535 104.863,384.420 104.884,384.265 104.824 C 384.109 104.764,384.025 104.786,384.079 104.872 C 384.132 104.959,384.038 105.082,383.871 105.146 C 383.609 105.247,383.603 105.287,383.827 105.429 C 384.020 105.551,384.028 105.595,383.856 105.597 C 383.728 105.599,383.672 105.679,383.732 105.776 C 383.794 105.876,383.723 105.906,383.566 105.846 C 383.415 105.788,383.332 105.674,383.382 105.594 C 383.541 105.336,383.209 105.273,382.982 105.518 C 382.808 105.706,382.826 105.770,383.071 105.834 C 383.303 105.895,383.282 105.919,382.988 105.935 C 382.774 105.946,382.554 106.028,382.499 106.117 C 382.444 106.205,382.554 106.238,382.743 106.188 C 383.015 106.117,383.068 106.171,382.996 106.447 C 382.939 106.664,383.011 106.836,383.186 106.903 C 383.363 106.971,383.467 106.915,383.467 106.750 C 383.467 106.606,383.389 106.489,383.294 106.489 C 383.200 106.489,383.168 106.369,383.225 106.222 C 383.281 106.076,383.403 105.956,383.496 105.956 C 383.589 105.956,383.623 106.024,383.571 106.108 C 383.520 106.191,383.670 106.223,383.905 106.178 C 384.139 106.133,384.376 105.981,384.430 105.839 C 384.486 105.695,384.452 105.630,384.354 105.690 C 384.257 105.750,384.178 105.728,384.178 105.641 C 384.178 105.554,384.378 105.375,384.622 105.244 C 384.867 105.114,385.067 104.940,385.067 104.859 C 385.067 104.778,385.087 104.659,385.111 104.595 C 385.136 104.531,385.016 104.458,384.844 104.433 C 384.673 104.408,384.524 104.440,384.512 104.505 M355.245 104.667 C 355.451 105.080,354.524 105.511,353.431 105.511 C 352.490 105.511,352.293 105.563,352.255 105.822 C 352.228 106.002,352.323 106.133,352.479 106.133 C 352.643 106.133,352.710 106.235,352.649 106.394 C 352.569 106.602,352.662 106.632,353.102 106.544 C 353.531 106.458,353.623 106.486,353.511 106.666 C 353.415 106.822,353.437 106.857,353.575 106.771 C 353.690 106.700,353.738 106.568,353.681 106.477 C 353.625 106.386,353.701 106.311,353.851 106.311 C 354.033 106.311,354.086 106.413,354.011 106.622 C 353.949 106.793,353.964 106.874,354.044 106.802 C 354.124 106.729,354.300 106.762,354.436 106.874 C 354.623 107.029,354.733 107.023,354.896 106.846 C 355.067 106.662,355.060 106.644,354.861 106.757 C 354.694 106.852,354.654 106.832,354.739 106.694 C 354.808 106.581,354.972 106.489,355.102 106.489 C 355.341 106.489,355.654 105.639,355.480 105.465 C 355.428 105.414,355.261 105.476,355.107 105.603 C 354.889 105.784,354.877 105.866,355.053 105.975 C 355.177 106.051,355.239 106.154,355.190 106.203 C 355.141 106.252,355.000 106.229,354.877 106.153 C 354.540 105.945,354.920 105.039,355.285 105.179 C 355.567 105.287,356.089 104.969,356.089 104.689 C 356.089 104.604,355.884 104.533,355.634 104.533 C 355.384 104.533,355.209 104.593,355.245 104.667 M369.653 104.646 C 369.715 104.707,369.680 104.917,369.576 105.111 C 369.440 105.366,369.441 105.431,369.582 105.344 C 369.757 105.236,369.857 105.456,369.799 105.822 C 369.787 105.896,369.866 105.956,369.975 105.956 C 370.083 105.956,370.127 105.840,370.073 105.698 C 370.007 105.526,370.059 105.473,370.230 105.539 C 370.637 105.695,370.759 105.086,370.378 104.797 C 370.054 104.552,369.428 104.421,369.653 104.646 M361.942 105.231 C 361.656 105.517,361.422 105.828,361.422 105.923 C 361.422 106.172,362.026 106.367,362.101 106.142 C 362.135 106.038,362.249 106.006,362.354 106.071 C 362.460 106.137,362.492 106.105,362.427 106.000 C 362.363 105.896,362.131 105.853,361.911 105.904 L 361.511 105.996 361.914 105.697 C 362.136 105.532,362.263 105.310,362.196 105.202 C 362.119 105.078,362.178 105.046,362.356 105.114 C 362.518 105.176,362.679 105.114,362.735 104.967 C 362.898 104.544,362.495 104.678,361.942 105.231 M336.412 105.187 C 336.486 105.308,336.407 105.429,336.224 105.477 C 336.052 105.522,336.163 105.568,336.470 105.579 C 336.817 105.592,337.067 105.499,337.131 105.333 C 337.187 105.187,337.156 105.067,337.061 105.067 C 336.966 105.067,336.889 105.156,336.889 105.266 C 336.889 105.396,336.784 105.381,336.588 105.222 C 336.420 105.085,336.343 105.070,336.412 105.187 M302.109 105.663 L 301.730 106.044 302.154 106.044 C 302.475 106.044,302.578 105.947,302.578 105.644 C 302.578 105.176,302.595 105.175,302.109 105.663 M350.800 105.545 C 350.971 105.589,351.251 105.589,351.422 105.545 C 351.593 105.500,351.453 105.463,351.111 105.463 C 350.769 105.463,350.629 105.500,350.800 105.545 M351.787 106.055 C 351.747 106.403,351.762 106.719,351.823 106.756 C 351.883 106.793,351.950 106.508,351.973 106.123 C 352.025 105.249,351.887 105.200,351.787 106.055 M367.030 105.588 C 366.974 105.679,367.021 105.811,367.136 105.882 C 367.274 105.967,367.300 105.926,367.214 105.761 C 367.119 105.578,367.148 105.582,367.321 105.778 C 367.705 106.211,368.052 106.212,367.781 105.778 C 367.546 105.401,367.200 105.313,367.030 105.588 M370.854 105.988 C 371.011 106.535,371.364 106.918,371.545 106.736 C 371.594 106.687,371.516 106.573,371.373 106.482 C 371.229 106.391,371.181 106.315,371.266 106.314 C 371.352 106.312,371.315 106.171,371.186 106.000 C 370.986 105.735,370.988 105.722,371.200 105.911 C 371.552 106.225,371.822 106.188,371.552 105.863 C 371.364 105.636,371.375 105.610,371.619 105.704 C 371.782 105.766,371.911 105.728,371.911 105.619 C 371.911 105.511,371.637 105.422,371.301 105.422 C 370.701 105.422,370.694 105.431,370.854 105.988 M319.679 105.791 C 319.714 105.896,319.818 105.982,319.911 105.982 C 320.004 105.982,320.109 105.896,320.144 105.791 C 320.179 105.686,320.074 105.600,319.911 105.600 C 319.748 105.600,319.643 105.686,319.679 105.791 M350.800 105.900 C 350.971 105.945,351.251 105.945,351.422 105.900 C 351.593 105.856,351.453 105.819,351.111 105.819 C 350.769 105.819,350.629 105.856,350.800 105.900 M335.771 106.189 C 335.845 106.305,335.907 106.460,335.908 106.533 C 335.910 106.607,335.971 106.667,336.044 106.667 C 336.818 106.667,336.818 106.666,336.485 106.473 C 336.312 106.372,336.206 106.231,336.251 106.159 C 336.295 106.088,336.175 106.018,335.984 106.003 C 335.755 105.986,335.683 106.049,335.771 106.189 M365.904 106.436 C 365.562 106.674,365.429 106.683,365.072 106.492 C 364.348 106.104,364.089 106.357,364.089 107.451 C 364.089 108.602,364.210 108.731,365.333 108.778 C 365.673 108.792,365.867 108.895,365.867 109.060 C 365.867 109.488,366.534 109.716,367.548 109.634 C 368.431 109.562,368.527 109.594,368.878 110.067 C 369.221 110.532,370.327 110.857,370.052 110.412 C 369.996 110.321,370.033 110.195,370.135 110.132 C 370.237 110.069,370.298 110.124,370.271 110.253 C 370.236 110.424,370.467 110.489,371.111 110.489 C 371.988 110.489,372.207 110.298,371.928 109.778 C 371.736 109.420,371.771 109.156,372.010 109.156 C 372.145 109.156,372.215 109.262,372.165 109.393 C 372.112 109.529,372.194 109.630,372.356 109.630 C 372.526 109.630,372.599 109.530,372.541 109.378 C 372.488 109.240,372.444 109.093,372.444 109.052 C 372.444 109.011,372.244 108.978,372.000 108.978 C 371.756 108.978,371.554 109.078,371.553 109.200 C 371.551 109.363,371.509 109.357,371.397 109.180 C 371.281 108.998,371.197 109.065,371.058 109.447 C 370.887 109.915,370.727 109.669,370.829 109.095 C 370.843 109.013,370.633 109.100,370.361 109.288 C 369.539 109.857,369.410 109.911,369.462 109.658 C 369.491 109.520,369.349 109.420,369.121 109.417 C 368.906 109.414,368.666 109.341,368.587 109.255 C 368.508 109.169,368.538 109.152,368.654 109.219 C 368.770 109.285,368.915 109.258,368.976 109.158 C 369.037 109.059,368.968 108.978,368.821 108.978 C 368.674 108.978,368.507 109.054,368.449 109.148 C 368.391 109.241,368.214 109.268,368.054 109.206 C 367.845 109.126,367.804 109.161,367.909 109.331 C 368.009 109.492,367.987 109.524,367.838 109.432 C 367.718 109.358,367.665 109.253,367.720 109.199 C 367.775 109.144,367.720 108.977,367.598 108.827 C 367.422 108.609,367.423 108.581,367.600 108.687 C 367.744 108.774,367.826 108.708,367.833 108.499 C 367.843 108.240,367.876 108.256,368.006 108.578 C 368.205 109.075,368.355 109.103,368.229 108.619 C 368.092 108.096,367.888 108.037,367.377 108.372 C 366.920 108.672,366.623 108.625,366.832 108.286 C 366.894 108.186,367.042 108.128,367.161 108.156 C 367.280 108.185,367.317 108.175,367.243 108.133 C 367.169 108.092,367.155 107.983,367.212 107.892 C 367.269 107.800,367.209 107.747,367.080 107.773 C 366.950 107.800,366.866 107.716,366.893 107.587 C 366.920 107.457,366.864 107.400,366.768 107.459 C 366.672 107.518,366.638 107.684,366.693 107.828 C 366.764 108.012,366.671 108.088,366.375 108.086 C 366.090 108.084,366.029 108.037,366.184 107.939 C 366.309 107.859,366.361 107.661,366.299 107.498 C 366.236 107.335,366.119 107.243,366.038 107.293 C 365.957 107.343,365.841 107.302,365.780 107.203 C 365.718 107.104,365.813 107.017,365.990 107.010 C 366.277 106.999,366.273 106.981,365.956 106.844 C 365.629 106.704,365.680 106.661,366.252 106.600 C 366.486 106.575,366.689 106.128,366.459 106.143 C 366.378 106.148,366.128 106.280,365.904 106.436 M370.419 106.416 C 370.272 106.798,370.269 106.792,370.644 106.890 C 370.901 106.957,370.957 106.900,370.889 106.641 C 370.769 106.183,370.549 106.077,370.419 106.416 M350.065 106.476 C 349.887 106.586,349.897 106.640,350.113 106.723 C 350.264 106.781,350.432 106.758,350.486 106.671 C 350.626 106.444,350.335 106.309,350.065 106.476 M355.479 106.653 C 355.185 106.978,355.185 106.993,355.481 106.950 C 355.652 106.924,355.812 106.770,355.836 106.607 C 355.891 106.231,355.857 106.236,355.479 106.653 M359.860 106.595 C 359.734 106.732,359.698 106.844,359.780 106.844 C 359.862 106.844,359.929 107.044,359.929 107.289 C 359.929 107.533,359.852 107.733,359.759 107.733 C 359.666 107.733,359.735 107.894,359.912 108.090 C 360.090 108.286,360.182 108.499,360.117 108.564 C 360.053 108.629,359.999 108.568,359.997 108.430 C 359.995 108.222,359.964 108.225,359.819 108.444 C 359.674 108.666,359.618 108.670,359.488 108.469 C 359.356 108.264,359.318 108.264,359.240 108.469 C 359.183 108.617,359.064 108.486,358.934 108.133 C 358.713 107.535,358.509 107.413,358.299 107.753 C 358.232 107.862,358.287 108.025,358.422 108.116 C 358.623 108.252,358.620 108.273,358.400 108.231 C 358.253 108.203,358.133 108.260,358.133 108.357 C 358.133 108.454,358.278 108.513,358.455 108.489 C 358.632 108.465,358.727 108.525,358.667 108.622 C 358.503 108.887,357.941 108.837,357.691 108.535 C 357.570 108.390,357.400 108.314,357.313 108.368 C 357.226 108.422,357.156 108.394,357.156 108.307 C 357.156 108.220,357.027 108.148,356.870 108.148 C 356.658 108.148,356.691 108.228,357.001 108.462 C 357.230 108.634,357.451 108.742,357.492 108.700 C 357.599 108.594,358.044 108.988,358.044 109.188 C 358.044 109.279,357.978 109.313,357.896 109.263 C 357.815 109.212,357.748 109.288,357.748 109.430 C 357.748 109.877,358.545 109.747,358.610 109.289 C 358.657 108.960,358.793 108.880,359.378 108.842 C 360.572 108.762,360.872 108.638,361.145 108.109 C 361.290 107.829,361.379 107.570,361.344 107.534 C 361.309 107.497,361.388 107.367,361.520 107.246 C 361.670 107.107,361.799 107.091,361.868 107.202 C 361.928 107.299,361.900 107.378,361.807 107.378 C 361.714 107.378,361.592 107.498,361.536 107.644 C 361.466 107.826,361.545 107.911,361.783 107.911 C 362.206 107.911,362.259 107.347,361.867 107.022 C 361.720 106.900,361.600 106.691,361.600 106.556 C 361.600 106.421,361.525 106.311,361.432 106.311 C 361.340 106.311,361.289 106.451,361.320 106.622 C 361.350 106.793,361.219 107.173,361.029 107.467 C 360.816 107.796,360.722 107.864,360.782 107.644 C 360.837 107.449,360.807 107.172,360.717 107.028 C 360.612 106.860,360.629 106.676,360.766 106.510 C 360.947 106.291,360.921 106.283,360.592 106.458 C 360.380 106.571,360.180 106.592,360.148 106.505 C 360.115 106.418,359.986 106.458,359.860 106.595 M382.767 107.275 C 382.498 107.599,382.776 107.770,383.065 107.457 C 383.186 107.326,383.215 107.175,383.129 107.122 C 383.044 107.069,382.881 107.138,382.767 107.275 M380.673 107.512 C 380.700 107.683,380.777 107.911,380.845 108.020 C 381.048 108.345,380.764 108.463,380.534 108.148 C 380.365 107.917,380.291 107.905,380.177 108.089 C 380.077 108.251,380.103 108.279,380.262 108.181 C 380.415 108.086,380.463 108.122,380.407 108.286 C 380.361 108.422,380.090 108.533,379.806 108.533 C 379.511 108.533,379.306 108.622,379.328 108.739 C 379.349 108.852,379.245 108.991,379.096 109.048 C 378.923 109.114,378.871 109.079,378.952 108.948 C 379.029 108.823,378.998 108.794,378.872 108.872 C 378.598 109.041,378.610 109.504,378.889 109.538 C 379.696 109.638,380.208 109.572,380.457 109.337 C 380.765 109.047,380.724 108.716,380.405 108.913 C 380.299 108.979,380.262 108.951,380.324 108.851 C 380.385 108.752,380.720 108.702,381.067 108.740 C 381.664 108.806,381.686 108.790,381.471 108.444 C 381.283 108.140,381.289 108.050,381.511 107.908 C 381.688 107.796,381.703 107.738,381.556 107.736 C 381.433 107.735,381.353 107.644,381.378 107.535 C 381.402 107.427,381.243 107.307,381.024 107.270 C 380.725 107.220,380.637 107.280,380.673 107.512 M341.430 107.865 C 341.343 108.092,341.371 108.101,341.596 107.914 C 341.819 107.729,341.867 107.740,341.867 107.978 C 341.867 108.148,341.721 108.267,341.513 108.267 C 341.247 108.267,341.168 108.159,341.195 107.835 C 341.215 107.598,341.299 107.446,341.383 107.497 C 341.467 107.549,341.488 107.715,341.430 107.865 M379.769 107.769 C 379.434 108.104,379.520 108.353,379.867 108.051 C 380.038 107.902,380.298 107.736,380.444 107.682 C 380.632 107.613,380.603 107.579,380.347 107.570 C 380.146 107.562,379.886 107.652,379.769 107.769 M355.336 108.341 C 355.580 108.730,355.540 109.124,355.246 109.236 C 354.830 109.396,355.288 109.691,355.911 109.666 C 356.252 109.652,356.277 109.628,356.019 109.561 C 355.760 109.495,355.737 109.441,355.918 109.329 C 356.107 109.212,356.111 109.129,355.935 108.917 C 355.753 108.698,355.781 108.631,356.097 108.531 C 356.384 108.440,356.429 108.358,356.277 108.206 C 356.125 108.054,356.032 108.069,355.906 108.268 C 355.814 108.414,355.738 108.447,355.736 108.341 C 355.735 108.235,355.617 108.148,355.474 108.148 C 355.332 108.148,355.270 108.235,355.336 108.341 M356.367 108.604 C 356.131 108.697,356.089 108.779,356.234 108.869 C 356.350 108.940,356.444 108.945,356.444 108.879 C 356.444 108.813,356.644 108.890,356.889 109.050 C 357.382 109.373,357.490 109.160,357.054 108.724 C 356.900 108.570,356.760 108.450,356.743 108.457 C 356.725 108.464,356.556 108.530,356.367 108.604 M366.933 109.073 C 366.933 109.284,366.426 109.125,366.334 108.885 C 366.249 108.664,366.295 108.649,366.582 108.802 C 366.775 108.906,366.933 109.027,366.933 109.073 M377.338 108.970 C 377.280 109.064,377.096 109.097,376.928 109.044 C 376.661 108.961,376.664 108.983,376.951 109.218 C 377.132 109.366,377.225 109.576,377.158 109.685 C 377.091 109.793,377.143 110.012,377.274 110.170 C 377.489 110.431,377.474 110.434,377.109 110.207 C 376.888 110.069,376.743 109.862,376.787 109.747 C 376.905 109.440,376.417 109.102,375.805 109.066 C 375.450 109.045,375.300 109.106,375.355 109.250 C 375.401 109.370,375.345 109.457,375.230 109.445 C 375.116 109.432,375.022 109.502,375.022 109.599 C 375.022 109.696,375.122 109.752,375.244 109.724 C 375.368 109.696,375.467 109.839,375.467 110.047 C 375.467 110.255,375.386 110.371,375.286 110.309 C 375.187 110.248,375.160 110.103,375.228 109.988 C 375.295 109.872,375.176 109.930,374.964 110.116 C 374.752 110.303,374.578 110.472,374.578 110.492 C 374.578 110.902,377.596 110.516,378.009 110.053 C 378.370 109.648,378.421 109.511,378.213 109.511 C 378.060 109.511,377.981 109.437,378.037 109.346 C 378.093 109.255,378.058 109.130,377.958 109.068 C 377.859 109.007,377.778 109.072,377.778 109.212 C 377.778 109.353,377.642 109.577,377.477 109.712 C 377.274 109.876,377.220 109.883,377.309 109.734 C 377.465 109.475,377.611 108.800,377.511 108.800 C 377.474 108.800,377.396 108.877,377.338 108.970 M344.762 109.318 C 344.588 109.597,344.603 109.677,344.848 109.772 C 345.104 109.870,345.109 109.909,344.884 110.054 C 344.365 110.388,345.081 110.533,347.308 110.545 C 349.725 110.558,349.563 110.636,349.559 109.467 C 349.558 109.221,349.710 109.156,350.281 109.156 C 351.051 109.156,351.012 109.423,350.222 109.562 C 349.890 109.621,349.916 109.634,350.325 109.615 C 350.736 109.596,350.921 109.691,351.064 109.995 C 351.170 110.218,351.183 110.356,351.094 110.301 C 351.006 110.246,350.933 110.278,350.933 110.372 C 350.933 110.470,351.776 110.552,352.902 110.565 C 354.225 110.579,354.833 110.526,354.756 110.401 C 354.694 110.300,354.555 110.270,354.449 110.336 C 354.325 110.412,354.308 110.371,354.402 110.219 C 354.507 110.049,354.467 110.015,354.257 110.095 C 354.098 110.157,353.924 110.138,353.872 110.053 C 353.820 109.969,353.598 109.948,353.378 110.007 C 352.979 110.113,352.979 110.114,353.422 110.239 C 353.838 110.356,353.830 110.366,353.299 110.382 C 352.466 110.408,352.452 110.153,353.263 109.742 C 353.779 109.479,353.900 109.350,353.699 109.273 C 353.547 109.215,353.422 109.249,353.422 109.349 C 353.422 109.450,353.330 109.475,353.217 109.405 C 353.100 109.333,353.060 109.356,353.123 109.458 C 353.183 109.556,353.031 109.729,352.784 109.841 C 352.523 109.960,352.387 110.130,352.459 110.246 C 352.534 110.367,352.485 110.414,352.336 110.364 C 352.200 110.319,352.085 110.168,352.081 110.030 C 352.076 109.891,351.984 109.795,351.875 109.817 C 351.767 109.838,351.633 109.738,351.578 109.594 C 351.523 109.451,351.555 109.333,351.650 109.333 C 351.745 109.333,351.822 109.253,351.822 109.156 C 351.822 108.918,348.501 108.921,348.354 109.158 C 348.292 109.258,348.155 109.285,348.049 109.220 C 347.934 109.149,347.907 109.183,347.983 109.306 C 348.053 109.419,348.020 109.511,347.911 109.511 C 347.802 109.511,347.763 109.593,347.825 109.693 C 347.887 109.793,347.857 109.853,347.759 109.827 C 347.661 109.800,347.449 109.938,347.288 110.133 C 347.127 110.329,347.051 110.381,347.119 110.249 C 347.220 110.054,347.170 110.048,346.851 110.219 C 346.635 110.334,346.412 110.382,346.355 110.326 C 346.298 110.269,346.342 110.222,346.453 110.222 C 346.779 110.222,347.090 109.507,346.863 109.281 C 346.580 108.997,345.244 108.990,345.244 109.272 C 345.244 109.402,345.359 109.456,345.511 109.398 C 345.682 109.332,345.778 109.401,345.778 109.591 C 345.778 109.754,345.857 109.839,345.954 109.779 C 346.059 109.714,346.081 109.796,346.010 109.982 C 345.932 110.184,345.865 110.216,345.817 110.072 C 345.758 109.897,345.682 109.898,345.449 110.076 C 345.288 110.199,345.212 110.233,345.280 110.151 C 345.349 110.069,345.292 109.889,345.155 109.751 C 345.017 109.614,344.949 109.383,345.004 109.239 C 345.145 108.872,345.014 108.914,344.762 109.318 M347.556 109.136 C 347.556 109.256,347.476 109.305,347.378 109.244 C 347.280 109.184,347.200 109.276,347.200 109.450 C 347.200 109.746,347.218 109.747,347.501 109.465 C 347.666 109.299,347.746 109.109,347.678 109.041 C 347.611 108.974,347.556 109.017,347.556 109.136 M354.025 109.556 C 354.011 109.911,354.054 109.999,354.161 109.837 C 354.287 109.647,354.354 109.662,354.501 109.911 C 354.728 110.297,355.022 110.316,355.022 109.945 C 355.022 109.779,354.939 109.719,354.814 109.796 C 354.673 109.884,354.644 109.827,354.723 109.620 C 354.793 109.439,354.769 109.359,354.665 109.423 C 354.568 109.483,354.489 109.421,354.489 109.285 C 354.489 108.791,354.044 109.051,354.025 109.556 M356.334 109.191 C 356.254 109.319,356.294 109.338,356.447 109.243 C 356.613 109.140,356.652 109.176,356.580 109.363 C 356.520 109.522,356.579 109.630,356.728 109.630 C 356.865 109.630,356.978 109.532,356.978 109.412 C 356.978 109.115,356.485 108.946,356.334 109.191 M369.540 109.256 C 369.662 109.574,369.889 109.585,370.006 109.281 C 370.057 109.148,369.957 109.040,369.768 109.026 C 369.553 109.010,369.476 109.088,369.540 109.256 M372.829 109.097 C 373.295 109.221,373.345 109.626,372.955 110.122 L 372.596 110.578 373.417 110.578 C 374.014 110.578,374.344 110.474,374.630 110.198 L 375.022 109.818 374.599 109.921 C 374.182 110.022,374.183 110.016,374.677 109.501 C 374.965 109.199,375.078 108.978,374.942 108.978 C 374.812 108.978,374.609 109.110,374.491 109.271 C 374.321 109.504,374.247 109.517,374.133 109.333 C 374.038 109.179,374.059 109.144,374.195 109.227 C 374.308 109.297,374.400 109.270,374.400 109.166 C 374.400 109.062,374.271 108.978,374.113 108.978 C 373.921 108.978,373.854 109.087,373.910 109.304 C 373.962 109.502,373.901 109.630,373.753 109.630 C 373.620 109.630,373.511 109.483,373.511 109.304 C 373.511 109.062,373.385 108.983,373.022 108.998 C 372.751 109.010,372.665 109.054,372.829 109.097 M319.111 109.310 C 319.111 109.395,318.391 109.479,317.511 109.497 C 316.491 109.517,315.911 109.461,315.911 109.342 C 315.911 109.226,316.512 109.156,317.511 109.156 C 318.391 109.156,319.111 109.225,319.111 109.310 M349.156 109.363 C 349.156 109.477,349.103 109.518,349.039 109.453 C 348.974 109.389,348.814 109.426,348.683 109.534 C 348.526 109.664,348.444 109.669,348.444 109.548 C 348.444 109.376,348.710 109.202,349.022 109.170 C 349.096 109.162,349.156 109.249,349.156 109.363 M351.420 109.540 C 351.436 109.751,351.553 109.990,351.680 110.070 C 351.849 110.177,351.853 110.218,351.695 110.219 C 351.473 110.222,351.111 109.681,351.111 109.345 C 351.111 108.982,351.391 109.158,351.420 109.540 M72.912 110.105 C 72.803 110.236,72.767 110.396,72.831 110.461 C 72.896 110.525,72.863 110.578,72.760 110.578 C 72.537 110.578,72.333 110.126,72.486 109.973 C 72.722 109.737,73.116 109.860,72.912 110.105 M349.689 110.133 C 349.689 110.311,349.867 110.400,350.222 110.400 C 350.578 110.400,350.756 110.311,350.756 110.133 C 350.756 109.956,350.578 109.867,350.222 109.867 C 349.867 109.867,349.689 109.956,349.689 110.133 M373.494 110.374 C 373.232 110.464,373.098 110.281,373.249 110.037 C 373.344 109.883,373.417 109.891,373.528 110.070 C 373.616 110.212,373.602 110.337,373.494 110.374 M71.684 112.787 C 71.704 113.025,71.788 113.176,71.872 113.125 C 71.956 113.073,71.983 112.924,71.933 112.794 C 71.883 112.663,71.938 112.517,72.054 112.470 C 72.171 112.422,72.128 112.377,71.958 112.370 C 71.736 112.360,71.659 112.478,71.684 112.787 M28.161 112.916 C 28.082 113.044,28.112 113.073,28.239 112.995 C 28.352 112.925,28.444 112.948,28.444 113.046 C 28.444 113.143,28.364 113.273,28.267 113.333 C 28.032 113.479,28.039 113.778,28.277 113.778 C 28.381 113.778,28.416 113.698,28.356 113.600 C 28.295 113.502,28.330 113.422,28.434 113.422 C 28.689 113.422,28.671 113.644,28.381 114.039 C 28.236 114.238,28.026 114.325,27.854 114.259 C 27.696 114.198,27.518 114.227,27.459 114.323 C 27.400 114.419,27.457 114.476,27.587 114.449 C 27.716 114.422,27.800 114.506,27.773 114.635 C 27.747 114.765,27.807 114.820,27.907 114.758 C 28.081 114.651,28.163 114.986,28.103 115.556 C 28.078 115.781,28.100 115.784,28.243 115.571 C 28.337 115.432,28.388 115.189,28.357 115.029 C 28.324 114.857,28.426 114.730,28.610 114.716 C 28.780 114.704,28.919 114.804,28.919 114.939 C 28.919 115.074,28.832 115.133,28.726 115.069 C 28.620 115.006,28.647 115.094,28.785 115.264 C 29.369 115.984,29.605 115.931,29.643 115.075 C 29.685 114.137,29.482 113.671,28.962 113.506 C 28.705 113.425,28.617 113.283,28.678 113.050 C 28.776 112.676,28.373 112.572,28.161 112.916 M59.240 113.054 C 59.267 113.159,59.089 113.347,58.844 113.472 C 58.217 113.791,58.076 116.040,58.661 116.402 C 58.860 116.525,59.022 116.549,59.022 116.455 C 59.022 116.361,58.882 116.203,58.711 116.103 C 58.409 115.927,58.712 115.806,59.244 115.890 C 59.318 115.902,59.378 115.794,59.378 115.650 C 59.378 115.480,59.275 115.428,59.082 115.502 C 58.896 115.574,58.830 115.545,58.903 115.427 C 58.968 115.322,58.901 115.191,58.755 115.135 C 58.608 115.079,58.490 114.931,58.492 114.806 C 58.494 114.646,58.546 114.658,58.667 114.844 C 58.829 115.095,58.916 114.977,58.863 114.578 C 58.850 114.480,58.961 114.556,59.110 114.747 C 59.369 115.079,59.395 115.081,59.691 114.781 C 59.985 114.483,59.984 114.455,59.684 114.232 C 59.410 114.030,59.404 113.983,59.639 113.893 C 59.789 113.836,59.911 113.871,59.911 113.972 C 59.911 114.072,60.001 114.099,60.112 114.030 C 60.249 113.946,60.220 113.855,60.021 113.744 C 59.861 113.654,59.655 113.627,59.563 113.684 C 59.472 113.741,59.427 113.627,59.465 113.431 C 59.503 113.235,59.457 113.027,59.362 112.968 C 59.268 112.910,59.213 112.949,59.240 113.054 M72.724 113.649 C 72.702 113.861,72.459 113.820,72.382 113.591 C 72.343 113.474,72.407 113.410,72.524 113.449 C 72.642 113.488,72.732 113.578,72.724 113.649 M15.741 116.076 C 15.656 116.213,15.698 116.369,15.844 116.462 C 15.992 116.556,16.008 116.618,15.885 116.619 C 15.752 116.621,15.682 117.013,15.686 117.733 C 15.692 119.078,15.762 119.289,16.197 119.289 C 16.383 119.289,16.533 119.413,16.533 119.566 C 16.533 119.740,16.616 119.794,16.756 119.710 C 16.903 119.621,16.894 119.679,16.730 119.881 C 16.594 120.049,16.545 120.244,16.621 120.315 C 16.697 120.386,16.796 120.344,16.840 120.221 C 16.883 120.098,16.992 120.042,17.080 120.097 C 17.168 120.152,17.193 120.072,17.134 119.920 C 17.076 119.769,17.109 119.644,17.208 119.644 C 17.307 119.644,17.473 119.890,17.578 120.191 C 17.731 120.630,18.133 120.798,18.133 120.423 C 18.133 120.375,18.062 120.378,17.976 120.432 C 17.889 120.486,17.770 120.350,17.712 120.131 C 17.562 119.575,17.575 119.252,17.741 119.355 C 17.818 119.403,17.892 119.302,17.905 119.131 C 17.924 118.886,17.800 118.827,17.320 118.851 C 16.985 118.868,16.627 118.779,16.523 118.655 C 16.379 118.480,16.419 118.450,16.701 118.524 C 16.917 118.580,17.067 118.534,17.067 118.410 C 17.067 118.295,17.007 118.236,16.933 118.278 C 16.860 118.321,16.640 118.291,16.444 118.212 C 16.103 118.075,16.104 118.069,16.478 118.057 C 16.930 118.042,17.233 117.689,16.794 117.689 C 16.632 117.689,16.458 117.529,16.407 117.333 C 16.356 117.138,16.235 116.978,16.138 116.978 C 16.041 116.978,15.999 116.878,16.045 116.756 C 16.180 116.396,15.913 115.799,15.741 116.076 M28.356 116.089 C 28.014 116.235,28.016 116.242,28.394 116.254 C 28.687 116.263,28.763 116.347,28.689 116.578 C 28.610 116.824,28.626 116.837,28.767 116.638 C 28.902 116.448,28.877 115.854,28.737 115.924 C 28.723 115.930,28.551 116.005,28.356 116.089 M29.430 116.254 C 29.374 116.345,29.420 116.477,29.532 116.546 C 29.670 116.632,29.598 116.778,29.314 116.992 C 28.931 117.281,28.921 117.323,29.206 117.432 C 29.508 117.548,29.508 117.564,29.205 117.793 C 28.915 118.013,28.929 118.034,29.378 118.039 C 29.848 118.044,29.940 118.190,29.914 118.889 C 29.910 119.011,30.017 119.111,30.153 119.111 C 30.289 119.111,30.400 119.018,30.400 118.904 C 30.400 118.790,30.451 118.748,30.514 118.811 C 30.577 118.873,30.526 119.049,30.400 119.200 C 30.208 119.431,30.218 119.457,30.464 119.363 C 30.624 119.301,30.761 119.159,30.768 119.048 C 30.775 118.936,30.849 119.004,30.933 119.200 C 31.079 119.539,31.087 119.537,31.099 119.156 C 31.105 118.936,31.197 118.756,31.301 118.756 C 31.406 118.756,31.466 118.576,31.435 118.356 C 31.398 118.101,31.255 117.955,31.040 117.955 C 30.708 117.954,30.365 117.382,30.528 117.101 C 30.875 116.504,29.792 115.669,29.430 116.254 M57.586 116.163 C 57.323 116.450,57.510 116.776,57.791 116.521 C 57.979 116.351,58.133 116.197,58.133 116.180 C 58.133 116.149,57.614 116.133,57.586 116.163 M71.114 116.449 C 71.277 116.754,71.644 116.798,71.644 116.512 C 71.644 116.463,71.564 116.473,71.467 116.533 C 71.369 116.594,71.289 116.518,71.289 116.366 C 71.289 116.214,71.206 116.089,71.105 116.089 C 70.981 116.089,70.984 116.207,71.114 116.449 M71.674 116.991 C 71.496 117.474,71.357 117.495,71.058 117.085 C 70.768 116.689,70.721 117.202,71.005 117.661 C 71.257 118.070,71.252 118.946,70.998 118.735 C 70.876 118.634,70.824 118.428,70.882 118.277 C 70.970 118.047,70.926 118.036,70.605 118.208 C 70.394 118.320,70.222 118.451,70.222 118.498 C 70.222 118.629,70.638 119.342,70.916 119.689 C 71.085 119.898,71.099 120.000,70.961 120.000 C 70.848 120.000,70.754 119.900,70.753 119.778 C 70.751 119.627,70.689 119.641,70.558 119.822 C 70.396 120.047,70.382 120.040,70.467 119.778 C 70.522 119.607,70.489 119.467,70.394 119.467 C 70.300 119.467,70.222 119.631,70.222 119.833 C 70.222 120.037,70.142 120.149,70.040 120.086 C 69.815 119.947,69.892 120.398,70.121 120.564 C 70.455 120.805,70.357 121.422,69.984 121.422 C 69.646 121.422,69.650 121.446,70.058 121.881 C 70.295 122.134,70.409 122.314,70.311 122.281 C 70.213 122.249,70.133 122.302,70.133 122.400 C 70.133 122.498,70.207 122.553,70.297 122.523 C 70.387 122.493,70.368 122.580,70.255 122.716 C 70.097 122.906,70.105 122.963,70.290 122.963 C 70.464 122.963,70.527 122.681,70.522 121.928 C 70.517 121.359,70.568 120.839,70.635 120.773 C 70.701 120.706,70.756 120.785,70.756 120.948 C 70.756 121.126,70.898 121.244,71.111 121.244 C 71.348 121.244,71.467 121.126,71.467 120.889 C 71.467 120.685,71.348 120.533,71.190 120.533 C 71.037 120.533,70.963 120.452,71.024 120.353 C 71.085 120.253,71.190 120.205,71.257 120.246 C 71.323 120.286,71.393 120.039,71.411 119.695 C 71.458 118.813,71.581 118.578,71.994 118.578 C 72.416 118.578,72.509 117.748,72.133 117.343 C 71.980 117.177,71.977 117.140,72.123 117.222 C 72.258 117.298,72.324 117.212,72.307 116.982 C 72.272 116.512,71.849 116.519,71.674 116.991 M16.651 117.233 C 16.586 117.403,16.666 117.479,16.890 117.462 C 17.144 117.443,17.180 117.380,17.036 117.207 C 16.797 116.919,16.771 116.921,16.651 117.233 M57.772 117.289 C 57.764 117.720,57.262 118.354,57.104 118.132 C 56.808 117.718,56.491 117.853,56.566 118.362 C 56.631 118.805,56.590 118.861,56.252 118.796 C 55.811 118.712,55.659 118.884,55.606 119.527 C 55.586 119.773,55.515 119.941,55.449 119.900 C 55.268 119.789,54.916 120.279,55.068 120.431 C 55.141 120.504,55.120 120.547,55.022 120.527 C 54.646 120.451,54.384 120.541,54.488 120.709 C 54.552 120.814,54.472 120.837,54.289 120.767 C 54.052 120.676,54.009 120.718,54.100 120.956 C 54.165 121.125,54.146 121.218,54.058 121.164 C 53.969 121.109,53.864 121.165,53.823 121.287 C 53.782 121.409,53.595 121.510,53.408 121.510 C 53.135 121.511,53.073 121.626,53.100 122.089 C 53.119 122.407,53.054 122.667,52.956 122.667 C 52.859 122.667,52.828 122.587,52.889 122.489 C 52.950 122.390,52.815 122.311,52.584 122.311 C 52.239 122.311,52.139 122.445,51.984 123.111 C 51.813 123.847,51.926 124.236,52.145 123.666 C 52.205 123.511,52.437 123.444,52.782 123.484 C 53.177 123.530,53.375 123.455,53.507 123.208 C 53.726 122.798,53.745 122.433,53.541 122.559 C 53.459 122.610,53.393 122.541,53.393 122.407 C 53.393 122.204,53.456 122.206,53.778 122.417 C 54.128 122.646,54.212 122.626,54.682 122.203 C 54.967 121.946,55.117 121.780,55.016 121.833 C 54.915 121.886,54.832 121.780,54.832 121.597 C 54.832 121.334,54.883 121.307,55.078 121.468 C 55.382 121.721,56.000 121.828,56.000 121.629 C 56.000 121.340,55.762 121.251,55.523 121.450 C 55.324 121.614,55.287 121.547,55.322 121.088 C 55.352 120.695,55.439 120.553,55.608 120.618 C 55.742 120.669,55.945 120.712,56.059 120.714 C 56.192 120.716,56.180 120.771,56.025 120.869 C 55.857 120.976,55.844 121.059,55.981 121.144 C 56.089 121.211,56.183 121.201,56.190 121.122 C 56.197 121.042,56.232 120.749,56.269 120.469 C 56.327 120.029,56.306 120.000,56.117 120.258 C 55.915 120.535,55.898 120.533,55.898 120.231 C 55.898 119.972,56.037 119.905,56.571 119.905 C 57.237 119.905,57.453 120.064,57.022 120.238 C 56.900 120.287,56.985 120.334,57.210 120.342 C 57.436 120.349,57.577 120.285,57.524 120.198 C 57.403 120.002,57.856 119.604,58.016 119.765 C 58.081 119.829,58.133 119.779,58.133 119.653 C 58.133 119.507,57.977 119.455,57.694 119.509 C 57.453 119.555,57.298 119.525,57.349 119.441 C 57.401 119.357,57.371 119.289,57.283 119.289 C 57.196 119.289,57.030 119.084,56.916 118.833 C 56.802 118.582,56.786 118.425,56.880 118.484 C 56.975 118.542,57.226 118.487,57.438 118.362 C 57.745 118.180,57.841 118.179,57.905 118.356 C 58.042 118.735,58.465 118.614,58.363 118.224 C 58.312 118.030,58.205 117.911,58.125 117.960 C 58.046 118.010,57.922 117.957,57.851 117.842 C 57.778 117.723,57.799 117.681,57.902 117.745 C 58.012 117.813,58.049 117.685,57.998 117.417 C 57.901 116.907,57.780 116.839,57.772 117.289 M58.535 117.206 C 58.559 117.325,58.481 117.403,58.362 117.380 C 58.006 117.310,58.307 117.824,58.679 117.921 C 59.103 118.032,59.177 117.253,58.758 117.092 C 58.605 117.033,58.511 117.082,58.535 117.206 M31.451 118.947 C 31.258 119.069,31.254 119.106,31.433 119.108 C 31.561 119.110,31.619 119.186,31.563 119.277 C 31.507 119.368,31.542 119.492,31.642 119.554 C 31.741 119.615,31.825 119.561,31.828 119.433 C 31.831 119.298,31.915 119.340,32.026 119.532 C 32.167 119.776,32.164 119.899,32.012 119.992 C 31.863 120.085,31.881 120.183,32.079 120.347 C 32.229 120.472,32.308 120.644,32.255 120.730 C 32.202 120.816,32.240 121.027,32.341 121.199 C 32.441 121.370,32.525 121.424,32.528 121.319 C 32.531 121.213,32.657 121.126,32.808 121.126 C 32.971 121.126,33.043 121.228,32.985 121.378 C 32.932 121.516,32.889 121.655,32.889 121.687 C 32.889 121.719,32.729 121.703,32.533 121.652 C 32.321 121.596,32.178 121.646,32.178 121.776 C 32.178 121.896,32.295 121.949,32.439 121.893 C 32.582 121.838,32.747 121.870,32.805 121.963 C 32.863 122.057,32.812 122.133,32.692 122.133 C 32.572 122.133,32.528 122.187,32.593 122.253 C 32.659 122.318,32.893 122.202,33.113 121.995 C 33.694 121.449,34.198 121.586,34.332 122.327 C 34.393 122.661,34.453 122.791,34.465 122.616 C 34.484 122.363,34.556 122.335,34.821 122.476 C 35.004 122.574,35.217 122.597,35.294 122.527 C 35.371 122.457,35.381 122.549,35.315 122.731 C 35.230 122.969,35.265 123.035,35.441 122.968 C 35.639 122.892,35.648 122.954,35.489 123.291 C 35.297 123.698,35.111 123.719,35.111 123.333 C 35.111 123.227,35.011 123.107,34.889 123.067 C 34.578 122.963,34.612 123.297,34.946 123.632 C 35.268 123.953,35.556 123.996,35.556 123.723 C 35.556 123.619,35.656 123.595,35.778 123.668 C 35.929 123.759,35.922 123.705,35.756 123.501 C 35.609 123.320,35.588 123.200,35.703 123.200 C 35.808 123.200,35.973 123.340,36.070 123.511 C 36.222 123.778,36.301 123.789,36.623 123.592 C 36.979 123.374,36.988 123.383,36.782 123.770 C 36.577 124.153,36.584 124.161,36.891 123.911 C 37.195 123.663,37.209 123.675,37.100 124.089 C 37.035 124.333,36.982 124.633,36.980 124.756 C 36.979 124.878,36.900 124.978,36.806 124.978 C 36.711 124.978,36.676 124.866,36.729 124.729 C 36.781 124.592,36.719 124.419,36.590 124.345 C 36.461 124.271,36.413 124.273,36.483 124.350 C 36.553 124.428,36.529 124.642,36.431 124.826 C 36.297 125.076,36.309 125.149,36.481 125.114 C 36.608 125.088,36.689 125.173,36.662 125.302 C 36.635 125.432,36.690 125.490,36.783 125.433 C 36.877 125.375,37.001 125.405,37.060 125.500 C 37.119 125.596,37.286 125.628,37.432 125.571 C 37.637 125.493,37.651 125.413,37.494 125.224 C 37.382 125.088,37.356 124.978,37.436 124.978 C 37.516 124.978,37.488 124.884,37.373 124.769 C 37.214 124.609,37.213 124.500,37.372 124.309 C 37.940 123.625,37.462 123.084,36.419 123.230 C 36.078 123.278,35.990 123.200,35.945 122.811 C 35.902 122.441,35.781 122.320,35.411 122.277 C 35.130 122.245,34.933 122.113,34.933 121.956 C 34.933 121.808,34.675 121.613,34.356 121.519 C 34.038 121.425,33.778 121.259,33.778 121.149 C 33.778 121.039,33.578 120.842,33.333 120.711 C 33.089 120.580,32.889 120.386,32.889 120.279 C 32.889 120.173,32.729 119.986,32.533 119.863 C 32.338 119.741,32.178 119.556,32.178 119.452 C 32.178 119.101,31.716 118.779,31.451 118.947 M57.230 118.978 C 57.480 119.308,57.956 119.402,57.956 119.122 C 57.956 119.030,57.863 118.897,57.750 118.827 C 57.623 118.749,57.593 118.779,57.672 118.906 C 57.841 119.179,57.557 119.168,57.244 118.889 C 57.032 118.700,57.030 118.713,57.230 118.978 M30.756 120.125 C 30.756 120.506,30.822 120.562,31.191 120.492 C 31.464 120.439,31.663 120.502,31.723 120.660 C 31.776 120.798,31.912 120.854,32.026 120.784 C 32.326 120.599,31.686 120.174,31.316 120.312 C 31.061 120.408,31.054 120.383,31.266 120.122 C 31.400 119.957,31.480 119.816,31.444 119.809 C 30.800 119.676,30.756 119.697,30.756 120.125 M16.730 120.518 C 16.402 120.726,16.586 121.170,17.051 121.292 C 17.528 121.417,17.679 121.171,17.337 120.829 C 17.135 120.627,17.124 120.540,17.290 120.473 C 17.439 120.413,17.424 120.380,17.244 120.372 C 17.098 120.366,16.866 120.432,16.730 120.518 M56.639 120.683 C 56.578 120.783,56.614 120.918,56.720 120.983 C 56.993 121.152,57.170 120.921,56.942 120.693 C 56.814 120.565,56.714 120.562,56.639 120.683 M30.977 120.933 C 31.017 121.056,31.243 121.163,31.480 121.173 C 31.881 121.189,31.883 121.185,31.508 121.116 C 31.286 121.076,31.151 120.968,31.207 120.877 C 31.264 120.786,31.218 120.711,31.107 120.711 C 30.995 120.711,30.937 120.811,30.977 120.933 M69.270 120.860 C 69.234 120.925,69.220 121.238,69.238 121.556 C 69.256 121.873,69.218 122.133,69.154 122.133 C 69.090 122.133,69.037 122.236,69.037 122.362 C 69.037 122.576,69.673 122.703,69.702 122.495 C 69.710 122.443,69.742 122.266,69.774 122.101 C 69.808 121.922,69.744 121.836,69.612 121.887 C 69.483 121.936,69.401 121.838,69.415 121.649 C 69.451 121.167,69.687 120.868,69.923 121.003 C 70.039 121.070,70.071 121.055,69.995 120.972 C 69.818 120.778,69.355 120.706,69.270 120.860 M17.700 121.084 C 17.746 121.159,17.702 121.270,17.603 121.332 C 17.347 121.490,17.371 123.022,17.630 123.022 C 17.744 123.022,17.789 123.071,17.729 123.130 C 17.593 123.266,18.001 123.722,18.169 123.621 C 18.237 123.579,18.378 123.703,18.480 123.895 C 18.605 124.127,18.608 124.282,18.489 124.355 C 18.198 124.535,18.286 124.780,18.633 124.754 C 18.872 124.736,18.951 124.590,18.939 124.187 C 18.929 123.861,18.836 123.662,18.706 123.689 C 18.586 123.713,18.489 123.616,18.489 123.472 C 18.489 123.312,18.385 123.251,18.220 123.314 C 18.017 123.392,17.970 123.316,18.029 122.998 C 18.072 122.767,18.113 122.518,18.120 122.444 C 18.127 122.371,18.273 122.312,18.444 122.314 C 18.683 122.316,18.700 122.352,18.518 122.467 C 18.330 122.586,18.345 122.642,18.588 122.735 C 18.757 122.800,18.937 122.811,18.988 122.760 C 19.040 122.709,18.978 122.667,18.852 122.667 C 18.671 122.667,18.674 122.604,18.867 122.366 C 19.006 122.195,19.021 122.119,18.901 122.191 C 18.786 122.260,18.641 122.236,18.580 122.136 C 18.518 122.037,18.561 121.956,18.675 121.956 C 18.789 121.956,18.838 121.841,18.785 121.700 C 18.731 121.560,18.782 121.406,18.899 121.359 C 19.054 121.296,19.050 121.269,18.884 121.258 C 18.760 121.251,18.579 121.451,18.483 121.703 C 18.293 122.205,17.903 122.202,17.805 121.698 C 17.771 121.523,17.814 121.424,17.900 121.477 C 17.987 121.530,18.057 121.433,18.057 121.261 C 18.057 121.089,17.958 120.948,17.836 120.948 C 17.715 120.948,17.653 121.009,17.700 121.084 M54.853 122.480 C 54.489 122.860,54.415 123.022,54.606 123.022 C 54.944 123.022,55.526 122.401,55.438 122.135 C 55.402 122.027,55.138 122.182,54.853 122.480 M68.116 122.480 C 68.155 122.597,68.245 122.687,68.316 122.680 C 68.528 122.658,68.487 122.414,68.258 122.338 C 68.140 122.299,68.076 122.363,68.116 122.480 M53.384 123.147 C 53.311 123.337,53.242 123.370,53.197 123.234 C 53.157 123.116,53.040 123.073,52.935 123.137 C 52.823 123.207,52.797 123.171,52.872 123.050 C 53.078 122.717,53.517 122.800,53.384 123.147 M50.430 123.319 C 50.186 123.562,50.305 124.129,50.578 124.025 C 50.724 123.968,50.844 124.005,50.844 124.105 C 50.844 124.209,50.611 124.251,50.305 124.203 C 49.773 124.119,49.585 125.235,50.059 125.661 C 50.131 125.725,50.140 125.639,50.080 125.470 C 49.963 125.145,50.634 125.079,50.864 125.392 C 50.990 125.565,51.911 124.762,51.911 124.479 C 51.911 124.374,51.990 124.239,52.087 124.179 C 52.185 124.118,52.219 124.184,52.164 124.328 C 52.025 124.690,52.309 124.813,52.587 124.513 C 52.778 124.306,52.770 124.244,52.543 124.157 C 52.392 124.099,52.320 123.960,52.383 123.848 C 52.452 123.725,52.402 123.732,52.257 123.867 C 51.843 124.251,51.537 124.137,51.660 123.644 C 51.771 123.204,50.801 122.947,50.430 123.319 M51.212 124.148 C 51.064 124.296,51.033 124.233,51.093 123.907 C 51.208 123.278,51.231 123.250,51.326 123.619 C 51.373 123.800,51.321 124.039,51.212 124.148 M69.511 123.930 C 69.511 124.038,69.394 124.082,69.250 124.027 C 69.107 123.972,68.939 124.007,68.879 124.105 C 68.807 124.221,68.679 124.209,68.513 124.072 C 68.373 123.956,68.209 123.910,68.148 123.970 C 68.088 124.031,68.166 124.130,68.323 124.190 C 68.579 124.288,68.581 124.342,68.345 124.703 C 68.200 124.924,68.134 125.245,68.199 125.414 C 68.282 125.629,68.160 125.856,67.803 126.154 L 67.289 126.585 67.986 126.530 C 68.691 126.474,69.122 125.731,68.627 125.425 C 68.526 125.363,68.450 125.137,68.457 124.923 C 68.469 124.547,68.475 124.546,68.622 124.889 C 68.706 125.084,68.781 125.144,68.788 125.022 C 68.794 124.900,69.000 124.800,69.244 124.800 C 69.620 124.800,69.689 124.717,69.689 124.267 C 69.689 123.973,69.649 123.733,69.600 123.733 C 69.551 123.733,69.511 123.822,69.511 123.930 M37.524 124.340 C 37.379 124.514,37.374 124.626,37.505 124.708 C 37.609 124.772,37.660 124.880,37.619 124.948 C 37.577 125.015,37.664 125.230,37.813 125.424 C 38.069 125.760,38.079 125.748,38.005 125.198 C 37.940 124.713,37.978 124.634,38.232 124.714 C 38.420 124.774,38.581 124.693,38.655 124.499 C 38.750 124.251,38.805 124.237,38.923 124.428 C 39.005 124.560,39.162 124.612,39.272 124.544 C 39.577 124.355,39.321 124.089,38.834 124.089 C 38.571 124.089,38.400 124.192,38.400 124.350 C 38.400 124.515,38.297 124.571,38.118 124.503 C 37.892 124.416,37.883 124.366,38.074 124.245 C 38.259 124.127,38.248 124.094,38.022 124.092 C 37.863 124.090,37.639 124.202,37.524 124.340 M48.501 124.489 C 48.465 124.746,48.302 124.909,48.044 124.946 C 47.824 124.977,47.644 125.069,47.644 125.151 C 47.644 125.233,47.824 125.261,48.044 125.212 C 48.272 125.163,48.368 125.189,48.267 125.273 C 47.963 125.528,48.016 126.044,48.347 126.044 C 48.649 126.044,49.171 125.201,48.984 125.014 C 48.932 124.962,48.889 125.017,48.889 125.136 C 48.889 125.256,48.800 125.299,48.692 125.232 C 48.568 125.156,48.717 124.921,49.092 124.600 L 49.689 124.090 49.123 124.089 C 48.672 124.089,48.547 124.170,48.501 124.489 M51.556 124.444 C 51.409 124.539,51.359 124.618,51.444 124.619 C 51.649 124.623,51.204 125.156,50.996 125.156 C 50.908 125.156,50.967 124.956,51.127 124.711 C 51.288 124.467,51.509 124.268,51.620 124.269 C 51.731 124.271,51.702 124.350,51.556 124.444 M50.933 124.622 C 50.994 124.720,50.914 124.800,50.756 124.800 C 50.597 124.800,50.517 124.880,50.578 124.978 C 50.763 125.278,50.370 125.174,50.111 124.855 C 49.972 124.683,49.957 124.608,50.077 124.680 C 50.192 124.749,50.337 124.724,50.398 124.625 C 50.541 124.395,50.792 124.393,50.933 124.622 M19.733 124.807 C 19.733 124.913,19.813 124.949,19.911 124.889 C 20.009 124.828,20.089 124.899,20.089 125.046 C 20.089 125.192,20.168 125.361,20.265 125.421 C 20.365 125.483,20.395 125.412,20.334 125.254 C 20.265 125.072,20.330 124.978,20.525 124.978 C 20.688 124.978,20.772 125.058,20.711 125.156 C 20.651 125.253,20.686 125.333,20.790 125.333 C 21.005 125.333,21.045 125.036,20.844 124.924 C 20.465 124.711,19.733 124.634,19.733 124.807 M67.289 124.800 C 67.349 124.898,67.316 124.978,67.215 124.978 C 67.094 124.978,67.087 125.124,67.193 125.404 C 67.282 125.638,67.307 125.878,67.248 125.937 C 67.189 125.996,67.243 126.044,67.367 126.044 C 67.503 126.044,67.549 125.929,67.483 125.755 C 67.417 125.584,67.499 125.366,67.686 125.221 C 67.985 124.989,67.985 124.982,67.687 125.086 C 67.451 125.168,67.401 125.124,67.484 124.909 C 67.546 124.746,67.504 124.622,67.386 124.622 C 67.272 124.622,67.228 124.702,67.289 124.800 M19.268 125.012 C 19.196 125.128,19.231 125.255,19.346 125.293 C 19.461 125.332,19.556 125.441,19.556 125.536 C 19.556 125.632,19.475 125.660,19.377 125.600 C 19.272 125.535,19.223 125.659,19.258 125.900 C 19.291 126.126,19.491 126.390,19.703 126.487 C 20.045 126.642,20.176 126.915,20.110 127.333 C 20.098 127.407,20.289 127.467,20.533 127.467 C 20.910 127.467,20.978 127.549,20.978 128.008 C 20.978 128.358,21.135 128.696,21.422 128.962 C 21.667 129.189,21.867 129.456,21.867 129.557 C 21.867 129.734,22.374 130.034,22.601 129.991 C 22.663 129.979,22.760 130.200,22.816 130.481 C 22.898 130.889,22.992 130.974,23.280 130.898 C 23.478 130.846,23.602 130.741,23.555 130.665 C 23.507 130.589,23.588 130.480,23.734 130.424 C 24.030 130.311,24.102 129.778,23.822 129.778 C 23.724 129.778,23.644 129.862,23.644 129.964 C 23.644 130.262,23.055 129.861,23.007 129.530 C 22.984 129.372,23.038 129.288,23.127 129.343 C 23.314 129.459,23.349 129.008,23.167 128.826 C 23.038 128.697,22.946 128.000,23.058 128.000 C 23.093 128.000,23.195 128.180,23.283 128.400 C 23.388 128.660,23.448 128.701,23.455 128.516 C 23.469 128.166,22.840 127.571,22.608 127.715 C 22.514 127.773,22.388 127.692,22.328 127.534 C 22.254 127.342,22.074 127.276,21.780 127.332 C 21.411 127.403,21.347 127.351,21.375 127.010 C 21.394 126.786,21.473 126.643,21.552 126.692 C 21.630 126.740,21.640 126.868,21.574 126.976 C 21.494 127.105,21.562 127.130,21.771 127.049 C 21.946 126.982,22.199 127.018,22.334 127.130 C 22.487 127.257,22.578 127.265,22.578 127.150 C 22.578 127.050,22.358 126.878,22.089 126.767 C 21.208 126.406,20.710 126.049,20.770 125.823 C 20.803 125.700,20.635 125.783,20.397 126.006 C 19.998 126.380,19.289 126.193,19.668 125.814 C 19.728 125.753,19.715 125.561,19.638 125.385 C 19.562 125.210,19.512 125.007,19.527 124.933 C 19.571 124.728,19.414 124.775,19.268 125.012 M38.377 125.378 C 38.191 125.726,38.198 125.749,38.432 125.557 C 38.579 125.436,38.756 125.393,38.826 125.463 C 38.895 125.532,38.899 125.452,38.835 125.283 C 38.683 124.887,38.633 124.897,38.377 125.378 M39.945 125.092 C 39.879 125.158,39.911 125.315,40.016 125.442 C 40.300 125.784,40.037 126.452,39.673 126.313 C 39.429 126.221,39.418 126.242,39.612 126.434 C 39.805 126.626,39.886 126.612,40.076 126.356 C 40.356 125.976,40.533 125.957,40.533 126.306 C 40.533 126.467,40.637 126.527,40.805 126.463 C 41.016 126.381,41.030 126.303,40.868 126.109 C 40.700 125.905,40.724 125.878,40.997 125.965 C 41.308 126.063,41.310 126.056,41.021 125.870 C 40.849 125.760,40.636 125.714,40.549 125.768 C 40.461 125.823,40.435 125.689,40.492 125.471 C 40.549 125.254,40.628 125.174,40.668 125.293 C 40.708 125.413,40.819 125.511,40.914 125.511 C 41.010 125.511,41.028 125.411,40.954 125.289 C 40.876 125.159,40.960 125.178,41.156 125.333 C 41.340 125.480,41.435 125.507,41.366 125.393 C 41.298 125.279,41.343 125.145,41.466 125.096 C 41.588 125.046,41.389 125.000,41.022 124.992 C 40.601 124.983,40.355 125.060,40.353 125.200 C 40.351 125.376,40.320 125.375,40.207 125.197 C 40.129 125.074,40.011 125.026,39.945 125.092 M42.155 125.137 C 42.020 125.223,41.962 125.378,42.025 125.480 C 42.089 125.583,41.999 125.750,41.826 125.850 C 41.541 126.016,41.540 126.034,41.810 126.039 C 41.975 126.042,42.158 125.892,42.217 125.705 C 42.276 125.519,42.438 125.323,42.575 125.270 C 42.741 125.206,42.793 125.261,42.727 125.431 C 42.673 125.573,42.545 125.689,42.442 125.689 C 42.340 125.689,42.389 125.835,42.551 126.014 C 42.713 126.193,42.798 126.387,42.740 126.446 C 42.682 126.504,42.759 126.533,42.912 126.510 C 43.313 126.451,43.435 126.279,43.184 126.124 C 43.037 126.032,43.013 126.064,43.111 126.222 C 43.206 126.376,43.186 126.412,43.049 126.328 C 42.776 126.159,43.376 125.344,43.687 125.463 C 43.809 125.510,43.869 125.613,43.820 125.692 C 43.771 125.772,43.832 125.884,43.954 125.941 C 44.077 125.999,43.956 126.004,43.685 125.952 C 43.226 125.864,43.207 125.883,43.417 126.220 C 43.542 126.418,43.670 126.500,43.703 126.402 C 43.736 126.303,43.876 126.222,44.015 126.222 C 44.153 126.222,44.267 126.142,44.267 126.044 C 44.267 125.947,44.359 125.867,44.472 125.867 C 44.593 125.867,44.553 126.004,44.376 126.200 C 44.088 126.517,44.095 126.533,44.515 126.533 C 44.758 126.533,45.002 126.414,45.058 126.269 C 45.145 126.044,45.205 126.041,45.458 126.247 C 45.711 126.452,45.734 126.451,45.609 126.244 C 45.507 126.073,45.537 126.026,45.709 126.088 C 45.844 126.137,45.936 126.268,45.911 126.378 C 45.887 126.488,45.991 126.578,46.144 126.578 C 46.310 126.578,46.370 126.495,46.292 126.370 C 46.205 126.229,46.262 126.199,46.469 126.279 C 46.638 126.344,46.729 126.323,46.673 126.232 C 46.617 126.143,46.648 126.022,46.740 125.965 C 46.833 125.908,46.960 125.943,47.022 126.044 C 47.085 126.145,47.211 126.181,47.304 126.124 C 47.397 126.067,47.426 125.944,47.368 125.851 C 47.311 125.759,47.190 125.729,47.099 125.785 C 47.008 125.842,46.933 125.803,46.933 125.699 C 46.933 125.596,47.011 125.511,47.106 125.511 C 47.200 125.511,47.235 125.400,47.183 125.265 C 47.116 125.090,46.762 125.021,45.952 125.027 C 45.326 125.031,44.854 125.099,44.902 125.176 C 44.950 125.254,45.127 125.269,45.295 125.208 C 45.463 125.148,45.551 125.154,45.491 125.222 C 45.287 125.451,44.622 125.494,44.622 125.279 C 44.622 125.023,42.523 124.902,42.155 125.137 M46.222 125.541 C 46.222 125.634,46.142 125.660,46.044 125.600 C 45.947 125.540,45.867 125.530,45.867 125.579 C 45.867 125.887,46.251 125.820,46.373 125.491 C 46.515 125.107,46.523 125.105,46.614 125.452 C 46.742 125.940,46.228 126.263,45.810 125.957 C 45.457 125.699,45.590 125.129,45.970 125.275 C 46.109 125.328,46.222 125.448,46.222 125.541 M66.360 126.044 C 66.345 126.240,66.408 126.400,66.500 126.400 C 66.592 126.400,66.660 126.540,66.653 126.711 C 66.645 126.882,66.602 126.934,66.558 126.826 C 66.513 126.718,66.280 126.667,66.040 126.713 C 65.800 126.759,65.648 126.867,65.701 126.954 C 65.755 127.040,65.694 127.114,65.566 127.117 C 65.424 127.120,65.456 127.193,65.648 127.305 C 66.049 127.539,65.859 127.872,65.407 127.729 C 65.156 127.649,65.067 127.706,65.069 127.944 C 65.072 128.217,65.098 128.226,65.244 128.000 C 65.348 127.840,65.418 127.818,65.419 127.945 C 65.421 128.062,65.341 128.207,65.242 128.268 C 65.142 128.330,65.104 128.450,65.156 128.535 C 65.209 128.620,65.110 128.772,64.937 128.873 C 64.639 129.046,64.639 129.056,64.948 129.061 C 65.132 129.064,65.239 129.163,65.193 129.289 C 65.148 129.411,65.021 129.491,64.911 129.467 C 64.718 129.424,64.641 129.640,64.695 130.075 C 64.709 130.190,64.638 130.335,64.538 130.397 C 64.438 130.459,64.356 130.625,64.356 130.766 C 64.356 131.039,64.660 131.040,64.980 130.769 C 65.210 130.573,65.292 129.905,65.068 130.044 C 64.983 130.096,64.868 130.065,64.811 129.974 C 64.705 129.802,65.026 129.804,65.333 129.977 C 65.588 130.120,66.083 129.540,66.027 129.163 C 65.953 128.658,66.320 128.216,66.739 128.306 C 67.044 128.372,67.063 128.352,66.844 128.193 C 66.631 128.039,66.624 127.970,66.807 127.852 C 67.075 127.679,67.009 126.684,66.690 126.087 C 66.428 125.599,66.394 125.594,66.360 126.044 M68.622 125.765 C 68.622 125.918,68.058 126.400,67.879 126.400 C 67.777 126.400,67.883 126.243,68.114 126.050 C 68.559 125.679,68.622 125.644,68.622 125.765 M48.812 126.209 C 48.518 126.534,48.518 126.549,48.814 126.505 C 48.986 126.480,49.146 126.326,49.169 126.163 C 49.225 125.787,49.190 125.791,48.812 126.209 M20.913 126.454 C 20.981 126.522,20.890 126.578,20.711 126.578 C 20.532 126.578,20.445 126.638,20.519 126.711 C 20.592 126.784,20.744 126.844,20.857 126.844 C 20.970 126.844,21.088 126.978,21.119 127.142 C 21.162 127.363,21.084 127.415,20.812 127.344 C 20.612 127.291,20.487 127.187,20.533 127.111 C 20.580 127.035,20.500 126.875,20.356 126.756 C 20.211 126.636,20.132 126.474,20.180 126.396 C 20.280 126.234,20.729 126.270,20.913 126.454 M67.413 126.969 C 67.108 127.274,67.150 127.448,67.520 127.420 C 67.697 127.407,67.852 127.252,67.865 127.076 C 67.893 126.705,67.718 126.664,67.413 126.969 M22.652 128.400 C 22.709 128.376,22.762 128.416,22.771 128.489 C 22.779 128.562,22.869 128.722,22.970 128.844 C 23.260 129.193,22.971 129.104,22.595 128.729 C 22.409 128.543,22.206 128.203,22.143 127.973 C 22.032 127.564,22.035 127.565,22.289 128.000 C 22.432 128.244,22.595 128.424,22.652 128.400 M22.390 128.667 C 22.612 128.859,22.609 128.889,22.374 128.889 C 22.224 128.889,22.003 128.669,21.882 128.400 C 21.698 127.992,21.701 127.955,21.898 128.178 C 22.027 128.324,22.249 128.544,22.390 128.667 M64.459 128.470 C 64.398 128.631,64.389 128.804,64.440 128.855 C 64.491 128.906,64.533 128.857,64.533 128.746 C 64.533 128.634,64.672 128.589,64.844 128.645 C 65.083 128.721,65.099 128.704,64.913 128.572 C 64.709 128.426,64.709 128.384,64.913 128.302 C 65.091 128.232,65.077 128.202,64.863 128.192 C 64.703 128.184,64.521 128.309,64.459 128.470 M63.467 129.689 C 63.374 129.933,63.186 130.133,63.048 130.133 C 62.911 130.133,62.705 130.247,62.590 130.386 C 62.397 130.617,62.646 130.663,63.111 130.481 C 63.160 130.462,63.260 130.456,63.333 130.468 C 63.407 130.479,63.447 130.389,63.422 130.267 C 63.398 130.144,63.475 130.063,63.594 130.087 C 63.724 130.112,63.771 130.006,63.712 129.820 C 63.630 129.557,63.657 129.562,63.895 129.854 C 64.048 130.042,64.234 130.136,64.308 130.063 C 64.381 129.989,64.300 129.850,64.128 129.754 C 63.956 129.657,63.861 129.503,63.918 129.411 C 63.975 129.320,63.934 129.244,63.828 129.244 C 63.722 129.244,63.560 129.444,63.467 129.689 M24.132 129.627 C 24.253 129.777,24.293 129.965,24.221 130.045 C 24.148 130.125,24.189 130.133,24.311 130.062 C 24.575 129.911,24.596 130.116,24.356 130.489 C 24.230 130.683,24.182 130.692,24.181 130.523 C 24.179 130.395,24.094 130.342,23.991 130.405 C 23.864 130.484,23.871 130.600,24.013 130.771 C 24.178 130.970,24.161 131.022,23.933 131.022 C 23.569 131.022,23.554 131.483,23.913 131.621 C 24.061 131.677,24.208 131.706,24.241 131.684 C 24.372 131.598,24.879 132.139,24.795 132.275 C 24.660 132.494,24.962 132.619,25.230 132.454 C 25.405 132.345,25.326 132.206,24.917 131.904 C 24.616 131.681,24.413 131.428,24.467 131.341 C 24.521 131.253,24.670 131.270,24.800 131.378 C 24.972 131.520,25.070 131.515,25.167 131.359 C 25.251 131.223,25.224 131.191,25.094 131.272 C 24.981 131.342,24.889 131.314,24.889 131.210 C 24.889 131.107,24.961 131.022,25.050 131.022 C 25.139 131.022,25.170 130.863,25.119 130.669 C 25.068 130.474,24.954 130.360,24.866 130.414 C 24.778 130.469,24.748 130.582,24.800 130.667 C 24.852 130.751,24.801 130.878,24.686 130.949 C 24.537 131.041,24.514 130.956,24.606 130.650 C 24.809 129.972,24.814 129.698,24.622 129.690 C 24.524 129.687,24.324 129.609,24.178 129.519 C 23.956 129.382,23.948 129.400,24.132 129.627 M62.857 130.946 C 62.476 131.327,62.502 131.500,62.889 131.157 C 63.060 131.005,63.360 130.842,63.556 130.795 C 63.886 130.714,63.884 130.706,63.524 130.687 C 63.311 130.676,63.011 130.792,62.857 130.946 M61.844 131.145 C 61.642 131.394,61.641 131.421,61.841 131.301 C 62.030 131.187,62.053 131.248,61.949 131.576 C 61.839 131.924,61.859 131.961,62.063 131.792 C 62.265 131.625,62.318 131.684,62.348 132.104 C 62.369 132.389,62.304 132.622,62.205 132.622 C 62.105 132.622,62.081 132.530,62.150 132.417 C 62.229 132.290,62.199 132.260,62.072 132.339 C 61.959 132.408,61.867 132.390,61.867 132.298 C 61.867 132.206,61.669 132.093,61.428 132.046 C 61.186 132.000,61.027 132.025,61.075 132.102 C 61.122 132.179,61.080 132.292,60.981 132.354 C 60.881 132.415,60.800 132.394,60.800 132.307 C 60.800 132.220,60.680 132.148,60.533 132.148 C 60.295 132.148,60.295 132.181,60.533 132.444 C 60.914 132.866,60.865 133.245,60.356 133.795 C 60.111 134.058,59.911 134.348,59.911 134.439 C 59.911 134.757,60.535 134.483,60.709 134.088 C 60.817 133.841,61.040 133.689,61.292 133.689 C 61.648 133.689,61.695 133.609,61.658 133.067 C 61.617 132.456,61.451 132.267,61.273 132.627 C 61.223 132.727,61.203 132.707,61.228 132.582 C 61.283 132.313,61.792 132.272,61.746 132.540 C 61.728 132.642,61.908 132.741,62.146 132.760 C 62.426 132.783,62.578 132.703,62.578 132.531 C 62.578 132.386,62.674 132.287,62.792 132.311 C 62.909 132.336,63.009 132.211,63.014 132.035 C 63.018 131.859,63.211 131.571,63.443 131.396 C 63.851 131.088,63.853 131.079,63.484 131.107 C 63.276 131.122,63.015 131.225,62.903 131.337 C 62.664 131.576,61.972 131.283,62.139 131.013 C 62.313 130.731,62.104 130.825,61.844 131.145 M64.000 131.184 C 64.000 131.284,63.780 131.484,63.511 131.627 C 63.064 131.866,63.057 131.886,63.422 131.862 C 63.642 131.847,63.822 131.768,63.822 131.685 C 63.822 131.602,63.915 131.592,64.027 131.661 C 64.149 131.736,64.185 131.711,64.115 131.598 C 64.051 131.494,64.094 131.376,64.212 131.337 C 64.379 131.281,64.379 131.236,64.213 131.133 C 64.096 131.061,64.000 131.083,64.000 131.184 M26.271 131.300 C 26.213 131.357,26.023 131.425,25.848 131.451 C 25.673 131.477,25.500 131.611,25.463 131.749 C 25.422 131.901,25.453 131.930,25.542 131.822 C 25.762 131.556,26.534 131.608,26.631 131.895 C 26.688 132.066,26.625 132.111,26.430 132.036 C 26.273 131.976,26.097 132.004,26.038 132.100 C 25.979 132.195,25.837 132.216,25.721 132.147 C 25.606 132.077,25.631 132.171,25.778 132.356 C 25.924 132.540,25.961 132.640,25.858 132.579 C 25.756 132.518,25.557 132.539,25.416 132.626 C 25.228 132.742,25.277 132.776,25.602 132.755 C 25.845 132.738,26.132 132.611,26.238 132.471 C 26.368 132.300,26.524 132.269,26.714 132.375 C 27.078 132.579,27.115 132.808,26.815 133.006 C 26.626 133.131,26.650 133.225,26.933 133.470 C 27.129 133.639,27.289 133.937,27.289 134.131 C 27.289 134.336,27.430 134.511,27.625 134.549 C 27.894 134.600,27.942 134.536,27.862 134.230 C 27.778 133.911,27.805 133.882,28.018 134.060 C 28.160 134.177,28.243 134.305,28.204 134.344 C 28.074 134.474,29.177 135.467,29.452 135.467 C 29.600 135.467,29.861 135.640,30.033 135.852 C 30.403 136.309,30.933 136.433,30.933 136.063 C 30.933 135.910,30.848 135.854,30.728 135.928 C 30.595 136.011,30.571 135.976,30.661 135.831 C 30.763 135.666,30.700 135.638,30.422 135.726 C 30.198 135.797,30.044 135.769,30.044 135.656 C 30.044 135.552,30.137 135.487,30.250 135.511 C 30.363 135.536,30.471 135.336,30.489 135.067 C 30.508 134.798,30.458 134.578,30.378 134.578 C 30.299 134.578,30.276 134.466,30.329 134.329 C 30.381 134.192,30.340 134.028,30.236 133.964 C 30.125 133.895,30.089 133.953,30.147 134.106 C 30.202 134.248,30.170 134.411,30.077 134.469 C 29.983 134.527,29.865 134.415,29.815 134.220 C 29.693 133.755,28.799 133.717,28.803 134.178 C 28.807 134.636,29.082 135.018,29.233 134.775 C 29.300 134.666,29.445 134.578,29.556 134.578 C 29.666 134.578,29.602 134.693,29.412 134.834 C 29.114 135.054,29.031 135.055,28.810 134.838 C 28.507 134.540,28.381 133.451,28.670 133.630 C 28.796 133.708,28.813 133.633,28.720 133.407 C 28.643 133.220,28.576 132.971,28.571 132.854 C 28.567 132.738,28.476 132.658,28.370 132.677 C 28.264 132.696,28.204 132.591,28.237 132.444 C 28.270 132.298,28.235 132.239,28.159 132.315 C 27.890 132.584,27.960 133.124,28.253 133.036 C 28.461 132.973,28.453 133.003,28.221 133.152 C 27.787 133.431,27.584 133.250,27.628 132.623 C 27.664 132.099,27.502 131.922,27.294 132.259 C 27.236 132.352,27.069 132.383,26.923 132.327 C 26.719 132.249,26.705 132.169,26.862 131.979 C 27.017 131.792,27.013 131.733,26.844 131.733 C 26.668 131.733,26.668 131.679,26.844 131.467 C 26.997 131.282,27.004 131.200,26.866 131.200 C 26.756 131.200,26.665 131.300,26.664 131.422 C 26.662 131.598,26.631 131.597,26.518 131.420 C 26.440 131.296,26.329 131.242,26.271 131.300 M59.892 132.736 C 59.700 132.969,59.694 133.052,59.867 133.110 C 59.989 133.152,60.089 133.299,60.089 133.437 C 60.089 133.576,60.178 133.689,60.287 133.689 C 60.410 133.689,60.378 133.524,60.203 133.258 C 59.992 132.936,59.976 132.807,60.138 132.749 C 60.258 132.706,60.307 132.618,60.248 132.553 C 60.188 132.489,60.028 132.571,59.892 132.736 M29.096 133.274 C 28.926 133.444,28.950 133.689,29.137 133.689 C 29.224 133.689,29.341 133.569,29.398 133.422 C 29.502 133.151,29.312 133.058,29.096 133.274 M58.133 133.867 C 57.940 133.992,57.930 134.040,58.099 134.042 C 58.230 134.043,58.276 134.135,58.205 134.250 C 58.130 134.371,58.156 134.407,58.268 134.337 C 58.373 134.273,58.494 134.321,58.538 134.443 C 58.582 134.566,58.673 134.616,58.740 134.554 C 58.807 134.493,58.738 134.319,58.588 134.169 C 58.382 133.963,58.369 133.873,58.535 133.806 C 58.656 133.757,58.676 133.712,58.578 133.706 C 58.480 133.699,58.280 133.772,58.133 133.867 M59.200 133.905 C 59.200 133.982,59.325 134.044,59.477 134.044 C 59.649 134.044,59.706 134.129,59.626 134.267 C 59.556 134.389,59.567 134.427,59.651 134.350 C 59.935 134.092,59.870 133.765,59.534 133.765 C 59.350 133.765,59.200 133.828,59.200 133.905 M56.985 134.392 C 56.690 134.700,56.358 134.879,56.153 134.840 C 55.966 134.804,55.856 134.844,55.909 134.930 C 55.962 135.016,55.930 135.133,55.837 135.191 C 55.745 135.248,55.619 135.214,55.558 135.116 C 55.448 134.937,54.917 135.282,55.020 135.467 C 55.164 135.726,54.736 136.383,54.532 136.216 C 54.378 136.091,54.241 136.112,54.082 136.284 C 53.956 136.421,53.906 136.533,53.970 136.533 C 54.035 136.533,53.992 136.649,53.875 136.789 C 53.759 136.930,53.529 137.003,53.365 136.950 C 53.110 136.869,53.105 136.883,53.333 137.047 C 53.591 137.233,53.567 137.264,53.128 137.311 C 52.898 137.336,52.762 137.911,52.958 138.032 C 53.168 138.162,53.531 137.852,53.591 137.493 C 53.622 137.308,53.745 137.178,53.865 137.206 C 53.984 137.234,54.209 137.154,54.364 137.028 C 54.579 136.854,54.614 136.853,54.513 137.022 C 54.417 137.181,54.535 137.244,54.923 137.244 C 55.343 137.244,55.467 137.166,55.467 136.899 C 55.467 136.447,55.675 136.335,56.542 136.323 C 56.998 136.317,57.233 136.240,57.186 136.112 C 57.106 135.898,57.007 135.897,56.444 136.105 C 56.158 136.211,56.176 136.164,56.533 135.865 C 56.930 135.533,56.945 135.480,56.668 135.372 C 56.497 135.306,56.405 135.175,56.463 135.081 C 56.523 134.983,56.732 135.046,56.951 135.227 C 57.324 135.536,57.326 135.536,57.026 135.204 C 56.539 134.666,57.102 134.080,57.614 134.592 C 57.802 134.780,57.956 135.013,57.956 135.111 C 57.956 135.209,58.056 135.295,58.178 135.302 C 58.300 135.310,58.540 135.343,58.711 135.376 C 58.882 135.408,59.022 135.330,59.022 135.201 C 59.022 135.050,58.879 134.998,58.622 135.054 C 58.338 135.117,58.290 135.094,58.459 134.974 C 58.634 134.849,58.555 134.778,58.151 134.697 C 57.777 134.622,57.638 134.509,57.705 134.334 C 57.891 133.849,57.474 133.882,56.985 134.392 M30.808 134.660 C 30.804 134.762,30.780 134.924,30.755 135.022 C 30.699 135.241,31.149 136.356,31.294 136.356 C 31.352 136.356,31.575 136.192,31.789 135.993 C 32.137 135.669,32.218 135.700,32.160 136.133 C 32.150 136.207,32.064 136.245,31.969 136.218 C 31.874 136.191,31.822 136.351,31.854 136.573 C 31.890 136.831,32.024 136.969,32.222 136.953 C 32.393 136.940,32.533 137.005,32.533 137.099 C 32.533 137.196,32.951 137.236,33.506 137.192 C 34.042 137.150,34.437 137.185,34.384 137.271 C 34.094 137.740,34.502 138.044,35.420 138.044 C 36.346 138.044,36.356 138.038,36.411 137.467 C 36.451 137.050,36.394 136.889,36.204 136.889 C 36.059 136.889,35.905 136.789,35.861 136.667 C 35.818 136.544,35.740 136.484,35.690 136.533 C 35.639 136.582,35.680 136.822,35.780 137.067 C 36.065 137.755,36.068 137.896,35.802 137.896 C 35.667 137.896,35.556 137.821,35.556 137.729 C 35.556 137.637,35.436 137.608,35.289 137.664 C 35.142 137.721,35.022 137.727,35.022 137.678 C 35.022 137.629,35.179 137.531,35.371 137.460 C 35.629 137.364,35.675 137.262,35.548 137.065 C 35.397 136.830,35.356 136.830,35.183 137.067 C 35.015 137.298,35.001 137.295,35.080 137.044 C 35.225 136.587,34.405 136.287,34.031 136.661 C 33.761 136.931,33.744 136.929,33.803 136.622 C 33.838 136.441,33.907 136.320,33.956 136.354 C 34.094 136.450,34.887 135.984,34.775 135.871 C 34.720 135.816,34.553 135.873,34.404 135.996 C 34.190 136.174,34.133 136.175,34.133 136.000 C 34.133 135.823,34.079 135.823,33.867 136.000 C 33.647 136.182,33.600 136.167,33.600 135.914 C 33.600 135.732,33.504 135.643,33.365 135.697 C 33.120 135.791,32.606 135.154,32.676 134.844 C 32.698 134.747,32.557 134.853,32.363 135.080 C 31.995 135.512,31.587 135.469,31.760 135.017 C 31.823 134.851,31.751 134.756,31.564 134.756 C 31.401 134.756,31.307 134.693,31.354 134.616 C 31.402 134.539,31.300 134.476,31.128 134.476 C 30.956 134.476,30.812 134.559,30.808 134.660 M56.249 135.613 C 56.112 135.645,55.998 135.805,55.995 135.969 C 55.990 136.237,55.971 136.236,55.806 135.952 C 55.705 135.779,55.528 135.686,55.411 135.745 C 55.295 135.804,55.394 135.696,55.630 135.503 C 55.954 135.239,56.114 135.203,56.279 135.354 C 56.452 135.513,56.445 135.568,56.249 135.613 M57.483 135.564 C 57.259 136.040,57.396 136.266,57.705 135.931 C 57.811 135.815,57.851 135.587,57.794 135.424 C 57.706 135.177,57.655 135.200,57.483 135.564 M32.889 135.841 C 32.889 136.026,32.800 136.178,32.692 136.178 C 32.584 136.178,32.538 136.068,32.589 135.933 C 32.641 135.798,32.590 135.593,32.475 135.478 C 32.315 135.317,32.339 135.295,32.578 135.386 C 32.749 135.451,32.889 135.656,32.889 135.841 M53.308 135.995 C 53.036 136.296,53.045 136.329,53.453 136.515 C 53.693 136.624,53.842 136.638,53.785 136.545 C 53.728 136.453,53.858 136.212,54.074 136.011 L 54.467 135.644 54.034 135.659 C 53.795 135.667,53.469 135.819,53.308 135.995 M32.800 136.533 C 32.740 136.631,32.570 136.711,32.423 136.711 C 32.277 136.711,32.206 136.631,32.267 136.533 C 32.327 136.436,32.497 136.356,32.643 136.356 C 32.790 136.356,32.860 136.436,32.800 136.533 M37.067 136.890 C 37.144 137.133,37.098 137.244,36.922 137.244 C 36.375 137.244,36.459 138.080,37.007 138.095 C 37.284 138.103,37.511 138.035,37.511 137.944 C 37.511 137.787,37.245 137.705,36.920 137.761 C 36.840 137.775,36.822 137.708,36.881 137.612 C 36.941 137.516,37.094 137.478,37.222 137.527 C 37.350 137.576,37.556 137.493,37.681 137.343 C 37.805 137.193,37.998 137.116,38.109 137.171 C 38.220 137.226,38.172 137.157,38.003 137.018 C 37.833 136.879,37.729 136.730,37.772 136.687 C 37.815 136.644,37.648 136.593,37.402 136.572 C 37.029 136.542,36.973 136.595,37.067 136.890 M50.089 136.871 C 49.918 137.044,49.778 137.239,49.778 137.304 C 49.778 137.515,50.278 137.429,50.382 137.200 C 50.437 137.078,50.459 137.138,50.431 137.333 C 50.402 137.529,50.291 137.671,50.184 137.650 C 50.077 137.629,49.896 137.724,49.782 137.862 C 49.539 138.155,49.067 138.028,49.067 137.670 C 49.067 137.336,48.108 137.345,47.829 137.681 C 47.707 137.828,47.676 138.040,47.757 138.170 C 47.968 138.507,47.992 138.844,47.806 138.844 C 47.717 138.844,47.644 138.720,47.644 138.567 C 47.644 138.403,47.561 138.342,47.439 138.417 C 47.306 138.500,47.282 138.465,47.373 138.319 C 47.454 138.188,47.410 138.028,47.267 137.936 C 47.062 137.803,47.065 137.750,47.289 137.603 C 47.509 137.458,47.491 137.427,47.185 137.425 C 46.973 137.423,46.815 137.536,46.815 137.689 C 46.815 137.836,46.881 137.956,46.963 137.956 C 47.044 137.956,47.111 138.036,47.111 138.133 C 47.111 138.506,46.757 138.290,46.647 137.850 C 46.572 137.551,46.484 137.464,46.399 137.602 C 46.298 137.764,46.236 137.766,46.138 137.607 C 46.045 137.456,45.917 137.482,45.675 137.701 C 45.492 137.867,45.376 138.059,45.419 138.128 C 45.461 138.197,45.263 138.239,44.978 138.222 C 44.693 138.205,44.497 138.253,44.544 138.328 C 44.590 138.403,44.545 138.515,44.444 138.578 C 44.029 138.834,44.524 138.998,45.728 139.001 C 46.465 139.004,47.118 138.915,47.270 138.792 C 47.479 138.623,47.506 138.625,47.401 138.800 C 47.312 138.948,47.386 139.022,47.623 139.022 C 47.819 139.022,48.031 138.938,48.094 138.836 C 48.162 138.726,48.294 138.713,48.416 138.804 C 49.052 139.282,50.311 138.852,50.075 138.237 C 50.006 138.058,50.078 138.000,50.311 138.044 C 50.536 138.088,50.616 138.029,50.553 137.867 C 50.487 137.694,50.569 137.654,50.845 137.727 C 51.137 137.803,51.173 137.864,50.993 137.979 C 50.842 138.076,50.992 138.116,51.405 138.088 C 51.774 138.063,52.012 137.968,51.957 137.867 C 51.903 137.769,51.967 137.789,52.098 137.911 C 52.230 138.033,52.442 138.133,52.569 138.133 C 52.940 138.133,52.754 137.722,52.188 137.289 C 51.715 136.927,51.781 136.767,52.356 136.883 C 52.453 136.902,52.472 136.857,52.396 136.782 C 52.156 136.541,51.556 136.583,51.556 136.840 C 51.556 136.976,51.474 137.037,51.375 136.976 C 51.276 136.915,51.248 136.777,51.313 136.671 C 51.383 136.559,51.350 136.529,51.235 136.600 C 50.991 136.751,51.257 137.531,51.532 137.469 C 51.636 137.446,51.679 137.536,51.627 137.669 C 51.459 138.109,50.649 137.592,50.707 137.082 C 50.767 136.549,50.498 136.456,50.089 136.871 M37.558 136.817 C 37.709 136.968,37.509 137.422,37.293 137.422 C 37.183 137.422,37.193 137.324,37.319 137.173 C 37.478 136.981,37.473 136.902,37.296 136.831 C 37.170 136.781,37.153 136.733,37.259 136.725 C 37.365 136.717,37.500 136.759,37.558 136.817 M38.223 138.136 C 38.331 138.728,38.312 138.815,38.110 138.647 C 37.959 138.522,37.867 138.514,37.867 138.626 C 37.867 139.095,39.520 139.172,39.968 138.724 C 40.248 138.444,40.270 138.444,40.324 138.724 C 40.393 139.079,40.555 139.102,40.879 138.800 C 41.072 138.621,41.093 138.624,40.986 138.812 C 40.885 138.991,40.994 139.029,41.449 138.974 C 41.916 138.917,42.033 138.829,41.991 138.562 C 41.962 138.375,41.928 138.133,41.917 138.024 C 41.905 137.914,41.732 137.869,41.526 137.922 C 41.218 138.001,41.191 137.973,41.369 137.757 C 41.547 137.539,41.493 137.490,41.041 137.455 C 40.552 137.418,40.526 137.441,40.776 137.690 C 40.996 137.911,41.015 138.489,40.801 138.489 C 40.490 138.489,39.888 137.968,39.924 137.729 C 39.975 137.379,39.562 137.327,38.856 137.596 C 38.560 137.708,38.400 137.708,38.400 137.596 C 38.400 137.500,38.331 137.422,38.246 137.422 C 38.161 137.422,38.151 137.743,38.223 138.136 M42.553 137.688 C 42.645 137.928,42.602 137.969,42.358 137.876 C 42.119 137.785,42.094 137.806,42.251 137.965 C 42.373 138.088,42.555 138.112,42.696 138.025 C 42.826 137.943,43.073 137.899,43.244 137.927 C 43.416 137.955,43.556 137.888,43.556 137.778 C 43.556 137.669,43.477 137.627,43.381 137.687 C 43.286 137.746,43.034 137.701,42.821 137.588 C 42.485 137.408,42.450 137.421,42.553 137.688 M45.172 137.586 C 45.034 137.674,44.814 137.704,44.683 137.653 C 44.552 137.603,44.444 137.653,44.444 137.765 C 44.444 138.029,45.379 137.907,45.471 137.631 C 45.554 137.383,45.505 137.375,45.172 137.586 M48.430 137.708 C 48.869 137.876,48.760 138.133,48.267 138.093 C 47.821 138.056,47.720 137.940,47.941 137.719 C 48.085 137.574,48.082 137.574,48.430 137.708 M42.609 138.276 C 42.406 138.317,42.323 138.442,42.387 138.608 C 42.443 138.754,42.489 138.907,42.489 138.948 C 42.489 139.084,43.463 139.019,43.704 138.866 C 43.898 138.744,43.894 138.698,43.681 138.616 C 43.538 138.562,43.313 138.610,43.182 138.725 C 43.050 138.840,43.089 138.758,43.268 138.543 C 43.620 138.120,43.537 138.086,42.609 138.276 M49.067 138.508 C 49.292 138.670,49.285 138.685,49.022 138.600 C 48.851 138.545,48.711 138.587,48.711 138.694 C 48.711 138.832,48.642 138.831,48.473 138.690 C 48.341 138.581,48.168 138.551,48.088 138.624 C 48.008 138.696,48.001 138.656,48.071 138.533 C 48.227 138.262,48.707 138.250,49.067 138.508 " stroke="none" fill="#0494dc" fill-rule="evenodd"></path><path id="path4" d="M36.622 39.378 C 36.441 39.596,36.453 39.644,36.689 39.644 C 36.848 39.644,36.978 39.524,36.978 39.378 C 36.978 39.231,36.948 39.111,36.911 39.111 C 36.874 39.111,36.744 39.231,36.622 39.378 M29.511 58.044 C 29.330 58.263,29.342 58.311,29.578 58.311 C 29.737 58.311,29.867 58.191,29.867 58.044 C 29.867 57.898,29.836 57.778,29.800 57.778 C 29.763 57.778,29.633 57.898,29.511 58.044 M29.333 73.312 C 29.333 73.348,29.453 73.478,29.600 73.600 C 29.818 73.781,29.867 73.769,29.867 73.533 C 29.867 73.374,29.747 73.244,29.600 73.244 C 29.453 73.244,29.333 73.275,29.333 73.312 M352.711 79.623 C 352.711 79.847,353.233 80.249,353.360 80.122 C 353.542 79.940,353.270 79.467,352.983 79.467 C 352.834 79.467,352.711 79.537,352.711 79.623 M374.639 79.741 C 374.581 79.892,374.582 80.063,374.640 80.122 C 374.767 80.249,375.289 79.847,375.289 79.623 C 375.289 79.354,374.750 79.453,374.639 79.741 M377.571 79.623 C 377.701 79.705,377.821 79.844,377.837 79.931 C 377.853 80.018,377.927 80.069,378.000 80.044 C 378.073 80.020,378.133 80.093,378.133 80.207 C 378.133 80.321,378.186 80.362,378.251 80.297 C 378.396 80.153,377.796 79.466,377.528 79.469 C 377.419 79.471,377.438 79.539,377.571 79.623 M363.022 80.094 C 363.022 80.381,363.495 80.653,363.677 80.471 C 363.804 80.344,363.402 79.822,363.178 79.822 C 363.092 79.822,363.022 79.945,363.022 80.094 M365.330 80.982 C 365.206 81.131,365.153 81.301,365.211 81.360 C 365.393 81.542,365.867 81.270,365.867 80.983 C 365.867 80.635,365.618 80.635,365.330 80.982 M194.133 82.422 C 194.133 82.658,194.182 82.670,194.400 82.489 C 194.547 82.367,194.667 82.237,194.667 82.200 C 194.667 82.164,194.547 82.133,194.400 82.133 C 194.253 82.133,194.133 82.263,194.133 82.422 M58.133 83.311 C 58.133 83.547,58.182 83.559,58.400 83.378 C 58.547 83.256,58.667 83.126,58.667 83.089 C 58.667 83.052,58.547 83.022,58.400 83.022 C 58.253 83.022,58.133 83.152,58.133 83.311 M227.911 88.267 C 227.911 88.413,228.041 88.533,228.200 88.533 C 228.436 88.533,228.448 88.485,228.267 88.267 C 228.145 88.120,228.015 88.000,227.978 88.000 C 227.941 88.000,227.911 88.120,227.911 88.267 M366.044 89.400 C 366.044 89.625,366.566 90.026,366.693 89.900 C 366.875 89.718,366.603 89.244,366.317 89.244 C 366.167 89.244,366.044 89.315,366.044 89.400 M239.467 91.311 C 239.467 91.547,239.515 91.559,239.733 91.378 C 239.880 91.256,240.000 91.126,240.000 91.089 C 240.000 91.052,239.880 91.022,239.733 91.022 C 239.587 91.022,239.467 91.152,239.467 91.311 M172.801 92.667 C 172.802 92.789,173.014 92.987,173.273 93.107 L 173.744 93.326 173.330 92.885 C 172.857 92.382,172.799 92.358,172.801 92.667 M139.556 93.756 C 139.556 93.793,139.676 93.923,139.822 94.044 C 140.041 94.226,140.089 94.214,140.089 93.977 C 140.089 93.819,139.969 93.689,139.822 93.689 C 139.676 93.689,139.556 93.719,139.556 93.756 M237.689 93.977 C 237.689 94.214,237.737 94.226,237.956 94.044 C 238.102 93.923,238.222 93.793,238.222 93.756 C 238.222 93.719,238.102 93.689,237.956 93.689 C 237.809 93.689,237.689 93.819,237.689 93.977 M157.689 94.866 C 157.689 95.102,157.737 95.115,157.956 94.933 C 158.102 94.812,158.222 94.682,158.222 94.645 C 158.222 94.608,158.102 94.578,157.956 94.578 C 157.809 94.578,157.689 94.708,157.689 94.866 M384.533 95.267 C 384.533 95.484,384.647 95.644,384.800 95.644 C 384.947 95.644,385.067 95.574,385.067 95.488 C 385.067 95.403,384.947 95.233,384.800 95.111 C 384.577 94.926,384.533 94.952,384.533 95.267 M317.330 98.760 C 317.206 98.909,317.153 99.079,317.211 99.137 C 317.393 99.319,317.867 99.048,317.867 98.761 C 317.867 98.413,317.618 98.412,317.330 98.760 M116.622 98.933 C 116.441 99.152,116.453 99.200,116.689 99.200 C 116.848 99.200,116.978 99.080,116.978 98.933 C 116.978 98.787,116.948 98.667,116.911 98.667 C 116.874 98.667,116.744 98.787,116.622 98.933 M358.904 99.178 C 359.035 99.261,359.155 99.400,359.171 99.487 C 359.187 99.573,359.260 99.624,359.333 99.600 C 359.407 99.576,359.467 99.649,359.467 99.763 C 359.467 99.877,359.520 99.918,359.584 99.853 C 359.729 99.708,359.129 99.021,358.861 99.025 C 358.753 99.026,358.772 99.094,358.904 99.178 M379.378 99.178 C 379.378 99.402,379.900 99.804,380.026 99.677 C 380.208 99.495,379.937 99.022,379.650 99.022 C 379.500 99.022,379.378 99.092,379.378 99.178 M316.441 101.427 C 316.317 101.576,316.264 101.745,316.323 101.804 C 316.505 101.986,316.978 101.714,316.978 101.428 C 316.978 101.080,316.729 101.079,316.441 101.427 M315.552 104.093 C 315.428 104.242,315.375 104.412,315.434 104.471 C 315.616 104.653,316.089 104.381,316.089 104.094 C 316.089 103.746,315.840 103.746,315.552 104.093 M355.022 104.681 C 355.022 104.796,354.962 104.869,354.889 104.844 C 354.816 104.820,354.742 104.871,354.726 104.958 C 354.710 105.045,354.590 105.184,354.460 105.266 C 354.327 105.350,354.308 105.418,354.417 105.419 C 354.685 105.423,355.284 104.736,355.140 104.592 C 355.075 104.527,355.022 104.567,355.022 104.681 M231.111 106.200 C 231.111 106.237,231.231 106.367,231.378 106.489 C 231.596 106.670,231.644 106.658,231.644 106.422 C 231.644 106.263,231.524 106.133,231.378 106.133 C 231.231 106.133,231.111 106.164,231.111 106.200 M383.528 106.408 C 383.470 106.559,383.471 106.730,383.529 106.789 C 383.656 106.915,384.178 106.513,384.178 106.289 C 384.178 106.021,383.639 106.119,383.528 106.408 M96.356 108.200 C 96.356 108.436,96.404 108.448,96.622 108.267 C 96.769 108.145,96.889 108.015,96.889 107.978 C 96.889 107.941,96.769 107.911,96.622 107.911 C 96.476 107.911,96.356 108.041,96.356 108.200 M272.000 107.978 C 272.000 108.015,272.120 108.145,272.267 108.267 C 272.485 108.448,272.533 108.436,272.533 108.200 C 272.533 108.041,272.413 107.911,272.267 107.911 C 272.120 107.911,272.000 107.941,272.000 107.978 M245.689 109.977 C 245.689 110.214,245.737 110.226,245.956 110.044 C 246.102 109.923,246.222 109.793,246.222 109.756 C 246.222 109.719,246.102 109.689,245.956 109.689 C 245.809 109.689,245.689 109.819,245.689 109.977 M71.528 118.852 C 71.470 119.003,71.471 119.174,71.529 119.233 C 71.656 119.360,72.178 118.958,72.178 118.734 C 72.178 118.466,71.639 118.564,71.528 118.852 M52.622 119.644 C 52.622 119.742,52.562 119.802,52.489 119.778 C 52.416 119.753,52.342 119.804,52.326 119.891 C 52.310 119.978,52.190 120.117,52.060 120.200 C 51.901 120.300,51.892 120.351,52.032 120.353 C 52.280 120.356,52.800 119.868,52.800 119.632 C 52.800 119.541,52.760 119.467,52.711 119.467 C 52.662 119.467,52.622 119.547,52.622 119.644 M16.711 121.400 C 16.711 121.625,17.233 122.026,17.360 121.900 C 17.542 121.718,17.270 121.244,16.983 121.244 C 16.834 121.244,16.711 121.315,16.711 121.400 M46.080 122.524 C 45.820 122.784,45.803 123.022,46.044 123.022 C 46.142 123.022,46.202 122.922,46.178 122.800 C 46.153 122.678,46.233 122.598,46.356 122.622 C 46.478 122.647,46.578 122.587,46.578 122.489 C 46.578 122.248,46.340 122.265,46.080 122.524 M47.996 124.538 C 47.873 124.687,47.820 124.857,47.878 124.915 C 48.060 125.097,48.533 124.826,48.533 124.539 C 48.533 124.191,48.285 124.190,47.996 124.538 M28.237 134.734 C 28.368 134.816,28.488 134.955,28.504 135.042 C 28.520 135.129,28.593 135.180,28.667 135.156 C 28.740 135.131,28.800 135.204,28.800 135.319 C 28.800 135.433,28.853 135.473,28.917 135.408 C 29.062 135.264,28.463 134.577,28.195 134.581 C 28.086 134.582,28.105 134.650,28.237 134.734 " stroke="none" fill="#6c50c8" fill-rule="evenodd"></path></g></svg>""" def get_svg(svg: str, style: str = "", wrap: bool = True): """Convert an SVG to a base64-encoded image.""" b64 = base64.b64encode(svg.encode("utf-8")).decode("utf-8") html = f'<img src="data:image/svg+xml;base64,{b64}" style="{style}"/>' return get_html(html) if wrap else html def get_html(html: str): """Convert HTML so it can be rendered.""" WRAPPER = """<div style="overflow-x: auto; border: 1px solid #e6e9ef; border-radius: 0.25rem; padding: 1rem; margin-bottom: 2.5rem">{}</div>""" # Newlines seem to mess with the rendering html = html.replace("\n", " ") return WRAPPER.format(html) def display_LOGO(where:str='sidebar'): if where =='sidebar': st.sidebar.markdown(LOGO, unsafe_allow_html=True) elif where =='main': st.markdown(LOGO, unsafe_allow_html=True) LOGO = get_svg(SVG, wrap=False, style="max-width: 100%; margin-bottom: 25px")
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/viz/streamlit_viz/logo.py
logo.py
import streamlit as st from sparknlp.annotator import * import nlu from nlu.pipe.utils.resolution.storage_ref_utils import StorageRefUtils from nlu.universe.feature_node_ids import NLP_NODE_IDS class StreamlitUtilsOS(): classifers_OS = [ClassifierDLModel, LanguageDetectorDL, MultiClassifierDLModel, NerDLModel, NerCrfModel, YakeKeywordExtraction, PerceptronModel, SentimentDLModel, SentimentDetectorModel, ViveknSentimentModel, DependencyParserModel, TypedDependencyParserModel, T5Transformer, MarianTransformer, NerConverter] @staticmethod def get_classifier_cols(pipe): classifier_cols = [] for c in pipe.components: if type(c.model) in StreamlitUtilsOS.classifers_OS: classifier_cols += pipe.anno2final_cols[c.model] return classifier_cols @staticmethod def get_embed_cols(pipe): classifier_cols = [] embedders = StreamlitUtilsOS.find_all_embed_components(pipe) for c in embedders: classifier_cols += pipe.anno2final_cols[c.model] return classifier_cols @staticmethod def find_embed_col(df, search_multi=False): """Find col that contains embed""" if not search_multi: for c in df.columns: if 'embed' in c: return c else: e_cols = [] for c in df.columns: if 'embed' in c: e_cols.append(c) return e_cols @staticmethod def find_embed_component(p): """Find first embed component_to_resolve in component_list""" for c in p.components: if 'embed' in c.out_types[0]: return c st.warning("No Embed model_anno_obj in component_list") return None @staticmethod def find_all_classifier_components(pipe): """Find ALL classifier component_to_resolve in component_list""" classifier_comps = [] for c in pipe.components: if type(c.model) in StreamlitUtilsOS.classifers_OS: classifier_comps.append(c) return classifier_comps @staticmethod def find_all_embed_components(p): """Find ALL embed component_to_resolve in component_list""" cs = [] for c in p.components: if 'embed' in c.out_types[0] and 'chunk' not in c.out_types[0]: cs.append(c) if len(cs) == 0: st.warning("No Embed model_anno_obj in component_list") return cs @staticmethod def extract_name(component_or_pipe): name = '' if hasattr(component_or_pipe, 'info'): if hasattr(component_or_pipe, 'nlu_ref'): name = component_or_pipe.nlu_ref elif hasattr(component_or_pipe, 'storage_ref'): name = component_or_pipe.storage_ref elif hasattr(component_or_pipe, 'nlp_ref'): name = component_or_pipe.nlp_ref elif hasattr(component_or_pipe, 'nlu_ref'): name = component_or_pipe.nlu_ref return name @staticmethod def find_ner_model(p): """Find NER component_to_resolve in component_list""" from sparknlp.annotator import NerDLModel, NerCrfModel for c in p.components: if isinstance(c.model, (NerDLModel, NerCrfModel)): return c.model st.warning("No NER model_anno_obj in component_list") return None @staticmethod def get_NER_tags_in_pipe(p): """Get NER tags in component_list, used for showing visualizable tags""" n = StreamlitUtilsOS.find_ner_model(p) if n is None: return [] classes_predicted_by_ner_model = n.getClasses() split_iob_tags = lambda s: s.split('-')[1] if '-' in s else '' classes_predicted_by_ner_model = list(map(split_iob_tags, classes_predicted_by_ner_model)) while '' in classes_predicted_by_ner_model: classes_predicted_by_ner_model.remove('') classes_predicted_by_ner_model = list(set(classes_predicted_by_ner_model)) return classes_predicted_by_ner_model @staticmethod def get_manifold_algo(algo, dim, n_jobs=None): from sklearn.manifold import TSNE, Isomap, LocallyLinearEmbedding, MDS, SpectralEmbedding from sklearn.decomposition import TruncatedSVD, DictionaryLearning, FactorAnalysis, FastICA, KernelPCA, PCA, \ LatentDirichletAllocation # manifold if algo == 'TSNE': return TSNE(n_components=dim, n_jobs=n_jobs) if algo == 'ISOMAP': return Isomap(n_components=dim, n_jobs=n_jobs) if algo == 'LLE': return LocallyLinearEmbedding(n_components=dim, n_jobs=n_jobs) if algo == 'Spectral Embedding': return SpectralEmbedding(n_components=dim, n_jobs=n_jobs) if algo == 'MDS': return MDS(n_components=dim, n_jobs=n_jobs) # Matrix Decomposition if algo == 'PCA': return PCA(n_components=dim) # No hyper if algo == 'SVD aka LSA': return TruncatedSVD(n_components=dim) # No hyper if algo == 'DictionaryLearning': return DictionaryLearning(n_components=dim, n_jobs=n_jobs) if algo == 'FactorAnalysis': return FactorAnalysis(n_components=dim) # no hyper if algo == 'FastICA': return FastICA(n_components=dim) # no hyper if algo == 'KernelPCA': return KernelPCA(n_components=dim, n_jobs=n_jobs) # not applicable because negative values, todo we could just take absolute values of all embeds.. if algo == 'LatentDirichletAllocation': return LatentDirichletAllocation(n_components=dim) # if algo =='NMF': return NMF(n_components=dim) @staticmethod @st.cache(allow_output_mutation=True, hash_funcs={"_thread.RLock": lambda _: None}) def get_pipe(model='ner'): return nlu.load(model) @staticmethod def merge_token_classifiers_with_embed_pipe(embed_pipe, token_pipe): """Merge token feature generators into embed component_list. i.e. Pos/Dep_depdency/Untyped_dep if not already present in component_list""" for c in token_pipe.components: if c.name == NLP_NODE_IDS.POS: for emb_c in embed_pipe.components: if emb_c.name == NLP_NODE_IDS.POS: embed_pipe.is_fitted = False embed_pipe.fit() return embed_pipe # only merge if pos not already in component_list embed_pipe.components.append(c) embed_pipe.is_fitted = False embed_pipe.fit() return embed_pipe @staticmethod def extract_all_sentence_storage_refs_or_nlu_refs(e_coms): """extract either NLU_ref or storage_ref as fallback for a list of embedding components""" loaded_storage_refs = [] loaded_embed_nlu_refs = [] for c in e_coms: if not hasattr(c, 'nlu_ref'): continue r = c.nlu_ref if 'en.' not in r and 'embed_sentence.' not in r and 'ner' not in r: loaded_embed_nlu_refs.append('en.embed_sentence.' + r) elif 'en.' in r and 'embed_sentence.' not in r and 'ner' not in r: r = r.split('en.')[0] loaded_embed_nlu_refs.append('en.embed_sentence.' + r) else: loaded_embed_nlu_refs.append(StorageRefUtils.extract_storage_ref(c)) loaded_storage_refs.append(StorageRefUtils.extract_storage_ref(c)) return loaded_embed_nlu_refs, loaded_storage_refs
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/viz/streamlit_viz/streamlit_utils_OS.py
streamlit_utils_OS.py
import streamlit as st from nlu.utils.modelhub.modelhub_utils import ModelHubUtils from nlu.pipe.viz.streamlit_viz.streamlit_utils_OS import StreamlitUtilsOS from nlu.pipe.viz.streamlit_viz.styles import _set_block_container_style class StreamlitVizTracker(): """Track the status of the visualizations and models loaded in the Streamlit Web View. This is the Model part of the MVC pattern""" _set_block_container_style() loaded_ner_word_embeding_pipes = [] loaded_word_embeding_pipes = [] loaded_sentence_embeding_pipes = [] loaded_document_classifier_pipes = [] loaded_token_pipes = [] loaded_token_level_classifiers = [] footer_displayed = False @staticmethod def pad_duplicate_tokens(tokens): """For every duplicate token in input list, ads N whitespaces for the Nth duplicate""" duplicates = {} for i,s in enumerate(tokens) : if s in duplicates.keys():duplicates[s].append(i) else : duplicates[s]=[i] for i, d in enumerate(duplicates.items()): for i,idx in enumerate(d[1]):tokens[idx]=d[0]+' '*i return tokens @staticmethod def RAW_HTML_link(text,url,CSS_class):return f"""<p class="{CSS_class}" style="padding:0px;" > <a href="{url}">{text}</a> </p>""" @staticmethod def style_link(text,url, CSS_class): return f""" <p class="{CSS_class}" style=" width: fit-content; padding:0px; color : #1E77B7; font-family: 'Roboto', sans-serif; font-weight: bold; font-size: 14px; line-height: 17px; box-sizing: content-box; overflow: hidden; display: block; color: #0098da !important; word-wrap: break-word; " > <a href="{url}">{text}</a> </p>""" @staticmethod def style_model_link(model,text,url, CSS_class): return f""" <p class="{CSS_class}" style=" width: fit-content; padding:0px; color : #1E77B7; font-family: 'Roboto', sans-serif; font-weight: bold; font-size: 14px; line-height: 17px; box-sizing: content-box; overflow: hidden; display: block; color: #0098da !important; word-wrap: break-word; " > <a href="{url}">{text}<div style:"color=rgb(246, 51, 102);">{model}</div></a> </p>""" @staticmethod def display_infos(): FOOTER = """<span style="font-size: 0.75em">{}</span>""" field_info = """**INFO:** You can type in the model_anno_obj selection fields to search and filter.""" iso_info = """**INFO:** NLU model_anno_obj references have the structure.md: `<iso_language_code>.<model_name>.<dataset>` . [Based on the `ISO Language Codes`](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes). If no language defined, `en.` will be assumed as default ' ,""" ISO_FOOTER = FOOTER.format(field_info) FIELD_FOOTER = FOOTER.format(iso_info) st.sidebar.markdown(ISO_FOOTER, unsafe_allow_html=True) st.sidebar.markdown(FIELD_FOOTER, unsafe_allow_html=True) @staticmethod def display_footer(): if not StreamlitVizTracker.footer_displayed: StreamlitVizTracker.display_infos() nlu_link = 'https://nlu.johnsnowlabs.com/' nlp_link = 'http://nlp.johnsnowlabs.com/' jsl_link = 'https://johnsnowlabs.com/' doc_link = 'TODO' powerd_by = f"""Powerd by [`NLU`]({nlu_link }) & [`Spark NLP`]({nlp_link}) from [`John Snow Labs `]({jsl_link}) Checkout [`The Docs`]({doc_link}) for more infos""" FOOTER = """<span style="font-size: 0.75em">{}</span>""" POWER_FOOTER = FOOTER.format(powerd_by) st.sidebar.markdown(POWER_FOOTER, unsafe_allow_html=True) StreamlitVizTracker.footer_displayed = True @staticmethod def show_logo(sidebar=True): HTML_logo = """ <div> <a href="https://www.johnsnowlabs.com/"> <img src="https://nlp.johnsnowlabs.com/assets/images/logo.png" width="300" height="100" > </a> </div> """ if sidebar : st.sidebar.markdown(HTML_logo, unsafe_allow_html=True) else: st.markdown(HTML_logo, unsafe_allow_html=True) @staticmethod def display_embed_vetor_information(embed_component,embed_mat): name = StreamlitUtilsOS.extract_name(embed_component) if name =='': name = 'See modelshub for more details' exp = st.expander("Vector information") exp.code({"Vector Dimension ":embed_mat.shape[1], "Num Vectors":embed_mat.shape[0] + embed_mat.shape[0], 'Vector Name':name}) @staticmethod def display_model_info(model2viz=' ',pipes=[],apply_style=True, display_component_wise_info=True,display_component_summary=True): """Display Links to Modelhub for every NLU Ref loaded and also every component_to_resolve in component_list""" default_modelhub_link = 'https://nlp.johnsnowlabs.com/models' nlu_refs = model2viz.split(' ') # for p in classifier_pipes + embed_pipes + token_pipes :nlu_refs.append(p.nlu_ref) for p in StreamlitVizTracker.loaded_word_embeding_pipes + StreamlitVizTracker.loaded_document_classifier_pipes + StreamlitVizTracker.loaded_token_pipes : nlu_refs.append(p.nlu_ref) nlu_refs = set(nlu_refs) st.sidebar.subheader("NLU pipeline components info") nlu_ref_infos = [] nlu_ref_infos.append(StreamlitVizTracker.style_link("Search over 1000+ scalable SOTA models in John Snow Labs Modelhub", default_modelhub_link, CSS_class='nlu_model_info')) for nlu_ref in nlu_refs : model_hub_link = ModelHubUtils.get_url_by_nlu_refrence(nlu_ref) link_text =f'JSL Modelhub page for '# {nlu_ref}' if model_hub_link is None or model_hub_link == default_modelhub_link : continue # link_text =f'More infos here {nlu_ref}' # model_hub_link = default_modelhub_link if apply_style: nlu_ref_info = StreamlitVizTracker.style_model_link(nlu_ref, link_text, model_hub_link, CSS_class='nlu_model_info') nlu_ref_infos.append(nlu_ref_info) # st.sidebar.write(VizUtilsStreamlitOS.style_link(link_text,model_hub_link),unsafe_allow_html=True) else : nlu_ref_info = StreamlitVizTracker.RAW_HTML_link(link_text, model_hub_link, CSS_class='nlu_model_info') nlu_ref_infos.append(nlu_ref_info) # st.sidebar.write(nlu_ref_info) n = '\n' HTML_INFO = f"<p>{n.join(nlu_ref_infos)}</p>" st.sidebar.markdown(HTML_INFO, unsafe_allow_html=True) c_names = [] if display_component_wise_info : for pipe in pipes : if pipe is None : continue for c in pipe.components : c_name = f"`{type(c.model).__name__}`" c_names.append(c_name) if model2viz[-1]==' ': model2viz = model2viz[:-1] FOOTER = """<span style="font-size: 0.75em">{}</span>""" component_info = f"**Info:** You can load all models active in 1 line via `nlu.load('{' '.join(nlu_refs)}')` which provides this with a optimized CPU build and components: {', '.join(set(c_names))}" component_info = FOOTER.format(component_info) st.sidebar.markdown(component_info, unsafe_allow_html=True) if display_component_summary: parameter_infos = {} for p in pipes : if p is None : continue parameter_infos[p.nlu_ref]=StreamlitVizTracker.get_pipe_param_dict(p) exp = st.expander("NLU Pipeline components and parameters information") exp.write(parameter_infos) @staticmethod def get_pipe_param_dict(pipe): # loop over ever model_anno_obj in pipeline stages and then loop over the models params all_params = {} from sparknlp.base import LightPipeline stages = pipe.vanilla_transformer_pipe.pipeline_model.stages if isinstance(pipe.vanilla_transformer_pipe, (LightPipeline)) else pipe.vanilla_transformer_pipe.stages for stage in stages: all_params[str(stage)]={} params = stage.extractParamMap() for param_name, param_value in params.items(): # print(f'model_anno_obj={stage} param_name={param_name}, param_value={param_value}') all_params[str(stage)][param_name.name]=param_value return all_params # @staticmethod # def viz_streamlit( # component_list, # # Base Params # text:Union[str, List[str], pd.DataFrame, pd.Series], # model_selection:List[str]=[], # # NER PARAMS # # default_ner_model2viz:Union[str, List[str]] = 'en.ner.onto.electra.base', # # SIMILARITY PARAMS # similarity_texts:Tuple[str,str]= ('I love NLU <3', 'I love Streamlit <3'), # title:str = 'NLU ❤️ Streamlit - Prototype your NLP startup in 0 lines of code' , # sub_title:str = 'Play with over 1000+ scalable enterprise NLP models', # side_info:str = None, # # UI PARAMS # visualizers:List[str] = ( "dependency_tree", "ner", "similarity", "token_features", 'classification','manifold'), # show_models_info:bool = True, # show_model_select:bool = True, # show_viz_selection:bool = False, # show_logo:bool=True, # set_wide_layout_CSS:bool=True, # show_code_snippets:bool=False, # model_select_position:str = 'side' , # main or side # display_infos:bool=True, # key:str = "NLU_streamlit", # display_footer :bool = True , # num_similarity_cols:int=2, # # # NEW PARAMS # # MANIfold # num_manifold_cols:int=3, # manifold_algos:List[str]=('TSNE'), # # # SIMY # similarity_algos:List[str]=('COSINE'), # )-> None: # """Visualize either individual building blocks for streamlit or a full UI to experiment and explore models with""" # StreamlitVizTracker.footer_displayed = not display_footer # if set_wide_layout_CSS : _set_block_container_style() # if title: st.title(title) # if sub_title: st.subheader(sub_title) # if show_logo :StreamlitVizTracker.show_logo() # if side_info : st.sidebar.markdown(side_info) # text = st.text_area("Enter text you want to visualize below", text, key=key) # ner_model_2_viz = component_list.nlu_ref # if show_model_select : # show_code_snippets = st.sidebar.checkbox('Generate code snippets', value=show_code_snippets) # if model_selection == [] : model_selection = Discoverer.get_components('ner',include_pipes=True) # model_selection.sort() # if model_select_position == 'side':ner_model_2_viz = st.sidebar.selectbox("Select a NER model_anno_obj.",model_selection,index=model_selection.index(component_list.nlu_ref.split(' ')[0])) # else : ner_model_2_viz = st.selectbox("Select a NER model_anno_obj",model_selection,index=model_selection.index(component_list.nlu_ref.split(' ')[0])) # # active_visualizers = visualizers # if show_viz_selection: active_visualizers = st.sidebar.multiselect("Visualizers",options=visualizers,default=visualizers,key=key) # # all_models = ner_model_2_viz + ' en.dep.typed ' if 'dependency_tree' in active_visualizers else ner_model_2_viz # ner_pipe, tree_pipe = None,None # if 'ner' in active_visualizers : # ner_pipe = component_list if component_list.nlu_ref == ner_model_2_viz else StreamlitUtilsOS.get_pipe(ner_model_2_viz) # StreamlitVizTracker.visualize_ner(ner_pipe, text, generate_code_sample=show_code_snippets, key=key, show_model_select=False, show_text_input=True, show_logo=False, show_infos=False) # if 'dependency_tree' in active_visualizers : # tree_pipe = StreamlitUtilsOS.get_pipe('en.dep.typed') # if not ValidateVizPipe.viz_tree_satisfied(component_list) else component_list # StreamlitVizTracker.visualize_dep_tree(tree_pipe, text, generate_code_sample=show_code_snippets, key=key, show_infos=False, show_logo=False) # if 'token_features' in active_visualizers: # ner_pipe = component_list if component_list.nlu_ref == ner_model_2_viz else StreamlitUtilsOS.get_pipe(ner_model_2_viz) # StreamlitVizTracker.visualize_tokens_information(ner_pipe, text, generate_code_sample=show_code_snippets, key=key, model_select_position=model_select_position, show_infos=False, show_logo=False, ) # if 'classification' in active_visualizers: # ner_pipe = component_list if component_list.nlu_ref == ner_model_2_viz else StreamlitUtilsOS.get_pipe(ner_model_2_viz) # StreamlitVizTracker.visualize_classes(ner_pipe, text, generate_code_sample=show_code_snippets, key=key, model_select_position=model_select_position, show_infos=False, show_logo=False) # if 'similarity' in active_visualizers: # ner_pipe = component_list if component_list.nlu_ref == ner_model_2_viz else StreamlitUtilsOS.get_pipe(ner_model_2_viz) # StreamlitVizTracker.display_word_similarity(ner_pipe, similarity_texts, generate_code_sample=show_code_snippets, model_select_position=model_select_position, show_infos=False, show_logo=False, num_cols=num_similarity_cols) # if 'manifold' in active_visualizers : # ner_pipe = component_list if ner_model_2_viz in component_list.nlu_ref.split(' ') else StreamlitUtilsOS.get_pipe(ner_model_2_viz) # StreamlitVizTracker.display_low_dim_embed_viz_token(ner_pipe, similarity_texts, generate_code_sample=show_code_snippets, model_select_position=model_select_position, show_infos=False, show_logo=False, num_cols=num_manifold_cols) # # StreamlitVizTracker.display_model_info(all_models, [ner_pipe, tree_pipe]) # if show_models_info : # pass # if display_infos : StreamlitVizTracker.display_footer() # # # @staticmethod # def visualize_classes( # component_list, # nlu component_list # text:Union[str,list,pd.DataFrame, pd.Series, List[str]]=('I love NLU and Streamlit and sunny days!', 'I hate rainy daiys','CALL NOW AND WIN 1000$M'), # pipe_prediction_output_level:Optional[str]='document', # title: Optional[str] = "Text Classification", # sub_title: Optional[str] = 'View predicted `classes` and `confidences` for `hundreds of text classifiers` in `over 200 languages`', # metadata : bool = False, # positions : bool = False, # set_wide_layout_CSS:bool=True, # generate_code_sample:bool = False, # key:str = "NLU_streamlit", # show_model_selector : bool = True , # model_select_position:str = 'side' , # show_infos:bool = True, # show_logo:bool = True, # )->None: # if show_logo :StreamlitVizTracker.show_logo() # if set_wide_layout_CSS : _set_block_container_style() # if title:st.header(title) # if sub_title:st.subheader(sub_title) # # # if generate_code_sample: st.code(get_code_for_viz('CLASSES',StreamlitUtilsOS.extract_name(component_list),text)) # if not isinstance(text, (pd.DataFrame, pd.Series)): # text = st.text_area('Enter N texts, seperated by new lines to view classification results for','\n'.join(text) if isinstance(text,list) else text, key=key) # text = text.split("\n") # while '' in text : text.remove('') # classifier_pipes = [component_list] # classifier_components_usable = [e for e in Discoverer.get_components('classify',True, include_aliases=True)] # classifier_components = StreamlitUtilsOS.find_all_classifier_components(component_list) # loaded_classifier_nlu_refs = [os_components.info.nlu_ref for os_components in classifier_components] # # for l in loaded_classifier_nlu_refs: # if 'converter' in l : # loaded_classifier_nlu_refs.remove(l) # continue # if l not in classifier_components_usable : classifier_components_usable.append(l) # classifier_components_usable.sort() # loaded_classifier_nlu_refs.sort() # if show_model_selector : # if model_select_position =='side':classifier_components_selection = st.sidebar.multiselect("Pick additional Classifiers",options=classifier_components_usable,default=loaded_classifier_nlu_refs,key = key) # else:classifier_components_selection = st.multiselect("Pick additional Classifiers",options=classifier_components_usable,default=loaded_classifier_nlu_refs,key = key) # # else : ValueError("Please define model_select_position as main or side") # classifier_algos_to_load = list(set(classifier_components_selection) - set(loaded_classifier_nlu_refs)) # for classifier in classifier_algos_to_load:classifier_pipes.append(nlu.load(classifier)) # StreamlitVizTracker.loaded_document_classifier_pipes+= classifier_pipes # if generate_code_sample:st.code(get_code_for_viz('CLASSES',[StreamlitUtilsOS.extract_name(p) for p in classifier_pipes],text)) # # dfs = [] # all_classifier_cols=[] # for p in classifier_pipes : # df = p.predict(text, pipe_prediction_output_level=pipe_prediction_output_level, metadata=metadata, positions=positions) # classifier_cols = StreamlitUtilsOS.get_classifier_cols(p) # for os_components in classifier_cols : # if os_components not in df.columns : classifier_cols.remove(os_components) # # if 'text' in df.columns: classifier_cols += ['text'] # elif 'document' in df.columns: classifier_cols += ['document'] # all_classifier_cols+= classifier_cols # dfs.append(df) # df = pd.concat(dfs, axis=1) # df = df.loc[:,~df.columns.duplicated()] # for os_components in all_classifier_cols : # if os_components not in df.columns : all_classifier_cols.remove(os_components) # all_classifier_cols = list(set(all_classifier_cols)) # # if len(all_classifier_cols) == 0: st.error('No classes detected') # else :st.write(df[all_classifier_cols],key=key) # if show_infos : # # VizUtilsStreamlitOS.display_infos() # StreamlitVizTracker.display_model_info(component_list.nlu_ref, pipes = [component_list]) # StreamlitVizTracker.display_footer() # # # # @staticmethod # def visualize_tokens_information( # component_list, # nlu component_list # text:str, # title: Optional[str] = "Token Features", # sub_title: Optional[str] ='Pick from `over 1000+ models` on the left and `view the generated features`', # show_feature_select:bool =True, # features:Optional[List[str]] = None, # full_metadata: bool = True, # pipe_prediction_output_level:str = 'token', # positions:bool = False, # set_wide_layout_CSS:bool=True, # generate_code_sample:bool = False, # key = "NLU_streamlit", # show_model_select = True, # model_select_position:str = 'side' , # main or side # show_infos:bool = True, # show_logo:bool = True, # show_text_input:bool = True, # ) -> None: # """Visualizer for token features.""" # StreamlitVizTracker.footer_displayed=False # if show_logo :StreamlitVizTracker.show_logo() # if set_wide_layout_CSS : _set_block_container_style() # if title:st.header(title) # # if generate_code_sample: st.code(get_code_for_viz('TOKEN',StreamlitUtilsOS.extract_name(component_list),text)) # if sub_title:st.subheader(sub_title) # token_pipes = [component_list] # if show_text_input : text = st.text_area("Enter text you want to view token features for", text, key=key) # if show_model_select : # token_pipes_components_usable = [e for e in Discoverer.get_components(get_all=True)] # loaded_nlu_refs = [os_components.info.nlu_ref for os_components in component_list.components] # # for l in loaded_nlu_refs: # if 'converter' in l : # loaded_nlu_refs.remove(l) # continue # if l not in token_pipes_components_usable : token_pipes_components_usable.append(l) # token_pipes_components_usable = list(set(token_pipes_components_usable)) # loaded_nlu_refs = list(set(loaded_nlu_refs)) # if '' in loaded_nlu_refs : loaded_nlu_refs.remove('') # if ' ' in loaded_nlu_refs : loaded_nlu_refs.remove(' ') # token_pipes_components_usable.sort() # loaded_nlu_refs.sort() # if model_select_position =='side':model_selection = st.sidebar.multiselect("Pick any additional models for token features",options=token_pipes_components_usable,default=loaded_nlu_refs,key = key) # else:model_selection = st.multiselect("Pick any additional models for token features",options=token_pipes_components_usable,default=loaded_nlu_refs,key = key) # # else : ValueError("Please define model_select_position as main or side") # models_to_load = list(set(model_selection) - set(loaded_nlu_refs)) # for model_anno_obj in models_to_load:token_pipes.append(nlu.load(model_anno_obj)) # StreamlitVizTracker.loaded_token_pipes+= token_pipes # if generate_code_sample:st.code(get_code_for_viz('TOKEN',[StreamlitUtilsOS.extract_name(p) for p in token_pipes],text)) # dfs = [] # for p in token_pipes: # df = p.predict(text, pipe_prediction_output_level=pipe_prediction_output_level, metadata=full_metadata,positions=positions) # dfs.append(df) # # # df = pd.concat(dfs,axis=1) # df = df.loc[:,~df.columns.duplicated()] # if show_feature_select : # exp = st.expander("Select token features to display") # features = exp.multiselect( # "Token features", # options=list(df.columns), # default=list(df.columns) # ) # st.dataframe(df[features]) # if show_infos : # # VizUtilsStreamlitOS.display_infos() # StreamlitVizTracker.display_model_info(component_list.nlu_ref, pipes = [component_list]) # StreamlitVizTracker.display_footer() # # # @staticmethod # def visualize_dep_tree( # component_list, #nlu component_list # text:str = 'Billy likes to swim', # title: Optional[str] = "Dependency Parse & Part-of-speech tags", # sub_title: Optional[str] = 'POS tags define a `grammatical label` for `each token` and the `Dependency Tree` classifies `Relations between the tokens` ', # set_wide_layout_CSS:bool=True, # generate_code_sample:bool = False, # key = "NLU_streamlit", # show_infos:bool = True, # show_logo:bool = True, # show_text_input:bool = True, # ): # StreamlitVizTracker.footer_displayed=False # if show_logo :StreamlitVizTracker.show_logo() # if set_wide_layout_CSS : _set_block_container_style() # if title:st.header(title) # if show_text_input : text = st.text_area("Enter text you want to visualize dependency tree for ", text, key=key) # if sub_title:st.subheader(sub_title) # if generate_code_sample: st.code(get_code_for_viz('TREE',StreamlitUtilsOS.extract_name(component_list),text)) # component_list.viz(text,write_to_streamlit=True,viz_type='dep', streamlit_key=key) # if show_infos : # # VizUtilsStreamlitOS.display_infos() # StreamlitVizTracker.display_model_info(component_list.nlu_ref, pipes = [component_list]) # StreamlitVizTracker.display_footer() # # # # @staticmethod # def visualize_ner( # component_list, # Nlu component_list # text:str, # ner_tags: Optional[List[str]] = None, # show_label_select: bool = True, # show_table: bool = False, # title: Optional[str] = "Named Entities", # sub_title: Optional[str] = "Recognize various `Named Entities (NER)` in text entered and filter them. You can select from over `100 languages` in the dropdown.", # colors: Dict[str, str] = {}, # show_color_selector: bool = False, # set_wide_layout_CSS:bool=True, # generate_code_sample:bool = False, # key = "NLU_streamlit", # model_select_position:str = 'side', # show_model_select : bool = True, # show_text_input:bool = True, # show_infos:bool = True, # show_logo:bool = True, # # ): # StreamlitVizTracker.footer_displayed=False # if set_wide_layout_CSS : _set_block_container_style() # if show_logo :StreamlitVizTracker.show_logo() # if show_model_select : # model_selection = Discoverer.get_components('ner',include_pipes=True) # model_selection.sort() # if model_select_position == 'side':ner_model_2_viz = st.sidebar.selectbox("Select a NER model_anno_obj",model_selection,index=model_selection.index(component_list.nlu_ref.split(' ')[0])) # else : ner_model_2_viz = st.selectbox("Select a NER model_anno_obj",model_selection,index=model_selection.index(component_list.nlu_ref.split(' ')[0])) # component_list = component_list if component_list.nlu_ref == ner_model_2_viz else StreamlitUtilsOS.get_pipe(ner_model_2_viz) # if title: st.header(title) # if show_text_input : text = st.text_area("Enter text you want to visualize NER classes for below", text, key=key) # if sub_title : st.subheader(sub_title) # if generate_code_sample: st.code(get_code_for_viz('NER',StreamlitUtilsOS.extract_name(component_list),text)) # if ner_tags is None: ner_tags = StreamlitUtilsOS.get_NER_tags_in_pipe(component_list) # # if not show_color_selector : # if show_label_select: # exp = st.expander("Select entity labels to highlight") # label_select = exp.multiselect( # "These labels are predicted by the NER model_anno_obj. Select which ones you want to display", # options=ner_tags,default=list(ner_tags)) # else : label_select = ner_tags # component_list.viz(text,write_to_streamlit=True, viz_type='ner',labels_to_viz=label_select,viz_colors=colors, streamlit_key=key) # else : # TODO WIP color select # cols = st.columns(3) # exp = cols[0].beta_expander("Select entity labels to display") # color = st.color_picker('Pick A Color', '#00f900',key = key) # color = cols[2].color_picker('Pick A Color for a specific entity label', '#00f900',key = key) # tag2color = cols[1].selectbox('Pick a ner tag to color', ner_tags,key = key) # colors[tag2color]=color # if show_table : st.write(component_list.predict(text, pipe_prediction_output_level='chunk'),key = key) # # if show_infos : # # VizUtilsStreamlitOS.display_infos() # StreamlitVizTracker.display_model_info(component_list.nlu_ref, pipes = [component_list]) # StreamlitVizTracker.display_footer() # # @staticmethod # def display_word_similarity( # component_list, #nlu component_list # default_texts: Tuple[str, str] = ("Donald Trump likes to party!", "Angela Merkel likes to party!"), # threshold: float = 0.5, # title: Optional[str] = "Embeddings Similarity Matrix & Visualizations ", # sub_tile :Optional[str]="Visualize `word-wise similarity matrix` and calculate `similarity scores` for `2 texts` and every `word embedding` loaded", # write_raw_pandas : bool = False, # display_embed_information:bool = True, # similarity_matrix = True, # show_algo_select : bool = True, # dist_metrics:List[str] =('cosine'), # set_wide_layout_CSS:bool=True, # generate_code_sample:bool = False, # key:str = "NLU_streamlit", # num_cols:int=2, # display_scalar_similarities : bool = False , # display_similarity_summary:bool = False, # model_select_position:str = 'side' , # main or side # show_infos:bool = True, # show_logo:bool = True, # ): # # """We visualize the following cases : # 1. Simmilarity between 2 words - > sim (word_emb1, word_emb2) # 2. Simmilarity between 2 sentences -> let weTW stand word word_emb of token T and sentence S # 2.1. Raw token level with merged embeddings -> sim([we11,we21,weT1], [we12,we22,weT2]) # 2.2 Autogenerate sentemb, basically does 2.1 in the Spark NLP backend # 2.3 Already using sentence_embedder model_anno_obj -> sim(se1,se2) # 3. Simmilarity between token and sentence -> sim([we11,w21,wT1], se2) # 4. Mirrored 3 # """ # # https://scikit-learn.org/stable/modules/classes.html#module-sklearn.metrics.pairwise # StreamlitVizTracker.footer_displayed=False # try : # import plotly.express as px # from sklearn.metrics.pairwise import distance_metrics # except :st.error("You need the sklearn and plotly package in your Python environment installed for similarity visualizations. Run <pip install sklearn plotly>") # if set_wide_layout_CSS : _set_block_container_style() # if title:st.header(title) # if show_logo :StreamlitVizTracker.show_logo() # if sub_tile : st.subheader(sub_tile) # # StreamlitVizTracker.loaded_word_embeding_pipes = [] # dist_metric_algos =distance_metrics() # dist_algos = list(dist_metric_algos.keys()) # # TODO NORMALIZE DISTANCES TO [0,1] for non cosine # if 'haversine' in dist_algos : dist_algos.remove('haversine') # not applicable in >2D # if 'precomputed' in dist_algos : dist_algos.remove('precomputed') # Not a dist # cols = st.columns(2) # text1 = cols[0].text_input("Text or word1",default_texts[0],key = key) # text2 = cols[1].text_input("Text or word2",default_texts[1], key=key) if len(default_texts) >1 else cols[1].text_input("Text or word2",'Please enter second string',key = key) # # exp = st.sidebar.beta_expander("Select additional Embedding Models and distance metric to compare ") # e_coms = StreamlitUtilsOS.find_all_embed_components(component_list) # embed_algos_to_load = [] # embed_pipes = [component_list] # dist_algo_selection = dist_metrics # if show_algo_select : # # emb_components_usable = Discoverer.get_components('embed') # emb_components_usable = [e for e in Discoverer.get_components('embed',True, include_aliases=True) if 'chunk' not in e and 'sentence' not in e] # loaded_embed_nlu_refs = [] # loaded_storage_refs = [] # for os_components in e_coms : # if not hasattr(os_components.info,'nlu_ref'): continue # r = os_components.info.nlu_ref # if 'en.' not in r and 'embed.' not in r and 'ner' not in r : loaded_embed_nlu_refs.append('en.embed.' + r) # elif 'en.' in r and 'embed.' not in r and 'ner' not in r: # r = r.split('en.')[0] # loaded_embed_nlu_refs.append('en.embed.' + r) # else : # loaded_embed_nlu_refs.append(StorageRefUtils.extract_storage_ref(os_components)) # loaded_storage_refs.append(StorageRefUtils.extract_storage_ref(os_components)) # # for l in loaded_embed_nlu_refs: # if l not in emb_components_usable : emb_components_usable.append(l) # # embed_algo_selection = exp.multiselect("Click to pick additional Embedding Algorithm",options=emb_components_usable,default=loaded_embed_nlu_refs,key = key) # # dist_algo_selection = exp.multiselect("Click to pick additional Distance Metric", options=dist_algos, default=dist_metrics, key = key) # emb_components_usable.sort() # loaded_embed_nlu_refs.sort() # dist_algos.sort() # # dist_metrics.sort() # if model_select_position =='side': # embed_algo_selection = st.sidebar.multiselect("Pick additional Word Embeddings for the Similarity Matrix",options=emb_components_usable,default=loaded_embed_nlu_refs,key = key) # dist_algo_selection = st.sidebar.multiselect("Pick additional Similarity Metrics ", options=dist_algos, default=dist_metrics, key = key) # else : # exp = st.expander("Pick additional Word Embeddings and Similarity Metrics") # embed_algo_selection = exp.multiselect("Pick additional Word Embeddings for the Similarity Matrix",options=emb_components_usable,default=loaded_embed_nlu_refs,key = key) # dist_algo_selection = exp.multiselect("Pick additional Similarity Metrics ", options=dist_algos, default=dist_metrics, key = key) # embed_algos_to_load = list(set(embed_algo_selection) - set(loaded_embed_nlu_refs)) # # for embedder in embed_algos_to_load:embed_pipes.append(nlu.load(embedder)) # # if generate_code_sample:st.code(get_code_for_viz('SIMILARITY',[StreamlitUtilsOS.extract_name(p) for p in embed_pipes],default_texts)) # # StreamlitVizTracker.loaded_word_embeding_pipes+=embed_pipes # similarity_metrics = {} # embed_vector_info = {} # cols_full = True # col_index=0 # for p in embed_pipes : # data1 = p.predict(text1,pipe_prediction_output_level='token').dropna() # data2 = p.predict(text2,pipe_prediction_output_level='token').dropna() # e_coms = StreamlitUtilsOS.find_all_embed_components(p) # modelhub_links = [ModelHubUtils.get_url_by_nlu_refrence(os_components.info.nlu_ref) if hasattr(os_components.info,'nlu_ref') else ModelHubUtils.get_url_by_nlu_refrence('') for os_components in e_coms] # e_cols = StreamlitUtilsOS.get_embed_cols(p) # for num_emb,e_col in enumerate(e_cols): # if col_index == num_cols-1 :cols_full=True # if cols_full : # cols = st.columns(num_cols) # col_index = 0 # cols_full = False # else:col_index+=1 # tok1 = data1['token'] # tok2 = data2['token'] # emb1 = data1[e_col] # emb2 = data2[e_col] # embed_mat1 = np.array([x for x in emb1]) # embed_mat2 = np.array([x for x in emb2]) # # e_name = e_col.split('word_embedding_')[-1] # e_name = e_coms[num_emb].info.nlu_ref if hasattr(e_coms[num_emb].info,'nlu_ref') else e_col.split('word_embedding_')[-1] if 'en.' in e_col else e_col # e_name = e_name.split('embed.')[-1] if 'en.' in e_name else e_name # if 'ner' in e_name : e_name = loaded_storage_refs[num_emb] # # embed_vector_info[e_name]= {"Vector Dimension ":embed_mat1.shape[1], # "Num Vectors":embed_mat1.shape[0] + embed_mat1.shape[0], # "NLU_reference":e_coms[num_emb].info.nlu_ref if hasattr(e_coms[num_emb].info,'nlu_ref') else ' ', # "Spark_NLP_reference":ModelHubUtils.NLU_ref_to_NLP_ref(e_coms[num_emb].info.nlu_ref if hasattr(e_coms[num_emb].info,'nlu_ref') else ' '), # "Storage Reference":loaded_storage_refs[num_emb], # 'Modelhub info': modelhub_links[num_emb]} # for dist_algo in dist_algo_selection: # # scalar_similarities[e_col][dist_algo]={} # sim_score = dist_metric_algos[dist_algo](embed_mat1,embed_mat2) # sim_score = pd.DataFrame(sim_score) # sim_score.index = tok1.values # sim_score.columns = tok2.values # sim_score.columns = StreamlitVizTracker.pad_duplicate_tokens(list(sim_score.columns)) # sim_score.index = StreamlitVizTracker.pad_duplicate_tokens(list(sim_score.index)) # if write_raw_pandas :st.write(sim_score,key = key) # if sim_score.shape == (1,1) : # sim_score = sim_score.iloc[0][0] # sim_score = round(sim_score,2) # if sim_score > threshold: # st.success(sim_score) # st.success(f'Scalar Similarity={sim_score} for distance metric={dist_algo}') # st.error('No similarity matrix for only 2 tokens. Try entering at least 1 sentences in a field') # else: # st.error(f'Scalar Similarity={sim_score} for distance metric={dist_algo}') # else : # ploty_avaiable = True # # for tok emb, sum rows and norm by rows, then sum cols and norm by cols to generate a scalar from matrix # scalar_sim_score = np.sum((np.sum(sim_score,axis=0) / sim_score.shape[0])) / sim_score.shape[1] # scalar_sim_score = round(scalar_sim_score,2) # # if display_scalar_similarities: # if scalar_sim_score > threshold:st.success(f'Scalar Similarity :{scalar_sim_score} for distance metric={dist_algo}') # else: st.error(f'Scalar Similarity :{scalar_sim_score} for embedder={e_col} distance metric={dist_algo}') # if similarity_matrix: # if ploty_avaiable : # fig = px.imshow(sim_score, labels=dict(color="similarity"))#, title=f'Simmilarity Matrix for embedding_model={e_name} distance metric={dist_algo}') # # st.write(fig,key =key) # similarity_metrics[f'{e_name}_{dist_algo}_similarity']={ # 'scalar_similarity' : scalar_sim_score, # 'dist_metric' : dist_algo, # 'embedding_model': e_name, # 'modelhub_info' : modelhub_links[num_emb], # } # subh = f"""Embedding-Model=`{e_name}`, Similarity-Score=`{scalar_sim_score}`, distance metric=`{dist_algo}`""" # cols[col_index].markdown(subh) # cols[col_index].write(fig, key=key) # else : pass # todo fallback plots # # if display_similarity_summary: # exp = st.expander("Similarity summary") # exp.write(similarity_metrics) # if display_embed_information: # exp = st.expander("Embedding vector information") # exp.write(embed_vector_info) # # if show_infos : # # VizUtilsStreamlitOS.display_infos() # StreamlitVizTracker.display_model_info(component_list.nlu_ref, pipes = [component_list]) # StreamlitVizTracker.display_footer() # # # # # @staticmethod # def display_low_dim_embed_viz_token( # component_list, # nlu component_list # default_texts: List[str] = ("Donald Trump likes to party!", "Angela Merkel likes to party!", 'Peter HATES TO PARTTY!!!! :('), # title: Optional[str] = "Lower dimensional Manifold visualization for word embeddings", # sub_title: Optional[str] = "Apply any of the 11 `Manifold` or `Matrix Decomposition` algorithms to reduce the dimensionality of `Word Embeddings` to `1-D`, `2-D` and `3-D` ", # write_raw_pandas : bool = False , # default_applicable_algos : List[str] = ('TSNE','PCA',), # applicable_algos : List[str] = ("TSNE", "PCA"),#,'LLE','Spectral Embedding','MDS','ISOMAP','SVD aka LSA','DictionaryLearning','FactorAnalysis','FastICA','KernelPCA',), # LatentDirichletAllocation 'NMF', # target_dimensions : List[int] = (1,2,3), # show_algo_select : bool = True, # show_embed_select : bool = True, # show_color_select: bool = True, # MAX_DISPLAY_NUM:int=100, # display_embed_information:bool=True, # set_wide_layout_CSS:bool=True, # num_cols: int = 3, # model_select_position:str = 'side', # side or main # key:str = "NLU_streamlit", # additional_classifiers_for_coloring:List[str]=['pos', 'sentiment'], # extra_NLU_models_for_hueing: List[str] = ('pos','sentiment'), # generate_code_sample:bool = False, # show_infos:bool = True, # show_logo:bool = True, # ): # # TODO dynamic columns infer for mouse over, TOKEN LEVEL FEATURS APPLICABLE!!!!! # # NIOT CRASH [1], [a b], [ab] # # todo dynamic deduct Tok vs Sent vs Doc vs Chunk embeds # # todo selectable color features # # todo selectable mouseover features # from nlu.component_list.viz.streamlit_viz.streamlit_utils_OS import StreamlitUtilsOS # # # VizUtilsStreamlitOS.footer_displayed=False # try : # import plotly.express as px # from sklearn.metrics.pairwise import distance_metrics # except :st.error("You need the sklearn and plotly package in your Python environment installed for similarity visualizations. Run <pip install sklearn plotly>") # if len(default_texts) > MAX_DISPLAY_NUM : default_texts = default_texts[:MAX_DISPLAY_NUM] # if set_wide_layout_CSS : _set_block_container_style() # if title:st.header(title) # if sub_title:st.subheader(sub_title) # # if show_logo :VizUtilsStreamlitOS.show_logo() # # # VizUtilsStreamlitOS.loaded_word_embeding_pipes = [] # loaded_word_embeding_pipes = [] # # # data = st.text_area('Enter N texts, seperated by new lines to visualize Word Embeddings for ','\n'.join(default_texts)) # data = data.split("\n") # while '' in data : data.remove('') # if len(data)<=1: # st.error("Please enter more than 2 lines of text, seperated by new lines (hit <ENTER>)") # return # else : algos = default_applicable_algos # # TODO dynamic color inference for plotting?? # if show_color_select: feature_to_color_by = st.selectbox('Feature to color plots by ',['pos','sentiment',],0) # text_col = 'token' # embed_algos_to_load = [] # embed_pipes = [component_list] # e_coms = StreamlitUtilsOS.find_all_embed_components(component_list) # # if show_algo_select : # exp = st.expander("Select dimension reduction technique to apply") # algos = exp.multiselect( # "Reduce embedding dimensionality to something visualizable", # options=("TSNE", "ISOMAP",'LLE','Spectral Embedding','MDS','PCA','SVD aka LSA','DictionaryLearning','FactorAnalysis','FastICA','KernelPCA',),default=applicable_algos,) # # emb_components_usable = [e for e in Discoverer.get_components('embed',True, include_aliases=True) if 'chunk' not in e and 'sentence' not in e] # loaded_embed_nlu_refs = [] # loaded_classifier_nlu_refs = [] # loaded_storage_refs = [] # for os_components in e_coms : # if not hasattr(os_components.info,'nlu_ref'): continue # r = os_components.info.nlu_ref # if 'en.' not in r and 'embed.' not in r and 'ner' not in r : loaded_embed_nlu_refs.append('en.embed.' + r) # elif 'en.' in r and 'embed.' not in r and 'ner' not in r: # r = r.split('en.')[0] # loaded_embed_nlu_refs.append('en.embed.' + r) # else : # loaded_embed_nlu_refs.append(StorageRefUtils.extract_storage_ref(os_components)) # loaded_storage_refs.append(StorageRefUtils.extract_storage_ref(os_components)) # # for p in StreamlitVizTracker.loaded_word_embeding_pipes : loaded_embed_nlu_refs.append(p.nlu_ref) # loaded_embed_nlu_refs = list(set(loaded_embed_nlu_refs)) # for l in loaded_embed_nlu_refs: # if l not in emb_components_usable : emb_components_usable.append(l) # emb_components_usable.sort() # loaded_embed_nlu_refs.sort() # if model_select_position =='side': # embed_algo_selection = st.sidebar.multiselect("Pick additional Word Embeddings for the Dimension Reduction",options=emb_components_usable,default=loaded_embed_nlu_refs,key = key) # else : # exp = st.expander("Pick additional Word Embeddings") # embed_algo_selection = exp.multiselect("Pick additional Word Embeddings for the Dimension Reduction",options=emb_components_usable,default=loaded_embed_nlu_refs,key = key) # embed_algos_to_load = list(set(embed_algo_selection) - set(loaded_embed_nlu_refs)) # # for embedder in embed_algos_to_load:embed_pipes.append(nlu.load(embedder + f' {" ".join(additional_classifiers_for_coloring)}')) # StreamlitVizTracker.loaded_word_embeding_pipes+=embed_pipes # # # TODO load/update classifier pipes # for nlu_ref in additional_classifiers_for_coloring : # already_loaded=False # if 'pos' in nlu_ref : continue # # for p in VizUtilsStreamlitOS.loaded_document_classifier_pipes: # # if p.nlu_ref == nlu_ref : already_loaded = True # # if not already_loaded : VizUtilsStreamlitOS.loaded_token_level_classifiers.append(nlu.load(nlu_ref)) # else : # for p in StreamlitVizTracker.loaded_document_classifier_pipes: # if p.nlu_ref == nlu_ref : already_loaded = True # if not already_loaded : StreamlitVizTracker.loaded_document_classifier_pipes.append(nlu.load(nlu_ref)) # # col_index = 0 # cols = st.columns(num_cols) # def are_cols_full(): return col_index == num_cols # token_feature_pipe = StreamlitUtilsOS.get_pipe('en.dep.typed') # ## TODO , not all pipes have sentiment/pos etc.. models for hueing loaded.... # ## Lets FIRST predict with the classifiers/Token level feature generators and THEN apply embed component_list?? # for p in StreamlitVizTracker.loaded_word_embeding_pipes : # # TODO, run all classifiers pipes. FOr Sentence/Doc level stuff, we can only use Senc/Doc/Input dependent level annotators # # TODO token features TYPED DEP/ UNTYPED DEP/ POS ---> LOAD DEP/UNTYPED DEP/ POS and then APPEN NLU_COMPONENTS!!!!! TO EXISTING PIPE # classifier_cols = [] # # for class_p in StreamlitVizTracker.loaded_document_classifier_pipes: # data = class_p.predict(data, pipe_prediction_output_level='document').dropna() # classifier_cols.append(StreamlitUtilsOS.get_classifier_cols(class_p)) # # p = StreamlitUtilsOS.merge_token_classifiers_with_embed_pipe(p, token_feature_pipe) # predictions = p.predict(data,pipe_prediction_output_level='token').dropna() # e_col = StreamlitUtilsOS.find_embed_col(predictions) # e_com = StreamlitUtilsOS.find_embed_component(p) # embedder_name = StreamlitUtilsOS.extract_name(e_com) # emb = predictions[e_col] # mat = np.array([x for x in emb]) # for algo in algos : # if len(mat.shape)>2 : mat =mat.reshape(len(emb),mat.shape[-1]) # # # calc reduced dimensionality with every algo # #todo try/catch block for embed failures? # if 1 in target_dimensions: # low_dim_data = StreamlitUtilsOS.get_manifold_algo(algo,1).fit_transform(mat) # x = low_dim_data[:,0] # y = np.zeros(low_dim_data[:,0].shape) # tsne_df = pd.DataFrame({'x':x,'y':y, 'text':predictions[text_col], 'pos':predictions.pos, 'sentiment' : predictions.sentiment}) # fig = px.scatter(tsne_df, x="x", y="y",color=feature_to_color_by, hover_data=['token','text','sentiment', 'pos']) # subh = f"""Word-Embeddings =`{embedder_name}`, Manifold-Algo =`{algo}` for `D=1`""" # cols[col_index].markdown(subh) # cols[col_index].write(fig,key=key) # col_index+=1 # if are_cols_full() : # cols = st.columns(num_cols) # col_index = 0 # if 2 in target_dimensions: # low_dim_data = StreamlitUtilsOS.get_manifold_algo(algo,2).fit_transform(mat) # x = low_dim_data[:,0] # y = low_dim_data[:,1] # tsne_df = pd.DataFrame({'x':x,'y':y, 'text':predictions[text_col], 'pos':predictions.pos, 'sentiment':predictions.sentiment, }) # fig = px.scatter(tsne_df, x="x", y="y",color=feature_to_color_by, hover_data=['text']) # subh = f"""Word-Embeddings =`{embedder_name}`, Manifold-Algo =`{algo}` for `D=2`""" # cols[col_index].markdown(subh) # cols[col_index].write(fig,key=key) # # st.write(fig) # col_index+=1 # if are_cols_full() : # cols = st.columns(num_cols) # col_index = 0 # if 3 in target_dimensions: # low_dim_data = StreamlitUtilsOS.get_manifold_algo(algo,3).fit_transform(mat) # x = low_dim_data[:,0] # y = low_dim_data[:,1] # z = low_dim_data[:,2] # tsne_df = pd.DataFrame({'x':x,'y':y,'z':z, 'text':predictions[text_col], 'pos':predictions.pos, 'sentiment':predictions.sentiment, }) # fig = px.scatter_3d(tsne_df, x="x", y="y", z='z',color=feature_to_color_by, hover_data=['text']) # subh = f"""Word-Embeddings =`{embedder_name}`, Manifold-Algo =`{algo}` for `D=3`""" # cols[col_index].markdown(subh) # cols[col_index].write(fig,key=key) # # # st.write(fig) # col_index+=1 # if are_cols_full() : # cols = st.columns(num_cols) # col_index = 0 # # Todo fancy embed infos etc # # if display_embed_information: display_embed_vetor_information(e_com,mat) # #
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/viz/streamlit_viz/streamlit_viz_tracker.py
streamlit_viz_tracker.py
import nlu from nlu.discovery import Discoverer from typing import List, Optional, Union import streamlit as st import pandas as pd from nlu.pipe.viz.streamlit_viz.streamlit_utils_OS import StreamlitUtilsOS from nlu.pipe.viz.streamlit_viz.gen_streamlit_code import get_code_for_viz from nlu.pipe.viz.streamlit_viz.styles import _set_block_container_style from nlu.pipe.viz.streamlit_viz.streamlit_viz_tracker import StreamlitVizTracker class ClassifierStreamlitBlock(): @staticmethod def visualize_classes( pipe, # nlu component_list text:Union[str,list,pd.DataFrame, pd.Series, List[str]]=('I love NLU and Streamlit and sunny days!', 'I hate rainy daiys','CALL NOW AND WIN 1000$M'), output_level:Optional[str]='document', title: Optional[str] = "Text Classification", sub_title: Optional[str] = 'View predicted `classes` and `confidences` for `hundreds of text classifiers` in `over 200 languages`', metadata : bool = False, positions : bool = False, set_wide_layout_CSS:bool=True, generate_code_sample:bool = False, key:str = "NLU_streamlit", show_model_selector : bool = True , model_select_position:str = 'side' , show_infos:bool = True, show_logo:bool = True, )->None: if show_logo :StreamlitVizTracker.show_logo() if set_wide_layout_CSS : _set_block_container_style() if title:st.header(title) if sub_title:st.subheader(sub_title) # if generate_code_sample: st.code(get_code_for_viz('CLASSES',StreamlitUtilsOS.extract_name(component_list),text)) if not isinstance(text, (pd.DataFrame, pd.Series)): text = st.text_area('Enter N texts, seperated by new lines to view classification results for','\n'.join(text) if isinstance(text,list) else text, key=key) text = text.split("\n") while '' in text : text.remove('') classifier_pipes = [pipe] classifier_components_usable = [e for e in Discoverer.get_components('classify',True, include_aliases=True)] classifier_components = StreamlitUtilsOS.find_all_classifier_components(pipe) loaded_classifier_nlu_refs = [c.nlu_ref for c in classifier_components ] for l in loaded_classifier_nlu_refs: if 'converter' in l : loaded_classifier_nlu_refs.remove(l) continue if l not in classifier_components_usable : classifier_components_usable.append(l) classifier_components_usable.sort() loaded_classifier_nlu_refs.sort() for r in loaded_classifier_nlu_refs: if r not in classifier_components_usable : loaded_classifier_nlu_refs.remove(r) if show_model_selector : if model_select_position =='side':classifier_components_selection = st.sidebar.multiselect("Pick additional Classifiers",options=classifier_components_usable,default=loaded_classifier_nlu_refs,key = key) else:classifier_components_selection = st.multiselect("Pick additional Classifiers",options=classifier_components_usable,default=loaded_classifier_nlu_refs,key = key) # else : ValueError("Please define model_select_position as main or side") classifier_algos_to_load = list(set(classifier_components_selection) - set(loaded_classifier_nlu_refs)) for classifier in classifier_algos_to_load:classifier_pipes.append(nlu.load(classifier)) StreamlitVizTracker.loaded_document_classifier_pipes+= classifier_pipes if generate_code_sample:st.code(get_code_for_viz('CLASSES',[StreamlitUtilsOS.extract_name(p) for p in classifier_pipes],text)) dfs = [] all_classifier_cols=[] for p in classifier_pipes : df = p.predict(text, output_level=output_level, metadata=metadata, positions=positions) classifier_cols = StreamlitUtilsOS.get_classifier_cols(p) for c in classifier_cols : if c not in df.columns : classifier_cols.remove(c) if 'text' in df.columns: classifier_cols += ['text'] elif 'document' in df.columns: classifier_cols += ['document'] all_classifier_cols+= classifier_cols dfs.append(df) df = pd.concat(dfs, axis=1) df = df.loc[:,~df.columns.duplicated()] for c in all_classifier_cols : if c not in df.columns : all_classifier_cols.remove(c) all_classifier_cols = list(set(all_classifier_cols)) if len(all_classifier_cols) == 0: st.error('No classes detected') else :st.write(df[all_classifier_cols],key=key) if show_infos : # VizUtilsStreamlitOS.display_infos() StreamlitVizTracker.display_model_info(pipe.nlu_ref, pipes = [pipe]) StreamlitVizTracker.display_footer()
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/viz/streamlit_viz/viz_building_blocks/classifier.py
classifier.py
import nlu from nlu.discovery import Discoverer from typing import List, Optional import streamlit as st import pandas as pd from nlu.pipe.viz.streamlit_viz.streamlit_utils_OS import StreamlitUtilsOS from nlu.pipe.viz.streamlit_viz.gen_streamlit_code import get_code_for_viz from nlu.pipe.viz.streamlit_viz.styles import _set_block_container_style from nlu.pipe.viz.streamlit_viz.streamlit_viz_tracker import StreamlitVizTracker class TokenFeaturesStreamlitBlock(): @staticmethod def visualize_tokens_information( pipe, # nlu component_list text:str, title: Optional[str] = "Token Features", sub_title: Optional[str] ='Pick from `over 1000+ models` on the left and `view the generated features`', show_feature_select:bool =True, features:Optional[List[str]] = None, full_metadata: bool = True, output_level:str = 'token', positions:bool = False, set_wide_layout_CSS:bool=True, generate_code_sample:bool = False, key = "NLU_streamlit", show_model_select = True, model_select_position:str = 'side' , # main or side show_infos:bool = True, show_logo:bool = True, show_text_input:bool = True, ) -> None: """Visualizer for token features.""" StreamlitVizTracker.footer_displayed=False if show_logo :StreamlitVizTracker.show_logo() if set_wide_layout_CSS : _set_block_container_style() if title:st.header(title) # if generate_code_sample: st.code(get_code_for_viz('TOKEN',StreamlitUtilsOS.extract_name(component_list),text)) if sub_title:st.subheader(sub_title) token_pipes = [pipe] if show_text_input : text = st.text_area("Enter text you want to view token features for", text, key=key) if show_model_select : token_pipes_components_usable = [e for e in Discoverer.get_components(get_all=True)] loaded_nlu_refs = [c.nlu_ref for c in pipe.components] for l in loaded_nlu_refs: if 'converter' in l : loaded_nlu_refs.remove(l) continue if l not in token_pipes_components_usable : token_pipes_components_usable.append(l) token_pipes_components_usable = list(set(token_pipes_components_usable)) loaded_nlu_refs = list(set(loaded_nlu_refs)) if '' in loaded_nlu_refs : loaded_nlu_refs.remove('') if ' ' in loaded_nlu_refs : loaded_nlu_refs.remove(' ') token_pipes_components_usable.sort() loaded_nlu_refs.sort() if model_select_position =='side':model_selection = st.sidebar.multiselect("Pick any additional models for token features",options=token_pipes_components_usable,default=loaded_nlu_refs,key = key) else:model_selection = st.multiselect("Pick any additional models for token features",options=token_pipes_components_usable,default=loaded_nlu_refs,key = key) # else : ValueError("Please define model_select_position as main or side") models_to_load = list(set(model_selection) - set(loaded_nlu_refs)) for model in models_to_load:token_pipes.append(nlu.load(model)) StreamlitVizTracker.loaded_token_pipes+= token_pipes if generate_code_sample:st.code(get_code_for_viz('TOKEN',[StreamlitUtilsOS.extract_name(p) for p in token_pipes],text)) dfs = [] for p in token_pipes: df = p.predict(text, output_level=output_level, metadata=full_metadata,positions=positions) dfs.append(df) df = pd.concat(dfs,axis=1) df = df.loc[:,~df.columns.duplicated()] if show_feature_select : exp = st.expander("Select token features to display") features = exp.multiselect( "Token features", options=list(df.columns), default=list(df.columns) ) for f in features: if 'entities' and 'embedding' in f : features.remove(f) st.dataframe(df[features]) if show_infos : # VizUtilsStreamlitOS.display_infos() StreamlitVizTracker.display_model_info(pipe.nlu_ref, pipes = [pipe]) StreamlitVizTracker.display_footer()
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/viz/streamlit_viz/viz_building_blocks/token_features.py
token_features.py
import nlu from nlu.discovery import Discoverer from nlu.pipe.utils.resolution.storage_ref_utils import StorageRefUtils from typing import List, Tuple, Optional import streamlit as st from nlu.utils.modelhub.modelhub_utils import ModelHubUtils import numpy as np import pandas as pd from nlu.pipe.viz.streamlit_viz.streamlit_utils_OS import StreamlitUtilsOS from nlu.pipe.viz.streamlit_viz.gen_streamlit_code import get_code_for_viz from nlu.pipe.viz.streamlit_viz.styles import _set_block_container_style from nlu.pipe.viz.streamlit_viz.streamlit_viz_tracker import StreamlitVizTracker class WordSimilarityStreamlitBlock(): @staticmethod def display_word_similarity( pipe, # nlu component_list default_texts: Tuple[str, str] = ("Donald Trump likes to party!", "Angela Merkel likes to party!"), threshold: float = 0.5, title: Optional[str] = "Embeddings Similarity Matrix & Visualizations ", sub_tile: Optional[ str] = "Visualize `word-wise similarity matrix` and calculate `similarity scores` for `2 texts` and every `word embedding` loaded", write_raw_pandas: bool = False, display_embed_information: bool = True, similarity_matrix=True, show_algo_select: bool = True, dist_metrics: List[str] = ('cosine'), set_wide_layout_CSS: bool = True, generate_code_sample: bool = False, key: str = "NLU_streamlit", num_cols: int = 2, display_scalar_similarities: bool = False, display_similarity_summary: bool = False, model_select_position: str = 'side', # main or side show_infos: bool = True, show_logo: bool = True, ): """We visualize the following cases : 1. Simmilarity between 2 words - > sim (word_emb1, word_emb2) 2. Simmilarity between 2 sentences -> let weTW stand word word_emb of token T and sentence S 2.1. Raw token level with merged embeddings -> sim([we11,we21,weT1], [we12,we22,weT2]) 2.2 Autogenerate sentemb, basically does 2.1 in the Spark NLP backend 2.3 Already using sentence_embedder model_anno_obj -> sim(se1,se2) 3. Simmilarity between token and sentence -> sim([we11,w21,wT1], se2) 4. Mirrored 3 """ # https://scikit-learn.org/stable/modules/classes.html#module-sklearn.metrics.pairwise StreamlitVizTracker.footer_displayed = False try: import plotly.express as px from sklearn.metrics.pairwise import distance_metrics except: st.error( "You need the sklearn and plotly package in your Python environment installed for similarity visualizations. Run <pip install sklearn plotly>") if set_wide_layout_CSS: _set_block_container_style() if title: st.header(title) if show_logo: StreamlitVizTracker.show_logo() if sub_tile: st.subheader(sub_tile) StreamlitVizTracker.loaded_word_embeding_pipes = [] dist_metric_algos = distance_metrics() dist_algos = list(dist_metric_algos.keys()) if 'haversine' in dist_algos: dist_algos.remove('haversine') # not applicable in >2D if 'precomputed' in dist_algos: dist_algos.remove('precomputed') # Not a dist cols = st.columns(2) text1 = cols[0].text_input("Text or word1", default_texts[0], key=key + 'field_1') text2 = cols[1].text_input("Text or word2", default_texts[1], key=key + 'field_2') if len( default_texts) > 1 else cols[ 1].text_input("Text or word2", 'Please enter second string', key=key) # exp = st.sidebar.beta_expander("Select additional Embedding Models and distance metric to compare ") e_coms = StreamlitUtilsOS.find_all_embed_components(pipe) embed_algos_to_load = [] embed_pipes = [pipe] dist_algo_selection = dist_metrics if show_algo_select: # emb_components_usable = Discoverer.get_components('embed') emb_components_usable = [e for e in Discoverer.get_components('embed', True, include_aliases=True) if 'chunk' not in e and 'sentence' not in e] loaded_embed_nlu_refs = [] loaded_storage_refs = [] loaded_embed_nlu_refs = list(set(loaded_embed_nlu_refs)) for c in e_coms: r = c.nlu_ref if 'en.' not in r and 'embed.' not in r and 'ner' not in r: loaded_embed_nlu_refs.append('en.embed.' + r) elif 'en.' in r and 'embed.' not in r and 'ner' not in r: r = r.split('en.')[0] loaded_embed_nlu_refs.append('en.embed.' + r) else: loaded_embed_nlu_refs.append(StorageRefUtils.extract_storage_ref(c)) loaded_storage_refs.append(StorageRefUtils.extract_storage_ref(c)) for p in StreamlitVizTracker.loaded_word_embeding_pipes: if p != pipe: loaded_embed_nlu_refs.append(p.nlu_ref) for l in loaded_embed_nlu_refs: if l not in emb_components_usable: emb_components_usable.append(l) # embed_algo_selection = exp.multiselect("Click to pick additional Embedding Algorithm",options=emb_components_usable,default=loaded_embed_nlu_refs,key = key) # dist_algo_selection = exp.multiselect("Click to pick additional Distance Metric", options=dist_algos, default=dist_metrics, key = key) emb_components_usable.sort() loaded_embed_nlu_refs.sort() dist_algos.sort() if model_select_position == 'side': embed_algo_selection = st.sidebar.multiselect( "Pick additional Word Embeddings for the Similarity Matrix", options=emb_components_usable, default=loaded_embed_nlu_refs, key=key) dist_algo_selection = st.sidebar.multiselect("Pick additional Similarity Metrics ", options=dist_algos, default=dist_metrics, key=key) else: exp = st.expander("Pick additional Word Embeddings and Similarity Metrics") embed_algo_selection = exp.multiselect("Pick additional Word Embeddings for the Similarity Matrix", options=emb_components_usable, default=loaded_embed_nlu_refs, key=key) dist_algo_selection = exp.multiselect("Pick additional Similarity Metrics ", options=dist_algos, default=dist_metrics, key=key) embed_algos_to_load = list(set(embed_algo_selection) - set(loaded_embed_nlu_refs)) for embedder in embed_algos_to_load: embed_pipes.append(nlu.load(embedder)) if generate_code_sample: st.code( get_code_for_viz('SIMILARITY', [StreamlitUtilsOS.extract_name(p) for p in embed_pipes], default_texts)) StreamlitVizTracker.loaded_word_embeding_pipes += embed_pipes similarity_metrics = {} embed_vector_info = {} cols_full = True col_index = 0 # for p in embed_pipes : for p in StreamlitVizTracker.loaded_word_embeding_pipes: data1 = p.predict(text1, output_level='token', get_embeddings=True).dropna() data2 = p.predict(text2, output_level='token', get_embeddings=True).dropna() e_coms = StreamlitUtilsOS.find_all_embed_components(p) modelhub_links = [ModelHubUtils.get_url_by_nlu_refrence(c.nlu_ref) for c in e_coms] e_cols = StreamlitUtilsOS.get_embed_cols(p) for num_emb, e_col in enumerate(e_cols): if col_index == num_cols - 1: cols_full = True if cols_full: cols = st.columns(num_cols) col_index = 0 cols_full = False else: col_index += 1 tok1 = data1['token'] tok2 = data2['token'] emb1 = data1[e_col] emb2 = data2[e_col] def normalize_matrix(m): return np.nan_to_num(m / np.linalg.norm(m, axis=1, keepdims=True)) embed_mat1 = normalize_matrix(np.array([x for x in emb1])) embed_mat2 = normalize_matrix(np.array([x for x in emb2])) # e_name = e_col.split('word_embedding_')[-1] e_name = e_coms[num_emb].nlu_ref e_name = e_name.split('embed.')[-1] if 'en.' in e_name else e_name if 'ner' in e_name: e_name = loaded_storage_refs[num_emb] embed_vector_info[e_name] = {"Vector Dimension ": embed_mat1.shape[1], "Num Vectors": embed_mat1.shape[0] + embed_mat1.shape[0], "NLU_reference": e_coms[num_emb].nlu_ref, "Spark_NLP_reference": ModelHubUtils.NLU_ref_to_NLP_ref( e_coms[num_emb].nlu_ref), "Storage Reference": loaded_storage_refs[num_emb], 'Modelhub info': modelhub_links[num_emb]} for dist_algo in dist_algo_selection: # scalar_similarities[e_col][dist_algo]={} sim_score = ((dist_metric_algos[dist_algo](embed_mat1, embed_mat2) - 1) * -1) sim_score = pd.DataFrame(sim_score) sim_score.index = tok1.values sim_score.columns = tok2.values sim_score.columns = StreamlitVizTracker.pad_duplicate_tokens(list(sim_score.columns)) sim_score.index = StreamlitVizTracker.pad_duplicate_tokens(list(sim_score.index)) if write_raw_pandas: st.write(sim_score, key=key) if sim_score.shape == (1, 1): sim_score = sim_score.iloc[0][0] sim_score = round(sim_score, 2) if sim_score > threshold: st.success(sim_score) st.success(f'Scalar Similarity={sim_score} for distance metric={dist_algo}') st.error( 'No similarity matrix for only 2 tokens. Try entering at least 1 sentences in a field') else: st.error(f'Scalar Similarity={sim_score} for distance metric={dist_algo}') else: ploty_avaiable = True # for tok emb, sum rows and norm by rows, then sum cols and norm by cols to generate a scalar from matrix scalar_sim_score = np.sum((np.sum(sim_score, axis=0) / sim_score.shape[0])) / sim_score.shape[1] scalar_sim_score = round(scalar_sim_score, 2) if display_scalar_similarities: if scalar_sim_score > threshold: st.success(f'Scalar Similarity :{scalar_sim_score} for distance metric={dist_algo}') else: st.error( f'Scalar Similarity :{scalar_sim_score} for embedder={e_col} distance metric={dist_algo}') if similarity_matrix: if ploty_avaiable: fig = px.imshow(sim_score, labels=dict( color="similarity")) # , title=f'Simmilarity Matrix for embedding_model={e_name} distance metric={dist_algo}') # st.write(fig,key =key) similarity_metrics[f'{e_name}_{dist_algo}_similarity'] = { 'scalar_similarity': scalar_sim_score, 'dist_metric': dist_algo, 'embedding_model': e_name, 'modelhub_info': modelhub_links[num_emb], } subh = f"""Embedding-Model=`{e_name}`, Similarity-Score=`{scalar_sim_score}`, distance metric=`{dist_algo}`""" cols[col_index].markdown(subh) cols[col_index].plotly_chart(fig, key=key, use_container_width=True) else: pass # todo fallback plots if display_similarity_summary: exp = st.expander("Similarity summary") exp.write(similarity_metrics) if display_embed_information: exp = st.expander("Embedding vector information") exp.write(embed_vector_info) if show_infos: # VizUtilsStreamlitOS.display_infos() StreamlitVizTracker.display_model_info(pipe.nlu_ref, pipes=[pipe]) StreamlitVizTracker.display_footer()
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/viz/streamlit_viz/viz_building_blocks/word_similarity.py
word_similarity.py
import nlu from nlu.discovery import Discoverer from nlu.pipe.utils.resolution.storage_ref_utils import StorageRefUtils from typing import List, Optional import streamlit as st import numpy as np import pandas as pd from nlu.pipe.viz.streamlit_viz.styles import _set_block_container_style from nlu.pipe.viz.streamlit_viz.streamlit_viz_tracker import StreamlitVizTracker from nlu.pipe.viz.streamlit_viz.viz_building_blocks.block_utils.entity_manifold_utils import EntityManifoldUtils class EntityEmbeddingManifoldStreamlitBlock(): @staticmethod def viz_streamlit_entity_embed_manifold( pipe, # nlu component_list default_texts: List[str] = ("Donald Trump likes to visit New York", "Angela Merkel likes to visit Berlin!", 'Peter hates visiting Paris'), title: Optional[str] = "Lower dimensional Manifold visualization for Entity embeddings", sub_title: Optional[str] = "Apply any of the 10+ `Manifold` or `Matrix Decomposition` algorithms to reduce the dimensionality of `Entity Embeddings` to `1-D`, `2-D` and `3-D` ", default_algos_to_apply: List[str] = ("TSNE", "PCA"), target_dimensions: List[int] = (1, 2, 3), show_algo_select: bool = True, set_wide_layout_CSS: bool = True, num_cols: int = 3, model_select_position: str = 'side', # side or main key: str = "NLU_streamlit", show_infos: bool = True, show_logo: bool = True, n_jobs: Optional[int] = 3, # False ): from nlu.pipe.viz.streamlit_viz.streamlit_utils_OS import StreamlitUtilsOS StreamlitVizTracker.footer_displayed = False try: import plotly.express as px from sklearn.metrics.pairwise import distance_metrics except: st.error( "You need the sklearn and plotly package in your Python environment installed for similarity visualizations. Run <pip install sklearn plotly>") if show_logo: StreamlitVizTracker.show_logo() if set_wide_layout_CSS: _set_block_container_style() if title: st.header(title) if sub_title: st.subheader(sub_title) # if show_logo :VizUtilsStreamlitOS.show_logo() # VizUtilsStreamlitOS.loaded_word_embeding_pipes = [] if isinstance(default_texts, list) : default_texts = '\n'.join(default_texts) data = st.text_area('Enter N texts, seperated by new lines to visualize Sentence Embeddings for ', default_texts).split('\n') output_level = 'chunk' ner_emebed_pipe_algo_selection = [] loaded_ner_embed_nlu_refs = [] algos = ['TSNE'] # A component_list should have a NER and a Word Embedding if pipe not in StreamlitVizTracker.loaded_ner_word_embeding_pipes: StreamlitVizTracker.loaded_ner_word_embeding_pipes.append( pipe) if pipe not in StreamlitVizTracker.loaded_word_embeding_pipes: StreamlitVizTracker.loaded_word_embeding_pipes.append( pipe) if show_algo_select: # Manifold Selection exp = st.expander("Select additional manifold and dimension reduction techniques to apply") algos = exp.multiselect( "Reduce embedding dimensionality to something visualizable", options=( "TSNE", "ISOMAP", 'LLE', 'Spectral Embedding', 'MDS', 'PCA', 'SVD aka LSA', 'DictionaryLearning', 'FactorAnalysis', 'FastICA', 'KernelPCA', 'LatentDirichletAllocation'), default=default_algos_to_apply, ) ner_emb_components_usable = [e for e in Discoverer.get_components('ner', True, include_aliases=True) if 'embed' not in e and 'sentence' not in e] # Find nlu_ref of currenlty loaded component_list for p in StreamlitVizTracker.loaded_ner_word_embeding_pipes: loaded_ner_embed_nlu_refs.append(p.nlu_ref) # NER Selection if model_select_position == 'side': ner_emebed_pipe_algo_selection = st.sidebar.multiselect( "Pick additional NER Models for the Dimension Reduction", options=ner_emb_components_usable, default=loaded_ner_embed_nlu_refs, key=key) else: ner_emebed_pipe_algo_selection = exp.multiselect( "Pick additional NER Models for the Dimension Reduction", options=ner_emb_components_usable, default=loaded_ner_embed_nlu_refs, key=key) for ner_nlu_ref in ner_emebed_pipe_algo_selection: load = True for ner_p in StreamlitVizTracker.loaded_ner_word_embeding_pipes: if ner_p.nlu_ref == ner_nlu_ref: load = False break if not load: continue p = nlu.load(ner_nlu_ref) if p not in StreamlitVizTracker.loaded_ner_word_embeding_pipes: StreamlitVizTracker.loaded_ner_word_embeding_pipes.append( p) if p not in StreamlitVizTracker.loaded_word_embeding_pipes: StreamlitVizTracker.loaded_word_embeding_pipes.append( p) col_index = 0 cols = st.columns(num_cols) def are_cols_full(): return col_index == num_cols for p in StreamlitVizTracker.loaded_ner_word_embeding_pipes: p = EntityManifoldUtils.insert_chunk_embedder_to_pipe_if_missing(p) predictions = p.predict(data, metadata=True, output_level=output_level, multithread=False).dropna() entity_cols = EntityManifoldUtils.get_ner_cols(predictions) chunk_embed_col = EntityManifoldUtils.find_chunk_embed_col(predictions) # TODO get cols for non default NER? or multi ner setups? # features = predictions[EntityManifoldUtils.get_ner_cols(predictions)] # e_col = StreamlitUtilsOS.find_embed_col(predictions) e_com = StreamlitUtilsOS.find_embed_component(p) e_com_storage_ref = StorageRefUtils.extract_storage_ref(e_com) emb = predictions[chunk_embed_col] mat = np.array([x for x in emb]) # for ner_emb_p in ps: for algo in algos: # Only pos values for latent Dirchlet if algo == 'LatentDirichletAllocation': mat = np.square(mat) if len(mat.shape) > 2: mat = mat.reshape(len(emb), mat.shape[-1]) hover_data = entity_cols + ['text'] # calc reduced dimensionality with every algo feature_to_color_by = entity_cols[0] if 1 in target_dimensions: low_dim_data = StreamlitUtilsOS.get_manifold_algo(algo, 1, n_jobs).fit_transform(mat) x = low_dim_data[:, 0] y = np.zeros(low_dim_data[:, 0].shape) # predictions['text'] = original_text tsne_df = pd.DataFrame({**{'x': x, 'y': y}, **{k: predictions[k] for k in entity_cols}, **{'text': predictions[entity_cols[-1]]} }) fig = px.scatter(tsne_df, x="x", y="y", color=feature_to_color_by, hover_data=hover_data) subh = f"""Word-Embeddings =`{e_com_storage_ref}`, NER-Model =`{p.nlu_ref}`, Manifold-Algo =`{algo}` for `D=1`""" cols[col_index].markdown(subh) cols[col_index].write(fig, key=key) col_index += 1 if are_cols_full(): cols = st.columns(num_cols) col_index = 0 if 2 in target_dimensions: low_dim_data = StreamlitUtilsOS.get_manifold_algo(algo, 2, n_jobs).fit_transform(mat) x = low_dim_data[:, 0] y = low_dim_data[:, 1] tsne_df = pd.DataFrame({**{'x': x, 'y': y}, **{k: predictions[k] for k in entity_cols}, **{'text': predictions[entity_cols[-1]]} }) fig = px.scatter(tsne_df, x="x", y="y", color=feature_to_color_by, hover_data=hover_data) subh = f"""Word-Embeddings =`{e_com_storage_ref}`, NER-Model =`{p.nlu_ref}`, Manifold-Algo =`{algo}` for `D=2`""" cols[col_index].markdown(subh) cols[col_index].write(fig, key=key) col_index += 1 if are_cols_full(): cols = st.columns(num_cols) col_index = 0 if 3 in target_dimensions: low_dim_data = StreamlitUtilsOS.get_manifold_algo(algo, 3, n_jobs).fit_transform(mat) x = low_dim_data[:, 0] y = low_dim_data[:, 1] z = low_dim_data[:, 2] tsne_df = pd.DataFrame({**{'x': x, 'y': y, 'z': z}, **{k: predictions[k] for k in entity_cols}, **{'text': predictions[entity_cols[-1]]} }) fig = px.scatter_3d(tsne_df, x="x", y="y", z='z', color=feature_to_color_by, hover_data=hover_data) subh = f"""Word-Embeddings =`{e_com_storage_ref}`, NER-Model =`{p.nlu_ref}`, Manifold-Algo =`{algo}` for `D=3`""" cols[col_index].markdown(subh) cols[col_index].write(fig, key=key) col_index += 1 if are_cols_full(): cols = st.columns(num_cols) col_index = 0 # Todo fancy embed infos etc # if display_embed_information: display_embed_vetor_information(e_com,mat) # if display_embed_information: # exp = st.expander("Embedding vector information") # exp.write(embed_vector_info) if show_infos: # VizUtilsStreamlitOS.display_infos() StreamlitVizTracker.display_model_info(pipe.nlu_ref, pipes=[pipe]) StreamlitVizTracker.display_footer()
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/viz/streamlit_viz/viz_building_blocks/entity_embedding_manifold.py
entity_embedding_manifold.py
from nlu.discovery import Discoverer from typing import List, Optional, Dict import streamlit as st from nlu.pipe.viz.streamlit_viz.streamlit_utils_OS import StreamlitUtilsOS from nlu.pipe.viz.streamlit_viz.gen_streamlit_code import get_code_for_viz from nlu.pipe.viz.streamlit_viz.styles import _set_block_container_style from nlu.pipe.viz.streamlit_viz.streamlit_viz_tracker import StreamlitVizTracker class NERStreamlitBlock(): @staticmethod def visualize_ner( pipe, # Nlu component_list text:str, ner_tags: Optional[List[str]] = None, show_label_select: bool = True, show_table: bool = False, title: Optional[str] = "Named Entities", sub_title: Optional[str] = "Recognize various `Named Entities (NER)` in text entered and filter them. You can select from over `100 languages` in the dropdown.", colors: Dict[str, str] = {}, show_color_selector: bool = False, set_wide_layout_CSS:bool=True, generate_code_sample:bool = False, key = "NLU_streamlit", model_select_position:str = 'side', show_model_select : bool = True, show_text_input:bool = True, show_infos:bool = True, show_logo:bool = True, ): StreamlitVizTracker.footer_displayed=False if set_wide_layout_CSS : _set_block_container_style() if show_logo :StreamlitVizTracker.show_logo() if show_model_select : model_selection = Discoverer.get_components('ner',include_pipes=True) model_selection.sort() if model_select_position == 'side':ner_model_2_viz = st.sidebar.selectbox("Select a NER model_anno_obj",model_selection,index=model_selection.index(pipe.nlu_ref.split(' ')[0])) else : ner_model_2_viz = st.selectbox("Select a NER model_anno_obj",model_selection,index=model_selection.index(pipe.nlu_ref.split(' ')[0])) pipe = pipe if pipe.nlu_ref == ner_model_2_viz else StreamlitUtilsOS.get_pipe(ner_model_2_viz) if title: st.header(title) if show_text_input : text = st.text_area("Enter text you want to visualize NER classes for below", text, key=key) if sub_title : st.subheader(sub_title) if generate_code_sample: st.code(get_code_for_viz('NER',StreamlitUtilsOS.extract_name(pipe),text)) if ner_tags is None: ner_tags = StreamlitUtilsOS.get_NER_tags_in_pipe(pipe) if not show_color_selector : if show_label_select: exp = st.expander("Select entity labels to highlight") label_select = exp.multiselect( "These labels are predicted by the NER model_anno_obj. Select which ones you want to display", options=ner_tags,default=list(ner_tags)) else : label_select = ner_tags pipe.viz(text,write_to_streamlit=True, viz_type='ner',labels_to_viz=label_select,viz_colors=colors, streamlit_key=key) else : # TODO WIP color select cols = st.columns(3) exp = cols[0].beta_expander("Select entity labels to display") color = st.color_picker('Pick A Color', '#00f900',key = key) color = cols[2].color_picker('Pick A Color for a specific entity label', '#00f900',key = key) tag2color = cols[1].selectbox('Pick a ner tag to color', ner_tags,key = key) colors[tag2color]=color if show_table : st.write(pipe.predict(text, output_level='chunk'),key = key) if show_infos : # VizUtilsStreamlitOS.display_infos() StreamlitVizTracker.display_model_info(pipe.nlu_ref, pipes = [pipe]) StreamlitVizTracker.display_footer()
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/viz/streamlit_viz/viz_building_blocks/ner.py
ner.py
import nlu from nlu.discovery import Discoverer from nlu.pipe.utils.resolution.storage_ref_utils import StorageRefUtils from typing import List, Optional import streamlit as st import numpy as np import pandas as pd from nlu.pipe.viz.streamlit_viz.styles import _set_block_container_style from nlu.pipe.viz.streamlit_viz.streamlit_viz_tracker import StreamlitVizTracker class SentenceEmbeddingManifoldStreamlitBlock(): @staticmethod def viz_streamlit_sentence_embed_manifold( pipe, # nlu component_list default_texts: List[str] = ( "Donald Trump likes to party!", "Angela Merkel likes to party!", 'Peter HATES TO PARTTY!!!! :('), title: Optional[str] = "Lower dimensional Manifold visualization for Sentence embeddings", sub_title: Optional[ str] = "Apply any of the 11 `Manifold` or `Matrix Decomposition` algorithms to reduce the dimensionality of `Sentence Embeddings` to `1-D`, `2-D` and `3-D` ", write_raw_pandas: bool = False, default_algos_to_apply: List[str] = ("TSNE", "PCA"), # ,'LLE','Spectral Embedding','MDS','ISOMAP','SVD aka LSA','DictionaryLearning','FactorAnalysis','FastICA','KernelPCA',), # LatentDirichletAllocation 'NMF', target_dimensions: List[int] = (1, 2, 3), show_algo_select: bool = True, show_embed_select: bool = True, show_color_select: bool = True, MAX_DISPLAY_NUM: int = 200000, display_embed_information: bool = True, set_wide_layout_CSS: bool = True, num_cols: int = 3, model_select_position: str = 'side', # side or main key: str = "NLU_streamlit", additional_classifiers_for_coloring: List[str] = ['sentiment.imdb'], generate_code_sample: bool = False, show_infos: bool = True, show_logo: bool = True, n_jobs: Optional[int] = 3, # False ): from nlu.pipe.viz.streamlit_viz.streamlit_utils_OS import StreamlitUtilsOS StreamlitVizTracker.footer_displayed = False try: import plotly.express as px from sklearn.metrics.pairwise import distance_metrics except: st.error( "You need the sklearn and plotly package in your Python environment installed for similarity visualizations. Run <pip install sklearn plotly>") # if len(default_texts) > MAX_DISPLAY_NUM : default_texts = default_texts[:MAX_DISPLAY_NUM] if show_logo: StreamlitVizTracker.show_logo() if set_wide_layout_CSS: _set_block_container_style() if title: st.header(title) if sub_title: st.subheader(sub_title) # if show_logo :VizUtilsStreamlitOS.show_logo() # VizUtilsStreamlitOS.loaded_word_embeding_pipes = [] data = st.text_area('Enter N texts, seperated by new lines to visualize Sentence Embeddings for ', default_texts) # detect_sentence = False # TODO ITNEGRATE PARAM output_level = 'document' # if not detect_sentence else 'sentence' classifier_cols = [] original_text = nlu.load('tokenize').predict(data.split("\n"), output_level=output_level)[output_level].values original_text = original_text original_text = original_text[original_text != ''] original_text = original_text[~pd.isna(original_text)] text_col = output_level embed_algos_to_load = [] class_algos_to_load = [] new_embed_pipes = [] new_class_pipes = [] e_coms = StreamlitUtilsOS.find_all_embed_components(pipe) if show_algo_select: exp = st.expander("Select additional manifold and dimension reduction techniques to apply") algos = exp.multiselect( "Reduce embedding dimensionality to something visualizable", options=( "TSNE", "ISOMAP", 'LLE', 'Spectral Embedding', 'MDS', 'PCA', 'SVD aka LSA', 'DictionaryLearning', 'FactorAnalysis', 'FastICA', 'KernelPCA', 'LatentDirichletAllocation'), default=default_algos_to_apply, ) emb_components_usable = [e for e in Discoverer.get_components('embed', True, include_aliases=True) if 'chunk' not in e and 'sentence' in e] # Todo, multi-classifiers excluded classifier_components_usable = [e for e in Discoverer.get_components('classify', True, include_aliases=True) if 'xx' not in e and 'toxic' not in e and 'e2e' not in e] # Storage Ref extraction loaded_embed_nlu_refs, loaded_storage_refs = StreamlitUtilsOS.extract_all_sentence_storage_refs_or_nlu_refs( e_coms) loaded_classifier_nlu_refs = additional_classifiers_for_coloring # + all classifier NLU_refs? # Get loaded Embed NLU Refs for embed_pipe in StreamlitVizTracker.loaded_sentence_embeding_pipes: if embed_pipe != pipe: loaded_embed_nlu_refs.append(embed_pipe.nlu_ref) loaded_embed_nlu_refs = list(set(loaded_embed_nlu_refs)) # Get loaded Classifier NLU Refs for embed_pipe in StreamlitVizTracker.loaded_document_classifier_pipes: if embed_pipe != pipe: loaded_classifier_nlu_refs.append(embed_pipe.nlu_ref) loaded_classifier_nlu_refs = list(set(loaded_classifier_nlu_refs)) # fix default selector for l in loaded_embed_nlu_refs: if l not in emb_components_usable: emb_components_usable.append(l) # fix default selector for l in loaded_classifier_nlu_refs: if l not in classifier_components_usable: classifier_components_usable.append(l) emb_components_usable.sort() loaded_embed_nlu_refs.sort() classifier_components_usable.sort() loaded_classifier_nlu_refs.sort() if model_select_position == 'side': embed_algo_selection = st.sidebar.multiselect( "Pick additional Sentence Embeddings for the Dimension Reduction", options=emb_components_usable, default=loaded_embed_nlu_refs, key=key) embed_algo_selection = [embed_algo_selection[-1]] exp = st.expander("Pick additional Classifiers") class_algo_selection = exp.multiselect("Pick additional Classifiers to load for coloring points", options=classifier_components_usable, default=loaded_classifier_nlu_refs, key=key) class_algo_selection = [class_algo_selection[-1]] else: exp = st.expander("Pick additional Sentence Embeddings") embed_algo_selection = exp.multiselect( "Pick additional Sentence Embeddings for the Dimension Reduction", options=emb_components_usable, default=loaded_embed_nlu_refs, key=key) embed_algo_selection = [embed_algo_selection[-1]] exp = st.expander("Pick additional Classifiers") class_algo_selection = exp.multiselect("Pick additional Classifiers to load for coloring points", options=classifier_components_usable, default=loaded_classifier_nlu_refs, key=key) class_algo_selection = [class_algo_selection[-1]] embed_algos_to_load = list(set(embed_algo_selection) - set(loaded_embed_nlu_refs)) class_algos_to_load = list(set(class_algo_selection) - set(loaded_classifier_nlu_refs)) for embedder in embed_algos_to_load: new_embed_pipes.append(nlu.load(embedder)) for classifier in class_algos_to_load: new_class_pipes.append(nlu.load(classifier)) StreamlitVizTracker.loaded_sentence_embeding_pipes += new_embed_pipes StreamlitVizTracker.loaded_document_classifier_pipes += new_class_pipes if pipe not in StreamlitVizTracker.loaded_sentence_embeding_pipes: StreamlitVizTracker.loaded_sentence_embeding_pipes.append( pipe) for nlu_ref in additional_classifiers_for_coloring: # TODO REMVOVE< INTEGRATE INTO THE AUT LOAD THING REDUNDAND already_loaded = False for embed_pipe in StreamlitVizTracker.loaded_document_classifier_pipes: if embed_pipe.nlu_ref == nlu_ref: already_loaded = True if not already_loaded: already_loaded = True StreamlitVizTracker.loaded_document_classifier_pipes.append(nlu.load(nlu_ref)) col_index = 0 cols = st.columns(num_cols) data = original_text.copy() # Get classifier predictions classifier_cols = [] for class_pipe in StreamlitVizTracker.loaded_document_classifier_pipes: data = class_pipe.predict(data, output_level=output_level, multithread=False) classifier_cols += StreamlitUtilsOS.get_classifier_cols(class_pipe) data['text'] = original_text # drop embeds of classifiers because bad conversion for c in data.columns: if 'embedding' in c: data.drop(c, inplace=True, axis=1) data['text'] = original_text if show_color_select: if model_select_position == 'side': feature_to_color_by = st.sidebar.selectbox('Pick a feature to color points in manifold by ', classifier_cols, 0) else: feature_to_color_by = st.selectbox('Feature to color plots by ', classifier_cols, 0) def are_cols_full(): return col_index == num_cols for embed_pipe in StreamlitVizTracker.loaded_sentence_embeding_pipes: predictions = embed_pipe.predict(data, output_level=output_level, multithread=False).dropna() e_col = StreamlitUtilsOS.find_embed_col(predictions) e_com = StreamlitUtilsOS.find_embed_component(embed_pipe) e_com_storage_ref = StorageRefUtils.extract_storage_ref(e_com) emb = predictions[e_col] mat = np.array([x for x in emb]) for algo in algos: # Only pos values for latent Dirchlet if algo == 'LatentDirichletAllocation': mat = np.square(mat) if len(mat.shape) > 2: mat = mat.reshape(len(emb), mat.shape[-1]) hover_data = classifier_cols + ['text'] # calc reduced dimensionality with every algo if 1 in target_dimensions: low_dim_data = StreamlitUtilsOS.get_manifold_algo(algo, 1, n_jobs).fit_transform(mat) x = low_dim_data[:, 0] y = np.zeros(low_dim_data[:, 0].shape) predictions['text'] = original_text tsne_df = pd.DataFrame({**{'x': x, 'y': y}, **{k: predictions[k] for k in classifier_cols}, **{'text': original_text} }) fig = px.scatter(tsne_df, x="x", y="y", color=feature_to_color_by, hover_data=hover_data) subh = f"""Sentence-Embeddings =`{e_com_storage_ref}`, Manifold-Algo =`{algo}` for `D=1`""" cols[col_index].markdown(subh) cols[col_index].write(fig, key=key) col_index += 1 if are_cols_full(): cols = st.columns(num_cols) col_index = 0 if 2 in target_dimensions: low_dim_data = StreamlitUtilsOS.get_manifold_algo(algo, 2, n_jobs).fit_transform(mat) x = low_dim_data[:, 0] y = low_dim_data[:, 1] tsne_df = pd.DataFrame({**{'x': x, 'y': y}, **{k: predictions[k] for k in classifier_cols}, **{'text': original_text} }) fig = px.scatter(tsne_df, x="x", y="y", color=feature_to_color_by, hover_data=hover_data) subh = f"""Sentence-Embeddings =`{e_com_storage_ref}`, Manifold-Algo =`{algo}` for `D=2`""" cols[col_index].markdown(subh) cols[col_index].write(fig, key=key) col_index += 1 if are_cols_full(): cols = st.columns(num_cols) col_index = 0 if 3 in target_dimensions: low_dim_data = StreamlitUtilsOS.get_manifold_algo(algo, 3, n_jobs).fit_transform(mat) x = low_dim_data[:, 0] y = low_dim_data[:, 1] z = low_dim_data[:, 2] tsne_df = pd.DataFrame({**{'x': x, 'y': y, 'z': z}, **{k: predictions[k] for k in classifier_cols}, **{'text': original_text} }) fig = px.scatter_3d(tsne_df, x="x", y="y", z='z', color=feature_to_color_by, hover_data=hover_data) subh = f"""Sentence-Embeddings =`{e_com_storage_ref}`, Manifold-Algo =`{algo}` for `D=3`""" cols[col_index].markdown(subh) cols[col_index].write(fig, key=key) col_index += 1 if are_cols_full(): cols = st.columns(num_cols) col_index = 0 # Todo fancy embed infos etc # if display_embed_information: display_embed_vetor_information(e_com,mat) # if display_embed_information: # exp = st.expander("Embedding vector information") # exp.write(embed_vector_info) if show_infos: # VizUtilsStreamlitOS.display_infos() StreamlitVizTracker.display_model_info(pipe.nlu_ref, pipes=[pipe]) StreamlitVizTracker.display_footer() # vodafonegmbh 40875 Radtingen
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/viz/streamlit_viz/viz_building_blocks/sentence_embedding_manifold.py
sentence_embedding_manifold.py
import nlu from nlu.discovery import Discoverer from nlu.pipe.utils.resolution.storage_ref_utils import StorageRefUtils from typing import List, Optional import streamlit as st import numpy as np import pandas as pd from nlu.pipe.viz.streamlit_viz.styles import _set_block_container_style from nlu.pipe.viz.streamlit_viz.streamlit_viz_tracker import StreamlitVizTracker class WordEmbeddingManifoldStreamlitBlock(): @staticmethod def viz_streamlit_word_embed_manifold( pipe, # nlu component_list default_texts: List[str] = ("Donald Trump likes to party!", "Angela Merkel likes to party!", 'Peter HATES TO PARTTY!!!! :('), title: Optional[str] = "Lower dimensional Manifold visualization for word embeddings", sub_title: Optional[str] = "Apply any of the 11 `Manifold` or `Matrix Decomposition` algorithms to reduce the dimensionality of `Word Embeddings` to `1-D`, `2-D` and `3-D` ", write_raw_pandas : bool = False , default_algos_to_apply : List[str] = ("TSNE", "PCA"),#,'LLE','Spectral Embedding','MDS','ISOMAP','SVD aka LSA','DictionaryLearning','FactorAnalysis','FastICA','KernelPCA',), # LatentDirichletAllocation 'NMF', target_dimensions : List[int] = (1,2,3), show_algo_select : bool = True, show_embed_select : bool = True, show_color_select: bool = True, MAX_DISPLAY_NUM:int=200000, display_embed_information:bool=True, set_wide_layout_CSS:bool=True, num_cols: int = 3, model_select_position:str = 'side', # side or main key:str = "NLU_streamlit", additional_classifiers_for_coloring:List[str]=['pos', 'sentiment.imdb'], generate_code_sample:bool = False, show_infos:bool = True, show_logo:bool = True, n_jobs: Optional[int] = 3, # False ): from nlu.pipe.viz.streamlit_viz.streamlit_utils_OS import StreamlitUtilsOS StreamlitVizTracker.footer_displayed=False try : import plotly.express as px from sklearn.metrics.pairwise import distance_metrics except :st.error("You need the sklearn and plotly package in your Python environment installed for similarity visualizations. Run <pip install sklearn plotly>") if len(default_texts) > MAX_DISPLAY_NUM : default_texts = default_texts[:MAX_DISPLAY_NUM] if show_logo :StreamlitVizTracker.show_logo() if set_wide_layout_CSS : _set_block_container_style() if title:st.header(title) if sub_title:st.subheader(sub_title) # if show_logo :VizUtilsStreamlitOS.show_logo() # VizUtilsStreamlitOS.loaded_word_embeding_pipes = [] data = st.text_area('Enter N texts, seperated by new lines to visualize Word Embeddings for ','\n'.join(default_texts)) if len(data) > MAX_DISPLAY_NUM : data = data[:MAX_DISPLAY_NUM] original_text = nlu.load('tokenize').predict(data.split("\n"),output_level='document')['document'].values if show_color_select: if model_select_position == 'side' : feature_to_color_by = st.sidebar.selectbox('Pick a feature to color points in manifold by ',['pos','sentiment',],0) else:feature_to_color_by = st.selectbox('Feature to color plots by ',['pos','sentiment',],0) text_col = 'token' embed_algos_to_load = [] new_embed_pipes = [] e_coms = StreamlitUtilsOS.find_all_embed_components(pipe) if show_algo_select : exp = st.expander("Select additional manifold and dimension reduction techniques to apply") algos = exp.multiselect( "Reduce embedding dimensionality to something visualizable", options=("TSNE", "ISOMAP",'LLE','Spectral Embedding','MDS','PCA','SVD aka LSA','DictionaryLearning','FactorAnalysis','FastICA','KernelPCA',),default=default_algos_to_apply,) emb_components_usable = [e for e in Discoverer.get_components('embed',True, include_aliases=True) if 'chunk' not in e and 'sentence' not in e] loaded_embed_nlu_refs = [] loaded_classifier_nlu_refs = [] loaded_storage_refs = [] for c in e_coms : r = c.nlu_ref if 'en.' not in r and 'embed.' not in r and 'ner' not in r : loaded_embed_nlu_refs.append('en.embed.' + r) elif 'en.' in r and 'embed.' not in r and 'ner' not in r: r = r.split('en.')[0] loaded_embed_nlu_refs.append('en.embed.' + r) else : loaded_embed_nlu_refs.append(StorageRefUtils.extract_storage_ref(c)) loaded_storage_refs.append(StorageRefUtils.extract_storage_ref(c)) for p in StreamlitVizTracker.loaded_word_embeding_pipes : if p != pipe : loaded_embed_nlu_refs.append(p.nlu_ref) loaded_embed_nlu_refs = list(set(loaded_embed_nlu_refs)) for l in loaded_embed_nlu_refs: if l not in emb_components_usable : emb_components_usable.append(l) emb_components_usable.sort() loaded_embed_nlu_refs.sort() if model_select_position =='side': embed_algo_selection = st.sidebar.multiselect("Pick additional Word Embeddings for the Dimension Reduction",options=emb_components_usable,default=loaded_embed_nlu_refs,key = key) embed_algo_selection=[embed_algo_selection[-1]] else : exp = st.expander("Pick additional Word Embeddings") embed_algo_selection = exp.multiselect("Pick additional Word Embeddings for the Dimension Reduction",options=emb_components_usable,default=loaded_embed_nlu_refs,key = key) embed_algo_selection=[embed_algo_selection[-1]] embed_algos_to_load = list(set(embed_algo_selection) - set(loaded_embed_nlu_refs)) for embedder in embed_algos_to_load:new_embed_pipes.append(nlu.load(embedder))# + f' {" ".join(additional_classifiers_for_coloring)}')) StreamlitVizTracker.loaded_word_embeding_pipes+=new_embed_pipes if pipe not in StreamlitVizTracker.loaded_word_embeding_pipes: StreamlitVizTracker.loaded_word_embeding_pipes.append(pipe) for nlu_ref in additional_classifiers_for_coloring : already_loaded=False if 'pos' in nlu_ref : continue # for p in VizUtilsStreamlitOS.loaded_document_classifier_pipes: # if p.nlu_ref == nlu_ref : already_loaded = True # if not already_loaded : VizUtilsStreamlitOS.loaded_token_level_classifiers.append(nlu.load(nlu_ref)) else : for p in StreamlitVizTracker.loaded_document_classifier_pipes: if p.nlu_ref == nlu_ref : already_loaded = True if not already_loaded : already_loaded=True StreamlitVizTracker.loaded_document_classifier_pipes.append(nlu.load(nlu_ref)) col_index = 0 cols = st.columns(num_cols) def are_cols_full(): return col_index == num_cols token_feature_pipe = StreamlitUtilsOS.get_pipe('pos') #not all pipes have sentiment/pos etc.. models for hueing loaded.... ## Lets FIRST predict with the classifiers/Token level feature generators and THEN apply embed component_list data = original_text.copy() classifier_cols = [] for class_p in StreamlitVizTracker.loaded_document_classifier_pipes: data = class_p.predict(data, output_level='document',multithread=False)#.dropna() classifier_cols.append(StreamlitUtilsOS.get_classifier_cols(class_p)) data['text'] = original_text # drop embeds of classifiers because bad conversion for c in data.columns : if 'embedding' in c : data.drop(c, inplace=True,axis=1) # data['text'] # =data['document'] data['text'] = original_text for c in data.columns : if 'sentence_embedding' in c : data.drop(c,inplace=True,axis=1) if 'document' in data.columns : data.drop('document',inplace=True,axis=1) if'pos' in data.columns : data.drop('pos',inplace=True,axis=1) for p in StreamlitVizTracker.loaded_word_embeding_pipes : p = StreamlitUtilsOS.merge_token_classifiers_with_embed_pipe(p, token_feature_pipe) predictions = p.predict(data,output_level='token',multithread=False).dropna() e_col = StreamlitUtilsOS.find_embed_col(predictions) e_com = StreamlitUtilsOS.find_embed_component(p) e_com_storage_ref = StorageRefUtils.extract_storage_ref(e_com) emb = predictions[e_col] mat = np.array([x for x in emb]) for algo in algos : #Only pos values for latent Dirchlet if algo == 'LatentDirichletAllocation':mat = np.square(mat) if len(mat.shape)>2 : mat = mat.reshape(len(emb),mat.shape[-1]) hover_data = ['token','text','sentiment', 'pos'] # TODO DEDUCT # calc reduced dimensionality with every algo if 1 in target_dimensions: low_dim_data = StreamlitUtilsOS.get_manifold_algo(algo,1,n_jobs).fit_transform(mat) x = low_dim_data[:,0] y = np.zeros(low_dim_data[:,0].shape) tsne_df = pd.DataFrame({'x':x,'y':y, 'text':predictions[text_col], 'pos':predictions.pos, 'sentiment' : predictions.sentiment,'token':predictions.token}) fig = px.scatter(tsne_df, x="x", y="y",color=feature_to_color_by, hover_data=hover_data) subh = f"""Word-Embeddings =`{e_com_storage_ref}`, Manifold-Algo =`{algo}` for `D=1`""" cols[col_index].markdown(subh) cols[col_index].plotly_chart(fig, key=key, use_container_width=True) col_index+=1 if are_cols_full() : cols = st.columns(num_cols) col_index = 0 if 2 in target_dimensions: low_dim_data = StreamlitUtilsOS.get_manifold_algo(algo,2,n_jobs).fit_transform(mat) x = low_dim_data[:,0] y = low_dim_data[:,1] tsne_df = pd.DataFrame({'x':x,'y':y, 'text':predictions[text_col], 'pos':predictions.pos, 'sentiment' : predictions.sentiment,'token':predictions.token}) fig = px.scatter(tsne_df, x="x", y="y",color=feature_to_color_by, hover_data=hover_data) subh = f"""Word-Embeddings =`{e_com_storage_ref}`, Manifold-Algo =`{algo}` for `D=2`""" cols[col_index].markdown(subh) cols[col_index].plotly_chart(fig, key=key, use_container_width=True) col_index+=1 if are_cols_full() : cols = st.columns(num_cols) col_index = 0 if 3 in target_dimensions: low_dim_data = StreamlitUtilsOS.get_manifold_algo(algo,3,n_jobs).fit_transform(mat) x = low_dim_data[:,0] y = low_dim_data[:,1] z = low_dim_data[:,2] tsne_df = pd.DataFrame({'x':x,'y':y,'z':z, 'text':predictions[text_col], 'pos':predictions.pos, 'sentiment':predictions.sentiment,'token':predictions.token }) fig = px.scatter_3d(tsne_df, x="x", y="y", z='z',color=feature_to_color_by, hover_data=hover_data) subh = f"""Word-Embeddings =`{e_com_storage_ref}`, Manifold-Algo =`{algo}` for `D=3`""" cols[col_index].markdown(subh) cols[col_index].plotly_chart(fig, key=key, use_container_width=True) col_index+=1 if are_cols_full() : cols = st.columns(num_cols) col_index = 0 # Todo fancy embed infos etc # if display_embed_information: display_embed_vetor_information(e_com,mat) # if display_embed_information: # exp = st.expander("Embedding vector information") # exp.write(embed_vector_info) if show_infos : # VizUtilsStreamlitOS.display_infos() StreamlitVizTracker.display_model_info(pipe.nlu_ref, pipes = [pipe]) StreamlitVizTracker.display_footer()
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/viz/streamlit_viz/viz_building_blocks/word_embedding_manifold.py
word_embedding_manifold.py
import streamlit as st from sparknlp.annotator import * from nlu.universe.component_universes import jsl_id_to_empty_component from nlu.universe.feature_node_ids import NLP_NODE_IDS, NLP_HC_NODE_IDS from nlu.universe.logic_universes import AnnoTypes from nlu.universe.universes import Licenses class EntityManifoldUtils: classifers_OS = [ClassifierDLModel, LanguageDetectorDL, MultiClassifierDLModel, NerDLModel, NerCrfModel, YakeKeywordExtraction, PerceptronModel, SentimentDLModel, SentimentDetectorModel, ViveknSentimentModel, DependencyParserModel, TypedDependencyParserModel, T5Transformer, MarianTransformer, NerConverter] @staticmethod def insert_chunk_embedder_to_pipe_if_missing(pipe): """Scan component_list for chunk_embeddings. If missing, add new. Validate NER model_anno_obj is loaded""" # component_list.predict('Donald Trump and Angela Merkel love Berlin') classifier_cols = [] has_ner = False has_chunk_embeds = True ner_component_names = ['named_entity_recognizer_dl', 'named_entity_recognizer_dl_healthcare'] for c in pipe.components: if c.name == NLP_NODE_IDS.NER_DL or c.name == NLP_HC_NODE_IDS.MEDICAL_NER or c.type == AnnoTypes.TRANSFORMER_TOKEN_CLASSIFIER: has_ner = True if c.name == NLP_NODE_IDS.CHUNK_EMBEDDINGS_CONVERTER: return pipe if not has_ner: raise ValueError( "You Need to load a NER model_anno_obj or this visualization. Try nlu.load('ner').viz_streamlit_entity_embed_manifold(text)") ner_conveter_c, word_embed_c = None, None for c in pipe.components: if c.type == AnnoTypes.TOKEN_EMBEDDING: word_embed_c = c if c.name == NLP_NODE_IDS.NER_CONVERTER: ner_conveter_c = c if c.name == NLP_HC_NODE_IDS.NER_CONVERTER_INTERNAL: ner_conveter_c = c chunker = jsl_id_to_empty_component(NLP_NODE_IDS.CHUNK_EMBEDDINGS_CONVERTER) chunker.set_metadata( chunker.get_default_model(), 'chunker', 'chunker', 'xx', False, Licenses.open_source) # chunker = embeddings_chunker.EmbeddingsChunker(nlu_ref='chunk_embeddings') chunker.model.setInputCols(ner_conveter_c.spark_output_column_names + word_embed_c.spark_output_column_names) chunker.model.setOutputCol('chunk_embedding') chunker.spark_input_column_names = ner_conveter_c.spark_output_column_names + word_embed_c.spark_output_column_names chunker.spark_output_column_names = ['chunk_embedding'] # chunker.inputs = ner_conveter_c.spark_output_column_names + word_embed_c.spark_output_column_names # chunker.out_types = ['chunk_embedding'] pipe.components.append(chunker) pipe.is_fitted = False pipe.fit() return pipe @staticmethod def find_chunk_embed_col(df): return 'chunk_embedding_chunker' for c in df.columns: ss = set(c.split('_')) if 'chunk' in ss: ss.remove('chunk') if 'embedding' in ss: ss.remove('embedding') if 'embeddings' in ss: ss.remove('embeddings') if len(ss) == 0: return c raise ValueError('Could not find chunk embed col') @staticmethod def get_ner_cols(df): """find NER pred, conf and class cols. Cols[0] = entity_class_col Cols[1] = entity_confidence Cols[2] = entity_chunk """ entity_class_col, entity_confidence_col, entity_chunk_col = None, None, None for c in df.columns: if 'entities' in c and 'class' in c: entity_class_col = c if 'entities' in c and 'confidence' in c: entity_confidence_col = c if 'entities' in c and 'confidence' not in c and 'confidence' not in c and 'origin' not in c: entity_chunk_col = c cols = [entity_class_col, entity_confidence_col, entity_chunk_col] if not any(cols): raise ValueError(f'Failure to resolve entities col for df cols = {df.columns}') return cols @staticmethod def find_entity_embed_col_pd(df, search_multi=False): """Find col that contains embed in pandas df """ if not search_multi: for c in df.columns: if 'embed_entitiy' in c: return c else: e_cols = [] for c in df.columns: if 'embed' in c: e_cols.append(c) return e_cols @staticmethod def find_embed_component(p): """Find first embed component_to_resolve in component_list""" for c in p.components: if 'embed' in c.out_types[0]: return c st.warning("No Embed model_anno_obj in component_list") return None
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/viz/streamlit_viz/viz_building_blocks/block_utils/entity_manifold_utils.py
entity_manifold_utils.py
import logging from typing import List from nlu.pipe.nlu_component import NluComponent from nlu.pipe.utils.resolution.storage_ref_utils import StorageRefUtils from nlu.universe.feature_node_ids import NLP_NODE_IDS, NLP_HC_NODE_IDS from nlu.universe.feature_universes import NLP_FEATURES, OCR_FEATURES from nlu.universe.logic_universes import AnnoTypes logger = logging.getLogger('nlu') class ComponentUtils: """Component and Column Level logic operations and utils""" @staticmethod def config_chunk_embed_converter(converter: NluComponent) -> NluComponent: '''For a Chunk to be added to a pipeline, configure its input/output and set storage ref to amtch the storage ref and enfore storage ref notation. This will be used to infer backward later which component_to_resolve should feed this consumer''' storage_ref = StorageRefUtils.extract_storage_ref(converter) input_embed_col = ComponentUtils.extract_embed_col(converter) new_embed_col_with_AT_notation = input_embed_col + "@" + storage_ref converter.info.inputs.remove(input_embed_col) converter.info.inputs.append(new_embed_col_with_AT_notation) converter.info.spark_input_column_names.remove(input_embed_col) converter.info.spark_input_column_names.append(new_embed_col_with_AT_notation) converter.model.setInputCols(converter.info.inputs) return converter @staticmethod def clean_irrelevant_features(feature_list, remove_AT_notation=False, remove_text=True): ''' Remove irrelevant features from a list of component_to_resolve features Also remove the @notation from names, since they are irrelevant for ordering :param feature_list: list of features :param remove_AT_notation: remove AT notation from os_components names if true. Used for sorting :return: list with only relevant feature names ''' # remove irrelevant missing features for pretrained models # Most of these should be provided externally by the user and cannot be resolved if 'text' in feature_list and remove_text: feature_list.remove('text') if 'raw_text' in feature_list: feature_list.remove('raw_text') if 'raw_texts' in feature_list: feature_list.remove('raw_texts') if 'label' in feature_list: feature_list.remove('label') if 'sentiment_label' in feature_list: feature_list.remove('sentiment_label') if '%%%feature_elements%%%' in feature_list: feature_list.remove('%%%feature_elements%%%') if NLP_FEATURES.RAW_AUDIO in feature_list: feature_list.remove(NLP_FEATURES.RAW_AUDIO) if NLP_FEATURES.ANY in feature_list: feature_list.remove(NLP_FEATURES.ANY) if NLP_FEATURES.RAW_QUESTION in feature_list: feature_list.remove(NLP_FEATURES.RAW_QUESTION) if NLP_FEATURES.RAW_QUESTION_CONTEXT in feature_list: feature_list.remove(NLP_FEATURES.RAW_QUESTION_CONTEXT) if NLP_FEATURES.SPARK_NLP_FILE_PATH in feature_list: feature_list.remove(NLP_FEATURES.SPARK_NLP_FILE_PATH) if NLP_FEATURES.SPARK_NLP_IMAGE in feature_list: feature_list.remove(NLP_FEATURES.SPARK_NLP_IMAGE) if OCR_FEATURES.BINARY_IMG in feature_list: feature_list.remove(OCR_FEATURES.BINARY_IMG) if OCR_FEATURES.BINARY_IMG in feature_list: feature_list.remove(OCR_FEATURES.BINARY_IMG) if OCR_FEATURES.FILE_PATH in feature_list: feature_list.remove(OCR_FEATURES.FILE_PATH) if OCR_FEATURES.BINARY_DOCX in feature_list: feature_list.remove(OCR_FEATURES.BINARY_DOCX) if OCR_FEATURES.BINARY_PDF in feature_list: feature_list.remove(OCR_FEATURES.BINARY_PDF) if remove_AT_notation: new_cs = [] for c in feature_list: new_cs.append(c.split("@")[0]) return new_cs return feature_list @staticmethod def component_has_embeddings_requirement(component: NluComponent): ''' Check for the input component_to_resolve, wether it depends on some embedding. Returns True if yes, otherwise False. :param component: The component_to_resolve to check :return: True if the component_to_resolve needs some specifc embedding (i.e.glove, bert, elmo etc..). Otherwise returns False ''' return component.is_storage_ref_consumer @staticmethod def extract_storage_ref_AT_notation_for_embeds(component: NluComponent, col='input'): ''' Extract <col>_embed_col@storage_ref notation from a component_to_resolve if it has a storage ref, otherwise ' :param component: To extract notation from :cols component_to_resolve: Wether to extract for the input or output col :return: '' if no storage_ref, <col>_embed_col@storage_ref otherwise ''' if col == 'input': e_col = next(filter(lambda s: 'embed' in s, component.spark_input_column_names)) elif col == 'output': e_col = next(filter(lambda s: 'embed' in s, component.spark_output_column_names)) stor_ref = StorageRefUtils.extract_storage_ref(component) return e_col + '@' + stor_ref @staticmethod def is_embedding_provider(component: NluComponent) -> bool: """Check if a NLU Component returns/generates embeddings """ return component.is_storage_ref_producer @staticmethod def is_embedding_consumer(component: NluComponent) -> bool: """Check if a NLU Component consumes embeddings """ return component.is_storage_ref_consumer @staticmethod def is_embedding_converter(component: NluComponent) -> bool: """Check if NLU component_to_resolve is embedding converter """ return component.name in [NLP_NODE_IDS.SENTENCE_EMBEDDINGS_CONVERTER, NLP_NODE_IDS.SENTENCE_EMBEDDINGS_CONVERTER] @staticmethod def is_NER_provider(component: NluComponent) -> bool: """Check if a NLU Component wraps a NER/NER-Medical model_anno_obj """ if component.name in [NLP_HC_NODE_IDS.MEDICAL_NER, NLP_HC_NODE_IDS.TRAINABLE_MEDICAL_NER, NLP_NODE_IDS.NER_DL, NLP_NODE_IDS.TRAINABLE_NER_DL, NLP_NODE_IDS.TRAINABLE_NER_CRF, NLP_NODE_IDS.NER_CRF, NLP_HC_NODE_IDS.ZERO_SHOT_NER]: return True if component.type == AnnoTypes.TRANSFORMER_TOKEN_CLASSIFIER: return True @staticmethod def is_NER_IOB_token_classifier(component: NluComponent) -> bool: """Check if a Token Classifier uses IOB PRediction format""" if not hasattr(component.model, 'getClasses'): return False return any(['-' in label for label in component.model.getClasses()]) @staticmethod def is_NER_converter(component: NluComponent) -> bool: """Check if a NLU Component wraps a NER-IOB to NER-Pr etty converter """ return component.name in [NLP_HC_NODE_IDS.NER_CONVERTER_INTERNAL, NLP_NODE_IDS.NER_CONVERTER] @staticmethod def extract_NER_col(component: NluComponent, column='input') -> str: """Extract the exact name of the NER IOB column in the component_to_resolve""" if column == 'input': for f in component.in_types: if f in [NLP_FEATURES.NAMED_ENTITY_IOB, NLP_FEATURES.TOKEN_CLASSIFICATION]: return f if column == 'output': for f in component.out_types: if f in [NLP_FEATURES.NAMED_ENTITY_IOB, NLP_FEATURES.TOKEN_CLASSIFICATION]: return f raise ValueError(f"Could not find NER col for component_to_resolve ={component}") @staticmethod def extract_NER_converter_col(component: NluComponent, column='input') -> str: """Extract the exact name of the NER-converter column in the component_to_resolve""" if column == 'input': for f in component.in_types: if f == NLP_FEATURES.NAMED_ENTITY_IOB: return f if column == 'output': for f in component.out_types: if f == NLP_FEATURES.NAMED_ENTITY_CONVERTED: return f raise ValueError(f"Could not find NER Converter col for component_to_resolve ={component}") @staticmethod def extract_embed_col(component: NluComponent, column='input') -> str: """Extract the exact name of the embed column in the component_to_resolve""" if column == 'input': for c in component.spark_input_column_names: if 'embed' in c: return c if column == 'output': for c in component.spark_output_column_names: if 'embed' in c: return c raise ValueError(f"Could not find Embed col for component_to_resolve ={component}") @staticmethod def set_storage_ref_attribute_of_embedding_converters(pipe_list: List[NluComponent]): """For every embedding converter, we set storage ref attr on it, based on what the storage ref from it's provider is """ for converter in pipe_list: if ComponentUtils.is_embedding_provider(converter) and ComponentUtils.is_embedding_converter(converter): # First find the embed col of the converter embed_col = ComponentUtils.extract_embed_col(converter) for provider in pipe_list: # Now find the Embedding generator that is feeding the converter if embed_col in provider.spark_input_column_names: converter.storage_ref = StorageRefUtils.nlp_extract_storage_ref_nlp_model(provider.model) # converter.storage_ref = StorageRefUtils.extract_storage_ref(provider) return pipe_list @staticmethod def extract_embed_level_identity(component: NluComponent, col='input'): """Figure out if component_to_resolve feeds on chunk/sent aka doc/word emb for either nput or output cols""" if col == 'input': if any(filter(lambda s: 'document_embed' in s, component.info.inputs)): return 'document_embeddings' if any(filter(lambda s: 'sentence_embed' in s, component.info.inputs)): return 'sentence_embeddings' if any(filter(lambda s: 'chunk_embed' in s, component.info.inputs)): return 'chunk_embeddings' if any(filter(lambda s: 'token_embed' in s, component.info.inputs)): return 'token_embeddings' elif col == 'output': if any(filter(lambda s: 'document_embed' in s, component.out_types)): return 'document_embeddings' if any(filter(lambda s: 'sentence_embed' in s, component.out_types)): return 'sentence_embeddings' if any(filter(lambda s: 'chunk_embed' in s, component.out_types)): return 'chunk_embeddings' if any(filter(lambda s: 'token_embed' in s, component.out_types)): return 'token_embeddings' @staticmethod def are_producer_consumer_matches(e_consumer: NluComponent, e_provider: NluComponent) -> bool: """Check for embedding_consumer and embedding_producer if they match storage_ref and output level wise wise """ if StorageRefUtils.extract_storage_ref(e_consumer) == StorageRefUtils.extract_storage_ref(e_provider): if ComponentUtils.extract_embed_level_identity(e_consumer, 'input') == ComponentUtils.extract_embed_level_identity( e_provider, 'output'): return True ## TODO FALL BACK FOR BAD MATCHES WHICH ACTUALLY MATCH-> consult name space return False @staticmethod def get_nlu_ref_identifier(component: NluComponent) -> str: """The tail of a NLU ref after splitting on '.' gives a unique identifier for NON-Aliased components If result is '' , model_anno_obj UID will be used as identifier """ tail = component.nlu_ref.split('.')[-1].split('@')[-1] if tail == '': logger.warning( f"Could not deduct tail from component_to_resolve={component}. This is intended for CustomModelComponents used in offline mode") tail = str(component.model) return tail @staticmethod def remove_storage_ref_from_features(features: List[str]): """Clean storage ref from every str in list """ return [f.split('@')[0] for f in features]
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/utils/component_utils.py
component_utils.py
import logging from io import StringIO from typing import Iterable from nlu.universe.feature_universes import NLP_FEATURES logger = logging.getLogger('nlu') import pyspark from pyspark.sql.functions import monotonically_increasing_id import numpy as np import pandas as pd from pyspark.sql.types import StringType, StructType, StructField class DataConversionUtils: # Modin aswell but optional, so we dont import the type yet supported_types = [pyspark.sql.DataFrame, pd.DataFrame, pd.Series, np.ndarray] @staticmethod def except_text_col_not_found(cols): raise ValueError( f'Could not find column named "text" in input Pandas Dataframe. Please ensure one column named such exists. Columns in DF are : {cols} ') @staticmethod def except_invalid_question_data_format(cols): raise ValueError( f'You input data format is invalid for question answering with span classification.' f'Make sure you have at least 2 columns in you dataset, named context/question for pandas Dataframes' f'For Strings/Iterables/Tuples make sure to use the format `question|||context` or (question,context) ') @staticmethod def sdf_to_sdf(data, spark_sess, raw_text_column='text'): """No casting, Spark to Spark. Just add index col""" logger.info(f"Casting Spark DF to Spark DF") output_datatype = 'spark' data = data.withColumn('origin_index', monotonically_increasing_id().alias('origin_index')) stranger_features = [] if raw_text_column in data.columns: # store all stranger features if len(data.columns) > 1: stranger_features = list(set(data.columns) - set(raw_text_column)) else: DataConversionUtils.except_text_col_not_found(data.columns) return data, stranger_features, output_datatype @staticmethod def table_question_str_to_sdf(data, spark_sess): # Its JSON or CSV but as Raw string, validate with json.loads or pd.read_csv() try: # Validate JSON # For now no csv validation because we dont have validator. We let Spark-NLP Pipe validate the date return spark_sess.createDataFrame( [[data[0], " ".join([[data[1]] if isinstance(data[1], str) else data[1]])]] ).toDF(NLP_FEATURES.RAW_QUESTION_CONTEXT, NLP_FEATURES.RAW_QUESTION), [], 'pandas' except Exception as err: # TODO PARSE FAIl raise err @staticmethod def table_question_pdf_to_sdf(data, spark_sess): # For now no csv validation because we dont have delimitor. We let Spark-NLP Pipe validate the date # pd.read_csv(StringIO(data[0])) header = '"header": ['+", ".join((list(map(lambda x : f'"{x}"' , data[0].columns))))+ ']' json_data_str = '{' +f'''{header}, "rows" : {data[0].to_json(orient='values')} ''' + '}' # data[0].to_json() try: return spark_sess.createDataFrame( [[json_data_str, data[1] if isinstance(data[1], str) else ' '.join(data[1])]] ).toDF(NLP_FEATURES.RAW_QUESTION_CONTEXT, NLP_FEATURES.RAW_QUESTION).withColumn('origin_index', monotonically_increasing_id().alias('origin_index')), [], 'pandas' except Exception as err: # TODO PARSE FAIl raise err @staticmethod def question_sdf_to_sdf(data, spark_sess): """Casting question pandas to spark and add index col""" logger.info(f"Casting Pandas DF to Spark DF") output_datatype = 'spark' if NLP_FEATURES.RAW_QUESTION not in data.columns or NLP_FEATURES.RAW_QUESTION_CONTEXT not in data.columns: if len(data.columns) < 2: DataConversionUtils.except_invalid_question_data_format(data) data = data.withColumnRenamed(data.columns[0], NLP_FEATURES.RAW_QUESTION) \ .withColumnRenamed(data.columns[1], NLP_FEATURES.RAW_QUESTION_CONTEXT) data = data.withColumn('origin_index', monotonically_increasing_id().alias('origin_index')) # make Nans to None, or spark will crash stranger_features = list(set(data.columns) - {NLP_FEATURES.RAW_QUESTION, NLP_FEATURES.RAW_QUESTION_CONTEXT}) return data, stranger_features, output_datatype @staticmethod def question_str_to_sdf(data, spark_sess): """Casting str to spark and add index col. This is a bit inefficient. Casting follow # inefficient, str->pd->spark->pd , we can could first pd""" output_datatype = 'string' if '|||' not in data: DataConversionUtils.except_invalid_question_data_format(data) question, context = data.split('|||') sdf = spark_sess.createDataFrame(pd.DataFrame({NLP_FEATURES.RAW_QUESTION: question, NLP_FEATURES.RAW_QUESTION_CONTEXT: context, 'origin_index': [0]}, index=[0])) return sdf, [], output_datatype @staticmethod def question_tuple_to_sdf(data, spark_sess): """Casting str to spark and add index col. This is a bit inefficient. Casting follow # inefficient, str->pd->spark->pd , we can could first pd""" output_datatype = 'string' question, context = data[0], data[1] sdf = spark_sess.createDataFrame(pd.DataFrame({NLP_FEATURES.RAW_QUESTION: question, NLP_FEATURES.RAW_QUESTION_CONTEXT: context, 'origin_index': [0]}, index=[0])) return sdf, [], output_datatype @staticmethod def question_tuple_iterable_to_sdf(data, spark_sess): """Casting str to spark and add index col. This is a bit inefficient. Casting follow # inefficient, str->pd->spark->pd , we can could first pd""" output_datatype = 'string' if len(data) == 0: DataConversionUtils.except_invalid_question_data_format(data) if len(data[0]) != 2: DataConversionUtils.except_invalid_question_data_format(data) question, context = zip(*[(d[0], d[1]) for d in data]) sdf = spark_sess.createDataFrame(pd.DataFrame({NLP_FEATURES.RAW_QUESTION: question, NLP_FEATURES.RAW_QUESTION_CONTEXT: context, 'origin_index': [0]}, index=list(range(len(question))))) return sdf, [], output_datatype @staticmethod def question_str_iterable_to_sdf(data, spark_sess): """Casting str to spark and add index col. This is a bit inefficient. Casting follow # inefficient, str->pd->spark->pd , we can could first pd""" output_datatype = 'string' if len(data) == 0: DataConversionUtils.except_invalid_question_data_format(data) if '|||' not in data[0]: DataConversionUtils.except_invalid_question_data_format(data) question, context = zip(*[d.split('|||') for d in data]) sdf = spark_sess.createDataFrame(pd.DataFrame({NLP_FEATURES.RAW_QUESTION: question, NLP_FEATURES.RAW_QUESTION_CONTEXT: context, 'origin_index': list(range(len(question)))})) return sdf, [], output_datatype @staticmethod def pdf_to_sdf(data, spark_sess, raw_text_column='text'): """Casting pandas to spark and add index col""" logger.info(f"Casting Pandas DF to Spark DF") output_datatype = 'pandas' stranger_features = [] sdf = None # set first col as text column if there is none if raw_text_column not in data.columns: data.rename(columns={data.columns[0]: 'text'}, inplace=True) data['origin_index'] = data.index if raw_text_column in data.columns: if len(data.columns) > 1: # make Nans to None, or spark will crash data = data.where(pd.notnull(data), None) data = data.dropna(axis=1, how='all') stranger_features = list(set(data.columns) - set(raw_text_column)) sdf = spark_sess.createDataFrame(data) else: DataConversionUtils.except_text_col_not_found(data.columns) return sdf, stranger_features, output_datatype @staticmethod def question_pdf_to_sdf(data, spark_sess): """Casting question pandas to spark and add index col""" logger.info(f"Casting Pandas DF to Spark DF") output_datatype = 'pandas' if NLP_FEATURES.RAW_QUESTION not in data.columns or NLP_FEATURES.RAW_QUESTION_CONTEXT not in data.columns: if len(data.columns) < 2: DataConversionUtils.except_invalid_question_data_format(data) data = data.rename(columns={ data.columns[0]: NLP_FEATURES.RAW_QUESTION, data.columns[1]: NLP_FEATURES.RAW_QUESTION_CONTEXT, }) data['origin_index'] = data.index # make Nans to None, or spark will crash data = data.where(pd.notnull(data), None) data = data.dropna(axis=1, how='all') stranger_features = list(set(data.columns) - {NLP_FEATURES.RAW_QUESTION, NLP_FEATURES.RAW_QUESTION_CONTEXT}) sdf = spark_sess.createDataFrame(data) return sdf, stranger_features, output_datatype @staticmethod def pds_to_sdf(data, spark_sess, raw_text_column='text'): """Casting pandas series to spark and add index col. # for df['text'] colum/series passing casting follows pseries->pdf->spark->pd """ logger.info(f"Casting Pandas Series to Spark DF") output_datatype = 'pandas_series' sdf = None schema = StructType([StructField(raw_text_column, StringType(), True)]) data = pd.DataFrame(data).dropna(axis=1, how='all') # If series from a column is passed, its column name will be reused. if raw_text_column not in data.columns and len(data.columns) == 1: data[raw_text_column] = data[data.columns[0]] else: logger.info( f'INFO: NLU will assume {data.columns[0]} as label column since default text column could not be find') data[raw_text_column] = data[data.columns[0]] data['origin_index'] = data.index if raw_text_column in data.columns: sdf = spark_sess.createDataFrame(pd.DataFrame(data[raw_text_column]), schema=schema) else: DataConversionUtils.except_text_col_not_found(data.columns) if 'origin_index' not in sdf.columns: sdf = sdf.withColumn('origin_index', monotonically_increasing_id().alias('origin_index')) return sdf, [], output_datatype @staticmethod def np_to_sdf(data, spark_sess, raw_text_column='text'): """Casting numpy array to spark and add index col. This is a bit inefficient. Casting follow np->pd->spark->pd. We could cut out the first pd step """ logger.info(f"Casting Numpy Array to Spark DF") output_datatype = 'numpy_array' if len(data.shape) != 1: ValueError( f"Exception : Input numpy array must be 1 Dimensional for prediction.. Input data shape is{data.shape}") sdf = spark_sess.createDataFrame(pd.DataFrame({raw_text_column: data, 'origin_index': list(range(len(data)))})) return sdf, [], output_datatype @staticmethod def str_to_sdf(data, spark_sess, raw_text_column='text'): """Casting str to spark and add index col. This is a bit inefficient. Casting follow # inefficient, str->pd->spark->pd , we can could first pd""" logger.info(f"Casting String to Spark DF") output_datatype = 'string' sdf = spark_sess.createDataFrame(pd.DataFrame({raw_text_column: data, 'origin_index': [0]}, index=[0])) return sdf, [], output_datatype @staticmethod def str_list_to_sdf(data, spark_sess, raw_text_column='text'): """Casting str list to spark and add index col. This is a bit inefficient. Casting follow # # inefficient, list->pd->spark->pd , we can could first pd""" logger.info(f"Casting String List to Spark DF") output_datatype = 'string_list' if all(type(elem) == str for elem in data): sdf = spark_sess.createDataFrame( pd.DataFrame({raw_text_column: pd.Series(data), 'origin_index': list(range(len(data)))})) else: ValueError("Exception: Not all elements in input list are of type string.") return sdf, [], output_datatype @staticmethod def fallback_modin_to_sdf(data, spark_sess, raw_text_column='text'): """Casting potential Modin data to spark and add index col. # Modin tests, This could crash if Modin not installed """ logger.info(f"Casting Modin DF to Spark DF") sdf = None output_datatype = '' try: import modin.pandas as mpd if isinstance(data, mpd.DataFrame): data = pd.DataFrame(data.to_dict()) # create pandas to support type inference output_datatype = 'modin' data['origin_index'] = data.index if raw_text_column in data.columns: if len(data.columns) > 1: data = data.where(pd.notnull(data), None) # make Nans to None, or spark will crash data = data.dropna(axis=1, how='all') stranger_features = list(set(data.columns) - set(raw_text_column)) sdf = spark_sess.createDataFrame(data) else: DataConversionUtils.except_text_col_not_found(data.columns) if isinstance(data, mpd.Series): output_datatype = 'modin_series' data = pd.Series(data.to_dict()) # create pandas to support type inference data = pd.DataFrame(data).dropna(axis=1, how='all') data['origin_index'] = data.index index_provided = True if raw_text_column in data.columns: sdf = spark_sess.createDataFrame(data[['text']]) else: DataConversionUtils.except_text_col_not_found(data.columns) except: print( "If you use Modin, make sure you have installed 'pip install modin[ray]' or 'pip install modin[dask]' backend for Modin ") return sdf, [], output_datatype @staticmethod def to_spark_df(data, spark_sess, raw_text_column='text', is_span_data=False, is_tabular_qa_data=False): """Convert supported datatypes to SparkDF and extract extra data for prediction later on.""" if is_tabular_qa_data: if not isinstance(data, Iterable): # TODO invalid Table Data Format Exception pass if not isinstance(data[0], (pd.DataFrame, str)): # TODO invalid Table Data Format Exception pass if not isinstance(data[1], Iterable): # TODO invalid Table Data Format Exception pass if isinstance(data[0], str): return DataConversionUtils.table_question_str_to_sdf(data, spark_sess) if isinstance(data[0], pd.DataFrame): return DataConversionUtils.table_question_pdf_to_sdf(data, spark_sess) if is_span_data: try: if isinstance(data, pyspark.sql.dataframe.DataFrame): return DataConversionUtils.question_sdf_to_sdf(data, spark_sess) elif isinstance(data, pd.DataFrame): return DataConversionUtils.question_pdf_to_sdf(data, spark_sess) elif isinstance(data, tuple): return DataConversionUtils.question_tuple_to_sdf(data, spark_sess) elif isinstance(data, str): return DataConversionUtils.question_str_to_sdf(data, spark_sess) elif isinstance(data, (list, pd.Series, np.ndarray)): if isinstance(data[0], tuple): return DataConversionUtils.question_tuple_iterable_to_sdf(data, spark_sess) elif isinstance(data[0], str): return DataConversionUtils.question_str_iterable_to_sdf(data, spark_sess) except: ValueError("Data could not be converted to Spark Dataframe for internal conversion.") else: try: if isinstance(data, pyspark.sql.dataframe.DataFrame): return DataConversionUtils.sdf_to_sdf(data, spark_sess, raw_text_column) elif isinstance(data, pd.DataFrame): return DataConversionUtils.pdf_to_sdf(data, spark_sess, raw_text_column) elif isinstance(data, pd.Series): return DataConversionUtils.pds_to_sdf(data, spark_sess, raw_text_column) elif isinstance(data, np.ndarray): return DataConversionUtils.np_to_sdf(data, spark_sess, raw_text_column) elif isinstance(data, str): return DataConversionUtils.str_to_sdf(data, spark_sess, raw_text_column) elif isinstance(data, list): return DataConversionUtils.str_list_to_sdf(data, spark_sess, raw_text_column) else: return DataConversionUtils.fallback_modin_to_sdf(data, spark_sess, raw_text_column) except: ValueError("Data could not be converted to Spark Dataframe for internal conversion.") raise TypeError(f"Invalid datatype = {type(data)}") @staticmethod def str_to_pdf(data, raw_text_column): logger.info(f"Casting String to Pandas DF") return pd.DataFrame({raw_text_column: [data]}).reset_index().rename( columns={'index': 'origin_index'}), [], 'string' @staticmethod def str_list_to_pdf(data, raw_text_column): logger.info(f"Casting String List to Pandas DF") return pd.DataFrame({raw_text_column: data}).reset_index().rename( columns={'index': 'origin_index'}), [], 'string_list' @staticmethod def np_to_pdf(data, raw_text_column): logger.info(f"Casting Numpy Array to Pandas DF") return pd.DataFrame({raw_text_column: data}).reset_index().rename( columns={'index': 'origin_index'}), [], 'string_list' @staticmethod def pds_to_pdf(data, raw_text_column): return pd.DataFrame({raw_text_column: data}).reset_index().rename( columns={'index': 'origin_index'}), [], 'string_list' @staticmethod def pdf_to_pdf(data, raw_text_column): logger.info(f"Casting Pandas DF to Pandas DF") data = data.reset_index().rename(columns={'index': 'origin_index'}) stranger_features = list(data.columns) if raw_text_column not in stranger_features: print(f"Could not find {raw_text_column} col in df. Using {stranger_features[0]} col istead") data = data.reset_index().rename(columns={stranger_features[0]: raw_text_column}) stranger_features.remove('text') stranger_features.remove('origin_index') return data, stranger_features, 'pandas' @staticmethod def sdf_to_pdf(data, raw_text_column): logger.info(f"Casting Spark DF to Pandas DF") data = data.toPandas().reset_index().rename(columns={'index': 'origin_index'}) stranger_features = list(data.columns) if raw_text_column not in stranger_features: print(f"Could not find {raw_text_column} col in df. Using {stranger_features[0]} col istead") data = data.reset_index().rename(columns={stranger_features[0]: raw_text_column}) stranger_features.remove('text') stranger_features.remove('origin_index') return data, stranger_features, 'spark' @staticmethod def to_pandas_df(data, raw_text_column='text'): """ Convert data to LihgtPipeline Compatible Format, which is np.array[str], list[str] and str but we need list anyways later. So we create here a pd.Dataframe with a TEXT col if not already given Convert supported datatypes to Pandas and extract extra data for prediction later on. """ try: if isinstance(data, pyspark.sql.dataframe.DataFrame): return DataConversionUtils.pdf_to_pdf(data, raw_text_column) elif isinstance(data, pd.DataFrame): return DataConversionUtils.pdf_to_pdf(data, raw_text_column) elif isinstance(data, pd.Series): return DataConversionUtils.pds_to_pdf(data, raw_text_column) elif isinstance(data, np.ndarray): return DataConversionUtils.np_to_pdf(data, raw_text_column) elif isinstance(data, str): return DataConversionUtils.str_to_pdf(data, raw_text_column) elif isinstance(data, list): return DataConversionUtils.str_list_to_pdf(data, raw_text_column) else: return DataConversionUtils.fallback_modin_to_pdf(data, raw_text_column) except: ValueError("Data could not be converted to Spark Dataframe for internal conversion.") @staticmethod def size_of(data): """ Convert data to LihgtPipeline Compatible Format, which is np.array[str], list[str] and str but we need list anyways later. So we create here a pd.Dataframe with a TEXT col if not already given Convert supported datatypes to Pandas and extract extra data for prediction later on. """ if isinstance(data, pyspark.sql.dataframe.DataFrame): return data.count() elif isinstance(data, pd.DataFrame): return data.shape[0] elif isinstance(data, pd.Series): return data.shape[0] elif isinstance(data, np.ndarray): return data.shape[0] elif isinstance(data, str): return 1 elif isinstance(data, list): return len(data) else: return len(data)
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/utils/data_conversion_utils.py
data_conversion_utils.py
from typing import Dict, Any, List import logging from nlu.universe.atoms import NlpLevel from nlu.universe.feature_node_ids import NLP_HC_NODE_IDS from nlu.universe.logic_universes import NLP_LEVELS from nlu.universe.feature_universes import NLP_FEATURES from nlu.universe.universes import Licenses from nlu.pipe.col_substitution.col_name_substitution_utils import ColSubstitutionUtils logger = logging.getLogger('nlu') class OutputLevelUtils: """Resolve output level of pipeline and components""" @staticmethod def infer_prediction_output_level(pipe) -> NlpLevel: """ This function checks the LAST component_to_resolve of the NLU pipeline and infers from that the output level via checking the components' info. :param pipe: to infer output level for :return returns inferred output level """ # Loop in reverse over component_list and get first output level # (???) of non util/sentence_detector/tokenizer/doc_assembler. for c in pipe.components[::-1]: return OutputLevelUtils.resolve_component_to_output_level(pipe, c) # fallback return NLP_LEVELS.DOCUMENT @staticmethod def resolve_input_dependent_component_to_output_level(pipe, component_to_resolve) -> NlpLevel: """ For a given NLU component which is input dependent , resolve its output level by checking if it's input stem from document or sentence based annotators :param pipe: the pipeline containing all components :param component_to_resolve: Input dependent component for which we want to know the output level :return: output-level of component """ # (1.) A classifier, which is using sentence/document. We just check input cols if NLP_FEATURES.DOCUMENT in component_to_resolve.spark_input_column_names: return NLP_LEVELS.DOCUMENT if NLP_FEATURES.SENTENCE in component_to_resolve.spark_input_column_names: return NLP_LEVELS.SENTENCE if NLP_FEATURES.DOCUMENT_QUESTION_CONTEXT in component_to_resolve.spark_input_column_names: return NLP_LEVELS.DOCUMENT # (2.) A model_anno_obj which is input dependent and not using document/sentence cols # We iterator over components and see which is feeding this input dependent component_to_resolve for c in pipe.components: if c.name == component_to_resolve.name: continue if c.spark_output_column_names[0] in component_to_resolve.spark_input_column_names: # We found a component that is feeding the component_to_resolve. # Now we need to check if that component is document/sentence level if NLP_LEVELS.DOCUMENT in c.spark_input_column_names: return NLP_LEVELS.DOCUMENT elif NLP_LEVELS.SENTENCE in c.spark_input_column_names: return NLP_LEVELS.SENTENCE @staticmethod def resolve_component_to_output_level(pipe, component) -> NlpLevel: """ For a given NLU component_to_resolve, resolve its output level, by checking annotator_levels dicts for approaches and models If output level is input dependent, resolve_input_dependent_component_to_output_level will resolve it :param component: to resolve :param pipe: pipe containing the component :return: resolve component_to_resolve """ if 'input_dependent' in component.output_level: return OutputLevelUtils.resolve_input_dependent_component_to_output_level(pipe, component) else: return component.output_level @staticmethod def get_columns_at_same_level_of_pipe(pipe, df, anno_2_ex_config, get_embeddings) -> List[str]: """Get List of columns in df that are generated from components in the pipeline which are at the same output level as the pipe . :param pipe: NLU Pipeline :param df: Pandas DataFrame resulting from applying the pipe :param anno_2_ex_config: mapping between anno to extractor, from get_annotator_extraction_configs() :param get_embeddings: Should embeddings be included :return: List of columns which are generated from components at same output level as the pipe.prediction_output_level """ same_output_level_cols = [] for c in pipe.components: if 'embedding' in c.type and get_embeddings is False: continue output_level = OutputLevelUtils.resolve_component_to_output_level(pipe, c) if output_level == pipe.prediction_output_level: generated_cols = ColSubstitutionUtils.get_final_output_cols_of_component(c, df, anno_2_ex_config) for generated_col in generated_cols: if '_k_' in generated_col and c.jsl_anno_class_id == NLP_HC_NODE_IDS.SENTENCE_ENTITY_RESOLVER: # all _k_ fields of resolver may never be viewed as any common outputlevel and thus never be zipped. continue same_output_level_cols.append(generated_col) return list(set(same_output_level_cols)) @staticmethod def get_output_level_mapping_by_component(pipe) -> Dict[Any, str]: """Get a mapping key=NluComponent and value = output level :param pipe: NLU pipe for which to get the mapping :return: dict where key = NLU_Component and Value = Output level """ nlp_levels = {c: OutputLevelUtils.resolve_component_to_output_level(pipe, c) for c in pipe.components} for c in pipe.components: if c.license == Licenses.ocr: nlp_levels[c] = c.output_level return {c: OutputLevelUtils.resolve_component_to_output_level(pipe, c) for c in pipe.components}
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/utils/output_level_resolution_utils.py
output_level_resolution_utils.py
import logging from sparknlp.annotator import * import nlu from nlu import Licenses from nlu.pipe.nlu_component import NluComponent from nlu.pipe.pipeline import NLUPipeline from nlu.pipe.utils.resolution.storage_ref_utils import StorageRefUtils from nlu.universe.atoms import JslAnnoId from nlu.universe.component_universes import ComponentUniverse, jsl_id_to_empty_component from nlu.universe.feature_node_ids import NLP_NODE_IDS, NLP_HC_NODE_IDS, OCR_NODE_IDS from nlu.universe.feature_universes import NLP_FEATURES from nlu.universe.logic_universes import NLP_LEVELS, AnnoTypes logger = logging.getLogger('nlu') from nlu.pipe.utils.component_utils import ComponentUtils from typing import List, Union from nlu.universe.annotator_class_universe import AnnoClassRef from nlu.utils.environment.env_utils import is_running_in_databricks import os import glob import json class PipeUtils: """Pipe Level logic operations and utils""" @staticmethod def update_bad_storage_refs(pipe: NLUPipeline): """ Some models have bad storage refs. The list of these bad models is defined by nlu.spellbook.Spellbook.bad_storage_refs. The correct storage ref is given by the resolving models storage ref defined by nlu.Spellbook.licensed_storage_ref_2_nlu_ref[pipe.lang][storage_ref]. Once the resolving model_anno_obj is loaded in the pipe, this method will take its storage ref and write it to the bad_storage_ref model_anno_obj defined by nlu.spellbook.Spellbook.bad_storage_refs. If storage ref is already updated, this method will leave the pipe unchanged. We only check for healthcare storage refs :param pipe: Pipe to update bad storage refs on :return: Pipe where each component_to_resolve has storage ref updated, if it was not already updated """ for bad_storage_ref_component in pipe.components: if not bad_storage_ref_component.loaded_from_pretrained_pipe and bad_storage_ref_component.has_storage_ref: storage_ref = StorageRefUtils.extract_storage_ref(bad_storage_ref_component) # After updating the storage ref it will not be in the licensed_storage_ref_2_nlu_ref mapping anymore, if storage_ref in nlu.spellbook.Spellbook.bad_storage_refs: # since its a bad storage ref, we can resolve its storage ref by checking licensed_storage_ref_2_nlu_ref if pipe.lang in nlu.Spellbook.licensed_storage_ref_2_nlu_ref.keys(): if storage_ref in nlu.Spellbook.licensed_storage_ref_2_nlu_ref[pipe.lang].keys(): storage_ref_resolver_nlu_ref = nlu.Spellbook.licensed_storage_ref_2_nlu_ref[pipe.lang][ storage_ref] for storage_resolver in pipe.components: if storage_resolver.nlu_ref == storage_ref_resolver_nlu_ref: # Update the storage ref of the bad_component to the storage ref of the resolving model_anno_obj according to licensed_storage_ref_2_nlu_ref resolving_storage_ref = StorageRefUtils.extract_storage_ref(storage_resolver) bad_storage_ref_component.model.set(bad_storage_ref_component.model.storageRef, resolving_storage_ref) return pipe @staticmethod def update_relation_extractor_models_storage_ref(pipe: NLUPipeline): # if provided, because the sometimes have unresolvable storage refs # we can find the actual storage ref only after its mapped is sresolved to an model_anno_obj defined by an nlp ref # If RelationExtractor is not loaded from a pretrained pipe we update its storage ref to the resolving models storage ref for relation_extractor_component in pipe.components: if relation_extractor_component.jsl_anno_class_id == NLP_HC_NODE_IDS.RELATION_EXTRACTION and not relation_extractor_component.loaded_from_pretrained_pipe: storage_ref = StorageRefUtils.extract_storage_ref(relation_extractor_component) # After updating the storage ref it will not be in the licensed_storage_ref_2_nlu_ref mapping anymore, # so we have to check here if it exists in the mapping before accessing it if pipe.lang in nlu.Spellbook.licensed_storage_ref_2_nlu_ref.keys(): if storage_ref in nlu.Spellbook.licensed_storage_ref_2_nlu_ref[pipe.lang].keys(): # We need to find a component_to_resolve in the pipeline which has this storage ref storage_ref_resolver_nlu_ref = nlu.Spellbook.licensed_storage_ref_2_nlu_ref[pipe.lang][ storage_ref] for storage_resolver in pipe.components: if storage_resolver.nlu_ref == storage_ref_resolver_nlu_ref: # Update the storage ref of the RL-Extractor to the storage ref of the resolving model_anno_obj according to licensed_storage_ref_2_nlu_ref resolving_storage_ref = StorageRefUtils.extract_storage_ref(storage_resolver) relation_extractor_component.model.set(relation_extractor_component.model.storageRef, resolving_storage_ref) return pipe @staticmethod def get_json_data_for_pipe_model_at_stage_number(pipe_path, stage_number_as_string): """Gets the json metadata from a model_anno_obj for a given base path at a specific stage index""" c_metadata_path = f'{pipe_path}/stages/{stage_number_as_string}_*/metadata/part-00000' c_metadata_path = glob.glob(f'{c_metadata_path}*')[0] with open(c_metadata_path, "r", encoding="utf8") as f: data = json.load(f) return data @staticmethod def get_json_data_for_pipe_model_at_stage_number_on_databricks(nlp_ref, lang, digit_str): """Gets the json metadata from a model_anno_obj for a given base path at a specific stage index on databricks""" import sparknlp spark = sparknlp.start() pipe_df = spark.read.json( f'dbfs:/root/cache_pretrained/{nlp_ref}_{lang}*/stages/{digit_str}_*/metadata/part-00000') data = pipe_df.toPandas().to_dict() data = {k: v[0] for k, v in data.items()} if 'inputCols' in data['paramMap'].keys(): data['paramMap']['inputCols'] = data['paramMap']['inputCols'].tolist() data return data @staticmethod def set_column_values_on_components_from_pretrained_pipe(component_list: List[NluComponent], nlp_ref, lang, path): """Since output/input cols cannot be fetched from Annotators via get input/output col reliably, we must check annotator data to find them Expects a list of NLU Component objects which all stem from the same pipeline defined by nlp_ref """ if path: pipe_path = path else: pipe_path = os.path.expanduser('~') + '/cache_pretrained/' + f'{nlp_ref}_{lang}' # We do not need to check for Spark Version, since cols should match across versions pipe_path = glob.glob(f'{pipe_path}*') if len(pipe_path) == 0: # try databricks env path if is_running_in_databricks(): pipe_path = [f'dbfs:/root/cache_pretrained/{nlp_ref}_{lang}'] else: raise FileNotFoundError(f"Could not find downloaded Pipeline at path={pipe_path}") pipe_path = pipe_path[0] if not os.path.exists(pipe_path) and not is_running_in_databricks(): raise FileNotFoundError(f"Could not find downloaded Pipeline at path={pipe_path}") # Find HDD location of component_list and read out input/output cols digits_num = len(str(len(component_list))) digit_str = '0' * digits_num digit_cur = 0 for c in component_list: model_name = c.model.uid.split('_')[0] if is_running_in_databricks(): data = PipeUtils.get_json_data_for_pipe_model_at_stage_number_on_databricks(nlp_ref, lang, digit_str) else: data = PipeUtils.get_json_data_for_pipe_model_at_stage_number(pipe_path, digit_str) if 'inputCols' in data['paramMap'].keys(): inp = data['paramMap']['inputCols'] c.model.setInputCols(inp) else: inp = data['paramMap']['inputCol'] c.model.setInputCol(inp) if 'outputCol' in data['paramMap'].keys(): out = data['paramMap']['outputCol'] else: # Sometimes paramMap is missing outputCol, so we have to use this hack if model_name == 'DocumentAssembler': out = 'document' elif model_name == 'Finisher': out = 'finished' else: out = c.model.uid.split('_')[0] + '_out' c.spark_input_column_names = inp if isinstance(inp, List) else [inp] c.spark_output_column_names = [out] if model_name != 'Finisher': # finisher dynamically generates cols from input cols 4 c.model.setOutputCol(out) if hasattr(c.model, 'setOutputCol') else c.model.setOutputCols(out) digit_cur += 1 digit_str = str(digit_cur) while len(digit_str) < digits_num: digit_str = '0' + digit_str return component_list @staticmethod def is_trainable_pipe(pipe: NLUPipeline): """Check if component_list is trainable""" for c in pipe.components: if c.trainable: return True return False @staticmethod def enforece_AT_embedding_provider_output_col_name_schema_for_list_of_components(pipe_list: List[NluComponent]): """For every embedding provider, enforce that their output col is named <pipe_prediction_output_level>@storage_ref for output_levels word,chunk,sentence aka document , TODO update the classifier models swell i.e. word_embed@elmo or sentence_embed@elmo etc. """ for c in pipe_list: if ComponentUtils.is_embedding_provider(c): level_AT_ref = ComponentUtils.extract_storage_ref_AT_notation_for_embeds(c, 'output') c.out_types = [level_AT_ref] c.info.spark_output_column_names = [level_AT_ref] c.model.setOutputCol(level_AT_ref[0]) return pipe_list @staticmethod def enforce_AT_schema_on_pipeline_and_add_NER_converter(pipe: NLUPipeline): """Enforces the AT naming schema on all column names and add missing NER converters""" return PipeUtils.enforce_AT_schema_on_NER_processors_and_add_missing_NER_converters( PipeUtils.enforce_AT_schema_on_embedding_processors(pipe)) @staticmethod def enforce_AT_schema_on_NER_processors_and_add_missing_NER_converters(pipe: NLUPipeline): """For every NER provider and consumer, enforce that their output col is named <pipe_prediction_output_level>@storage_ref for output_levels word,chunk,sentence aka document , i.e. word_embed@elmo or sentence_embed@elmo etc. We also add NER converters for every NER model_anno_obj that no Converter converting its inputs In addition, returns the pipeline with missing NER converters added, for every NER model_anno_obj. The converters transform the IOB schema in a merged and more usable form for downstream tasks 1. Find a NER model_anno_obj in component_list 2. Find a NER converter feeding from it, if there is None, create one. 3. Generate name with Identifier <ner-iob>@<nlu_ref_identifier> and <entities>@<nlu_ref_identifier> 3.1 Update NER Models output to <ner-iob>@<nlu_ref_identifier> 3.2 Update NER Converter input to <ner-iob>@<nlu_ref_identifier> 3.3 Update NER Converter output to <entities>@<nlu_ref_identifier> 4. Update every Component that feeds from the NER converter (i.e. Resolver etc.) includes TOKEN-CLASSIFIER-TRANSFORMER models which usually output NER format """ new_converters = [] for c in pipe.components: if c.loaded_from_pretrained_pipe: # Leave pretrained component_list models untouched new_converters.append(c) continue # TRANSFORMER_TOKEN_CLASSIFIER might be a NER provider. Regardless, No ner-Conversion will be performed # because it will not return NER IOB if ComponentUtils.is_NER_provider(c): if c.type == AnnoTypes.TRANSFORMER_TOKEN_CLASSIFIER and not ComponentUtils.is_NER_IOB_token_classifier( c): continue output_NER_col = ComponentUtils.extract_NER_col(c, 'output') converter_to_update = None for other_c in pipe.components: if output_NER_col in other_c.spark_input_column_names and ComponentUtils.is_NER_converter(other_c): converter_to_update = other_c ner_identifier = ComponentUtils.get_nlu_ref_identifier(c) if converter_to_update is None: if c.license == Licenses.hc: converter_to_update = jsl_id_to_empty_component(NLP_HC_NODE_IDS.NER_CONVERTER_INTERNAL) converter_to_update.set_metadata(converter_to_update.get_default_model(), NLP_HC_NODE_IDS.NER_CONVERTER_INTERNAL, NLP_HC_NODE_IDS.NER_CONVERTER_INTERNAL, 'xx', False, Licenses.hc) else: converter_to_update = jsl_id_to_empty_component(NLP_NODE_IDS.NER_CONVERTER) converter_to_update.set_metadata(converter_to_update.get_default_model(), NLP_NODE_IDS.NER_CONVERTER, NLP_NODE_IDS.NER_CONVERTER, 'xx', False, Licenses.open_source) new_converters.append(converter_to_update) converter_to_update.nlu_ref = f'ner_converter.{c.nlu_ref}' # 3. generate new col names new_NER_AT_ref = output_NER_col if '@' not in output_NER_col: new_NER_AT_ref = output_NER_col + '@' + ner_identifier new_NER_converter_AT_ref = 'entities' + '@' + ner_identifier # 3.1 upate NER model_anno_obj outputs c.spark_output_column_names = [new_NER_AT_ref] c.model.setOutputCol(new_NER_AT_ref) # 3.2 update converter inputs old_ner_input_col = ComponentUtils.extract_NER_converter_col(converter_to_update, 'input') if old_ner_input_col in converter_to_update.spark_input_column_names: converter_to_update.spark_input_column_names.remove(old_ner_input_col) else: converter_to_update.spark_input_column_names.pop() # if old_ner_input_col in converter_to_update.spark_input_column_names: # converter_to_update.spark_input_column_names.remove(old_ner_input_col) # else: # converter_to_update.spark_input_column_names.pop() converter_to_update.spark_input_column_names.append(new_NER_AT_ref) converter_to_update.model.setInputCols(converter_to_update.spark_input_column_names) # 3.3 update converter outputs converter_to_update.spark_output_column_names = [new_NER_converter_AT_ref] converter_to_update.model.setOutputCol(new_NER_converter_AT_ref) ## todo improve, this causes the first ner producer to feed to all ner-cosnuners. All other ner-producers will be ignored by ner-consumers,w ithouth special syntax or manual configs --> Chunk merger ##4. Update all NER consumers input columns, i.e. Resolver, Relation, etc.. for conversion_consumer in pipe.components: if NLP_FEATURES.NAMED_ENTITY_CONVERTED in conversion_consumer.in_types: conversion_consumer.spark_input_column_names.remove(NLP_FEATURES.NAMED_ENTITY_CONVERTED) conversion_consumer.spark_input_column_names.append(new_NER_converter_AT_ref) # Add new converters to component_list for conv in new_converters: if conv.license == Licenses.hc: pipe.add(conv, name_to_add=f'chunk_converter_licensed@{conv.spark_output_column_names[0].split("@")[0]}') else: pipe.add(conv, name_to_add=f'chunk_converter@{conv.spark_output_column_names[0].split("@")[0]}') return pipe @staticmethod def enforce_AT_schema_on_embedding_processors(pipe: NLUPipeline): """For every embedding provider and consumer, enforce that their output col is named <pipe_prediction_output_level>@storage_ref for output_levels word,chunk,sentence aka document , i.e. word_embed@elmo or sentence_embed@elmo etc. """ for c in pipe.components: # Leave pretrained component_list models untouched if c.loaded_from_pretrained_pipe: continue if ComponentUtils.is_embedding_provider(c): if '@' not in c.spark_output_column_names[0]: new_embed_AT_ref = ComponentUtils.extract_storage_ref_AT_notation_for_embeds(c, 'output') c.spark_output_column_names = [new_embed_AT_ref] c.model.setOutputCol(new_embed_AT_ref) if ComponentUtils.is_embedding_consumer(c): input_embed_col = ComponentUtils.extract_embed_col(c) if '@' not in input_embed_col: # TODO set storage ref for traianble model_anno_obj? new_embed_AT_ref = ComponentUtils.extract_storage_ref_AT_notation_for_embeds(c, 'input') c.spark_input_column_names.remove(input_embed_col) c.spark_input_column_names.append(new_embed_AT_ref) c.model.setInputCols(c.spark_input_column_names) return pipe @staticmethod def enforce_NLU_columns_to_NLP_columns(pipe: NLUPipeline): """for every component_to_resolve, set its inputs and outputs to the ones configured on the NLU component_to_resolve.""" # These anno have no standardized setInputCol or it should not be configured blacklisted = [NLP_NODE_IDS.DOCUMENT_ASSEMBLER] for c in pipe.components: if c.name == OCR_NODE_IDS.VISUAL_DOCUMENT_CLASSIFIER: c.model.setLabelCol(c.spark_output_column_names[0]) c.model.setConfidenceCol(c.spark_output_column_names[1]) continue if c.loaded_from_pretrained_pipe: continue if c.name in blacklisted: continue if hasattr(c.model, 'setOutputCol'): c.model.setOutputCol(c.spark_output_column_names[0]) else: c.model.setOutputCols(c.spark_output_column_names) if hasattr(c.model, 'setInputCols'): c.model.setInputCols(c.spark_input_column_names) else: # Some OCR Annotators only have one input and thus only setInputCol method but not setInputCols c.model.setInputCol(c.spark_input_column_names[0]) return pipe @staticmethod def is_converter_component_resolution_reference(reference: str) -> bool: if 'chunk_emb' in reference: return True @staticmethod def configure_component_output_levels_to_sentence(pipe: NLUPipeline): ''' Configure component_list components to output level document. Substitute every occurrence of <document> to <sentence> for every component_to_resolve that feeds from <document :param pipe: component_list to be configured :return: configured component_list ''' logger.info('Configuring components to sentence level') for c in pipe.components: # update in/out spark cols if c.loaded_from_pretrained_pipe: continue if NLP_FEATURES.DOCUMENT in c.spark_input_column_names and NLP_FEATURES.SENTENCE not in c.spark_input_column_names and NLP_FEATURES.SENTENCE not in c.spark_output_column_names: logger.info(f"Configuring C={c.name} of Type={type(c.model)} to Sentence Level") c.spark_input_column_names.remove(NLP_FEATURES.DOCUMENT) c.spark_input_column_names.append(NLP_FEATURES.SENTENCE) c.model.setInputCols(c.spark_input_column_names) if 'input_dependent' in c.output_level: c.output_level = NLP_LEVELS.SENTENCE # update in/out col types if NLP_FEATURES.DOCUMENT in c.in_types and NLP_FEATURES.SENTENCE not in c.in_types and NLP_FEATURES.SENTENCE not in c.out_types: c.in_types.remove(NLP_FEATURES.DOCUMENT) c.in_types.append(NLP_FEATURES.SENTENCE) return pipe.components @staticmethod def configure_component_output_levels_to_document(pipe: NLUPipeline): ''' Configure component_list components to output level document. Substitute every occurence of <sentence> to <document> for every component_to_resolve that feeds from <sentence> :param pipe: component_list to be configured :return: configured component_list coonents only ''' logger.info('Configuring components to document level') for c in pipe.components: if c.loaded_from_pretrained_pipe: continue # Update in/out spark cols if NLP_FEATURES.SENTENCE in c.spark_input_column_names and NLP_FEATURES.DOCUMENT not in c.spark_input_column_names and NLP_FEATURES.DOCUMENT not in c.spark_output_column_names: logger.info(f"Configuring C={c.name} to document output level") c.spark_input_column_names.remove(NLP_FEATURES.SENTENCE) c.spark_input_column_names.append(NLP_FEATURES.DOCUMENT) c.model.setInputCols(c.spark_input_column_names) if 'input_dependent' in c.output_level: c.output_level = NLP_LEVELS.DOCUMENT # Update in/out col types if NLP_FEATURES.SENTENCE in c.in_types and NLP_FEATURES.DOCUMENT not in c.in_types and NLP_FEATURES.DOCUMENT not in c.out_types: c.in_types.remove(NLP_FEATURES.SENTENCE) c.in_types.append(NLP_FEATURES.DOCUMENT) return pipe.components @staticmethod def has_component_with_id(pipe: NLUPipeline, ids: Union[JslAnnoId, List[JslAnnoId]]): """Check for NLUPipeline if it contains component with id """ ids = ids if isinstance(ids, list) else [ids] for c in pipe.components: if c.name in ids: return True return False @staticmethod def has_sentence_detector(pipe: NLUPipeline): """Check for NLUPipeline if it contains sentence detector""" for c in pipe.components: if isinstance(c.model, (SentenceDetectorDLModel, SentenceDetector, SentenceDetectorDLApproach)): return True return False @staticmethod def has_document_assembler(pipe: NLUPipeline): return PipeUtils.has_component_with_id(pipe, NLP_NODE_IDS.DOCUMENT_ASSEMBLER) @staticmethod def has_table_extractor(pipe: NLUPipeline): """Check for NLUPipieline if it contains any table extracting OCR component""" return PipeUtils.has_component_with_id(pipe, [OCR_NODE_IDS.PDF2TEXT_TABLE, OCR_NODE_IDS.PPT2TEXT_TABLE, OCR_NODE_IDS.DOC2TEXT_TABLE, OCR_NODE_IDS.IMAGE_TABLE_DETECTOR, ]) @staticmethod def get_component_idx_by_id(pipe: NLUPipeline, node_id: JslAnnoId): """Find first occurrence of component in pipe by ID and returns index """ for i, c in enumerate(pipe.components): if c.name == node_id: return i raise Exception(f'Could not find component {node_id} in pipe {pipe}') @staticmethod def add_tokenizer_to_pipe_if_missing(pipe: NLUPipeline): """add tokenizer to pipe if it is missing :param pipe: pipe :return: Pipe with tokenizer if missing """ if PipeUtils.has_component_with_id(pipe, [NLP_NODE_IDS.TOKENIZER, NLP_NODE_IDS.TOKEN_ASSEMBLER, NLP_NODE_IDS.REGEX_TOKENIZER, NLP_NODE_IDS.RECURISVE_TOKENIZER, NLP_NODE_IDS.WORD_SEGMENTER]): return pipe from nlu.pipe.component_resolution import resolve_feature tokenizer = resolve_feature(NLP_FEATURES.TOKEN) tokenizer.spark_input_column_names = [pipe.component_output_level] tokenizer.spark_output_column_names = [NLP_FEATURES.TOKEN] tokenizer.model.setInputCols(pipe.component_output_level) tokenizer.model.setOutputCol(NLP_FEATURES.TOKEN) # Find the document/sentence component and add tokenizer right after that for i, c in enumerate(pipe.components): if pipe.component_output_level in c.spark_output_column_names: pipe.components.insert(i + 1, tokenizer) return pipe @staticmethod def configure_component_output_levels(pipe: NLUPipeline, new_output_level=''): ''' This method configures sentenceEmbeddings and Classifier components to output at a specific level. Generally this substitutes all `sentence` columns to `document` and vice versa. Adds SentenceDetector to pipeline if none exists This method is called the first time .predict() is called and every time the pipe_prediction_output_level changed If pipe_prediction_output_level == Document, then sentence embeddings will be fed on Document col and classifiers receive doc_embeds/doc_raw column, depending on if the classifier works with or without embeddings If pipe_prediction_output_level == sentence, then sentence embeddings will be fed on sentence col and classifiers receive sentence_embeds/sentence_raw column, depending on if the classifier works with or without embeddings. If sentence detector is missing, one will be added. :param pipe: NLU pipeline :param new_output_level: The new output level to apply, either sentence or document :return: Nlu pipeline, with all components output levels configured to new_output_level ''' if not PipeUtils.has_document_assembler(pipe): # When loaded from OCR, we might not have a documentAssembler in pipe pipe.is_fitted = False document_assembler = ComponentUniverse.components[NLP_NODE_IDS.DOCUMENT_ASSEMBLER]() document_assembler.set_metadata(document_assembler.get_default_model(), 'document_assembler', 'document_assembler', 'xx', False, Licenses.open_source) pipe.components.insert(0, document_assembler) if new_output_level == 'sentence': if not PipeUtils.has_sentence_detector(pipe): logger.info("Adding missing Sentence Detector") pipe.is_fitted = False sentence_detector = ComponentUniverse.components[NLP_NODE_IDS.SENTENCE_DETECTOR_DL]() sentence_detector.set_metadata(sentence_detector.get_default_model(), 'detect_sentence', 'sentence_detector_dl', 'en', False, Licenses.open_source) insert_idx = PipeUtils.get_component_idx_by_id(pipe, NLP_NODE_IDS.DOCUMENT_ASSEMBLER) # insert After doc assembler pipe.components.insert(insert_idx + 1, sentence_detector) return PipeUtils.configure_component_output_levels_to_sentence(pipe) elif new_output_level == 'document': return PipeUtils.configure_component_output_levels_to_document(pipe) @staticmethod def check_if_component_is_in_pipe(pipe: NLUPipeline, component_name_to_check, check_strong=True): """Check if a component_to_resolve with a given name is already in a component_list """ for c in pipe.components: if check_strong and component_name_to_check == c.info.name: return True elif not check_strong and component_name_to_check in c.info.name: return True return False @staticmethod def check_if_there_component_with_col_in_components(component_list, features, except_component): """For a given list of features and a list of components, see if there are components taht provide this feature If yes, True, otherwise False """ for c in component_list: if c.out_types[0] != except_component.out_types[0]: for f in ComponentUtils.clean_irrelevant_features(c.info.spark_output_column_names, True): if f in features: return True return False @staticmethod def is_leaf_node(c, pipe: NLUPipeline) -> bool: """Check if a component_to_resolve is a leaf in the DAG. We verify by checking if any other_c is feeding from os_components. If yes, it is not a leaf. If nobody feeds from os_components, it's a leaf. """ inputs = c.info.inputs for other_c in pipe.components: if c is not other_c: for f in other_c.info.inputs: 1 return False @staticmethod def clean_AT_storage_refs(pipe: NLUPipeline): """Removes AT notation from all columns. Useful to reset component_list back to default state""" for c in pipe.components: if c.info.loaded_from_pretrained_pipe: continue c.info.inputs = [f.split('@')[0] for f in c.info.inputs] c.out_types = [f.split('@')[0] for f in c.out_types] c.info.spark_input_column_names = [f.split('@')[0] for f in c.info.spark_input_column_names] c.info.spark_output_column_names = [f.split('@')[0] for f in c.info.spark_output_column_names] c.info.spark_input_column_names = c.info.inputs.copy() c.info.spark_output_column_names = c.out_types.copy() return pipe @staticmethod def rename_duplicate_cols(pipe: NLUPipeline): """Rename cols with duplicate names""" for i, c in enumerate(pipe.components): for other_c in pipe.components: if c is other_c: continue if c.loaded_from_pretrained_pipe: continue if c.spark_output_column_names[0] == other_c.spark_output_column_names[0]: c.spark_output_column_names[0] = f'{c.spark_output_column_names[0]}_{str(i)}' return pipe @staticmethod def find_trainable_embed_consumer(pipe: NLUPipeline): """Find traianble component_to_resolve which consumes emeddings. Returns index of component_to_resolve and type of embedding if found, otherwise returns -1 and None""" for i, c in enumerate(pipe.components): if c.trainable and c.has_storage_ref: return pipe.components.index(c), ComponentUtils.extract_embed_col(c, 'input') return -1, None @staticmethod def remove_convertable_storage_refs(required_features_ref, conversion_candidates, provided_features_ref): """Remove required storage ref features if conversion candidate has it, so that storage ref provider will not be downloaded twice """ if len(conversion_candidates) == 0: return required_features_ref, conversion_candidates # ComponentUtils.extract_storage_ref_AT_notation_for_embeds for candidate in conversion_candidates: # candidate_at_storage_ref_feature = ComponentUtils.extract_storage_ref_AT_notation_for_embeds( # candidate.component_candidate, 'output') if candidate.component_candidate is None: continue for feature in required_features_ref: # if feature not in provided_features_ref: # TODO revisit this after deep test # # Feature not yet manifested by creating corresponding anno # # Unless its also a storage ref candidate. In this scenario, the Feature is manifested but the Converter is missing. # # Remove the feature from requirements, since its already there and will otherwise cause storage ref resolution to manifest again # continue required_storage_ref = feature.split('@')[-1] if required_storage_ref == candidate.storage_ref: # or candidate_at_storage_ref_feature == feature # The feature is already provided, but not converted. We can remove it required_features_ref.remove(feature) return required_features_ref, conversion_candidates @staticmethod def update_converter_storage_refs_and_cols(pipe: NLUPipeline, provided_features_ref, required_features_ref): """Storage ref of converters is initially empty string, i.e. '' . This method checks if any convertable embeddings are provided, if yes it will update storage ref of converter , update the input/output columns with colname@storage_ref notation and mark it as resolved by removing it from the corrosponding lists""" for c in pipe.components: if c.name in [NLP_NODE_IDS.SENTENCE_EMBEDDINGS_CONVERTER, NLP_NODE_IDS.CHUNK_EMBEDDINGS_CONVERTER]: # Check if there are candidates that feed the converter, any word Embedding will work if c.storage_ref != '': # If storage_ref is not '' then this is converter is already fixed, nothing to do continue for other_c in pipe.components: if other_c.has_storage_ref and other_c.type == AnnoTypes.TOKEN_EMBEDDING: # Get original embed cols in_embed = ComponentUtils.extract_embed_col(c, 'input') out_embed = ComponentUtils.extract_embed_col(c, 'output') if len(in_embed.split('@')) == 2: # Storage ref is already on annotator, we dont ned to fix this continue c.spark_output_column_names.remove(out_embed) c.spark_input_column_names.remove(in_embed) provided_features_ref.remove(out_embed + '@') required_features_ref.remove(in_embed + '@') storage_ref = StorageRefUtils.extract_storage_ref(other_c) in_embed = in_embed + '@' + storage_ref out_embed = out_embed + '@' + storage_ref c.spark_output_column_names.append(out_embed) c.spark_input_column_names.append(in_embed) provided_features_ref.append(out_embed) required_features_ref.append(in_embed) c.storage_ref = storage_ref return provided_features_ref, required_features_ref @staticmethod def add_metadata_to_pipe(pipe: NLUPipeline): """Write metadata fields to pipeline, for now only whether it contains OCR components or not. To be extended in the future """ py_class_to_anno_id = AnnoClassRef.get_ocr_pyclass_2_anno_id_dict() for c in pipe.components: # Check for OCR componments if c.jsl_anno_py_class in py_class_to_anno_id.keys(): pipe.contains_ocr_components = True # Check for licensed components if c.license in [Licenses.ocr, Licenses.hc]: pipe.has_licensed_components = True # Check for NLP Component, which is any open source if c.license == Licenses.open_source \ and c.name != NLP_NODE_IDS.WAV2VEC_FOR_CTC \ and c.name != NLP_NODE_IDS.HUBERT_FOR_CTC \ and c.name != NLP_NODE_IDS.AUDIO_ASSEMBLER: # TODO Table Assembler/VIT/ Other non txt open source pipe.has_nlp_components = True if c.type == AnnoTypes.QUESTION_TABLE_ANSWERER: pipe.has_table_qa_models = True if c.type == AnnoTypes.CHUNK_MAPPER: pipe.prefer_light = True if c.type == AnnoTypes.QUESTION_SPAN_CLASSIFIER: pipe.has_span_classifiers = True if c.type == AnnoTypes.SPEECH_RECOGNIZER: pipe.contains_audio_components = True if c.type == AnnoTypes.IMAGE_CLASSIFICATION: pipe.contains_ocr_components = True pipe.has_nlp_components = False if c.jsl_anno_py_class == 'ImageAssembler': pipe.contains_ocr_components = True return pipe @staticmethod def replace_untrained_component_with_trained(nlu_pipe: NLUPipeline, spark_transformer_pipe): """Write metadata fields to pipeline, for now only whether it contains OCR components or not. To be extended in the future :return: :param nlu_pipe: NLU pipeline, which contains one untrained component :param spark_transformer_pipe: Spark Pipeline which contains fitted component version of the untrained one :return: NLU pipeline component list, where untrained component is replaced with a trained one """ # Go through NLU pip and find the untrained component and replace with the trained one for i, trainable_c in enumerate(nlu_pipe.components): if trainable_c.trainable: # Construct trained NLU component with the trained Spark Model if trainable_c.license == Licenses.open_source: trained_class_name = AnnoClassRef.JSL_anno2_py_class[trainable_c.trained_mirror_anno] untrained_class_name = AnnoClassRef.JSL_anno2_py_class[trainable_c.jsl_anno_class_id] trained_model = PipeUtils.get_model_of_class_from_spark_pipe(spark_transformer_pipe, trained_class_name) trained_component = jsl_id_to_empty_component(trainable_c.trained_mirror_anno).set_metadata( trained_model, trainable_c.trained_mirror_anno, trainable_c.trained_mirror_anno, nlu_pipe.lang, False, Licenses.open_source) elif trainable_c.license == Licenses.hc: trained_class_name = AnnoClassRef.JSL_anno_HC_ref_2_py_class[trainable_c.trained_mirror_anno] untrained_class_name = AnnoClassRef.JSL_anno_HC_ref_2_py_class[trainable_c.jsl_anno_class_id] trained_model = PipeUtils.get_model_of_class_from_spark_pipe(spark_transformer_pipe, trained_class_name) trained_component = jsl_id_to_empty_component(trainable_c.trained_mirror_anno).set_metadata( trained_model, trainable_c.trained_mirror_anno, trainable_c.trained_mirror_anno, nlu_pipe.lang, False, Licenses.hc) # update col names on new model_anno_obj trained_component.spark_input_column_names = trainable_c.spark_input_column_names trained_component.spark_output_column_names = trainable_c.spark_output_column_names trained_component.model.setInputCols(trained_component.spark_input_column_names) trained_component.model.setOutputCol(trained_component.spark_output_column_names[0]) # Replace component in pipe nlu_pipe.components.remove(trainable_c) # nlu_pipe.components.insert(i, trained_component) # remove the component from the NlpuPipe dict Keys and add the trained one pipe_key_to_delete = None for k in nlu_pipe.keys(): if nlu_pipe[k].__class__.__name__ == untrained_class_name: pipe_key_to_delete = k del nlu_pipe[pipe_key_to_delete] # TODOf NER or other trainable, make sure we ad at the right place! nlu_pipe.add(trained_component, idx=i) return nlu_pipe.components @staticmethod def get_model_of_class_from_spark_pipe(spark_transformer_pipe, class_name): for model in spark_transformer_pipe.stages: if model.__class__.name == class_name: return model raise ValueError(f"Could not find model_anno_obj of requested class = {class_name}") @staticmethod def contains_t5_or_gpt(pipe: NLUPipeline): return PipeUtils.has_component_with_id(pipe, [NLP_NODE_IDS.GPT2, NLP_NODE_IDS.T5_TRANSFORMER]) @staticmethod def add_sentence_detector_to_pipe_if_required(pipe: NLUPipeline): """ 1. For Tabla-QA the Question Tapas Col should originate from a doc type -> doc_question -> sent_question | (Context/Questions) -> Multi-Doc - => TAPAS -> doc_context -> assembled_table | right after the Multi-Doc-assembler we add a sentence Detector. Sentence Detectors Input is doc_question and we update TAPAS to take sent_question instead of doc_question :param pipe: """ if not pipe.has_table_qa_models: return pipe PipeUtils.has_sentence_detector(pipe) # Create Sentence Detector & Set inputs to Document_question sent_detector = ComponentUniverse.components[NLP_NODE_IDS.SENTENCE_DETECTOR_DL]() sent_detector.set_metadata(sent_detector.get_default_model(), 'detect_sentence', 'sentence_detector_dl', 'en', False, Licenses.open_source) sent_detector.set_input(str(NLP_FEATURES.DOCUMENT_QUESTION)) # Insert Sentence Detector right after Multi-Doc multi_doc_idx = PipeUtils.get_component_idx_by_id(pipe, NLP_NODE_IDS.MULTI_DOCUMENT_ASSEMBLER) pipe.components.insert(multi_doc_idx + 1, sent_detector) # Update Tapas to use sentence_detector_question instead of doc_quesiton pipe.components[PipeUtils.get_component_idx_by_id(pipe, NLP_NODE_IDS.TAPAS_FOR_QA)].set_input( [str(NLP_FEATURES.ASSEMBLED_TABULAR_DATA), str(NLP_FEATURES.SENTENCE)]) return pipe
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/utils/pipe_utils.py
pipe_utils.py
import glob import logging import os from typing import List import pyspark import numpy as np import pandas as pd logger = logging.getLogger('nlu') class OcrDataConversionUtils: @staticmethod def validate_OCR_compatible_inputs(data): """Validate for input data that it contains a path pointing to file or jsl_folder""" if isinstance(data, List): return OcrDataConversionUtils.check_iterable_paths_are_valid(data) if isinstance(data, str): return OcrDataConversionUtils.check_iterable_paths_are_valid([data]) if isinstance(data, pd.DataFrame): return 'path' in data.columns if isinstance(data, pd.Series): return 'path' in data.name if isinstance(data, pyspark.sql.dataframe.DataFrame): return 'path' in data.columns if isinstance(data, np.ndarray): return OcrDataConversionUtils.check_iterable_paths_are_valid(data) @staticmethod def check_iterable_paths_are_valid(iterable_paths): """Validate for iterable data input if all elements point to file or jsl_folder""" paths_validness = [] for p in iterable_paths: if os.path.isdir(p) or os.path.isfile(p): paths_validness.append(True) else: print(f'Warning : Invalid path for jsl_folder or file in input. Could validate path.\n' f'NLU will try and ignore this issue but you might run into errors.\n' f'Please make sure all paths are valid\n') print(f'For path = {p}') paths_validness.append(False) return all(paths_validness) @staticmethod def check_all_paths_point_to_accepted_file_type(paths, file_types): """Validate that all paths point to a file type defined by file_types""" pass @staticmethod def glob_files_of_accepted_type(paths, file_types): """Get all paths which point to correct file types from iterable paths which can contain file and jsl_folder paths 1. paths point to a file which is suffixed with one of the accepted file_types, i.e. path/to/file.type 2. path points to a jsl_folder, in this case jsl_folder is recurisvely searched for valid files and accepted paths will be in return result """ accepted_file_paths = [] for p in paths: for t in file_types: t = t.lower() if os.path.isfile(p): if p.lower().split('.')[-1] == t: accepted_file_paths.append(p) elif os.path.isdir(p): accepted_file_paths += glob.glob(p + f'/*.{t.upper()}', recursive=True) + glob.glob(p + f'/*.{t}', recursive=True) else: print(f"Invalid path = {p} pointing neither to file or jsl_folder on this machine") return accepted_file_paths @staticmethod def extract_iterable_paths_from_data(data): """Extract an iterable object containing paths from input data""" if isinstance(data, List): return data if isinstance(data, str): return [data] if isinstance(data, pd.DataFrame): return list(data['path'].values) if isinstance(data, pd.Series): return list(data.values) if isinstance(data, pyspark.sql.dataframe.DataFrame): return [p['path'] for p in data.select('path').collect()] if isinstance(data, np.ndarray): return list(data) @staticmethod def get_accepted_ocr_file_types(pipe): """Get all file typtes/suffixes that can be processed by the pipeline""" accepted_files = [] for c in pipe.components: if c.applicable_file_types: accepted_files += c.applicable_file_types return list(set(accepted_files))
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/utils/ocr_data_conversion_utils.py
ocr_data_conversion_utils.py
import logging import sparknlp from pyspark.sql.functions import monotonically_increasing_id from nlu.pipe.utils.audio_data_conversion_utils import AudioDataConversionUtils from nlu.pipe.utils.ocr_data_conversion_utils import OcrDataConversionUtils logger = logging.getLogger('nlu') from nlu.pipe.pipe_logic import PipeUtils import pandas as pd from nlu.pipe.utils.data_conversion_utils import DataConversionUtils def __predict_standard_spark(pipe, data, output_level, positions, keep_stranger_features, metadata, drop_irrelevant_cols, return_spark_df, get_embeddings): # 1. Convert data to Spark DF data, stranger_features, output_datatype = DataConversionUtils.to_spark_df(data, pipe.spark, pipe.raw_text_column, is_span_data=pipe.has_span_classifiers, is_tabular_qa_data=pipe.has_table_qa_models, ) # 3. Apply Spark Pipeline data = pipe.vanilla_transformer_pipe.transform(data) # 4. Convert resulting spark DF into nicer format and by default into pandas. if return_spark_df: return data # Returns RAW Spark Dataframe result of component_list prediction return pipe.pythonify_spark_dataframe(data, keep_stranger_features=keep_stranger_features, stranger_features=stranger_features, output_metadata=metadata, drop_irrelevant_cols=drop_irrelevant_cols, positions=positions, output_level=output_level, get_embeddings=get_embeddings ) def predict_multi_threaded_light_pipe(pipe, data, output_level, positions, keep_stranger_features, metadata, drop_irrelevant_cols, get_embeddings): # 1. Try light component_list predcit # 2. if vanilla Fails use Vanilla # 3. if vanilla fails raise error data, stranger_features, output_datatype = DataConversionUtils.to_pandas_df(data, pipe.raw_text_column) # Predict -> Cast to PDF -> Join with original inputs. It does NOT yield EMBEDDINGS. data = data.join(pd.DataFrame(pipe.light_transformer_pipe.fullAnnotate(data.text.values))) return pipe.pythonify_spark_dataframe(data, keep_stranger_features=keep_stranger_features, stranger_features=stranger_features, output_metadata=metadata, drop_irrelevant_cols=drop_irrelevant_cols, positions=positions, output_level=output_level, get_embeddings=get_embeddings ) def __predict_ocr_spark(pipe, data, output_level, positions, keep_stranger_features, metadata, drop_irrelevant_cols, get_embeddings): """ Check if there are any OCR components in the Pipe. If yes, we verify data contains pointer to jsl_folder or image files. If yes, df = spark.read.format("binaryFile").load(imagePath) Run OCR pipe on df and pythonify procedure afterwards """ pipe.fit() OcrDataConversionUtils.validate_OCR_compatible_inputs(data) paths = OcrDataConversionUtils.extract_iterable_paths_from_data(data) accepted_file_types = OcrDataConversionUtils.get_accepted_ocr_file_types(pipe) file_paths = OcrDataConversionUtils.glob_files_of_accepted_type(paths, accepted_file_types) spark = sparknlp.start() # Fetches Spark Session that has already been licensed data = pipe.vanilla_transformer_pipe.transform(spark.read.format("image").load(file_paths)).withColumn( 'origin_index', monotonically_increasing_id().alias('origin_index')) return pipe.pythonify_spark_dataframe(data, keep_stranger_features=keep_stranger_features, output_metadata=metadata, drop_irrelevant_cols=drop_irrelevant_cols, positions=positions, output_level=output_level, get_embeddings=get_embeddings ) def __predict_audio_spark(pipe, data, output_level, positions, keep_stranger_features, metadata, drop_irrelevant_cols, get_embeddings): """ Check if there are any OCR components in the Pipe. If yes, we verify data contains pointer to jsl_folder or image files. If yes, df = spark.read.format("binaryFile").load(imagePath) Run OCR pipe on df and pythonify procedure afterwards """ pipe.fit() try: import librosa except: raise ImportError('The librosa library is not installed and required for audio features! ' 'Run pip install librosa ') sample_rate = 16000 AudioDataConversionUtils.validate_paths(data) paths = AudioDataConversionUtils.extract_iterable_paths_from_data(data) accepted_file_types = AudioDataConversionUtils.get_accepted_audio_file_types(pipe) file_paths = AudioDataConversionUtils.glob_files_of_accepted_type(paths, accepted_file_types) data = AudioDataConversionUtils.data_to_spark_audio_df(data=file_paths, sample_rate=sample_rate, spark=sparknlp.start()) data = pipe.vanilla_transformer_pipe.transform(data).withColumn( 'origin_index', monotonically_increasing_id().alias('origin_index')) return pipe.pythonify_spark_dataframe(data, keep_stranger_features=keep_stranger_features, output_metadata=metadata, drop_irrelevant_cols=drop_irrelevant_cols, positions=positions, output_level=output_level, get_embeddings=get_embeddings ) def __predict__(pipe, data, output_level, positions, keep_stranger_features, metadata, multithread, drop_irrelevant_cols, return_spark_df, get_embeddings): ''' Annotates a Pandas Dataframe/Pandas Series/Numpy Array/Spark DataFrame/Python List strings /Python String :param data: Data to predict on :param output_level: output level, either document/sentence/chunk/token :param positions: whether to output indexes that map predictions back to position in origin string :param keep_stranger_features: whether to keep columns in the dataframe that are not generated by pandas. I.e. when you s a dataframe with 10 columns and only one of them is named text, the returned dataframe will only contain the text column when set to false :param metadata: whether to keep additional metadata in final df or not like confidences of every possible class for preidctions. :param multithread: Whether to use multithreading based light pipeline. In some cases, this may cause errors. :param drop_irellevant_cols: Whether to drop cols of different output levels, i.e. when predicting token level and dro_irrelevant_cols = True then chunk, sentence and Doc will be dropped :param return_spark_df: Prediction results will be returned right after transforming with the Spark NLP pipeline :return: ''' if output_level == '' and not pipe.has_table_qa_models: # Default sentence level for all components if pipe.has_nlp_components and not PipeUtils.contains_t5_or_gpt( pipe) and not pipe.has_span_classifiers: pipe.component_output_level = 'sentence' pipe.components = PipeUtils.configure_component_output_levels(pipe, 'sentence') else: if pipe.has_nlp_components and output_level in ['document', 'sentence']: # Pipe must be re-configured for document/sentence level pipe.component_output_level = output_level pipe.components = PipeUtils.configure_component_output_levels(pipe, output_level) elif pipe.has_nlp_components and output_level in ['token']: # Add tokenizer if not in pipe, default its inputs to sentence pipe.component_output_level = 'sentence' pipe.components = PipeUtils.configure_component_output_levels(pipe, 'sentence') pipe = PipeUtils.add_tokenizer_to_pipe_if_missing(pipe) if get_embeddings is None: # Grab embeds if nlu ref is of type embed get_embeddings = True if 'embed' in pipe.nlu_ref else False if not pipe.is_fitted: if pipe.has_trainable_components: pipe.fit(data) else: pipe.fit() pipe.__configure_light_pipe_usage__(DataConversionUtils.size_of(data), multithread) if pipe.contains_ocr_components and pipe.contains_audio_components: """ Idea: Expect Array of Paths For every path classify file ending and use it to correctly handle Img or Audio stuff """ raise Exception('Cannot mix Audio and OCR components in a Pipe?') if pipe.contains_audio_components: return __predict_audio_spark(pipe, data, output_level, positions, keep_stranger_features, metadata, drop_irrelevant_cols, get_embeddings=get_embeddings) if pipe.contains_ocr_components: # Ocr processing try: return __predict_ocr_spark(pipe, data, output_level, positions, keep_stranger_features, metadata, drop_irrelevant_cols, get_embeddings=get_embeddings) except Exception as err: logger.warning(f"Predictions Failed={err}") pipe.print_exception_err(err) raise Exception("Failure to process data with NLU OCR pipe") if return_spark_df: try: return __predict_standard_spark(pipe, data, output_level, positions, keep_stranger_features, metadata, drop_irrelevant_cols, return_spark_df, get_embeddings) except Exception as err: logger.warning(f"Predictions Failed={err}") pipe.print_exception_err(err) raise Exception("Failure to process data with NLU") elif not get_embeddings and multithread or pipe.prefer_light: # In Some scenarios we prefer light, because Bugs in ChunkMapper... # Try Multithreaded with Fallback vanilla as option. No Embeddings in this mode try: return predict_multi_threaded_light_pipe(pipe, data, output_level, positions, keep_stranger_features, metadata, drop_irrelevant_cols, get_embeddings=get_embeddings) except Exception as err: logger.warning( f"Multithreaded mode with Light pipeline failed. trying to predict again with non multithreaded mode, " f"err={err}") try: return __predict_standard_spark(pipe, data, output_level, positions, keep_stranger_features, metadata, drop_irrelevant_cols, return_spark_df, get_embeddings) except Exception as err: logger.warning(f"Predictions Failed={err}") pipe.print_exception_err(err) raise Exception("Failure to process data with NLU") else: # Standard predict with no fallback try: return __predict_standard_spark(pipe, data, output_level, positions, keep_stranger_features, metadata, drop_irrelevant_cols, return_spark_df, get_embeddings) except Exception as err: logger.warning(f"Predictions Failed={err}") pipe.print_exception_err(err) raise Exception("Failure to process data with NLU") def debug_print_pipe_cols(pipe): for c in pipe.components: print(f'{c.spark_input_column_names}->{c.name}->{c.spark_output_column_names}')
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/utils/predict_helper.py
predict_helper.py
from pyspark.sql.types import * import glob import logging import os from typing import List import pyspark import numpy as np import pandas as pd logger = logging.getLogger('nlu') class AudioDataConversionUtils: """Validate Audio Data Files and Create Spark DataFrames from them""" @staticmethod def validate_paths(data): """Validate for input data that it contains a path pointing to file or folder of audio fila readable with librosa""" if isinstance(data, List): return AudioDataConversionUtils.check_iterable_paths_are_valid(data) if isinstance(data, str): return AudioDataConversionUtils.check_iterable_paths_are_valid([data]) if isinstance(data, pd.DataFrame): return 'path' in data.columns if isinstance(data, pd.Series): return 'path' in data.name if isinstance(data, pyspark.sql.dataframe.DataFrame): return 'path' in data.columns if isinstance(data, np.ndarray): return AudioDataConversionUtils.check_iterable_paths_are_valid(data) @staticmethod def check_iterable_paths_are_valid(iterable_paths): """Validate for iterable data input if all elements point to file or jsl_folder""" paths_validness = [] for p in iterable_paths: if os.path.isdir(p) or os.path.isfile(p): paths_validness.append(True) else: print(f'Warning : Invalid path for jsl_folder or file in input. Could validate path.\n' f'NLU will try and ignore this issue but you might run into errors.\n' f'Please make sure all paths are valid\n') print(f'For path = {p}') paths_validness.append(False) return all(paths_validness) @staticmethod def check_all_paths_point_to_accepted_file_type(paths, file_types): """Validate that all paths point to a file type defined by file_types""" pass @staticmethod def data_to_spark_audio_df(data, sample_rate, spark): import librosa import typing if isinstance(data, str): data, _ = librosa.load(data, sr=sample_rate) # let's convert them to floats df = pd.DataFrame({ # data is an List[List[Float]] "raw_audio": data.toList(), "sampling_rate": [sample_rate] }) elif isinstance(data, typing.Iterable): data_ = [] for d in data: data, _ = librosa.load(d, sr=sample_rate) data_.append(data.tolist()) df = pd.DataFrame({ # data is an List[List[Float]] "raw_audio": data_, "sampling_rate": [sample_rate] * len(data_) }) schema = StructType([StructField("raw_audio", ArrayType(FloatType())), StructField("sampling_rate", LongType())]) data = spark.createDataFrame(df, schema) return data @staticmethod def glob_files_of_accepted_type(paths, file_types): """Get all paths which point to correct file types from iterable paths which can contain file and jsl_folder paths 1. paths point to a file which is suffixed with one of the accepted file_types, i.e. path/to/file.type 2. path points to a jsl_folder, in this case jsl_folder is recursively searched for valid files and accepted paths will be in return result """ accepted_file_paths = [] for p in paths: for t in file_types: t = t.lower() if os.path.isfile(p): if p.lower().split('.')[-1] == t: accepted_file_paths.append(p) elif os.path.isdir(p): accepted_file_paths += glob.glob(p + f'/**/*.{t}', recursive=True) else: print(f"Invalid path = {p} pointing neither to file or jsl_folder on this machine") return accepted_file_paths @staticmethod def extract_iterable_paths_from_data(data): """Extract an iterable object containing paths from input data""" if isinstance(data, List): return data if isinstance(data, str): return [data] if isinstance(data, pd.DataFrame): return list(data['path'].values) if isinstance(data, pd.Series): return list(data.values) if isinstance(data, pyspark.sql.dataframe.DataFrame): return [p['path'] for p in data.select('path').collect()] if isinstance(data, np.ndarray): return list(data) @staticmethod def get_accepted_audio_file_types(pipe): """Get all file types/suffixes that can be processed by the pipeline""" accepted_files = [] for c in pipe.components: if c.applicable_file_types: accepted_files += c.applicable_file_types return list(set(accepted_files))
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/utils/audio_data_conversion_utils.py
audio_data_conversion_utils.py
import logging from nlu.pipe.nlu_component import NluComponent from nlu.pipe.utils.resolution import uid_to_storage_ref as uid2storageref logger = logging.getLogger('nlu') """Storage Ref logic operations and utils""" class StorageRefUtils: @staticmethod def has_storage_ref(component: NluComponent): """Storage ref is either on the model_anno_obj or nlu component_to_resolve defined """ return component.has_storage_ref @staticmethod def extract_storage_ref(component: NluComponent): """Extract storage ref from either a NLU component_to_resolve or NLP Annotator. First checks if annotator has storage ref, otherwise check NLU attribute """ if StorageRefUtils.has_storage_ref(component): return StorageRefUtils.nlp_extract_storage_ref_nlp_model(component) else: raise ValueError( f'Tried to extract storage ref from component_to_resolve which has no storageref ! Component = {component}') @staticmethod def fallback_storage_ref_resolutions(storage_ref): """ For every storage ref result, we check if its storage ref is defined as its UID and if a fallback storageref is available. If available, alternative is returned, otherwise original """ if storage_ref in uid2storageref.mappings.keys(): return uid2storageref.mappings[storage_ref] else: return storage_ref @staticmethod def has_component_storage_ref_or_anno_storage_ref(component: NluComponent): """Storage ref is either on the model_anno_obj or nlu component_to_resolve defined """ return component.has_storage_ref @staticmethod def nlp_component_has_storage_ref(model): """Check if a storage ref is defined on the Spark NLP Annotator model_anno_obj""" for k, _ in model.extractParamMap().items(): if k.name == 'storageRef': return True return False @staticmethod def extract_storage_ref_from_component(component): """Extract storage ref from a NLU component_to_resolve which embellished a Spark NLP Annotator""" if StorageRefUtils.nlu_component_has_storage_ref(component): return component.info.storage_ref elif StorageRefUtils.nlp_component_has_storage_ref(component): return StorageRefUtils.nlp_extract_storage_ref_nlp_model(component) else: return '' @staticmethod def nlu_extract_storage_ref_nlp_model(component): """Extract storage ref from a NLU component_to_resolve which embellished a Spark NLP Annotator""" return component.model.extractParamMap()[component.model.getParam('storageRef')] @staticmethod def nlu_component_has_storage_ref(component): """Check if a storage ref is defined on the Spark NLP Annotator embellished by the NLU Component""" if hasattr(component.info, 'storage_ref'): return True return False @staticmethod def nlp_extract_storage_ref_nlp_model(component: NluComponent): """Extract storage ref from a NLU component_to_resolve which embellished a Spark NLP Annotator""" # Embedding Converters don't have storage ref attribute on class, but NLU component_to_resolve has attribute for it params = list(component.model.extractParamMap().keys()) for p in params: if p.name == 'storageRef': storage_ref = component.model.extractParamMap()[component.model.getParam('storageRef')] if not storage_ref: # For untrained components storage ref will be none return '' else: return storage_ref if not component.storage_ref: return '' return component.storage_ref
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/utils/resolution/storage_ref_utils.py
storage_ref_utils.py
import logging import nlu from nlu import Licenses from nlu.info import AllComponentsInfo from nlu.spellbook import Spellbook logger = logging.getLogger('nlu') def check_if_nlu_ref_is_licensed(nlu_ref): """check if a nlu_ref is pointing to a licensed or open source model_anno_obj. This works by just checking if the NLU ref points to a healthcare model_anno_obj or not""" for lang, universe in Spellbook.healthcare_component_alias_references.items(): for hc_nlu_ref, hc_nlp_ref in universe.items(): if hc_nlu_ref == nlu_ref: return True for lang, universe in Spellbook.pretrained_healthcare_model_references.items(): for hc_nlu_ref, hc_nlp_ref in universe.items(): if hc_nlu_ref == nlu_ref: return True return False def parse_language_from_nlu_ref(nlu_ref): """Parse a ISO language identifier from a NLU reference which can be used to load a Spark NLP model_anno_obj""" infos = nlu_ref.split('.') lang = None for split in infos: if split in nlu.Spellbook.pretrained_models_references.keys(): lang = split if not lang: # translators are internally handled as 'xx' if 'translate_to' in nlu_ref and not 't5' in nlu_ref: # Special case for translate_to, will be prefixed with 'xx' if not already prefixed with xx because there are only xx.<ISO>.translate_to references language = 'xx' if nlu_ref[0:3] != 'xx.': nlu_reference = 'xx.' + nlu_ref logger.info(f'Setting lang as xx for nlu_ref={nlu_reference}') if not lang: lang = 'en' logger.info(f'Parsed Nlu_ref={nlu_ref} as lang={lang}') return lang def nlu_ref_to_nlp_ref(nlu_ref): _, _, nlp_ref, _, _ = nlu_ref_to_nlp_metadata(nlu_ref) return nlp_ref def nlu_ref_to_nlp_metadata(nlu_ref, is_recursive_call=False): """ For given NLU ref, returns is_pipe, license_type :return: lang, nlu_ref, nlp_ref, license_type, is_pipe """ model_params = None lang = parse_language_from_nlu_ref(nlu_ref) nlp_ref = None license_type = Licenses.open_source is_pipe = False if 'translate_to' in nlu_ref : # We append here xx and set lang as xx so users don't have to specify it lang = 'xx' if 'xx' not in nlu_ref: nlu_ref = 'xx.' + nlu_ref # 1. check if open source pipeline if lang in Spellbook.pretrained_pipe_references.keys(): if nlu_ref in Spellbook.pretrained_pipe_references[lang].keys(): nlp_ref = Spellbook.pretrained_pipe_references[lang][nlu_ref] is_pipe = True # 2. check if open source model_anno_obj if lang in Spellbook.pretrained_models_references.keys(): if nlu_ref in Spellbook.pretrained_models_references[lang].keys(): nlp_ref = Spellbook.pretrained_models_references[lang][nlu_ref] logger.info(f'Found Spark NLP reference in pretrained models namespace = {nlp_ref}') # 3. check if open source alias if nlu_ref in Spellbook.component_alias_references.keys(): sparknlp_data = Spellbook.component_alias_references[nlu_ref] nlp_ref = sparknlp_data[0] is_pipe = 'component_list' in sparknlp_data[1] if len(sparknlp_data) == 3: model_params = sparknlp_data[2] # 4. check if healthcare pipe if lang in Spellbook.pretrained_healthcare_pipe_references.keys(): if nlu_ref in Spellbook.pretrained_healthcare_pipe_references[lang].keys(): nlp_ref = Spellbook.pretrained_healthcare_pipe_references[lang][nlu_ref] license_type = Licenses.hc is_pipe = True # 5. check if healthcare model_anno_obj if lang in Spellbook.pretrained_healthcare_model_references.keys(): if nlu_ref in Spellbook.pretrained_healthcare_model_references[lang].keys(): nlp_ref = Spellbook.pretrained_healthcare_model_references[lang][nlu_ref] license_type = Licenses.hc # 6. check if healthcare alias if nlu_ref in Spellbook.healthcare_component_alias_references.keys(): sparknlp_data = Spellbook.healthcare_component_alias_references[nlu_ref] nlp_ref = sparknlp_data[0] is_pipe = 'component_list' in sparknlp_data[1] license_type = Licenses.hc # 7. check if ocr model_anno_obj if nlu_ref in Spellbook.ocr_model_references.keys(): nlp_ref = Spellbook.ocr_model_references[nlu_ref] license_type = Licenses.ocr # Check if multi lingual ner if not nlp_ref and 'ner' in nlu_ref: all_component_info = AllComponentsInfo() if lang in all_component_info.all_multi_lang_base_ner_languages: lang = 'xx' nlp_ref = 'ner_wikiner_glove_840B_300' nlu_ref = 'xx.ner.wikiner_glove_840B_300' if lang in all_component_info.all_multi_lang_xtreme_ner_languages: lang = 'xx' nlp_ref = 'ner_xtreme_glove_840B_300' nlu_ref = 'xx.ner.xtreme_glove_840B_300' # Search again but with en. prefixed, enables all refs to work without en prefix if not nlp_ref and not is_recursive_call: return nlu_ref_to_nlp_metadata('en.' + nlu_ref, is_recursive_call=True) # # if is_licensed: # if not auth_utils.is_authorized_environment() and is_licensed: # print(f"The nlu_ref=[{nlu_ref}] is pointing to a licensed Spark NLP Annotator or Model [{nlp_ref}]. \n" # f"Your environment does not seem to be Authorized!\n" # f"Please RESTART your Python environment and run nlu.auth(SPARK_NLP_LICENSE,AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,JSL_SECRET) \n" # f"with your corrosponding credentials. If you have no credentials you can get a trial version with credentials here https://www.johnsnowlabs.com/spark-nlp-try-free/ \n" # f"Or contact us at [email protected]\n" # f"NLU will ignore this error and continue running, but you will encounter errors most likely. ") return lang, nlu_ref, nlp_ref, license_type, is_pipe, model_params
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/utils/resolution/nlu_ref_utils.py
nlu_ref_utils.py
import logging from nlu.pipe.utils.resolution.nlu_ref_utils import * from nlu.spellbook import Spellbook from nlu.universe.feature_universes import NLP_FEATURES logger = logging.getLogger('nlu') def resolve_storage_ref(lang, storage_ref, missing_component_type): """Returns a nlp_ref, nlu_ref and whether it is a licensed model_anno_obj or not and an updated languiage, if multi lingual""" logger.info( f"Resolving storage_ref={storage_ref} for lang={lang} and missing_component_type={missing_component_type}") nlu_ref, nlp_ref, is_licensed = None, None, False # get nlu ref # check if storage_ref is hardcoded if lang in Spellbook.licensed_storage_ref_2_nlu_ref.keys() and storage_ref in \ Spellbook.licensed_storage_ref_2_nlu_ref[lang].keys(): nlu_ref = Spellbook.licensed_storage_ref_2_nlu_ref[lang][storage_ref] is_licensed = True elif lang in Spellbook.storage_ref_2_nlu_ref.keys() and storage_ref in Spellbook.storage_ref_2_nlu_ref[ lang].keys(): nlu_ref = Spellbook.storage_ref_2_nlu_ref[lang][ storage_ref] # a HC model_anno_obj may use OS storage_ref_provider, so we dont know yet if it is licensed or not if lang in Spellbook.pretrained_models_references.keys() and nlu_ref in \ Spellbook.pretrained_models_references[lang].keys(): nlp_ref = Spellbook.pretrained_models_references[lang][nlu_ref] elif lang in Spellbook.pretrained_healthcare_model_references.keys() and nlu_ref in \ Spellbook.pretrained_healthcare_model_references[lang].keys(): nlp_ref = Spellbook.pretrained_healthcare_model_references[lang][nlu_ref] is_licensed = True # check if storage_ref matches nlu_ref and get NLP_ref elif lang in Spellbook.licensed_storage_ref_2_nlu_ref.keys() and storage_ref in \ Spellbook.licensed_storage_ref_2_nlu_ref[lang].keys(): nlu_ref = storage_ref nlp_ref = Spellbook.licensed_storage_ref_2_nlu_ref[lang][nlu_ref] elif lang in Spellbook.pretrained_models_references.keys() and storage_ref in \ Spellbook.pretrained_models_references[lang].keys(): nlu_ref = storage_ref nlp_ref = Spellbook.pretrained_models_references[lang][nlu_ref] # check if storage_ref matches nlp_ref and get nlp and nlu ref elif lang in Spellbook.pretrained_healthcare_model_references.keys(): if storage_ref in Spellbook.pretrained_healthcare_model_references[lang].values(): inv_namespace = {v: k for k, v in Spellbook.pretrained_healthcare_model_references[lang].items()} nlp_ref = storage_ref nlu_ref = inv_namespace[nlp_ref] is_licensed = True if nlu_ref is not None and 'xx.' in nlu_ref: lang = 'xx' if nlp_ref is None and nlu_ref is not None: # cast NLU ref to NLP ref if is_licensed: nlp_ref = Spellbook.pretrained_healthcare_model_references[lang][nlu_ref] else: nlp_ref = Spellbook.pretrained_models_references[lang][nlu_ref] if nlp_ref is not None and nlu_ref is None: # cast NLP ref to NLU ref if is_licensed: inv_namespace = {v: k for k, v in Spellbook.pretrained_healthcare_model_references[lang].items()} nlu_ref = inv_namespace[nlp_ref] else: inv_namespace = {v: k for k, v in Spellbook.pretrained_models_references[lang].items()} nlu_ref = inv_namespace[nlp_ref] if nlu_ref == None and nlp_ref == None: # todo enfore storage ref when training logger.info(f"COULD NOT RESOLVE STORAGE_REF={storage_ref}") if storage_ref == '': if missing_component_type == NLP_FEATURES.SENTENCE_EMBEDDINGS: logger.info("Using default storage_ref USE, assuming training mode") storage_ref = 'en.embed_sentence.use' # this enables default USE embeds for traianble components nlp_ref = 'tfhub_use' nlu_ref = storage_ref elif missing_component_type == NLP_FEATURES.WORD_EMBEDDINGS: logger.info("Using default storage_ref GLOVE, assuming training mode") storage_ref = 'en.glove' # this enables default USE embeds for traianble components nlp_ref = 'glove_100d' nlu_ref = storage_ref else: nlp_ref = storage_ref nlu_ref = storage_ref if nlu_ref is not None: is_licensed = check_if_nlu_ref_is_licensed(nlu_ref) logger.info(f'Resolved storageref = {storage_ref} to NLU_ref = {nlu_ref} and NLP_ref = {nlp_ref}') return nlu_ref, nlp_ref, is_licensed, lang def set_storage_ref_and_resolution_on_component_info(c, storage_ref): """Sets a storage ref on a components component_to_resolve info and returns the component_to_resolve """ c.storage_ref = storage_ref return c
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/pipe/utils/resolution/storage_ref_resolution_utils.py
storage_ref_resolution_utils.py
import logging import nlu import requests logger = logging.getLogger('nlu') class ModelHubUtils(): """Modelhub utils""" modelhub_json_url = 'https://nlp.johnsnowlabs.com/models.json' data = requests.get(modelhub_json_url).json() @staticmethod def NLU_ref_to_NLP_ref(nlu_ref: str, lang: str = None) -> str: """Resolve a Spark NLU reference to a NLP reference. :param nlu_ref: which nlu model_anno_obj's nlp refrence to return. :param lang: what language is the model_anno_obj in. :return: Spark nlp model_anno_obj name """ nlu_namespaces_to_check = [nlu.Spellbook.pretrained_pipe_references, nlu.Spellbook.pretrained_models_references, nlu.Spellbook.pretrained_healthcare_model_references, nlu.Spellbook.licensed_storage_ref_2_nlu_ref, nlu.Spellbook.storage_ref_2_nlu_ref] # ] for dict_ in nlu_namespaces_to_check: if lang: if lang in dict_.keys(): for reference in dict_[lang]: if reference == nlu_ref: return dict_[lang][reference] else: for dict_ in nlu_namespaces_to_check: for lang in dict_: for reference in dict_[lang]: if reference == nlu_ref: return dict_[lang][reference] for _nlp_ref, nlp_ref_type in nlu.Spellbook.component_alias_references.items(): if _nlp_ref == nlu_ref: return nlp_ref_type[0] return '' @staticmethod def get_url_by_nlu_refrence(nlu_ref: str) -> str: """Resolves a URL for an NLU reference. :param nlu_ref: Which nlu refrence's url to return. :return: url to modelhub """ # getting spark refrence for given nlu refrence if not nlu_ref: return 'https://nlp.johnsnowlabs.com/models' if nlu_ref.split(".")[0] not in nlu.Spellbook.pretrained_models_references.keys(): nlu_ref = "en." + nlu_ref nlp_refrence = ModelHubUtils.NLU_ref_to_NLP_ref(nlu_ref) if nlp_refrence == None: print(f"{nlp_refrence} {nlu_ref}") return 'https://nlp.johnsnowlabs.com/models' else: for model in ModelHubUtils.data: if (model['language'] in nlu_ref.split(".") or model['language'] in nlp_refrence.split('_')) and \ model['name'] == nlp_refrence: return f"https://nlp.johnsnowlabs.com/{model['url']}" for model in ModelHubUtils.data: # Retry, but no respect to lang if model['name'] == nlp_refrence: return f"https://nlp.johnsnowlabs.com/{model['url']}" return 'https://nlp.johnsnowlabs.com/models' @staticmethod def return_json_entry(nlu_ref: str) -> dict: """Resolves a Json entry for annlu_refrence. :param nlu_ref: What nlp_refrence to resolve :return: Json entry of that nlu reference """ if nlu_ref.split(".")[0] not in nlu.Spellbook.pretrained_models_references.keys(): nlu_ref = "en." + nlu_ref nlp_refrence = ModelHubUtils.NLU_ref_to_NLP_ref(nlu_ref) language = nlu_ref.split(".")[0] for model in ModelHubUtils.data: if model['language'] == language and model["name"] == nlp_refrence: return model
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/utils/modelhub/modelhub_utils.py
modelhub_utils.py
import os, sys import logging logger = logging.getLogger('nlu') def get_pyspark_version(): import pyspark return pyspark.version.__version__ def get_pyspark_major_and_minor(): import pyspark return pyspark.version.__version__.split('.')[:2] def is_env_pyspark_3_1(): v = get_pyspark_major_and_minor() if v[0] == '3' and v[1] == '1': return True return False def is_env_pyspark_3_2(): v = get_pyspark_major_and_minor() if v[0] == '3' and v[1] == '2': return True return False def is_env_pyspark_3_0(): v = get_pyspark_major_and_minor() if v[0] == '3' and v[1] == '0': return True return False def is_env_pyspark_2_4(): v = get_pyspark_major_and_minor() if v[0] == '2' and v[1] == '4': return True return False def is_env_pyspark_2_3(): v = get_pyspark_major_and_minor() if v[0] == '2' and v[1] == '3': return True return False def is_env_pyspark_2_x(): v = get_pyspark_major_and_minor() return int(v[0]) <= 2 def is_env_pyspark_3_x(): v = get_pyspark_major_and_minor() return int(v[0]) == 3 def check_pyspark_install(): try: from pyspark.sql import SparkSession try: import sparknlp v = sparknlp.start().version spark_major = int(v.split('.')[0]) if spark_major >= 3: raise Exception() except: print( f"Detected pyspark version={v} Which is >=3.X\nPlease run '!pip install pyspark==2.4.7' or install any pyspark>=2.4.0 and pyspark<3") return False except: print( "No Pyspark installed!\nPlease run '!pip install pyspark==2.4.7' or install any pyspark>=2.4.0 with pyspark<3") return False return True def try_import_streamlit(): try: import streamlit as st except ImportError: print("You need to install Streamlit to run this functionality.") def is_running_in_databricks(): """ Check if the currently running Python Process is running in Databricks or not If any Environment Variable name contains 'DATABRICKS' this will return True, otherwise False""" for k in os.environ.keys(): if 'DATABRICKS' in k: return True return False def install_and_import_package(pkg_name, version='', import_name=''): """ Install Spark-NLP-Healthcare PyPI Package in current environment if it cannot be imported and license provided """ import importlib try: if import_name == '': importlib.import_module(pkg_name) else: importlib.import_module(import_name) except ImportError: import pip if version == '': logger.info(f"{pkg_name} could not be imported. Running 'pip install {pkg_name}'...") else: logger.info(f"{pkg_name} could not be imported. Running 'pip install {pkg_name}=={version}'...") pip_major_version = int(pip.__version__.split('.')[0]) if pip_major_version in [10, 18, 19, 20]: # for these versions pip module does not support installing, we install via OS command straight into pip # module py_path = sys.executable if version == '': os.system(f'{py_path} -m pip install {pkg_name}') else: os.system(f'{py_path} -m pip install {pkg_name}=={version}') else: if version == '': pip.main(['install', f'{pkg_name}']) else: pip.main(['install', f'{pkg_name}=={version}']) finally: import site from importlib import reload reload(site) # import name is not always the same name as pkg_name we want to import, so it must be specified via import name if import_name != '': globals()[import_name] = importlib.import_module(import_name) else: globals()[pkg_name] = importlib.import_module(pkg_name) def try_import_pyspark_in_streamlit(): """Try importing Pyspark or display warn message in streamlit""" try: import pyspark from pyspark.sql import SparkSession except: print("You need Pyspark installed to run NLU. Run <pip install pyspark==3.0.2>") try: import streamlit as st st.error( "You need Pyspark, Sklearn, Pyplot, Pandas, Numpy installed to run this app. Run <pip install pyspark==3.0.2 sklearn pyplot numpy pandas>") except: return False return False return True def try_import_spark_nlp(): """Try importing Spark NLP""" try: import sparknlp except: print("You need Spark NLP to run NLU. run pip install spark-nlp") return False return True
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/utils/environment/env_utils.py
env_utils.py
import json import os from nlu.universe.annotator_class_universe import AnnoClassRef from nlu.universe.component_universes import jsl_id_to_empty_component from nlu.universe.universes import Licenses # These Spark NLP imports are not unused and will be dynamically loaded, do not remove! from sparknlp.annotator import * from sparknlp.base import * # we just use some classes here, so that intelij cleanup will not remove the imports DocumentAssembler.name,BertEmbeddings.__name__ def is_pipe(model_path): """Check whether there is a pyspark component_list stored in path""" return 'stages' in os.listdir(model_path) def is_model(model_path): """Check whether there is a model_anno_obj stored in path""" return 'metadata' in os.listdir(model_path) def get_model_class(model_path): """Extract class from a model_anno_obj saved in model_path""" with open(model_path + '/stages/part-00000', encoding="utf8") as json_file: java_class = json.load(json_file)['class'] pyth_class = java_class.split('.')[-1] return java_class, pyth_class def verify_and_create_model(model_path: str): """ Build model_anno_obj with requirements Figures out class name by checking metadata json file assumes metadata is always called part-00000 """ with open(model_path + '/metadata/' + 'part-00000', encoding="utf8") as json_f: class_name = json.load(json_f)['class'].split('.')[-1] # The last element in the Class name can be used to just load the model_anno_obj from disk! # Just call eval on it, which will give you the actual Python class reference which should have a .load() method try: m = eval(class_name).load(model_path) except: from nlu.utils.environment.offline_load_utils_licensed import verify_model_licensed m = verify_model_licensed(class_name, model_path) os_annos = AnnoClassRef.get_os_pyclass_2_anno_id_dict() hc_annos = AnnoClassRef.get_hc_pyclass_2_anno_id_dict() ocr_annos = AnnoClassRef.get_ocr_pyclass_2_anno_id_dict() # component_type, nlu_anno_class, = resolve_annotator_class_to_nlu_component_info(class_name) # construct_component_from_identifier('xx', nlu_ref = class_name, nlp_ref = class_name, anno_class_name=class_name) if class_name in os_annos.keys(): jsl_anno_id = os_annos[class_name] nlu_component = jsl_id_to_empty_component(jsl_anno_id) return nlu_component.set_metadata(m, jsl_anno_id, jsl_anno_id, 'xx', False, Licenses.open_source) elif class_name in hc_annos.keys(): jsl_anno_id = hc_annos[class_name] nlu_component = jsl_id_to_empty_component(jsl_anno_id) return nlu_component.set_metadata(m, jsl_anno_id, jsl_anno_id, 'xx', False, Licenses.hc) elif class_name in ocr_annos.keys(): pass raise ValueError(f'Could not detect correct Class for nlp class ={class_name}') def test_check_if_string_in_file(file_name, string_to_search, regex=False): """ Check if any line in the file contains given string """ # Open the file in read only mode # print('reading ', file_name) import re with open(file_name, 'r', encoding="utf8") as read_obj: # Read all lines in the file one by one for line in read_obj: # For each line, check if line contains the string if regex: if len(re.findall(string_to_search, line)) > 0: return True else: if string_to_search in line: return True return False def NLP_ref_to_NLU_ref(nlp_ref, lang): """Resolve a Spark NLP reference to a NLU reference""" from nlu import Spellbook nlu_namespaces_to_check = [Spellbook.pretrained_pipe_references, Spellbook.pretrained_models_references, Spellbook.pretrained_healthcare_model_references, Spellbook.pretrained_pipe_references, ] for dict_ in nlu_namespaces_to_check: if lang in dict_.keys(): for reference in dict_[lang]: if dict_[lang][reference] == nlp_ref: return reference
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/utils/environment/offline_load_utils.py
offline_load_utils.py
import importlib import json import site from importlib import reload from nlu.utils.environment.env_utils import * def ocr_version_is_missmatch(secret_version): """Check if installed OCR version is the same as the OCR secret :param secret_version: ocr secret version :return: True if missmatch, False if versions are fien """ try: import sparkocr except ImportError: # Import failed, there can be no version missmatch return False # check versions installed_version = sparkocr.version() if installed_version == secret_version: return False return True def healthcare_version_is_missmatch(secret_version): """Check if installed Spark NLP healthcare version is the same as the Healthcare secret :param secret_version: healthcare secret version :return: True if missmatch, False if versions are fine""" try: import sparknlp_jsl except ImportError: # Import failed, there can be no version missmatch return False # check versions installed_version = sparknlp_jsl.version() if installed_version == secret_version: return False return True def uninstall_lib(pip_package_name): cmd = f'{sys.executable} -m pip uninstall {pip_package_name} -y ' os.system(cmd) reload(site) def check_if_secret_missmatch_and_uninstall_if_bad(secret_version, module_name, package_name): """Check if OCR/Healthcare lib installed version match up with the secrets provided. If not, this will uninstall the missmaching library :param module_name: module import name :param package_name: pipe package name :param secret_version: healthcare/ocr secret version provided :return: True if missmatch was uninstalled, False if no missmatch found and nothing was done """ try: importlib.import_module(module_name) except ImportError: # Import failed, there can be no version missmatch return False if module_name == 'sparknlp_jsl': # get versions import sparknlp_jsl installed_version = sparknlp_jsl.version() if installed_version == secret_version: return False elif module_name == 'sparkocr': # get versions import sparkocr installed_version = sparkocr.version() if installed_version == secret_version: return False else: raise ValueError(f'Invalid module_name=={module_name}') print( f"Installed {module_name}=={installed_version} version not matching provided secret version=={secret_version}. " f"Uninstalling it..") # version missmatch, uninstall shell uninstall_lib(package_name) return True def import_or_install_licensed_lib(JSL_SECRET, lib='healthcare'): """ Install Spark-NLP-Healthcare PyPI Package in current environment if it cannot be imported and license provided """ hc_module_name = 'sparknlp_jsl' hc_pip_package_name = 'spark-nlp-jsl' hc_display_name = ' Spark NLP for Healthcare' ocr_pip_package_name = 'spark-ocr' ocr_module_name = 'sparkocr' ocr_display_name = ' Spark OCR' get_deps = True lib_version = JSL_SECRET.split('-')[0] missmatch = False if lib == 'healthcare': target_import = hc_module_name target_install = hc_pip_package_name display_name = hc_display_name missmatch = check_if_secret_missmatch_and_uninstall_if_bad(lib_version, hc_module_name, hc_pip_package_name) elif lib == 'ocr': target_import = ocr_module_name target_install = ocr_pip_package_name display_name = ocr_display_name # OCR version is suffixed with spark version if is_env_pyspark_2_3(): lib_version = lib_version + '+spark23' if is_env_pyspark_2_4(): lib_version = lib_version + '+spark24' if is_env_pyspark_3_0() or is_env_pyspark_3_1(): lib_version = lib_version + '+spark30' # get_deps = False else: raise ValueError(f'Invalid install licensed install target ={lib}') try: # Try importing, if it fails install the pacakge importlib.import_module(target_import) if missmatch: raise ImportError except ImportError: # Install package since its missing import pip print(f"{display_name} could not be imported. Installing latest {target_install} PyPI package via pip...") import pyspark pip_major_version = int(pip.__version__.split('.')[0]) if pip_major_version in [10, 18, 19, 20]: # for these versions pip module does not support installing from Python, we install via OS command. cmd = f'{sys.executable} -m pip install {target_install}=={lib_version} --extra-index-url https://pypi.johnsnowlabs.com/{JSL_SECRET}' if not get_deps: cmd = cmd + '--no-deps' os.system(cmd) else: params = ['install', f'{target_install}=={lib_version}', '--extra-index-url', f'https://pypi.johnsnowlabs.com/{JSL_SECRET}'] if not get_deps: params.append('--no-deps') pip.main(params) finally: # Import module after installing package reload(site) globals()[target_import] = importlib.import_module(target_import) def authenticate_enviroment_HC(SPARK_NLP_LICENSE, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY): """Set Secret environ variables for Spark Context""" os.environ['SPARK_NLP_LICENSE'] = SPARK_NLP_LICENSE os.environ['AWS_ACCESS_KEY_ID'] = AWS_ACCESS_KEY_ID os.environ['AWS_SECRET_ACCESS_KEY'] = AWS_SECRET_ACCESS_KEY def authenticate_enviroment_OCR(SPARK_OCR_LICENSE, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY): """Set Secret environ variables for Spark Context""" os.environ['SPARK_OCR_LICENSE'] = SPARK_OCR_LICENSE os.environ['AWS_ACCESS_KEY_ID'] = AWS_ACCESS_KEY_ID os.environ['AWS_SECRET_ACCESS_KEY'] = AWS_SECRET_ACCESS_KEY def authenticate_enviroment_HC_and_OCR(SPARK_NLP_LICENSE, SPARK_OCR_LICENSE, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY): """Set Secret environ variables for Spark Context""" authenticate_enviroment_HC(SPARK_NLP_LICENSE, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) authenticate_enviroment_OCR(SPARK_OCR_LICENSE, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) def get_authenticated_spark_HC(HC_LICENSE, HC_SECRET, AWS_ACCESS_KEY, AWS_SECRET_KEY, gpu): import_or_install_licensed_lib(HC_SECRET, 'healthcare') authenticate_enviroment_HC(HC_LICENSE, AWS_ACCESS_KEY, AWS_SECRET_KEY) import sparknlp import sparknlp_jsl params = {"spark.driver.memory": "16G", "spark.kryoserializer.buffer.max": "2000M", "spark.driver.maxResultSize": "2000M"} if is_env_pyspark_3_x(): return sparknlp_jsl.start(HC_SECRET, gpu=gpu, public=sparknlp.version(), params=params) else: raise ValueError(f"Current Spark version {get_pyspark_version()} not supported!") def get_authenticated_spark_OCR(OCR_LICENSE, OCR_SECRET, AWS_ACCESS_KEY, AWS_SECRET_KEY, gpu): import_or_install_licensed_lib(OCR_SECRET, 'ocr') authenticate_enviroment_OCR(OCR_LICENSE, AWS_ACCESS_KEY, AWS_SECRET_KEY) import sparkocr import sparknlp params = {"spark.driver.memory": "16G", "spark.kryoserializer.buffer.max": "2000M", "spark.driver.maxResultSize": "2000M"} OS_version = sparknlp.version() spark = sparkocr.start(secret=OCR_SECRET, nlp_version=OS_version, ) spark.sparkContext.setLogLevel('ERROR') def get_authenticated_spark_HC_and_OCR(HC_LICENSE, HC_SECRET, OCR_LICENSE, OCR_SECRET, AWS_ACCESS_KEY, AWS_SECRET_KEY, gpu): import_or_install_licensed_lib(HC_SECRET, 'healthcare') import_or_install_licensed_lib(OCR_SECRET, 'ocr') authenticate_enviroment_HC_and_OCR(HC_LICENSE, OCR_LICENSE, AWS_ACCESS_KEY, AWS_SECRET_KEY) import sparkocr import sparknlp params = {"spark.driver.memory": "16G", "spark.kryoserializer.buffer.max": "2000M", "spark.driver.maxResultSize": "2000M"} HC_version = HC_SECRET.split('-')[0] OS_version = sparknlp.version() spark = sparkocr.start(secret=OCR_SECRET, nlp_secret=HC_SECRET, nlp_version=OS_version, nlp_internal=HC_version) spark.sparkContext.setLogLevel('ERROR') def get_authenticated_spark(SPARK_NLP_LICENSE, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, JSL_SECRET, gpu=False, ): """ Authenticates environment if not already done so and returns Spark Context with Healthcare Jar loaded 0. If no Spark-NLP-Healthcare, install it via PyPi 1. If not auth, run authenticate_enviroment() """ import sparknlp authenticate_enviroment_HC(SPARK_NLP_LICENSE, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY) import_or_install_licensed_lib(JSL_SECRET) import sparknlp_jsl params = {"spark.driver.memory": "16G", "spark.kryoserializer.buffer.max": "2000M", "spark.driver.maxResultSize": "2000M"} if is_env_pyspark_3_x(): return sparknlp_jsl.start(JSL_SECRET, gpu=gpu, public=sparknlp.version(), params=params) else: raise ValueError(f"Current Spark version {get_pyspark_version()} not supported!") def is_authorized_environment(): """Check if auth secrets are set in environment""" SPARK_NLP_LICENSE = os.getenv('SPARK_NLP_LICENSE') AWS_ACCESS_KEY_ID = os.getenv('AWS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = os.getenv('AWS_SECRET_ACCESS_KEY') return None not in [SPARK_NLP_LICENSE, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY] def auth(HEALTHCARE_LICENSE_OR_JSON_PATH='/content/spark_nlp_for_healthcare.json', AWS_ACCESS_KEY_ID='', AWS_SECRET_ACCESS_KEY='', HEALTHCARE_SECRET='', OCR_LICENSE='', OCR_SECRET='', gpu=False): """ Authenticate environment for JSL Licensed models Installs NLP-Healthcare if not in environment detected Either provide path to spark_nlp_for_healthcare.json file as first param or manually enter them, HEALTHCARE_LICENSE_OR_JSON_PATH,AWS_ACCESS_KEY_ID,AWS_SECRET_ACCESS_KEY,HEALTHCARE_SECRET . Set gpu=true if you want to enable GPU mode """ def has_empty_strings(iterable): """Check for a given list of strings, whether it has any empty strings or not""" return all(x == '' for x in iterable) hc_creds = [HEALTHCARE_LICENSE_OR_JSON_PATH, HEALTHCARE_SECRET] ocr_creds = [OCR_LICENSE, OCR_SECRET] aws_creds = [AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY] if os.path.exists(HEALTHCARE_LICENSE_OR_JSON_PATH): # Credentials provided via JSON file with open(HEALTHCARE_LICENSE_OR_JSON_PATH, encoding="utf8") as json_file: j = json.load(json_file) if 'SPARK_NLP_LICENSE' in j.keys() and 'SPARK_OCR_LICENSE' in j.keys(): # HC and OCR creds provided get_authenticated_spark_HC_and_OCR(j['SPARK_NLP_LICENSE'], j['SECRET'], j['SPARK_OCR_LICENSE'], j['SPARK_OCR_SECRET'], j['AWS_ACCESS_KEY_ID'], j['AWS_SECRET_ACCESS_KEY'], gpu) return True if 'SPARK_NLP_LICENSE' in j.keys() and 'SPARK_OCR_LICENSE' not in j.keys(): # HC creds provided but no OCR get_authenticated_spark_HC(j['SPARK_NLP_LICENSE'], j['SECRET'], j['AWS_ACCESS_KEY_ID'], j['AWS_SECRET_ACCESS_KEY'], gpu) return True if 'SPARK_NLP_LICENSE' not in j.keys() and 'SPARK_OCR_LICENSE' in j.keys(): # OCR creds provided but no HC get_authenticated_spark_OCR(j['SPARK_OCR_LICENSE'], j['SPARK_OCR_SECRET'], j['AWS_ACCESS_KEY_ID'], j['AWS_SECRET_ACCESS_KEY'], gpu) return True get_authenticated_spark(gpu) return True else: # Credentials provided as parameter if not has_empty_strings(hc_creds) and not has_empty_strings(ocr_creds) and not has_empty_strings(aws_creds): # HC + OCR credentials provided get_authenticated_spark_HC_and_OCR(HEALTHCARE_LICENSE_OR_JSON_PATH, HEALTHCARE_SECRET, OCR_LICENSE, OCR_SECRET, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, gpu) return True elif not has_empty_strings(hc_creds) and has_empty_strings(ocr_creds) and not has_empty_strings(aws_creds): # HC creds provided, but no HC get_authenticated_spark_HC(HEALTHCARE_LICENSE_OR_JSON_PATH, HEALTHCARE_SECRET, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, gpu) return True elif has_empty_strings(hc_creds) and not has_empty_strings(ocr_creds) and not has_empty_strings(aws_creds): # OCR creds provided but no HC get_authenticated_spark_OCR(OCR_LICENSE, OCR_SECRET, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, gpu) return True return False
Aj-Zsl-nlu
/Aj_Zsl_nlu-4.2.0-py3-none-any.whl/nlu/utils/environment/authentication.py
authentication.py
""" Modul obslugujacy glowne okno interfejsu graficznego. """ from PyQt4.QtGui import QWidget from PyQt4.QtGui import QMessageBox from PyQt4.QtGui import QIcon from PyQt4.QtGui import QTreeView from PyQt4.QtGui import QTableView from PyQt4.QtGui import QSplitter from PyQt4.QtGui import QHBoxLayout from PyQt4.QtCore import Qt from django.core.management.base import CommandError from ajango.gui.controller import Controller import ajango def block_application(window): """ Wyswietla wiadomosc o niedostepnosci elementu gui i rzuca blad. """ message = QMessageBox(window) message.setText("Blad interfejsu graficznego.") message.setInformativeText("Oprogramowanie w budowie.") message.setWindowIcon(QIcon(ajango.__path__[0] + '/gui/resource/icon.png')) message.setIcon(QMessageBox.Critical) message.exec_() raise CommandError("Aplikacja nie gotowa.") class Window(QWidget): """ Klasa okienka interfejsu graficznego Ajango. """ def __init__(self, file_skeleton): self.tree = None self.table = None self.controller = Controller(self, file_skeleton) QWidget.__init__(self) self.init_ui() self.controller.build_tree() block_application(self) def get_tree(self): """ Zwraca drzewo okna. """ return self.tree def get_table(self): """ Zwraca okno z tabela. """ return self.table def init_ui(self): """ Inicjalizacja okienka. """ self.tree = QTreeView(self) self.table = QTableView(self) splitter = QSplitter(Qt.Horizontal) splitter.addWidget(self.tree) splitter.addWidget(self.table) vbox = QHBoxLayout() vbox.addWidget(splitter) self.setLayout(vbox) self.setGeometry(300, 300, 500, 200) self.setWindowTitle('Ajagno Editor') self.setWindowIcon(QIcon(ajango.__path__[0] + '/gui/resources/icon.png')) self.show()
Ajango
/Ajango-1.0.0.tar.gz/Ajango-1.0.0/ajango/gui/window.py
window.py
from django.core.management.base import CommandError from ajango.core.generable import Generable from ajango.core.hybrid import Hybrid from ajango.database import get_table_by_name class Result(object): """ Obiekt odpowiedzi. """ def __init__(self, data): self.data = data def get_element(self, key): """ Pobierz odpowiedni element. """ return self.data.serializable_value(key) def set_data(self, data): """ Ustawienie danych. """ self.data = data class Query(Hybrid, Generable): """ Obiekt zapytania. """ def __init__(self, param): self.xml_name = "QUERY" self.xml_permited = ["TABLE"] self.table = "" self.models = None self._table_object = None Hybrid.__init__(self, param) def read_from_xml(self, xmldoc): """ Inicjalizacja z danych XML. """ pass def read_from_dict(self, param): """ Inicjalizacja ze zmiennej slownikowej. """ self.table = param['table'] def check(self, name, xmldoc_elem): """ Odczytanie nodow wewnetrznych. """ if name == "TABLE": if self.table != "": raise CommandError("There are only one table name.") self.table = xmldoc_elem.getAttribute('name') def set_models(self, models): """ Ustawienie modelu. """ self.models = models def execute(self, view, view_name='view'): """ Wykonanie zadan obiektu. """ view.add_import("models", ".") view.add_import("Query", "ajango.database.query") view.add_line("query = Query({ 'table': %r })" % self.table) view.add_line("query.set_models(models)") view.add_line("%s.set_query(query)" % view_name) def _get_table_object(self): """ Zwraca obiekt tabeli zgodny z biblioteka minidom. """ if self._table_object == None: self._table_object = get_table_by_name(self.table, self.models) return self._table_object @staticmethod def _get_base(elems): """ Czesc wspolna dla metod pobierajacych dane 'get'. """ results = [] for elem in elems: results.append(Result(elem)) return results def get_by_filter(self, option): """ Pobiera dane na podstawie filtra. """ table = self._get_table_object() id_param = option['id'] elems = table.objects.filter(id=id_param) return Query._get_base(elems) def get_all(self): """ Pobiera wszystkie dane z tabeli. """ table = self._get_table_object() elems = table.objects.all() return Query._get_base(elems) class DataCreate(object): """ Klasa wprowadzajaca dane do bazy. """ def __init__(self, table): self.table = table self.models = None self._table_object = None self.data = None def set_models(self, models): """ Ustawienie modelu. """ self.models = models def _get_table_object(self): """ Zwraca obiekt tabeli zgodny z biblioteka minidom. """ if self._table_object == None: self._table_object = get_table_by_name(self.table, self.models) return self._table_object def create_from_post(self, post): """ Wprowadz do bazy dane na podstawie odpowiedzi formularza. """ table = self._get_table_object() table_inputs = [] for elem in post.keys(): if elem == 'view_id' or elem.find('middlewaretoken') > -1: continue table_inputs.append(elem) data = table() for elem in table_inputs: setattr(data, elem, post[elem]) self.data = data def save(self): """ Zapisanie danych do bazy. """ self.data.save() class DataEdit(object): """ Klasa edytujaca dane do bazy. """ def __init__(self, table, editable_ob): self.table = table self.editable_ob = editable_ob def edit_with_post(self, post): """ Wprowadz do bazy dane na podstawie odpowiedzi formularza. """ table_inputs = [] for elem in post.keys(): if elem == 'view_id' or elem.find('middlewaretoken') > -1 or elem == 'identity': print ("Element pominiety: %s" % elem) continue print("Element: %s " % elem) table_inputs.append(elem) for elem in table_inputs: setattr(self.editable_ob, elem, post[elem]) def set_models(self, models): """ Ustawienie modelu. (metoda przestarzala) """ pass def save(self): """ Zapisanie danych do bazy. """ self.editable_ob.save()
Ajango
/Ajango-1.0.0.tar.gz/Ajango-1.0.0/ajango/database/query.py
query.py
""" Modul zarzadzania polami wejsciowymi. """ from __future__ import print_function from django.core.management.base import CommandError from ajango.core.factory import FactoryBase from ajango.database.validate import ValidationManager from ajango.core.generable import Generable from ajango.core.hybrid import Hybrid, get_str_object_factory class InputFactoryObject(FactoryBase): """ Klasa fabryki pol wejsciowych. """ def init(self): """ Metoda inicujujaca. """ self.set_items('Input', { 'default' : 'ajango.database.inputs.default', 'number' : 'ajango.database.inputs.number' }) get_str_object_factory(self) def input_factory(params): """ Fabryka pol wejsciowych. """ return InputFactoryObject(params).get_from_params() #pylint: disable=R0902 class InputBase(Hybrid, Generable): """ Abstrakcyjna klasa pola wejsiowego. """ def __init__(self, params): self.type = "unKnown" self.theme = 'default' self.default = "" self.messages = [] self.has_prepare = False self.data = {} self.xml_name = "INPUT" self.pre_init() self.label = "" self.default = "" self.tag = "" self.validation_manager = ValidationManager() Hybrid.__init__(self, params) self.value = self.default self.post_init() def read_from_xml(self, xmldoc): """ Inicjalizacja z danych XML. """ self.label = self.getAttribute('label') self.default = self.getAttribute('default') self.tag = self.getAttribute('tag') def read_from_dict(self, params): """ Inicjalizacja ze zmiennej slownikowej. """ try: self.validation_manager = params['vm'] if self.validation_manager == None: self.validation_manager = ValidationManager() except KeyError: self.validation_manager = ValidationManager() try: self.label = params['label'] self.default = params['default'] self.tag = params['tag'] except KeyError: raise CommandError("Input element is invalid") def pre_init(self): """ Czynnosci przed inicjalizacja. """ self.add_permited(["VALIDATE"]) def post_init(self): """ Czynnosci po inicjalizacji. """ pass def execute(self, view, view_name='view'): """ Wykonanie zadan obiektu. """ self.validation_manager.execute(view) if len(self.validation_manager) > 0: view.add_line("%s.add_input({'type': %r, " "'label': %r, " "'tag': %r, " "'default': %r, " "'vm': vm })" % (view_name, self.type, self.label, self.tag, self.default)) else: view.add_line("%s.add_input({'type': %r, " "'label': %r, " "'tag': %r, " "'default': %r })" % (view_name, self.type, self.label, self.tag, self.default)) def set_value(self, value): """ Ustawienie wartosci. """ self.value = value def check(self, name, xmldoc_elem): """ Oczytanie nodow wewnetrznych. """ if name == 'VALIDATE': self.validation_manager.add_validate(xmldoc_elem) def prepare_data(self): """ Przygotowanie danych dla szablonu. """ if self.has_prepare: return # Id dla fromularza, do uzycia przy kilku widokach na jednej stronie self.has_prepare = True self.data['label'] = self.label self.data['default'] = self.default self.data['value'] = self.value self.data['tag'] = self.tag self.data['url'] = '%s/ajango_inputs/%s.html' % (self.theme, self.type) self.data['messages'] = self.messages def get_data(self): """ Pobieranie danych dla szablonu. """ self.prepare_data() return dict(self.data) def get_label(self): """ Pobranie opisu pola wejsiowego. """ self.prepare_data() return self.label def get_messages(self): """ Pobranie wiadomosci bledu pola wejsciowego. """ return self.messages def is_valid(self, data): """ Sprawdzenie pole wejsciowe jest poprawnie wypelnione. """ value = False try: text = data[self.tag] value = self.validation_manager.is_valid(text) self.messages += self.validation_manager.get_messages() except KeyError: self.messages.append("Cannot read data named %r from POST" % self.tag) return value class InputManager(object): """ Klasa zarzadzajaca obiektami pol wejsiowych. """ def __init__(self): self.inputs = [] self.messages = [] def add_input(self, input_ob): """ Dodaj pole wejsiowe. """ self.inputs.append(input_ob) def get_inputs(self): """ Zwraca tablice elementow kontenera. """ return self.inputs def is_valid(self, data): """ Sprawdza czy pola wejsiowe sa poprawnie wypelnione. """ result = True for elem in self.inputs: if not elem.is_valid(data): self.messages += elem.get_messages() result = False return result def set_data(self, data): """ Ustawienie elementow danymi z parametru. """ for elem in self.inputs: try: value = data[elem.tag] elem.value = value except KeyError: print("There are no tag %r in data object" % elem.tag) elem.value = elem.default def set_data_table(self, table_el): """ Ustwienie elementow na podstawie tabeli. """ for elem in self.inputs: value = table_el.serializable_value(elem.tag) elem.value = value def get_messages(self): """ Pobiera wiadomosci bledu pol wejsiowych. """ return self.messages
Ajango
/Ajango-1.0.0.tar.gz/Ajango-1.0.0/ajango/database/inputs/__init__.py
__init__.py
""" Modul domyslnej kolumny. """ from ajango.database.columns import ColumnBase class Column(ColumnBase): """ Klasa domyslnej kolumny. """ def __init__(self, xmldoc, param=None): self.presentation = [] ColumnBase.__init__(self, xmldoc, param) def pre_init(self): """ Czynnosci do wykonania przed inicjalizacja. """ self.type = 'default' self.add_permited(["PRESENTATION"]) def read_from_dict(self, params): """ Inicjalizacja ze zmiennej slownikowej. """ ColumnBase.read_from_dict(self, params) try: self.presentation = self.object['presentation'] except KeyError: self.presentation = [] def check(self, name, xmldoc_elem): """ Oczytanie nodow wewnetrznych. """ if name == 'PRESENTATION': presentation_type = xmldoc_elem.getAttribute('type') self.presentation.append(presentation_type) def execute(self, view, view_name="view"): """ Wykonanie czynnosci kolumny. """ if len(self.presentation) > 0: view.add_import("presentation_factory", "ajango.site.presentations") view.add_line("p = []") for elem in self.presentation: view.add_line("p.append(presentation_factory(%r))" % elem) view.add_line("%s.add_column({'type' : 'default', " "'label' : %r, " "'tag' : %r, " "'presentation' : p })" % (view_name, self.label, self.tag)) else: view.add_line("%s.add_column({'type' : 'default', " "'label' : %r, " "'tag' : %r })" % (view_name, self.label, self.tag)) def get_data(self, value=None): """ Pobranie danych dla szablonu. """ self.prepare_data() result = value['result'] data = dict(self.data) value = result.get_element(self.data['tag']) for pres in self.presentation: value = pres.get(value) data['value'] = value return data
Ajango
/Ajango-1.0.0.tar.gz/Ajango-1.0.0/ajango/database/columns/default.py
default.py
""" Modul zarzadzania kolumnami. """ from django.core.management.base import CommandError from ajango.core.factory import FactoryBase from ajango.core.generable import Generable from ajango.core.hybrid import Hybrid, get_str_object_factory import abc class ColumnFactoryObject(FactoryBase): """ Klasa obiektu zarzadzajacego kolumnami. """ def init(self): """ Metoda inicjalizujaca. """ self.set_items('Column', { 'default' : 'ajango.database.columns.default', 'button' : 'ajango.database.columns.button' }) get_str_object_factory(self) def column_factory(param): """ Fabryka kolumn. """ return ColumnFactoryObject(param).get_from_params() class ColumnBase(Hybrid, Generable): """ Klasa bazowa obiektu obslugujacego kolumny. """ __metaclass__ = abc.ABCMeta def __init__(self, xmldoc, param=None): self.type = "unKnown" self.theme = 'default' self.has_prepare = False self.data = {} self.xml_name = "COLUMN" self.label = "" self.tag = "" self.pre_init() Hybrid.__init__(self, xmldoc) self.post_init() def read_from_xml(self, xmldoc): """ Inicjalizacja z danych XML. """ self.label = self.getAttribute('label') self.tag = self.getAttribute('tag') def read_from_dict(self, params): """ Inicjalizacja ze zmiennej slownikowej. """ try: self.label = params['label'] self.tag = params['tag'] except KeyError: raise CommandError("Input element is invalid") def pre_init(self): """ Czynnosci do wykonania przed inicjalizacja. """ pass def post_init(self): """ Czynnosci do wykonania po inicjalizacji. """ pass def check(self, name, xml_doc_elem): """ Oczytanie nodow wewnetrznych. """ pass def prepare_data(self): """ Przygotowanie danych. """ if self.has_prepare: return self.has_prepare = True self.data['tag'] = self.tag self.data['label'] = self.label self.data['url'] = '%s/ajango_columns/%s.html' % (self.theme, self.type) @abc.abstractmethod def get_data(self, value=None): """ Pobranie danych dla szablonu. """ raise CommandError("Please implement this method") def get_label(self): """ Pobranie opisu kolumny. """ self.prepare_data() return self.label
Ajango
/Ajango-1.0.0.tar.gz/Ajango-1.0.0/ajango/database/columns/__init__.py
__init__.py
from django.core.management.base import CommandError from ajango.core.factory import FactoryBase from ajango.core.generable import Generable from ajango.core.hybrid import Hybrid, get_str_object_factory import abc class ValidateFactoryObject(FactoryBase): """ Fabryka obiektow sprawdzajacych. """ def __init__(self, param=None): self.param = None FactoryBase.__init__(self, param) def init(self): """ Metoda inicjalizujaca. """ self.set_items('Validate', { 'isempty' : 'ajango.database.validate.isempty', 'regex' : 'ajango.database.validate.regex', 'lt' : 'ajango.database.validate.lt', 'gt' : 'ajango.database.validate.gt' }) get_str_object_factory(self, 'isempty') def validate_factory(param): """ Fabryka obiektow sprawdzajacych. """ return ValidateFactoryObject(param).get_from_params() class ValidateBase(Hybrid, Generable): """ Klasa abstrakcyjna obiektu sprawdzajacego. """ __metaclass__ = abc.ABCMeta def __init__(self, param): self.has_prepare = False self.type = "unKnown" self.data = {} self.xml_name = "VALIDATE" self.pre_init() self.messages = [] self.param = "" Hybrid.__init__(self, param) self.post_init() def pre_init(self): """ Czynnosci przed inicjalizacja. """ pass def post_init(self): """ Czynnosci po inicjalizacji. """ pass def get_param(self): """ Pobranie parametru validatora. """ return self.param def read_from_xml(self, xmldoc): """ Inicjalizacja z danych XML. """ self.param = self.getAttribute('param') def read_from_dict(self, params): """ Inicjalizacja ze zmiennej slownikowej. """ try: self.param = params['param'] except KeyError: raise CommandError("Input element is invalid") @abc.abstractmethod def is_valid(self, text): """ Sprawdza czy pola wejsiowe sa poprawnie wypelnione. """ raise CommandError("File not implemented yet.") def execute(self, view, view_name="view"): """ Wykonanie zadan obiektu. """ view.add_line("vm.add_validate({'type': %r, 'param': %r })" % (self.type, self.param)) def get_messages(self): """ Pobiera wiadomosci bledu pol wejsiowych. """ return self.messages def check(self, name, xmldoc_elem): pass class ValidationManager(Generable): """ Klasa zarzadzajaca obiektami sprawdzajacymi. """ def __init__(self): self.validations = [] self.messages = [] def __len__(self): return len(self.validations) def add_validate(self, param): """ Dodanie obiektu sprawdzajacego. """ self.validations.append(validate_factory(param)) def is_valid(self, text): """ Sprawdza czy pola wejsciowe sa poprawnie wypelnione. """ for elem in self.validations: if not elem.is_valid(text): self.messages += elem.get_messages() return False return True def get_messages(self): """ Pobranie wiadomosci bledu pola wejsciowego. """ return self.messages def execute(self, view, view_name="view"): """ Wykonanie zadan obiektu. """ if len(self.validations) > 0: view.add_import("ValidationManager", "ajango.database.validate") view.add_line("vm = ValidationManager()") for elem in self.validations: elem.execute(view)
Ajango
/Ajango-1.0.0.tar.gz/Ajango-1.0.0/ajango/database/validate/__init__.py
__init__.py
from __future__ import print_function from django.utils.datastructures import MultiValueDictKeyError from ajango.site.sites import SiteBase from ajango.database.inputs import input_factory from ajango.database.inputs import InputManager from ajango.database.query import DataEdit from ajango.database import get_table_by_name class Site(SiteBase): """ Klasa strony wprowadzania danych. """ def __init__(self, ob): self.inputs = InputManager() self.table = "" self.models = None self.editable_ob = None SiteBase.__init__(self, ob) self.identity = self.get_id() def init(self): """ Metoda inicjalizujaca. """ self.type = 'editable' def add_input(self, param): """ Dodanie pola wprowadzania danych. """ self.inputs.add_input(input_factory(param)) def set_table_name(self, table): """ Ustawienie nazwy tabeli do pobrania. """ self.table = table def set_models(self, models): """ Ustawienie modelu dla strony. """ self.models = models def _is_form_available(self): """ Sprawdzenie czy dostepne sa dane dla obecnego widoku. """ try: return (self.request.method == 'POST' and self.request.POST['view_id'] == ("view_%d" % self.view_id)) except KeyError: return False def get_id(self): """ Zwraca id rekordu ktory wyswietla. """ try: identity = self.request.GET['id'] except MultiValueDictKeyError: try: identity = self.request.POST['identity'] except MultiValueDictKeyError: identity = -1 return identity def send_data(self, post): """ Wprowadzenie informacji do bazy danych. """ data = DataEdit(self.table, self.editable_ob) data.set_models(self.models) data.edit_with_post(post) data.save() def content(self): """ Buduje dane dla strony. """ table_ob = get_table_by_name(self.table, self.models) try: self.editable_ob = table_ob.objects.filter(id=self.identity)[0] except IndexError: self.editable_ob = None data = [] self.data['result'] = True self.data['identity'] = self.identity if self.editable_ob == None: self.data['result'] = False return if self._is_form_available(): if self.inputs.is_valid(self.request.POST): self.send_data(self.request.POST) self.data['result'] = False return self.inputs.set_data(self.request.POST) else: self.inputs.set_data_table(self.editable_ob) for elem in self.inputs.get_inputs(): data.append(elem.get_data()) self.data['tabledata'] = data
Ajango
/Ajango-1.0.0.tar.gz/Ajango-1.0.0/ajango/site/sites/editable.py
editable.py
from django.utils.datastructures import MultiValueDictKeyError from ajango.core.factory import FactoryBase from django.core.management.base import CommandError from django.shortcuts import render from abc import ABCMeta, abstractmethod import ajango class SiteFactoryObject(FactoryBase): """ Obiekt fabryki stron. """ def init(self): """ Metoda inicjalizujaca. """ self.set_items('Site', { 'empty' : 'ajango.site.sites.empty', 'input' : 'ajango.site.sites.input', 'list' : 'ajango.site.sites.list', 'display' : 'ajango.site.sites.display', 'editable' : 'ajango.site.sites.editable', 'raport' : 'ajango.site.sites.raport', 'container' : 'ajango.site.sites.container' }) def site_factory(key, param): """ Fabryka stron. """ return SiteFactoryObject(param).get_class_factory(key) #pylint: disable=R0902 class SiteBase(object): """ Abstrakcyjna klasa obslugujaca strone. """ __metaclass__ = ABCMeta global_view_id_counter = 0 def __init__(self, ob): self.view_id = self.global_view_id_counter self.global_view_id_counter = self.global_view_id_counter + 1 self.request = ob['request'] self.theme = 'default' self.header = "" self.type = 'empty' # default type for layout self.init() self.layout = 'ajango_layout.html' self.include = 'ajango_%s.html' % self.type self.data = dict() self.data['appName'] = ob['appName'] self.data['viewName'] = ob['viewName'] self.data['menu'] = [] def set_view_id(self, view_id): """ Zmiana id widoku niezbedna przy obsludze wiekszej ilosci widokow. """ self.view_id = view_id def set_title(self, text): """ Ustaw tytul strony. """ self.data['title'] = text def set_header(self, text): """ Ustaw tytul sekcji. """ self.header = text def set_theme(self, theme): """ Ustawienie tematu layoutu. Ajango wczytuje pliku html z folderu oznaczonego przez zmienna ustawiana w tej funkcji. System pobiera pliki o nazwach okreslonych na podstawie dokumentacji, ale mozna je umiescic w pakietach o nazwach podanych w zmiennej. """ self.theme = theme @abstractmethod def init(self): """ Metoda inicjalizujaca. """ raise NotImplementedError() def set_include(self, text): """ Ustawienie glownego pliku ze strona odpowiedniego dla strony. """ self.include = text def set_layout(self, html): """ Ustawienie pliku layoutu. """ self.layout = html @abstractmethod def content(self): """ Ustawienie zmiennych dostarczanych do szblonow. """ raise NotImplementedError() def make_content(self): """ Metoda wywolywana przez zewnetrzny interface. """ if self.header != "": self.data['header'] = self.header self.data['include_view'] = self.theme + '/' + self.include self.data['view_id'] = self.view_id self.content() def make_content_and_get_data(self): """ Buduje dane kontentu i zwraca informacje na temat danych utworzonych przez strone. """ self.make_content() return self.data def make_site(self): """ Wywolaj renderowanie strony. Zwraca informacje jakie powinny byc tworzone w plikach view.py frameworku Django. """ self.make_content() return render(self.request, self.theme + '/' + self.layout, {'data_site' : self.data}) def set_menu(self, tab): """ Ustawienie menu. """ self.data['menu'] = tab class GetSite(object): """ Klasa strony odczytujacej dane za pomoca metody post i get. """ __metaclass__ = ABCMeta @abstractmethod def get_request(self): """ Zwraca obiekt request. """ raise NotImplementedError() def get_id(self): """ Zwraca id rekordu ktory wyswietla. """ try: request = self.get_request() identity = int(request.GET['id']) except MultiValueDictKeyError: identity = -1 except ValueError: return -1 return identity
Ajango
/Ajango-1.0.0.tar.gz/Ajango-1.0.0/ajango/site/sites/__init__.py
__init__.py
""" Pakiet zarzadzajacy projektem Django. """ import os.path from django.core.management.base import CommandError from ajango.core import add_to_python_array from ajango.core import file_to_buffor, buffor_to_file class ProjectManager(object): """ Klasa zarzadzajaca projektem Django. """ def __init__(self, project_name): if not os.path.isdir(project_name): raise CommandError("%r: Cannot find project" % project_name) self.project_name = project_name self._verify_file('settings.py') self._verify_file('urls.py') self._verify_file('wsgi.py') self.apps = [] self.urls = [] self.is_global_url = False def _verify_file(self, file_skeleton): """ Sprawdzenie czy istnieje plik. """ search_file = self.project_name + '/' + file_skeleton if not os.path.isfile(search_file): raise CommandError("Cannot find file: %r" % search_file) def add_application(self, app): """ Dodanie aplikacji do pliku settingsow projektu. """ self.apps.append(app) def add_url(self, viewname, appname): """ Add view url. """ self.urls.append("url(r'^%s/%s/', %s.views.%s, name=%r)," % (appname, viewname, appname, viewname, viewname)) def add_main_view_url(self, app): """ Zarejestrowanie widoku jako glowny widok aplikacji. """ appname = app.get_name() viewname = app.get_main_view().get_name() self.urls.append("url(r'^%s$', %s.views.%s, name='%s_main')," % (appname, appname, viewname, appname)) def add_main_url(self, app): """ Zarejestrowanie glownego ekranu dla aplikacji. """ if app == None: raise CommandError("Cannot set 'None' value as application") if self.is_global_url: raise CommandError("Cannot set two or more main application to url") self.is_global_url = True viewname = app.get_main_view().get_name() self.urls.append("url(r'^$', %s.views.%s, name='main')," % (app.get_name(), viewname)) def execute_url(self): """ Dodanie odpowiednich adresow url do pliku. """ address = self.project_name + "/urls.py" buffor = file_to_buffor(address) import_str = "" for app in self.apps: import_str += "import %s.views\n" % app buffor = import_str + buffor url_str = "" for url in self.urls: url_str += "\n %s" % url buffor = add_to_python_array(buffor, "urlpatterns", url_str) buffor_to_file(address, buffor) def execute_settings(self): """ dodanie aplikacji do pliku z ustawieniami settings.py""" address = self.project_name + "/settings.py" buffor = file_to_buffor(address) app_str = "" for app in self.apps: app_str += "\n '%s'," % app buffor = add_to_python_array(buffor, "INSTALLED_APPS", app_str) buffor_to_file(address, buffor) def execute(self): """ Wykonanie zadan obiektu. """ self.execute_url() self.execute_settings()
Ajango
/Ajango-1.0.0.tar.gz/Ajango-1.0.0/ajango/generator/project_manager.py
project_manager.py
""" Modul zarzadzania aplikacjami. """ import os import os.path from django.core.management.base import CommandError from django.core.management.templates import TemplateCommand from ajango.generator.config_global import ConfigGlobal from ajango.generator.views import view_factory from ajango.core.XMLReader import XMLReader from ajango.database.data_base_manager import DataBaseManager from ajango.generator.view_manager import ViewManager from ajango.generator.menu_manager import MenuManager from ajango.generator.renderer import ImportRenderer class Application(XMLReader): """ Klasa zarzadzajaca aplikacjami. """ def __init__(self, xmldoc, param=None): self.views = [] self.managers = {} self.main_view = None self.imp_renderer = ImportRenderer() self.options = None self.managers['view'] = ViewManager() self.managers['menu'] = MenuManager(self.managers['view'], self.imp_renderer) self.managers['database'] = DataBaseManager(self) self.options = None self.name = "" XMLReader.__init__(self, xmldoc, param) def init(self): """ Metoda inicjalizujaca. """ self.xml_name = "APPLICATION" self.add_permited(["VIEW"]) self.managers['view'].set_menu(self.managers['menu']) def set_options(self, options): """ Ustawienie opcji. """ self.options = options def is_exist(self): """ Sprawdzenie czy aplikacja juz istanieje. """ return os.path.isdir(self.get_name()) # pylint: disable=W0142 def make_new(self): """ Wykonanie nowej aplikacji. """ template_command = TemplateCommand() options = self.options template_command.validate_name(self.get_name(), 'app') try: os.mkdir(self.get_name()) except OSError: raise CommandError("Cannot create new application named %r." % self.get_name()) template_command.handle('app', self.get_name(), self.get_name(), **options) project_manager = ConfigGlobal().get('project_manager') project_manager.add_application(self) self.managers['database'].build_data_base(options) def get_name(self): """ Pobranie nazwy. """ return self.name def pre_render(self): """ Czynnosci przed inicjalizacja. """ self.name = self.getAttribute('name').lower() if self.name == '': raise CommandError("Missing parametr name in application") def check(self, name, xmldoc_elem): """ Oczytanie nodow wewnetrznych. """ if name == 'VIEW': new_view = view_factory({'xmldoc' : xmldoc_elem, 'imp_renderer': self.imp_renderer, 'app': self}) for once in self.views: if once.get_name() == new_view.get_name(): raise CommandError("Id %r is use by two or " "more views in %r" % (str(once), str(self))) if new_view.getAttribute("main").lower() == "main": if self.main_view != None: raise CommandError("There are two or more views" "sign as 'main' -> %r %r " % (new_view.get_name(), self.main_view.get_name())) self.main_view = new_view self.views.append(new_view) def post_renderer(self): """ Czynnosci po inicjalizacji. """ if self.main_view == None: self.main_view = self.views[0] project = ConfigGlobal().get('project_manager') project.add_main_view_url(self) def execution(self): """ Wygenerowanie kodu wszystkich widokow i dodanie ich do Managera widokow, ktory je wypisze. """ for elem in self.views: self.managers['view'].add(elem) self.managers['view'].save_all(self) def get_url(self): """ Zwraca link do plikow z aplikacja. """ url = "./" + self.get_name() return url def __str__(self): return self.get_name() def get_main_view(self): """ Pobranie glownego widoku. """ if self.main_view == None: try: return self.views[0] except IndexError: return None else: return self.main_view
Ajango
/Ajango-1.0.0.tar.gz/Ajango-1.0.0/ajango/generator/application.py
application.py
""" Pakiet generowania plikow jezyka Python. """ import abc TABULATOR = ' ' class Line(object): """ Generowanie linni kodu. """ def __init__(self, text, tab): self.text = text self.tab = tab def get_text(self): """ Pobranie tekstu kodu. """ return self.text def set_tab(self, tab=0): """ Ustawienie ilosci tabulacji w linni. """ self.tab = tab #pylint: disable=W0612 class BaseRenderer(object): """ Klasa bazowa obiektow generujacych kod. """ __metaclass__ = abc.ABCMeta @staticmethod def cout(text, tab, enter=True): """ Zwraca fragment kodu z odpowiednim odstepem. """ result = "" for i in range(tab): result += TABULATOR result = result + text if enter == True: result = result + "\n" return result @abc.abstractmethod def render(self, tab=0): """ Renderowanie elementu. """ raise NotImplementedError() class DefRenderer(BaseRenderer): """ Klasa obiektu renderujacego funkcje. """ def __init__(self, name, args=''): """ Metoda inicjalizujaca. """ self.name = name self.args = args self.line = [] def add_line(self, text, tab=0): """ Dodanie linni kodu w funkcji. """ self.line.append(DefRenderer.cout(text, tab, False)) def render(self, tab=0): """ Renderowanie funkcji. """ result = DefRenderer.cout("def %s(%s):" % (self.name, self.args), tab) i = 0 for once in self.line: result = result + DefRenderer.cout(once, tab + 1) i = i + 1 if i == 0: result = result + DefRenderer.cout("pass", tab + 1) result = result + "\n" return result class ImportRenderer(BaseRenderer): """ Klasa obiektu generujacego importy. """ def __init__(self): self.imports = [] def add_import(self, imp, from_import=None): """ Dodanie pojedynczego importu. """ if from_import == None: self.imports.append("import %s" % imp) else: self.imports.append("from %s import %s" % (from_import, imp)) def render(self, tab=0): """ Renderowanie importow. """ result = '' imp = set(self.imports) for once in imp: result = result + self.cout(once, tab) return result
Ajango
/Ajango-1.0.0.tar.gz/Ajango-1.0.0/ajango/generator/renderer.py
renderer.py
from __future__ import print_function from django.core.management.base import CommandError from ajango.generator.project_manager import ProjectManager from ajango.generator.application import Application from ajango.generator.config_global import ConfigGlobal from xml.dom import minidom import os.path class AjangoGenerator(object): """ Obiekt generujacy aplikacje na podstawie pliku szkieletowego. """ def __init__(self, project_name, skeleton_file): self.project = ProjectManager(project_name) ConfigGlobal().set('project_manager', self.project) self.options = None self.apps = [] self.main_app = None self.render(skeleton_file) def set_options(self, options): """ Ustawienie opcji dla obiektu. """ self.options = options def render(self, skeleton_file): """ Budowanie zestawu obiektow dla pliku XML. """ if not os.path.isfile(skeleton_file): raise CommandError( "%r: File doesn't exist" % skeleton_file ) print("Render: "+ skeleton_file) xmldoc_main = minidom.parse(skeleton_file) xmldoc = xmldoc_main.childNodes[0] if xmldoc.tagName.upper() != 'AJANGO': raise CommandError("This Ajango skeleton is not Valid") for elem in xmldoc.childNodes: if isinstance(elem, minidom.Element): # Renderowanie aplikacji if elem.tagName.upper() == 'APPLICATION': app = Application(elem) for once in self.apps: # Sprawdzanie czy aplikacje sie nie powtarzaja if app.get_name() == once.get_name(): raise CommandError("There are two or more " "application named: %r" % app.get_name()) if app.getAttribute("main").lower() == "main": # Sprawdzanie czy aplikacja nie jest glowna if self.main_app != None: raise CommandError("There are two or more " "application signed as 'main'") self.main_app = app self.apps.append(app) self.project.add_main_view_url(app) else: raise CommandError("Unknown tag name: %r " % elem.tagName) if self.main_app == None: # Jesli brak glownej aplikacji to pierwsza jest glowna. self.main_app = self.apps[0] self.project.add_main_url(self.main_app) def make_apps(self): """ Stworzenie wszystkich aplikacji. """ for app in self.apps: if self.options != None: app.set_options(self.options) app.make_new() print("Building app: " + app.get_name()) app.execution() project = ConfigGlobal().get('project_manager') project.execute()
Ajango
/Ajango-1.0.0.tar.gz/Ajango-1.0.0/ajango/generator/__init__.py
__init__.py
from xml.dom import minidom from ajango.core.factory import FactoryBase from ajango.core.XMLReader import XMLReader from ajango.generator.renderer import DefRenderer from ajango.core.generable import Generable from django.core.management.base import CommandError import abc class ViewFactoryObject(FactoryBase): """ Klasa obiektu fabryki generatorow widoku. """ def init(self): """ Metoda inicjalizacyjna. """ self.set_items('View', { 'empty' : 'ajango.generator.views.empty', 'input' : 'ajango.generator.views.input', 'list' : 'ajango.generator.views.list', 'display' : 'ajango.generator.views.display', 'editable' : 'ajango.generator.views.editable', 'raport' : 'ajango.generator.views.raport', 'container' : 'ajango.generator.views.container', }) if self.object['xmldoc'].tagName.upper() != 'VIEW': raise CommandError("Cannot create application from %r" % self.object['xmldoc'].tagName) self.str = self.object['xmldoc'].getAttribute('type').lower() def execution(self, fun): """ Wykonanie zadan obiektu. """ return fun(self.object['xmldoc'], self.object['imp_renderer'], self.object['app']) def view_factory(param): """ Fabryka generatorow widoku. """ return ViewFactoryObject(param).get_from_params() #pylint: disable=R0902 class ViewBase(XMLReader, Generable): """ Klasa abstrakcyjna generatora widokow. """ __metaclass__ = abc.ABCMeta DEFAULT_APP_TITLE = "My new Application" def __init__(self, xmldoc, importRenderer, app): self.name = xmldoc.getAttribute('id').lower() if self.name == '': self.name = 'index' self.app = app self.code = "" self.type = "" self.title = self.DEFAULT_APP_TITLE self.header = "" if isinstance(importRenderer, dict): self.renderer = importRenderer else: self.renderer = {} self.renderer['code'] = DefRenderer(self.name, 'request') self.renderer['import'] = importRenderer self.xml_name = "VIEW" self.add_permited(["TITLE", "HEADER"]) XMLReader.__init__(self, xmldoc) def check(self, name, xmldoc_elem): """ Oczytanie nodow wewnetrznych. """ if name == 'TITLE': self.title = xmldoc_elem.getAttribute('value') return True elif name == 'HEADER': self.header = xmldoc_elem.getAttribute('value') return True return False def get_renderer(self, key): """ Pobranie renderera. """ return self.renderer.get(key) def get_title(self): """ Pobranie tytulu. """ if self.title == "" or self.title == self.DEFAULT_APP_TITLE: return self.name return self.title def add_line(self, text, tab=0): """ Dadanie polecenia do kodu vidoku""" self.renderer['code'].add_line(text, tab) def add_import(self, imp, from_elem=None): """Dodanie importu do biblioteki""" self.renderer['import'].add_import(imp, from_elem) def get_name(self): """ Pobranie nazwy. """ return self.name def make_view_execute(self, view_name="view"): """ Budowanie widoku w funkcjach wewnetrzych. """ if self.type == "": raise CommandError("This view must have set type") self.add_import("site_factory", "ajango.site.sites") self.add_line("%s = site_factory(%r, {'request' : request," "'appName' : %r, 'viewName' : %r})" % (view_name, self.type, self.app.get_name(), self.name)) self.execute(None, view_name) def make_execute(self, view_name="view"): """ Budowanie kodu, funkcja eksportowana na zewnatrz. """ self.make_view_execute(view_name) self.add_line("%s.set_title(%r)" % (view_name, self.title)) if self.header != "": self.add_line("%s.set_header(%r)" % (view_name, self.header)) self.add_line("%s.set_menu(default_menu())" % view_name) self.add_line("return %s.make_site()" % view_name) @abc.abstractmethod def execute(self, view, view_name="view"): """ Wykonanie zadan obiektu. """ raise NotImplementedError() def __str__(self): return self.get_name() #pylint: disable=R0201 def is_display_in_menu(self): """ Informacja na temat widocznosci w menu glownym aplikacji. """ return False
Ajango
/Ajango-1.0.0.tar.gz/Ajango-1.0.0/ajango/generator/views/__init__.py
__init__.py
""" Modul parsowania xml. """ from __future__ import print_function from xml.dom import minidom from django.core.management.base import CommandError from django.utils.termcolors import make_style from django.core.management.color import supports_color import abc class XMLReader(object): """ Klasa obiektu parsowania XML. """ __metaclass__ = abc.ABCMeta def __init__(self, xmldoc, param=None): self.xml_xmldoc = xmldoc self.add_permited([]) self.xml_name = "" self.object = param self.init() self.render() self.xml_permited = [] def init(self): """ Modul inicjalizujacy. """ pass def add_permited(self, permited): """ Dodanie tagow ktore moga byc parsowane wewnatrz elementu. """ try: self.xml_permited += permited except AttributeError: self.xml_permited = permited def get_xml_doc(self): """ Pobranie xmldoc. """ return self.xml_xmldoc # pylint: disable=C0103 def getAttribute(self, name): """ Pobranie atrybutu dla noda. """ return self.xml_xmldoc.getAttribute(name) def _is_permited(self, name): """ Sprawdzenie czy element moze byc parsowany. """ if self.xml_permited == []: return True for elem in self.xml_permited: if elem.upper() == name.upper(): return True return False def pre_render(self): """ Czynnosci do wykonania przez inicjalizacja. """ pass def post_render(self): """ Czynnosci do wykonania po inicjalizacji. """ pass def render(self): """ Renderowanie elementow. """ if supports_color(): yellow = make_style(fg='yellow') else: yellow = lambda text: text print("Rendering '" + yellow(self.xml_xmldoc.tagName.upper()) + "'") if (self.xml_name != "") and (self.xml_name.upper() != self.get_xml_doc().tagName.upper()): raise CommandError("Cannot create object from %r" % self.get_xml_doc().tagName) self.pre_render() for elem in self.get_xml_doc().childNodes: if isinstance(elem, minidom.Element): if self._is_permited(elem.tagName): self.check(elem.tagName.upper(), elem) else: raise CommandError("Unknown element: %r" % elem.tagName) self.post_render() @abc.abstractmethod def check(self, name, xmldoc_elem): """ Oczytanie nodow wewnetrznych. """ raise CommandError("XMLReader must have check method")
Ajango
/Ajango-1.0.0.tar.gz/Ajango-1.0.0/ajango/core/XMLReader.py
XMLReader.py
import importlib from django.core.management.base import CommandError from django.core.exceptions import ImproperlyConfigured from django.utils.termcolors import make_style from django.core.management.color import supports_color from django.conf import settings from abc import ABCMeta class FactoryBase(object): """ Klasa bazowa tworzaca fabryke. """ __metaclass__ = ABCMeta def __init__(self, param=None): self.class_name = '' self.base_address = {} self.object = param self.str = None self.init() def init(self): """ Metoda inicjalizujaca. @param self: Obiekt fabryki """ pass def execution(self, fun): """ Wykonanie zadan obiektu. @param self: Obiekt fabryki @param fun: Funkcja inicjalizujaca obiekt utworzony przez fabryke """ return fun(self.object) def _get_base_address(self): """ Pobierz tabele z klasami inicjalizujacymi. @param self: Obiekt fabryki """ return self.base_address def _create_object(self, key): """ Tworzenie obiektu. @param self: Obiekt fabryki @param key: Klucz obiektu @type key: str """ obj = "unKnown" try: base_address = self._get_base_address() obj = base_address[key] module = importlib.import_module(obj) fun = getattr(module, self.class_name) return self.execution(fun) except KeyError: raise CommandError("Doesn't know %s type: %r" % (self.class_name, key)) except ImportError: raise CommandError("Module %r doesn't exist" % obj) def get_class_factory(self, key): """ Pobranie obiektu na podstawie klucza. @param self: Obiekt fabryki @param key: Klucz obiektu @type key: str """ if supports_color(): blue = make_style(fg='cyan') else: blue = lambda text: text print("Create '" + blue(key) + "' from '" + blue(type(self).__name__) + "'") return self._create_object(key) def get_from_params(self): """ Pobranie obiektu na podstawie danych fabryki. @param self: Obiekt fabryki """ if supports_color(): blue = make_style(fg='cyan') else: blue = lambda text: text print("Create '" + blue(self.str) + "' from '" + blue(type(self).__name__) + "'") return self._create_object(self.str) def __add_modules(self, modules): """ Dodanie nowego modulu w fabryce. @param self: Obiekt fabryki @param modules: Zestaw obiektow dostepnych w fabryce @type modules: Slownik w ktorym B{klucz} jest kluczem dla fabryki, a B{wartosc} jest adresem modulu w ktorym znajduje sie obiekt. """ for elem in modules.keys(): if elem in self.base_address: raise CommandError("Cannot rewrite %r key" % elem) self.base_address[elem] = modules[elem] def __set_class_name(self, class_name): """ Ustawienie nazwy klasy obiektu dostepnego dla fabryki w module. Metoda wprowadza nazwe obiektu i probuje wczytac dane poczatkowe z pliku settingsow. @param self: Obiekt fabryki self.class_name = class_name @param class_name: Nazwa klasy znajdujacej sie w pliku modulu @type class_name: str """ if self.class_name != "": raise CommandError("Cannot update class_name for factory [%r]" % class_name) self.class_name = class_name self.__read_items_from_settings() def set_items(self, class_name, modules): """ Ustwienie opcji fabryki. Metoda ta powinna byc wywolana w ramach metody L{init(self) <ajango.core.factory.FactoryBase.init>} w klasie fabryki. Moze ona byc wywolana jednokrotnie. Ponowne wywolanie moze spowodowac nieokreslone bledy. @param self: Obiekt fabryki @param class_name: Nazwa klasy znajdujacej sie w pliku modulu @type class_name: str @param modules: Zestaw obiektow dostepnych w fabryce @type modules: Slownik w ktorym B{klucz} jest kluczem dla fabryki, a B{wartosc} jest adresem modulu w ktorym znajduje sie obiekt. """ self.__set_class_name(class_name) self.__add_modules(modules) def __read_items_from_settings(self): """ Wczytanie obiektow do fabryki z settingsow. Wiecej informacji na temat dodawania obiektow do fabryki w opisie modulu L{Factory <ajango.core.factory>} @param self: Obiekt fabryki """ try: if not self.class_name in settings.AJANGO_FACTORY: # fabyrka nie ma zdefiniowanych elementow dodatkowych return tab = settings.AJANGO_FACTORY[self.class_name] self.__add_modules(tab) except AttributeError: # Brak definicji dla dodatkowych obiektow fabryki return except ImproperlyConfigured: # Plik settings.py nie ma poprawnej konfiguracji return
Ajango
/Ajango-1.0.0.tar.gz/Ajango-1.0.0/ajango/core/factory.py
factory.py
import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '(8pd&#(njbqz&ni$c&6&3zqvk(4^$kaw9((yx%se7w-)y+@+)@' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'ajango.contrib.automatic', ] MIDDLEWARE_CLASSES = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'project.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'project.wsgi.application' # Database # https://docs.djangoproject.com/en/1.9/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ STATIC_URL = '/static/'
Ajango
/Ajango-1.0.0.tar.gz/Ajango-1.0.0/project/settings.py
settings.py