|
{ |
|
"_name_or_path": "facebook/dinov2-large", |
|
"apply_layernorm": true, |
|
"architectures": [ |
|
"Dinov2ForImageClassification" |
|
], |
|
"attention_probs_dropout_prob": 0.0, |
|
"drop_path_rate": 0.0, |
|
"hidden_act": "gelu", |
|
"hidden_dropout_prob": 0.0, |
|
"hidden_size": 1024, |
|
"id2label": { |
|
"0": "ALGAE", |
|
"1": "Acr", |
|
"2": "Acr_Br", |
|
"3": "Anem", |
|
"4": "CCA", |
|
"5": "Ech", |
|
"6": "Fts", |
|
"7": "Gal", |
|
"8": "Gon", |
|
"9": "Mtp", |
|
"10": "P", |
|
"11": "Poc", |
|
"12": "Por", |
|
"13": "R", |
|
"14": "RDC", |
|
"15": "S", |
|
"16": "SG", |
|
"17": "Sarg", |
|
"18": "Ser", |
|
"19": "Slt", |
|
"20": "Sp", |
|
"21": "Turf", |
|
"22": "UNK" |
|
}, |
|
"image_size": 128, |
|
"initializer_range": 0.02, |
|
"label2id": { |
|
"ALGAE": 0, |
|
"Acr": 1, |
|
"Acr_Br": 2, |
|
"Anem": 3, |
|
"CCA": 4, |
|
"Ech": 5, |
|
"Fts": 6, |
|
"Gal": 7, |
|
"Gon": 8, |
|
"Mtp": 9, |
|
"P": 10, |
|
"Poc": 11, |
|
"Por": 12, |
|
"R": 13, |
|
"RDC": 14, |
|
"S": 15, |
|
"SG": 16, |
|
"Sarg": 17, |
|
"Ser": 18, |
|
"Slt": 19, |
|
"Sp": 20, |
|
"Turf": 21, |
|
"UNK": 22 |
|
}, |
|
"layer_norm_eps": 1e-06, |
|
"layerscale_value": 1.0, |
|
"mlp_ratio": 4, |
|
"model_type": "dinov2", |
|
"num_attention_heads": 16, |
|
"num_channels": 3, |
|
"num_hidden_layers": 24, |
|
"out_features": [ |
|
"stage24" |
|
], |
|
"out_indices": [ |
|
24 |
|
], |
|
"patch_size": 14, |
|
"problem_type": "multi_label_classification", |
|
"qkv_bias": true, |
|
"reshape_hidden_states": true, |
|
"stage_names": [ |
|
"stem", |
|
"stage1", |
|
"stage2", |
|
"stage3", |
|
"stage4", |
|
"stage5", |
|
"stage6", |
|
"stage7", |
|
"stage8", |
|
"stage9", |
|
"stage10", |
|
"stage11", |
|
"stage12", |
|
"stage13", |
|
"stage14", |
|
"stage15", |
|
"stage16", |
|
"stage17", |
|
"stage18", |
|
"stage19", |
|
"stage20", |
|
"stage21", |
|
"stage22", |
|
"stage23", |
|
"stage24" |
|
], |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.44.2", |
|
"use_swiglu_ffn": false, |
|
"initial_learning_rate": 0.001, |
|
"train_batch_size": 64, |
|
"eval_batch_size": 64, |
|
"optimizer": { |
|
"type": "Adam" |
|
}, |
|
"lr_scheduler_type": { |
|
"type": "ReduceLROnPlateau" |
|
}, |
|
"patience_lr_scheduler": 5, |
|
"factor_lr_scheduler": 0.1, |
|
"weight_decay": 0.0001, |
|
"early_stopping_patience": 10, |
|
"freeze_encoder": true, |
|
"data_augmentation": true, |
|
"num_epochs": 150 |
|
} |