xhluca's picture
Upload with huggingface_hub
109b80c
{
"_name_or_path": "xhlu/tapas-nq-hn-retriever-large-0",
"aggregation_labels": null,
"aggregation_loss_weight": 1.0,
"aggregation_temperature": 1.0,
"allow_empty_column_selection": false,
"answer_loss_cutoff": null,
"answer_loss_importance": 1.0,
"architectures": [
"TapasModel"
],
"attention_probs_dropout_prob": 0.1,
"average_approximation_function": "ratio",
"average_logits_per_cell": false,
"cell_selection_preference": null,
"disable_per_token_loss": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"huber_loss_delta": null,
"init_cell_selection_weights_to_zero": false,
"initializer_range": 0.02,
"intermediate_size": 4096,
"layer_norm_eps": 1e-12,
"max_num_columns": 32,
"max_num_rows": 64,
"max_position_embeddings": 1024,
"model_type": "tapas",
"no_aggregation_label_index": null,
"num_aggregation_labels": 0,
"num_attention_heads": 16,
"num_hidden_layers": 24,
"pad_token_id": 0,
"positive_label_weight": 10.0,
"reset_position_index_per_cell": false,
"select_one_column": true,
"softmax_temperature": 1.0,
"torch_dtype": "float32",
"transformers_version": "4.11.3",
"type_vocab_size": [
3,
256,
256,
2,
256,
256,
10
],
"type_vocab_sizes": [
3,
256,
256,
2,
256,
256,
10
],
"use_answer_as_supervision": null,
"use_gumbel_for_aggregation": false,
"use_gumbel_for_cells": false,
"use_normalized_answer_loss": false,
"vocab_size": 30522
}