fschlatt commited on
Commit
e062892
·
1 Parent(s): ca73d7b

update config

Browse files
Files changed (1) hide show
  1. config.json +3 -4
config.json CHANGED
@@ -1,10 +1,10 @@
1
  {
2
- "_name_or_path": "experiments/wandb/run-20240605_134328-5wbd9pb9/files/huggingface_checkpoint",
3
  "add_extra_token": true,
4
  "architectures": [
5
  "SetEncoderElectraModel"
6
  ],
7
  "attention_probs_dropout_prob": 0.1,
 
8
  "classifier_dropout": null,
9
  "depth": 100,
10
  "doc_length": 256,
@@ -16,15 +16,14 @@
16
  "intermediate_size": 4096,
17
  "layer_norm_eps": 1e-12,
18
  "max_position_embeddings": 512,
19
- "model_type": "set-encoder-electra",
20
  "num_attention_heads": 16,
21
  "num_hidden_layers": 24,
22
- "other_sequence_embedding": false,
23
  "pad_token_id": 0,
 
24
  "position_embedding_type": "absolute",
25
  "query_length": 32,
26
  "sample_missing_docs": true,
27
- "save_step": 939,
28
  "summary_activation": "gelu",
29
  "summary_last_dropout": 0.1,
30
  "summary_type": "first",
 
1
  {
 
2
  "add_extra_token": true,
3
  "architectures": [
4
  "SetEncoderElectraModel"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
+ "backbone_model_type": "electra",
8
  "classifier_dropout": null,
9
  "depth": 100,
10
  "doc_length": 256,
 
16
  "intermediate_size": 4096,
17
  "layer_norm_eps": 1e-12,
18
  "max_position_embeddings": 512,
19
+ "model_type": "set-encoder",
20
  "num_attention_heads": 16,
21
  "num_hidden_layers": 24,
 
22
  "pad_token_id": 0,
23
+ "pooling_strategy": "first",
24
  "position_embedding_type": "absolute",
25
  "query_length": 32,
26
  "sample_missing_docs": true,
 
27
  "summary_activation": "gelu",
28
  "summary_last_dropout": 0.1,
29
  "summary_type": "first",