seongwoon commited on
Commit
6f689d8
·
1 Parent(s): 39ff6ff
Files changed (1) hide show
  1. config.json +2 -3
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "allenai/specter2",
3
  "adapters": {
4
  "adapters": {},
5
  "config_map": {},
@@ -7,7 +7,7 @@
7
  "fusions": {}
8
  },
9
  "architectures": [
10
- "BertForMaskedLM"
11
  ],
12
  "attention_probs_dropout_prob": 0.1,
13
  "classifier_dropout": null,
@@ -21,7 +21,6 @@
21
  "model_type": "bert",
22
  "num_attention_heads": 12,
23
  "num_hidden_layers": 12,
24
- "output_hidden_states": true,
25
  "pad_token_id": 0,
26
  "position_embedding_type": "absolute",
27
  "torch_dtype": "float32",
 
1
  {
2
+ "_name_or_path": "/mnt/user2/.cache/torch/sentence_transformers/allenai_specter2",
3
  "adapters": {
4
  "adapters": {},
5
  "config_map": {},
 
7
  "fusions": {}
8
  },
9
  "architectures": [
10
+ "BertModel"
11
  ],
12
  "attention_probs_dropout_prob": 0.1,
13
  "classifier_dropout": null,
 
21
  "model_type": "bert",
22
  "num_attention_heads": 12,
23
  "num_hidden_layers": 12,
 
24
  "pad_token_id": 0,
25
  "position_embedding_type": "absolute",
26
  "torch_dtype": "float32",