eclec commited on
Commit
720eab9
·
1 Parent(s): 0cdab44

Training in progress, epoch 0

Browse files
Files changed (3) hide show
  1. config.json +17 -5
  2. pytorch_model.bin +2 -2
  3. training_args.bin +1 -1
config.json CHANGED
@@ -1,11 +1,23 @@
1
  {
2
- "_name_or_path": "allenai/longformer-base-4096",
3
  "architectures": [
4
  "LongformerForSequenceClassification"
5
  ],
6
  "attention_mode": "longformer",
7
  "attention_probs_dropout_prob": 0.1,
8
  "attention_window": [
 
 
 
 
 
 
 
 
 
 
 
 
9
  512,
10
  512,
11
  512,
@@ -24,14 +36,14 @@
24
  "gradient_checkpointing": false,
25
  "hidden_act": "gelu",
26
  "hidden_dropout_prob": 0.1,
27
- "hidden_size": 768,
28
  "id2label": {
29
  "0": "NOT_REJECTED",
30
  "1": "REJECTED"
31
  },
32
  "ignore_attention_mask": false,
33
  "initializer_range": 0.02,
34
- "intermediate_size": 3072,
35
  "label2id": {
36
  "NOT_REJECTED": 0,
37
  "REJECTED": 1
@@ -39,8 +51,8 @@
39
  "layer_norm_eps": 1e-05,
40
  "max_position_embeddings": 4098,
41
  "model_type": "longformer",
42
- "num_attention_heads": 12,
43
- "num_hidden_layers": 12,
44
  "onnx_export": false,
45
  "pad_token_id": 1,
46
  "problem_type": "single_label_classification",
 
1
  {
2
+ "_name_or_path": "allenai/longformer-large-4096",
3
  "architectures": [
4
  "LongformerForSequenceClassification"
5
  ],
6
  "attention_mode": "longformer",
7
  "attention_probs_dropout_prob": 0.1,
8
  "attention_window": [
9
+ 512,
10
+ 512,
11
+ 512,
12
+ 512,
13
+ 512,
14
+ 512,
15
+ 512,
16
+ 512,
17
+ 512,
18
+ 512,
19
+ 512,
20
+ 512,
21
  512,
22
  512,
23
  512,
 
36
  "gradient_checkpointing": false,
37
  "hidden_act": "gelu",
38
  "hidden_dropout_prob": 0.1,
39
+ "hidden_size": 1024,
40
  "id2label": {
41
  "0": "NOT_REJECTED",
42
  "1": "REJECTED"
43
  },
44
  "ignore_attention_mask": false,
45
  "initializer_range": 0.02,
46
+ "intermediate_size": 4096,
47
  "label2id": {
48
  "NOT_REJECTED": 0,
49
  "REJECTED": 1
 
51
  "layer_norm_eps": 1e-05,
52
  "max_position_embeddings": 4098,
53
  "model_type": "longformer",
54
+ "num_attention_heads": 16,
55
+ "num_hidden_layers": 24,
56
  "onnx_export": false,
57
  "pad_token_id": 1,
58
  "problem_type": "single_label_classification",
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e9f74542581bbccf1194256a309f56f0b277d6ec9ccc051008deac0a7767d919
3
- size 594739281
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d5bb47fee091229e8e149568c13adcaf73a02567f6cd9f2ae2b52eba9fa7a5d
3
+ size 1738600369
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b37d494b740eb8716d42ac6cb1ff9778e0ad55c2b3c076e78b2d2b017f1bfcc6
3
  size 3963
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7455f37f756b8941e475a8752b273b1b7d76b12b9d6585e25abd9ca1fc85b31c
3
  size 3963