naghamghanim commited on
Commit
a8e607d
ยท
1 Parent(s): 3d53eb5
Files changed (4) hide show
  1. args.json +51 -0
  2. checkpoint_3.pt +3 -0
  3. predictions.txt +0 -0
  4. tag_vocab.pkl +3 -0
args.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "output_path": "/var/home/nhamad/WojoodNER/output-flat",
3
+ "train_path": "/var/home/nhamad/WojoodNER/flat/train.txt",
4
+ "val_path": "/var/home/nhamad/WojoodNER/flat/val.txt",
5
+ "test_path": "/var/home/nhamad/WojoodNER/flat/test.txt",
6
+ "bert_model": "aubmindlab/bert-base-arabertv2",
7
+ "gpus": [
8
+ 0
9
+ ],
10
+ "log_interval": 10,
11
+ "batch_size": 8,
12
+ "num_workers": 0,
13
+ "data_config": {
14
+ "fn": "arabiner.data.datasets.DefaultDataset",
15
+ "kwargs": {
16
+ "max_seq_len": 512
17
+ }
18
+ },
19
+ "trainer_config": {
20
+ "fn": "arabiner.trainers.BertTrainer",
21
+ "kwargs": {
22
+ "max_epochs": 50
23
+ }
24
+ },
25
+ "network_config": {
26
+ "fn": "arabiner.nn.BertSeqTagger",
27
+ "kwargs": {
28
+ "dropout": 0.1,
29
+ "bert_model": "aubmindlab/bert-base-arabertv2",
30
+ "num_labels": 42
31
+ }
32
+ },
33
+ "optimizer": {
34
+ "fn": "torch.optim.AdamW",
35
+ "kwargs": {
36
+ "lr": 0.0001
37
+ }
38
+ },
39
+ "lr_scheduler": {
40
+ "fn": "torch.optim.lr_scheduler.ExponentialLR",
41
+ "kwargs": {
42
+ "gamma": 1
43
+ }
44
+ },
45
+ "loss": {
46
+ "fn": "torch.nn.CrossEntropyLoss",
47
+ "kwargs": {}
48
+ },
49
+ "overwrite": false,
50
+ "seed": 1
51
+ }
checkpoint_3.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e62f6ccc6df2e5683edf4d1adcf1f01e0621f4d156fd8f8f44f25cf4a84649fc
3
+ size 1618243112
predictions.txt ADDED
The diff for this file is too large to render. See raw diff
 
tag_vocab.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0152df42d8ffed2a49a9647db1c1644c1aeb8dbcba4c21cb58c27c0c9b85db44
3
+ size 3778