adityaprakhar commited on
Commit
0a4c059
·
verified ·
1 Parent(s): ffd4f7f

Training in progress, step 500

Browse files
config.json CHANGED
@@ -1,9 +1,12 @@
1
  {
2
- "_name_or_path": "microsoft/layoutlm-base-uncased",
3
  "architectures": [
4
  "LayoutLMForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
 
 
 
7
  "hidden_act": "gelu",
8
  "hidden_dropout_prob": 0.1,
9
  "hidden_size": 768,
@@ -45,18 +48,18 @@
45
  "parties": 14,
46
  "telephone": 15
47
  },
48
- "layer_norm_eps": 1e-12,
49
  "max_2d_position_embeddings": 1024,
50
- "max_position_embeddings": 512,
51
  "model_type": "layoutlm",
52
  "num_attention_heads": 12,
53
  "num_hidden_layers": 12,
54
- "output_past": true,
55
- "pad_token_id": 0,
56
  "position_embedding_type": "absolute",
 
57
  "torch_dtype": "float32",
58
  "transformers_version": "4.39.0.dev0",
59
- "type_vocab_size": 2,
60
  "use_cache": true,
61
- "vocab_size": 30522
62
  }
 
1
  {
2
+ "_name_or_path": "microsoft/layoutlm-base-cased",
3
  "architectures": [
4
  "LayoutLMForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "eos_token_id": 2,
9
+ "gradient_checkpointing": false,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 768,
 
48
  "parties": 14,
49
  "telephone": 15
50
  },
51
+ "layer_norm_eps": 1e-05,
52
  "max_2d_position_embeddings": 1024,
53
+ "max_position_embeddings": 514,
54
  "model_type": "layoutlm",
55
  "num_attention_heads": 12,
56
  "num_hidden_layers": 12,
57
+ "pad_token_id": 1,
 
58
  "position_embedding_type": "absolute",
59
+ "tokenizer_class": "RobertaTokenizer",
60
  "torch_dtype": "float32",
61
  "transformers_version": "4.39.0.dev0",
62
+ "type_vocab_size": 1,
63
  "use_cache": true,
64
+ "vocab_size": 50265
65
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:11469270f146982cc43488070cfffd0032bf952be3aeeb4a5563e66e10f798a3
3
- size 450585896
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c83e1c6866bbe6514ea6df92aad4a354a2d6454fa1cfddddadb65ad48ae09a9b
3
+ size 511239464
runs/Mar15_05-42-14_91b63dc5e9e0/events.out.tfevents.1710481335.91b63dc5e9e0.375.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19f585e52bd8a54f228ff794a69b01a99fca89232f2ce22e4bcd82bdcc367627
3
+ size 5530
runs/Mar15_05-42-24_91b63dc5e9e0/events.out.tfevents.1710481345.91b63dc5e9e0.375.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:614cea363599591478b8b841d49fdb90dc1eb707e0c5af67cb8533c00857c429
3
+ size 8092
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:43c4e79aadf045e89982d6e95ea6046773154367159de0bae998da2375bfed0a
3
  size 4856
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75e0aa4d7ef5fa7f679513fd9693ea2003c2dbc8d1f3a3f1975eea1da8265893
3
  size 4856