Samuael commited on
Commit
5db001c
·
verified ·
1 Parent(s): 7977595

Training in progress, step 500

Browse files
Files changed (3) hide show
  1. config.json +4 -3
  2. model.safetensors +2 -2
  3. training_args.bin +2 -2
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "architectures": [
3
  "T5ForConditionalGeneration"
4
  ],
@@ -15,16 +16,16 @@
15
  "is_encoder_decoder": true,
16
  "is_gated_act": false,
17
  "layer_norm_epsilon": 1e-09,
18
- "max_length": 512,
19
  "model_type": "t5",
20
  "num_decoder_layers": 8,
21
  "num_heads": 8,
22
- "num_layers": 10,
23
  "pad_token_id": 0,
24
  "relative_attention_max_distance": 128,
25
  "relative_attention_num_buckets": 32,
26
  "torch_dtype": "float32",
27
- "transformers_version": "4.44.2",
28
  "use_cache": true,
29
  "vocab_size": 15100
30
  }
 
1
  {
2
+ "_name_or_path": "Samuael/ethiopic-sec2sec-tigrinya",
3
  "architectures": [
4
  "T5ForConditionalGeneration"
5
  ],
 
16
  "is_encoder_decoder": true,
17
  "is_gated_act": false,
18
  "layer_norm_epsilon": 1e-09,
19
+ "max_length": null,
20
  "model_type": "t5",
21
  "num_decoder_layers": 8,
22
  "num_heads": 8,
23
+ "num_layers": 6,
24
  "pad_token_id": 0,
25
  "relative_attention_max_distance": 128,
26
  "relative_attention_num_buckets": 32,
27
  "torch_dtype": "float32",
28
+ "transformers_version": "4.46.3",
29
  "use_cache": true,
30
  "vocab_size": 15100
31
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:432de7a78e7296a8f2f3cde555405a9458b1fdd5b997a688ffd172d30bfd41fa
3
- size 291090192
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ed7db500a336eb91bfc135c27f94f55116c0f27d4d22df914291fc5f631d7810
3
+ size 240738368
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b16424f44ff60a6a6eed109b3eac228c282406e761e270523b3b1eb704abba0a
3
- size 5304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e5c54bb775b754ca568de85f6cbc4d3a88b148e5792d67fb6391aae6bb9ecf9
3
+ size 5432