codingfaf commited on
Commit
4ca8181
1 Parent(s): 897e804

Training in progress epoch 0

Browse files
Files changed (3) hide show
  1. README.md +5 -4
  2. tf_model.h5 +1 -1
  3. tokenizer.json +2 -14
README.md CHANGED
@@ -15,7 +15,8 @@ probably proofread and complete it, then remove this comment. -->
15
 
16
  This model is a fine-tuned version of [t5-small](https://huggingface.co/t5-small) on an unknown dataset.
17
  It achieves the following results on the evaluation set:
18
- - Train Loss: 2.9839
 
19
  - Epoch: 0
20
 
21
  ## Model description
@@ -40,9 +41,9 @@ The following hyperparameters were used during training:
40
 
41
  ### Training results
42
 
43
- | Train Loss | Epoch |
44
- |:----------:|:-----:|
45
- | 2.9839 | 0 |
46
 
47
 
48
  ### Framework versions
 
15
 
16
  This model is a fine-tuned version of [t5-small](https://huggingface.co/t5-small) on an unknown dataset.
17
  It achieves the following results on the evaluation set:
18
+ - Train Loss: 3.2989
19
+ - Validation Loss: 3.0574
20
  - Epoch: 0
21
 
22
  ## Model description
 
41
 
42
  ### Training results
43
 
44
+ | Train Loss | Validation Loss | Epoch |
45
+ |:----------:|:---------------:|:-----:|
46
+ | 3.2989 | 3.0574 | 0 |
47
 
48
 
49
  ### Framework versions
tf_model.h5 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d72b464f0dbd86ca129d547c91bb5c62afc624266f0a953db6afb3e9bd114c0d
3
  size 373902664
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf3f155d925c793ecb74d0abe55fcb39ec43c25e4e1e0e0b7bb3deddae980e6b
3
  size 373902664
tokenizer.json CHANGED
@@ -1,19 +1,7 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 128,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
- "padding": {
10
- "strategy": "BatchLongest",
11
- "direction": "Right",
12
- "pad_to_multiple_of": null,
13
- "pad_id": 0,
14
- "pad_type_id": 0,
15
- "pad_token": "<pad>"
16
- },
17
  "added_tokens": [
18
  {
19
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,