abecode commited on
Commit
d1d0964
·
verified ·
1 Parent(s): 73a3fbd

Training in progress, step 500

Browse files
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8d7e9b95eeaa34a72628fbfe0d88e1159f304a65d854282166250b1da155dddb
3
  size 267844872
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a562be820d20114f2ded7182fee700526b0774ea6ff55f7109fa3d7713a38a0b
3
  size 267844872
runs/Sep26_21-31-47_ce04dca57a00/events.out.tfevents.1727386320.ce04dca57a00.5261.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6da9aa38aa8aeddb1aec2ecff29d0160af6b016efa5cc1b9b41e7a8b6922be2
3
+ size 5917
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -43,13 +43,13 @@
43
  },
44
  "clean_up_tokenization_spaces": true,
45
  "cls_token": "[CLS]",
46
- "do_lower_case": false,
47
  "mask_token": "[MASK]",
48
  "model_max_length": 512,
49
  "pad_token": "[PAD]",
50
  "sep_token": "[SEP]",
51
  "strip_accents": null,
52
  "tokenize_chinese_chars": true,
53
- "tokenizer_class": "BertTokenizer",
54
  "unk_token": "[UNK]"
55
  }
 
43
  },
44
  "clean_up_tokenization_spaces": true,
45
  "cls_token": "[CLS]",
46
+ "do_lower_case": true,
47
  "mask_token": "[MASK]",
48
  "model_max_length": 512,
49
  "pad_token": "[PAD]",
50
  "sep_token": "[SEP]",
51
  "strip_accents": null,
52
  "tokenize_chinese_chars": true,
53
+ "tokenizer_class": "DistilBertTokenizer",
54
  "unk_token": "[UNK]"
55
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c834580bfe88cc56c1bfeaf060f96afa219228a89c140fa81247ad74b6d7977e
3
  size 5240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb238ad8145ea22df9d83edf0fed6431d7953adae6ec595cb3cabfdd02b9bbfc
3
  size 5240
vocab.txt CHANGED
The diff for this file is too large to render. See raw diff