tnieva commited on
Commit
a8f0e73
·
1 Parent(s): 78acd20

End of training

Browse files
config.json CHANGED
@@ -11,7 +11,7 @@
11
  "initializer_range": 0.02,
12
  "layer_norm_epsilon": 1e-05,
13
  "model_type": "gpt2",
14
- "n_ctx": 32,
15
  "n_embd": 768,
16
  "n_head": 12,
17
  "n_inner": null,
@@ -35,5 +35,5 @@
35
  "torch_dtype": "float32",
36
  "transformers_version": "4.21.3",
37
  "use_cache": true,
38
- "vocab_size": 50000
39
  }
 
11
  "initializer_range": 0.02,
12
  "layer_norm_epsilon": 1e-05,
13
  "model_type": "gpt2",
14
+ "n_ctx": 16,
15
  "n_embd": 768,
16
  "n_head": 12,
17
  "n_inner": null,
 
35
  "torch_dtype": "float32",
36
  "transformers_version": "4.21.3",
37
  "use_cache": true,
38
+ "vocab_size": 368
39
  }
merges.txt CHANGED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:61df43402d0db70fef3e7023836a171186ea0fd713961f61e4d288164c96253f
3
- size 509604585
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60a0246073f5eeadf0d66ad3da4d22791b12113aa4c78930a07abf342f2242bd
3
+ size 357135081
runs/Sep14_14-15-59_699859ccfd6e/1663164965.6651332/events.out.tfevents.1663164965.699859ccfd6e.55.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:570cfab0a4c5ffd2deb4a07314faa56b9e105c6067546388dc86bd523123a7a7
3
+ size 5391
runs/Sep14_14-15-59_699859ccfd6e/events.out.tfevents.1663164965.699859ccfd6e.55.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19393840888817520054fe358c5269751ae2a42a9ac14a14603f3d48f7a43f29
3
+ size 4239
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -3,7 +3,7 @@
3
  "bos_token": "<|endoftext|>",
4
  "eos_token": "<|endoftext|>",
5
  "model_max_length": 1024,
6
- "name_or_path": "huggingface-course/code-search-net-tokenizer",
7
  "special_tokens_map_file": null,
8
  "tokenizer_class": "GPT2Tokenizer",
9
  "unk_token": "<|endoftext|>"
 
3
  "bos_token": "<|endoftext|>",
4
  "eos_token": "<|endoftext|>",
5
  "model_max_length": 1024,
6
+ "name_or_path": "tnieva/test-tokenizer",
7
  "special_tokens_map_file": null,
8
  "tokenizer_class": "GPT2Tokenizer",
9
  "unk_token": "<|endoftext|>"
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:387e8e3a1052467461437e3ff42e801dfd5ea2ca751e70794a63295c5bda5a81
3
  size 3311
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1628c8dc73059969ca473122bd2e5e88b98eddc98034f3830bcbd394d9b23ca8
3
  size 3311
vocab.json CHANGED
The diff for this file is too large to render. See raw diff