balaguru.s
commited on
Commit
·
fad03c5
1
Parent(s):
026f4bd
added tokenizer in the config file
Browse files
Checkpoints_1_6_M/gpt2-python-language-model/checkpoint-5130/config.json
CHANGED
@@ -12,6 +12,7 @@
|
|
12 |
"initializer_range": 0.02,
|
13 |
"layer_norm_epsilon": 1e-05,
|
14 |
"model_type": "gpt2",
|
|
|
15 |
"n_ctx": 1024,
|
16 |
"n_embd": 768,
|
17 |
"n_head": 12,
|
|
|
12 |
"initializer_range": 0.02,
|
13 |
"layer_norm_epsilon": 1e-05,
|
14 |
"model_type": "gpt2",
|
15 |
+
"tokenizer_name" : "gpt2",
|
16 |
"n_ctx": 1024,
|
17 |
"n_embd": 768,
|
18 |
"n_head": 12,
|
config.json
CHANGED
@@ -12,6 +12,7 @@
|
|
12 |
"initializer_range": 0.02,
|
13 |
"layer_norm_epsilon": 1e-05,
|
14 |
"model_type": "gpt2",
|
|
|
15 |
"n_ctx": 1024,
|
16 |
"n_embd": 768,
|
17 |
"n_head": 12,
|
|
|
12 |
"initializer_range": 0.02,
|
13 |
"layer_norm_epsilon": 1e-05,
|
14 |
"model_type": "gpt2",
|
15 |
+
"tokenizer_name" : "gpt2",
|
16 |
"n_ctx": 1024,
|
17 |
"n_embd": 768,
|
18 |
"n_head": 12,
|