totallynotbrent commited on
Commit
819436e
·
1 Parent(s): 62e5164

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -5
config.json CHANGED
@@ -2,7 +2,7 @@
2
  "_name_or_path": "microsoft/DialoGPT-large",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
- "GPT3LMHeadModel"
6
  ],
7
  "attn_pdrop": 0.1,
8
  "bos_token_id": 50256,
@@ -10,7 +10,7 @@
10
  "eos_token_id": 50256,
11
  "initializer_range": 0.02,
12
  "layer_norm_epsilon": 1e-05,
13
- "model_type": "gpt3",
14
  "n_ctx": 1024,
15
  "n_embd": 1280,
16
  "n_head": 20,
@@ -28,11 +28,11 @@
28
  "summary_use_proj": true,
29
  "task_specific_params": {
30
  "conversational": {
31
- "max_length": 10000
32
  }
33
  },
34
  "torch_dtype": "float32",
35
  "transformers_version": "4.27.4",
36
  "use_cache": true,
37
- "vocab_size": 150257
38
- }
 
2
  "_name_or_path": "microsoft/DialoGPT-large",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
+ "GPT2LMHeadModel"
6
  ],
7
  "attn_pdrop": 0.1,
8
  "bos_token_id": 50256,
 
10
  "eos_token_id": 50256,
11
  "initializer_range": 0.02,
12
  "layer_norm_epsilon": 1e-05,
13
+ "model_type": "gpt2",
14
  "n_ctx": 1024,
15
  "n_embd": 1280,
16
  "n_head": 20,
 
28
  "summary_use_proj": true,
29
  "task_specific_params": {
30
  "conversational": {
31
+ "max_length": 1000
32
  }
33
  },
34
  "torch_dtype": "float32",
35
  "transformers_version": "4.27.4",
36
  "use_cache": true,
37
+ "vocab_size": 50257
38
+ }