ArthurZ HF staff commited on
Commit
3288e28
1 Parent(s): 9fd1db2

Upload config

Browse files
Files changed (1) hide show
  1. config.json +13 -13
config.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
- "_name_or_path": "openai/whisper-medium",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
- "architectures": [
6
- "WhisperForConditionalGeneration"
7
- ],
8
  "attention_dropout": 0.0,
 
 
 
 
9
  "bos_token_id": 50257,
10
  "d_model": 1024,
11
  "decoder_attention_heads": 16,
@@ -19,14 +19,16 @@
19
  "encoder_layerdrop": 0.0,
20
  "encoder_layers": 24,
21
  "eos_token_id": 50257,
22
- "feature_size": 1,
23
  "init_std": 0.02,
24
- "input_channels": 1,
25
  "is_encoder_decoder": true,
26
  "max_source_positions": 1500,
27
  "max_target_positions": 448,
28
  "model_type": "whisper",
29
- "non_speech_tokens": [
 
 
 
 
30
  1,
31
  2,
32
  6,
@@ -110,13 +112,11 @@
110
  42863,
111
  47425,
112
  49870,
113
- 50254
 
 
 
114
  ],
115
- "num_hidden_layers": 24,
116
- "num_mel_bins": 80,
117
- "pad_token_id": 0,
118
- "scale_embedding": false,
119
- "torch_dtype": "float32",
120
  "transformers_version": "4.23.0.dev0",
121
  "use_cache": true,
122
  "vocab_size": 51865
 
1
  {
 
2
  "activation_dropout": 0.0,
3
  "activation_function": "gelu",
 
 
 
4
  "attention_dropout": 0.0,
5
+ "begin_suppress_tokens": [
6
+ 220,
7
+ 50257
8
+ ],
9
  "bos_token_id": 50257,
10
  "d_model": 1024,
11
  "decoder_attention_heads": 16,
 
19
  "encoder_layerdrop": 0.0,
20
  "encoder_layers": 24,
21
  "eos_token_id": 50257,
 
22
  "init_std": 0.02,
 
23
  "is_encoder_decoder": true,
24
  "max_source_positions": 1500,
25
  "max_target_positions": 448,
26
  "model_type": "whisper",
27
+ "num_hidden_layers": 24,
28
+ "num_mel_bins": 80,
29
+ "pad_token_id": 0,
30
+ "scale_embedding": false,
31
+ "suppress_tokens": [
32
  1,
33
  2,
34
  6,
 
112
  42863,
113
  47425,
114
  49870,
115
+ 50254,
116
+ 50257,
117
+ 50360,
118
+ 50359
119
  ],
 
 
 
 
 
120
  "transformers_version": "4.23.0.dev0",
121
  "use_cache": true,
122
  "vocab_size": 51865