huseinzol05 commited on
Commit
73f5e7f
1 Parent(s): 0560c8f

Upload WhisperForConditionalGeneration

Browse files
Files changed (3) hide show
  1. config.json +4 -95
  2. generation_config.json +1 -3
  3. model.safetensors +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "malaysian-whisper-small-v2/checkpoint-55900",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": false,
@@ -7,10 +7,7 @@
7
  "WhisperForConditionalGeneration"
8
  ],
9
  "attention_dropout": 0.0,
10
- "begin_suppress_tokens": [
11
- 220,
12
- 50257
13
- ],
14
  "bos_token_id": 50257,
15
  "classifier_proj_size": 256,
16
  "d_model": 768,
@@ -47,7 +44,7 @@
47
  "mask_time_length": 10,
48
  "mask_time_min_masks": 2,
49
  "mask_time_prob": 0.05,
50
- "max_length": 448,
51
  "max_source_positions": 1500,
52
  "max_target_positions": 448,
53
  "median_filter_width": 7,
@@ -56,96 +53,8 @@
56
  "num_mel_bins": 80,
57
  "pad_token_id": 50257,
58
  "scale_embedding": false,
59
- "suppress_tokens": [
60
- 1,
61
- 2,
62
- 7,
63
- 8,
64
- 9,
65
- 10,
66
- 14,
67
- 25,
68
- 26,
69
- 27,
70
- 28,
71
- 29,
72
- 31,
73
- 58,
74
- 59,
75
- 60,
76
- 61,
77
- 62,
78
- 63,
79
- 90,
80
- 91,
81
- 92,
82
- 93,
83
- 359,
84
- 503,
85
- 522,
86
- 542,
87
- 873,
88
- 893,
89
- 902,
90
- 918,
91
- 922,
92
- 931,
93
- 1350,
94
- 1853,
95
- 1982,
96
- 2460,
97
- 2627,
98
- 3246,
99
- 3253,
100
- 3268,
101
- 3536,
102
- 3846,
103
- 3961,
104
- 4183,
105
- 4667,
106
- 6585,
107
- 6647,
108
- 7273,
109
- 9061,
110
- 9383,
111
- 10428,
112
- 10929,
113
- 11938,
114
- 12033,
115
- 12331,
116
- 12562,
117
- 13793,
118
- 14157,
119
- 14635,
120
- 15265,
121
- 15618,
122
- 16553,
123
- 16604,
124
- 18362,
125
- 18956,
126
- 20075,
127
- 21675,
128
- 22520,
129
- 26130,
130
- 26161,
131
- 26435,
132
- 28279,
133
- 29464,
134
- 31650,
135
- 32302,
136
- 32470,
137
- 36865,
138
- 42863,
139
- 47425,
140
- 49870,
141
- 50254,
142
- 50258,
143
- 50360,
144
- 50361,
145
- 50362
146
- ],
147
  "torch_dtype": "bfloat16",
148
- "transformers_version": "4.43.0.dev0",
149
  "use_cache": true,
150
  "use_weighted_layer_sum": false,
151
  "vocab_size": 51865
 
1
  {
2
+ "_name_or_path": "malaysian-whisper-small-v2/checkpoint-75600",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": false,
 
7
  "WhisperForConditionalGeneration"
8
  ],
9
  "attention_dropout": 0.0,
10
+ "begin_suppress_tokens": null,
 
 
 
11
  "bos_token_id": 50257,
12
  "classifier_proj_size": 256,
13
  "d_model": 768,
 
44
  "mask_time_length": 10,
45
  "mask_time_min_masks": 2,
46
  "mask_time_prob": 0.05,
47
+ "max_length": null,
48
  "max_source_positions": 1500,
49
  "max_target_positions": 448,
50
  "median_filter_width": 7,
 
53
  "num_mel_bins": 80,
54
  "pad_token_id": 50257,
55
  "scale_embedding": false,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
  "torch_dtype": "bfloat16",
57
+ "transformers_version": "4.46.3",
58
  "use_cache": true,
59
  "use_weighted_layer_sum": false,
60
  "vocab_size": 51865
generation_config.json CHANGED
@@ -250,8 +250,6 @@
250
  49870,
251
  50254,
252
  50258,
253
- 50358,
254
- 50359,
255
  50360,
256
  50361,
257
  50362
@@ -260,5 +258,5 @@
260
  "transcribe": 50359,
261
  "translate": 50358
262
  },
263
- "transformers_version": "4.43.0.dev0"
264
  }
 
250
  49870,
251
  50254,
252
  50258,
 
 
253
  50360,
254
  50361,
255
  50362
 
258
  "transcribe": 50359,
259
  "translate": 50358
260
  },
261
+ "transformers_version": "4.46.3"
262
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8d52e8a36b250a7b2b84f45b83b8aac9ff44fa2eee2da19545dfc7da0a382073
3
  size 483525680
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a495d379e23e95b72825e362cf8e75484823a565ad285c7965347228d261c725
3
  size 483525680