pere commited on
Commit
bf871bf
1 Parent(s): 58ead6a
Files changed (3) hide show
  1. config.json +1 -1
  2. generation_config.json +3 -139
  3. tf_model.h5 +3 -0
config.json CHANGED
@@ -147,7 +147,7 @@
147
  50362
148
  ],
149
  "torch_dtype": "float32",
150
- "transformers_version": "4.36.1",
151
  "use_cache": true,
152
  "use_weighted_layer_sum": false,
153
  "vocab_size": 51865
 
147
  50362
148
  ],
149
  "torch_dtype": "float32",
150
+ "transformers_version": "4.36.2",
151
  "use_cache": true,
152
  "use_weighted_layer_sum": false,
153
  "vocab_size": 51865
generation_config.json CHANGED
@@ -1,30 +1,5 @@
1
  {
2
- "alignment_heads": [
3
- [
4
- 2,
5
- 2
6
- ],
7
- [
8
- 3,
9
- 0
10
- ],
11
- [
12
- 3,
13
- 2
14
- ],
15
- [
16
- 3,
17
- 3
18
- ],
19
- [
20
- 3,
21
- 4
22
- ],
23
- [
24
- 3,
25
- 5
26
- ]
27
- ],
28
  "begin_suppress_tokens": [
29
  220,
30
  50257
@@ -35,7 +10,7 @@
35
  "forced_decoder_ids": [
36
  [
37
  1,
38
- 50288
39
  ],
40
  [
41
  2,
@@ -46,114 +21,8 @@
46
  50363
47
  ]
48
  ],
49
- "is_multilingual": true,
50
- "lang_to_id": {
51
- "<|af|>": 50327,
52
- "<|am|>": 50334,
53
- "<|ar|>": 50272,
54
- "<|as|>": 50350,
55
- "<|az|>": 50304,
56
- "<|ba|>": 50355,
57
- "<|be|>": 50330,
58
- "<|bg|>": 50292,
59
- "<|bn|>": 50302,
60
- "<|bo|>": 50347,
61
- "<|br|>": 50309,
62
- "<|bs|>": 50315,
63
- "<|ca|>": 50270,
64
- "<|cs|>": 50283,
65
- "<|cy|>": 50297,
66
- "<|da|>": 50285,
67
- "<|de|>": 50261,
68
- "<|el|>": 50281,
69
- "<|en|>": 50259,
70
- "<|es|>": 50262,
71
- "<|et|>": 50307,
72
- "<|eu|>": 50310,
73
- "<|fa|>": 50300,
74
- "<|fi|>": 50277,
75
- "<|fo|>": 50338,
76
- "<|fr|>": 50265,
77
- "<|gl|>": 50319,
78
- "<|gu|>": 50333,
79
- "<|haw|>": 50352,
80
- "<|ha|>": 50354,
81
- "<|he|>": 50279,
82
- "<|hi|>": 50276,
83
- "<|hr|>": 50291,
84
- "<|ht|>": 50339,
85
- "<|hu|>": 50286,
86
- "<|hy|>": 50312,
87
- "<|id|>": 50275,
88
- "<|is|>": 50311,
89
- "<|it|>": 50274,
90
- "<|ja|>": 50266,
91
- "<|jw|>": 50356,
92
- "<|ka|>": 50329,
93
- "<|kk|>": 50316,
94
- "<|km|>": 50323,
95
- "<|kn|>": 50306,
96
- "<|ko|>": 50264,
97
- "<|la|>": 50294,
98
- "<|lb|>": 50345,
99
- "<|ln|>": 50353,
100
- "<|lo|>": 50336,
101
- "<|lt|>": 50293,
102
- "<|lv|>": 50301,
103
- "<|mg|>": 50349,
104
- "<|mi|>": 50295,
105
- "<|mk|>": 50308,
106
- "<|ml|>": 50296,
107
- "<|mn|>": 50314,
108
- "<|mr|>": 50320,
109
- "<|ms|>": 50282,
110
- "<|mt|>": 50343,
111
- "<|my|>": 50346,
112
- "<|ne|>": 50313,
113
- "<|nl|>": 50271,
114
- "<|nn|>": 50342,
115
- "<|no|>": 50288,
116
- "<|oc|>": 50328,
117
- "<|pa|>": 50321,
118
- "<|pl|>": 50269,
119
- "<|ps|>": 50340,
120
- "<|pt|>": 50267,
121
- "<|ro|>": 50284,
122
- "<|ru|>": 50263,
123
- "<|sa|>": 50344,
124
- "<|sd|>": 50332,
125
- "<|si|>": 50322,
126
- "<|sk|>": 50298,
127
- "<|sl|>": 50305,
128
- "<|sn|>": 50324,
129
- "<|so|>": 50326,
130
- "<|sq|>": 50317,
131
- "<|sr|>": 50303,
132
- "<|su|>": 50357,
133
- "<|sv|>": 50273,
134
- "<|sw|>": 50318,
135
- "<|ta|>": 50287,
136
- "<|te|>": 50299,
137
- "<|tg|>": 50331,
138
- "<|th|>": 50289,
139
- "<|tk|>": 50341,
140
- "<|tl|>": 50348,
141
- "<|tr|>": 50268,
142
- "<|tt|>": 50351,
143
- "<|uk|>": 50280,
144
- "<|ur|>": 50290,
145
- "<|uz|>": 50337,
146
- "<|vi|>": 50278,
147
- "<|yi|>": 50335,
148
- "<|yo|>": 50325,
149
- "<|zh|>": 50260
150
- },
151
- "language": "<|no|>",
152
- "max_initial_timestamp_index": 1,
153
  "max_length": 448,
154
- "no_timestamps_token_id": 50363,
155
  "pad_token_id": 50257,
156
- "return_timestamps": false,
157
  "suppress_tokens": [
158
  1,
159
  2,
@@ -244,10 +113,5 @@
244
  50361,
245
  50362
246
  ],
247
- "task": "transcribe",
248
- "task_to_id": {
249
- "transcribe": 50359,
250
- "translate": 50358
251
- },
252
- "transformers_version": "4.36.1"
253
  }
 
1
  {
2
+ "_from_model_config": true,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  "begin_suppress_tokens": [
4
  220,
5
  50257
 
10
  "forced_decoder_ids": [
11
  [
12
  1,
13
+ 50259
14
  ],
15
  [
16
  2,
 
21
  50363
22
  ]
23
  ],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
  "max_length": 448,
 
25
  "pad_token_id": 50257,
 
26
  "suppress_tokens": [
27
  1,
28
  2,
 
113
  50361,
114
  50362
115
  ],
116
+ "transformers_version": "4.36.2"
 
 
 
 
 
117
  }
tf_model.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:36c1e873b00c1f8ecda0bafa5ee8d21e4afe8f561aeb85abb752b20b310db15c
3
+ size 151253640