aktheroy commited on
Commit
2a54874
·
verified ·
1 Parent(s): 10c93c5

Upload 8 files

Browse files
Files changed (3) hide show
  1. README.md +7 -4
  2. special_tokens_map.json +5 -35
  3. tokenizer_config.json +1 -1
README.md CHANGED
@@ -1,6 +1,6 @@
1
  ---
2
- library_name: transformers
3
- tags: []
4
  ---
5
 
6
  # Model Card for Model ID
@@ -15,7 +15,7 @@ tags: []
15
 
16
  <!-- Provide a longer summary of what this model is. -->
17
 
18
- This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated.
19
 
20
  - **Developed by:** [More Information Needed]
21
  - **Funded by [optional]:** [More Information Needed]
@@ -196,4 +196,7 @@ Carbon emissions can be estimated using the [Machine Learning Impact calculator]
196
 
197
  ## Model Card Contact
198
 
199
- [More Information Needed]
 
 
 
 
1
  ---
2
+ base_model: facebook/m2m100_418M
3
+ library_name: peft
4
  ---
5
 
6
  # Model Card for Model ID
 
15
 
16
  <!-- Provide a longer summary of what this model is. -->
17
 
18
+
19
 
20
  - **Developed by:** [More Information Needed]
21
  - **Funded by [optional]:** [More Information Needed]
 
196
 
197
  ## Model Card Contact
198
 
199
+ [More Information Needed]
200
+ ### Framework versions
201
+
202
+ - PEFT 0.14.0
special_tokens_map.json CHANGED
@@ -101,39 +101,9 @@
101
  "__zh__",
102
  "__zu__"
103
  ],
104
- "bos_token": {
105
- "content": "<s>",
106
- "lstrip": false,
107
- "normalized": false,
108
- "rstrip": false,
109
- "single_word": false
110
- },
111
- "eos_token": {
112
- "content": "</s>",
113
- "lstrip": false,
114
- "normalized": false,
115
- "rstrip": false,
116
- "single_word": false
117
- },
118
- "pad_token": {
119
- "content": "<pad>",
120
- "lstrip": false,
121
- "normalized": false,
122
- "rstrip": false,
123
- "single_word": false
124
- },
125
- "sep_token": {
126
- "content": "</s>",
127
- "lstrip": false,
128
- "normalized": false,
129
- "rstrip": false,
130
- "single_word": false
131
- },
132
- "unk_token": {
133
- "content": "<unk>",
134
- "lstrip": false,
135
- "normalized": false,
136
- "rstrip": false,
137
- "single_word": false
138
- }
139
  }
 
101
  "__zh__",
102
  "__zu__"
103
  ],
104
+ "bos_token": "<s>",
105
+ "eos_token": "</s>",
106
+ "pad_token": "<pad>",
107
+ "sep_token": "</s>",
108
+ "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
109
  }
tokenizer_config.json CHANGED
@@ -946,7 +946,7 @@
946
  "sep_token": "</s>",
947
  "sp_model_kwargs": {},
948
  "src_lang": "en",
949
- "tgt_lang": "el",
950
  "tokenizer_class": "M2M100Tokenizer",
951
  "unk_token": "<unk>"
952
  }
 
946
  "sep_token": "</s>",
947
  "sp_model_kwargs": {},
948
  "src_lang": "en",
949
+ "tgt_lang": "hi",
950
  "tokenizer_class": "M2M100Tokenizer",
951
  "unk_token": "<unk>"
952
  }