cmeraki commited on
Commit
c203660
1 Parent(s): 114d7e4

commit files to HF hub

Browse files
Files changed (2) hide show
  1. config.json +3 -3
  2. generation_config.json +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "cmeraki/mimi_124m_8cb",
3
  "activation_function": "gelu",
4
  "architectures": [
5
  "GPT2LMHeadModel"
@@ -8,7 +8,7 @@
8
  "bos_token_id": 50256,
9
  "custom_pipelines": {
10
  "indri-tts": {
11
- "impl": "tts_pipeline.IndriTTSPipeline",
12
  "pt": [
13
  "AutoModelForCausalLM"
14
  ],
@@ -36,7 +36,7 @@
36
  "summary_type": "cls_index",
37
  "summary_use_proj": true,
38
  "torch_dtype": "float32",
39
- "transformers_version": "4.46.0",
40
  "use_bias": false,
41
  "use_cache": true,
42
  "vocab_size": 70016
 
1
  {
2
+ "_name_or_path": "11mlabs/indri-0.1-124m-tts",
3
  "activation_function": "gelu",
4
  "architectures": [
5
  "GPT2LMHeadModel"
 
8
  "bos_token_id": 50256,
9
  "custom_pipelines": {
10
  "indri-tts": {
11
+ "impl": "11mlabs/indri-0.1-124m-tts--tts_pipeline.IndriTTSPipeline",
12
  "pt": [
13
  "AutoModelForCausalLM"
14
  ],
 
36
  "summary_type": "cls_index",
37
  "summary_use_proj": true,
38
  "torch_dtype": "float32",
39
+ "transformers_version": "4.46.3",
40
  "use_bias": false,
41
  "use_cache": true,
42
  "vocab_size": 70016
generation_config.json CHANGED
@@ -6,5 +6,5 @@
6
  "max_length": 1024,
7
  "temperature": 0.5,
8
  "top_k": 15,
9
- "transformers_version": "4.46.0"
10
  }
 
6
  "max_length": 1024,
7
  "temperature": 0.5,
8
  "top_k": 15,
9
+ "transformers_version": "4.46.3"
10
  }