thangdao commited on
Commit
bf36572
·
verified ·
1 Parent(s): c7169f9

Upload config

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "merged_model/qwen_vl_chat_merge_lora-3000/",
3
  "architectures": [
4
  "QWenLMHeadModel"
5
  ],
@@ -30,7 +30,7 @@
30
  "tie_word_embeddings": false,
31
  "tokenizer_type": "QWenTokenizer",
32
  "torch_dtype": "bfloat16",
33
- "transformers_version": "4.38.2",
34
  "use_cache": true,
35
  "use_dynamic_ntk": true,
36
  "use_flash_attn": false,
 
1
  {
2
+ "_name_or_path": "qwen_vl_chat_merge_lora-3000",
3
  "architectures": [
4
  "QWenLMHeadModel"
5
  ],
 
30
  "tie_word_embeddings": false,
31
  "tokenizer_type": "QWenTokenizer",
32
  "torch_dtype": "bfloat16",
33
+ "transformers_version": "4.32.0",
34
  "use_cache": true,
35
  "use_dynamic_ntk": true,
36
  "use_flash_attn": false,