Commit
·
f00b0b6
1
Parent(s):
e8ea55a
update config.json
Browse files- config.json +2 -2
config.json
CHANGED
@@ -12,7 +12,7 @@
|
|
12 |
"llm_attn_implementation": "flash_attention_2",
|
13 |
"llm_config": {
|
14 |
"_attn_implementation_autoset": true,
|
15 |
-
"_name_or_path": "Qwen/Qwen2.5-
|
16 |
"add_cross_attention": false,
|
17 |
"architectures": [
|
18 |
"Qwen2ForCausalLM"
|
@@ -105,7 +105,7 @@
|
|
105 |
"architectures": null,
|
106 |
"backbone_config": {
|
107 |
"_attn_implementation_autoset": true,
|
108 |
-
"_name_or_path": "apple/aimv2-
|
109 |
"add_cross_attention": false,
|
110 |
"architectures": [
|
111 |
"AIMv2Model"
|
|
|
12 |
"llm_attn_implementation": "flash_attention_2",
|
13 |
"llm_config": {
|
14 |
"_attn_implementation_autoset": true,
|
15 |
+
"_name_or_path": "Qwen/Qwen2.5-3B-Instruct",
|
16 |
"add_cross_attention": false,
|
17 |
"architectures": [
|
18 |
"Qwen2ForCausalLM"
|
|
|
105 |
"architectures": null,
|
106 |
"backbone_config": {
|
107 |
"_attn_implementation_autoset": true,
|
108 |
+
"_name_or_path": "apple/aimv2-huge-patch14-448",
|
109 |
"add_cross_attention": false,
|
110 |
"architectures": [
|
111 |
"AIMv2Model"
|