John6666 commited on
Commit
2e3965e
1 Parent(s): 23aa4a5

Upload 6 files

Browse files
Files changed (5) hide show
  1. README.md +2 -0
  2. constants.py +17 -5
  3. llmdolphin.py +42 -0
  4. requirements.txt +1 -1
  5. utils.py +29 -15
README.md CHANGED
@@ -9,6 +9,8 @@ app_file: app.py
9
  license: mit
10
  short_description: Text-to-Image
11
  pinned: true
 
 
12
  hf_oauth: true
13
  ---
14
 
 
9
  license: mit
10
  short_description: Text-to-Image
11
  pinned: true
12
+ preload_from_hub:
13
+ - madebyollin/sdxl-vae-fp16-fix config.json,diffusion_pytorch_model.safetensors
14
  hf_oauth: true
15
  ---
16
 
constants.py CHANGED
@@ -7,10 +7,10 @@ from stablepy import (
7
  )
8
 
9
  # - **Download Models**
10
- DOWNLOAD_MODEL = "https://civitai.com/api/download/models/574369, https://huggingface.co/TechnoByte/MilkyWonderland/resolve/main/milkyWonderland_v40.safetensors"
11
 
12
  # - **Download VAEs**
13
- DOWNLOAD_VAE = "https://huggingface.co/nubby/blessed-sdxl-vae-fp16-fix/resolve/main/sdxl_vae-fp16fix-c-1.1-b-0.5.safetensors?download=true, https://huggingface.co/nubby/blessed-sdxl-vae-fp16-fix/resolve/main/sdxl_vae-fp16fix-blessed.safetensors?download=true, https://huggingface.co/digiplay/VAE/resolve/main/vividReal_v20.safetensors?download=true, https://huggingface.co/fp16-guy/anything_kl-f8-anime2_vae-ft-mse-840000-ema-pruned_blessed_clearvae_fp16_cleaned/resolve/main/vae-ft-mse-840000-ema-pruned_fp16.safetensors?download=true"
14
 
15
  # - **Download LoRAs**
16
  DOWNLOAD_LORA = "https://huggingface.co/Leopain/color/resolve/main/Coloring_book_-_LineArt.safetensors, https://civitai.com/api/download/models/135867, https://huggingface.co/Linaqruf/anime-detailer-xl-lora/resolve/main/anime-detailer-xl.safetensors?download=true, https://huggingface.co/Linaqruf/style-enhancer-xl-lora/resolve/main/style-enhancer-xl.safetensors?download=true, https://huggingface.co/ByteDance/Hyper-SD/resolve/main/Hyper-SD15-8steps-CFG-lora.safetensors?download=true, https://huggingface.co/ByteDance/Hyper-SD/resolve/main/Hyper-SDXL-8steps-CFG-lora.safetensors?download=true"
@@ -24,7 +24,9 @@ LOAD_DIFFUSERS_FORMAT_MODEL = [
24
  'John6666/lyh-anime-flux-v2a1-fp8-flux',
25
  'John6666/carnival-unchained-v10-fp8-flux',
26
  'Freepik/flux.1-lite-8B-alpha',
27
- 'ostris/OpenFLUX.1',
 
 
28
  'John6666/noobai-xl-nai-xl-epsilonpred10version-sdxl',
29
  'John6666/noobai-xl-nai-xl-epsilonpred075version-sdxl',
30
  'John6666/noobai-xl-nai-xl-epsilonpred05version-sdxl',
@@ -38,6 +40,7 @@ LOAD_DIFFUSERS_FORMAT_MODEL = [
38
  'John6666/illustriousxl-mmmix-v50-sdxl',
39
  'John6666/illustrious-pencil-xl-v200-sdxl',
40
  'John6666/obsession-illustriousxl-v21-sdxl',
 
41
  'eienmojiki/Anything-XL',
42
  'eienmojiki/Starry-XL-v5.2',
43
  'John6666/meinaxl-v2-sdxl',
@@ -86,6 +89,8 @@ LOAD_DIFFUSERS_FORMAT_MODEL = [
86
  'yodayo-ai/kivotos-xl-2.0',
87
  'yodayo-ai/holodayo-xl-2.1',
88
  'yodayo-ai/clandestine-xl-1.0',
 
 
89
  'digiplay/majicMIX_sombre_v2',
90
  'digiplay/majicMIX_realistic_v6',
91
  'digiplay/majicMIX_realistic_v7',
@@ -120,8 +125,8 @@ DIFFUSERS_FORMAT_LORAS = [
120
 
121
  DOWNLOAD_EMBEDS = [
122
  'https://huggingface.co/datasets/Nerfgun3/bad_prompt/blob/main/bad_prompt_version2.pt',
123
- 'https://huggingface.co/embed/negative/resolve/main/EasyNegativeV2.safetensors',
124
- 'https://huggingface.co/embed/negative/resolve/main/bad-hands-5.pt',
125
  ]
126
 
127
  CIVITAI_API_KEY = os.environ.get("CIVITAI_API_KEY")
@@ -291,6 +296,13 @@ MODEL_TYPE_CLASS = {
291
  "diffusers:FluxPipeline": "FLUX",
292
  }
293
 
 
 
 
 
 
 
 
294
  POST_PROCESSING_SAMPLER = ["Use same sampler"] + [
295
  name_s for name_s in scheduler_names if "Auto-Loader" not in name_s
296
  ]
 
7
  )
8
 
9
  # - **Download Models**
10
+ DOWNLOAD_MODEL = "https://huggingface.co/TechnoByte/MilkyWonderland/resolve/main/milkyWonderland_v40.safetensors"
11
 
12
  # - **Download VAEs**
13
+ DOWNLOAD_VAE = "https://huggingface.co/fp16-guy/anything_kl-f8-anime2_vae-ft-mse-840000-ema-pruned_blessed_clearvae_fp16_cleaned/resolve/main/vae-ft-mse-840000-ema-pruned_fp16.safetensors?download=true"
14
 
15
  # - **Download LoRAs**
16
  DOWNLOAD_LORA = "https://huggingface.co/Leopain/color/resolve/main/Coloring_book_-_LineArt.safetensors, https://civitai.com/api/download/models/135867, https://huggingface.co/Linaqruf/anime-detailer-xl-lora/resolve/main/anime-detailer-xl.safetensors?download=true, https://huggingface.co/Linaqruf/style-enhancer-xl-lora/resolve/main/style-enhancer-xl.safetensors?download=true, https://huggingface.co/ByteDance/Hyper-SD/resolve/main/Hyper-SD15-8steps-CFG-lora.safetensors?download=true, https://huggingface.co/ByteDance/Hyper-SD/resolve/main/Hyper-SDXL-8steps-CFG-lora.safetensors?download=true"
 
24
  'John6666/lyh-anime-flux-v2a1-fp8-flux',
25
  'John6666/carnival-unchained-v10-fp8-flux',
26
  'Freepik/flux.1-lite-8B-alpha',
27
+ 'shauray/FluxDev-HyperSD-merged',
28
+ 'mikeyandfriends/PixelWave_FLUX.1-dev_03',
29
+ 'terminusresearch/FluxBooru-v0.3',
30
  'John6666/noobai-xl-nai-xl-epsilonpred10version-sdxl',
31
  'John6666/noobai-xl-nai-xl-epsilonpred075version-sdxl',
32
  'John6666/noobai-xl-nai-xl-epsilonpred05version-sdxl',
 
40
  'John6666/illustriousxl-mmmix-v50-sdxl',
41
  'John6666/illustrious-pencil-xl-v200-sdxl',
42
  'John6666/obsession-illustriousxl-v21-sdxl',
43
+ 'John6666/obsession-illustriousxl-v30-sdxl',
44
  'eienmojiki/Anything-XL',
45
  'eienmojiki/Starry-XL-v5.2',
46
  'John6666/meinaxl-v2-sdxl',
 
89
  'yodayo-ai/kivotos-xl-2.0',
90
  'yodayo-ai/holodayo-xl-2.1',
91
  'yodayo-ai/clandestine-xl-1.0',
92
+ 'ostris/OpenFLUX.1',
93
+ 'shuttleai/shuttle-3-diffusion',
94
  'digiplay/majicMIX_sombre_v2',
95
  'digiplay/majicMIX_realistic_v6',
96
  'digiplay/majicMIX_realistic_v7',
 
125
 
126
  DOWNLOAD_EMBEDS = [
127
  'https://huggingface.co/datasets/Nerfgun3/bad_prompt/blob/main/bad_prompt_version2.pt',
128
+ # 'https://huggingface.co/embed/negative/resolve/main/EasyNegativeV2.safetensors',
129
+ # 'https://huggingface.co/embed/negative/resolve/main/bad-hands-5.pt',
130
  ]
131
 
132
  CIVITAI_API_KEY = os.environ.get("CIVITAI_API_KEY")
 
296
  "diffusers:FluxPipeline": "FLUX",
297
  }
298
 
299
+ DIFFUSECRAFT_CHECKPOINT_NAME = {
300
+ "sd1.5": "SD 1.5",
301
+ "sdxl": "SDXL",
302
+ "flux-dev": "FLUX",
303
+ "flux-schnell": "FLUX",
304
+ }
305
+
306
  POST_PROCESSING_SAMPLER = ["Use same sampler"] + [
307
  name_s for name_s in scheduler_names if "Auto-Loader" not in name_s
308
  ]
llmdolphin.py CHANGED
@@ -27,6 +27,7 @@ llm_models = {
27
  "mn-12b-lyra-v2a1-q5_k_m.gguf": ["HalleyStarbun/MN-12B-Lyra-v2a1-Q5_K_M-GGUF", MessagesFormatterType.CHATML],
28
  "L3-8B-Tamamo-v1.i1-Q5_K_M.gguf": ["mradermacher/L3-8B-Tamamo-v1-i1-GGUF", MessagesFormatterType.LLAMA_3],
29
  "MN-Chinofun-12B-2.i1-Q4_K_M.gguf": ["mradermacher/MN-Chinofun-12B-2-i1-GGUF", MessagesFormatterType.MISTRAL],
 
30
  "Mahou-1.5-mistral-nemo-12B.i1-Q4_K_M.gguf": ["mradermacher/Mahou-1.5-mistral-nemo-12B-i1-GGUF", MessagesFormatterType.MISTRAL],
31
  "MN-12B-Mag-Mell-Q4_K_M.gguf": ["inflatebot/MN-12B-Mag-Mell-R1-GGUF", MessagesFormatterType.MISTRAL],
32
  "Qwen-modelstock-15B.i1-Q4_K_M.gguf": ["mradermacher/Qwen-modelstock-15B-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
@@ -51,6 +52,7 @@ llm_models = {
51
  "Nemo-12B-Marlin-v7.Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v7-GGUF", MessagesFormatterType.MISTRAL],
52
  "Nemo-12B-Marlin-v8.Q4_K_S.gguf": ["mradermacher/Nemo-12B-Marlin-v8-GGUF", MessagesFormatterType.MISTRAL],
53
  "NemoDori-v0.2-Upscaled.1-14B.Q4_K_M.gguf": ["mradermacher/NemoDori-v0.2-Upscaled.1-14B-GGUF", MessagesFormatterType.MISTRAL],
 
54
  "Fireball-12B-v1.0.i1-Q4_K_M.gguf": ["mradermacher/Fireball-12B-v1.0-i1-GGUF", MessagesFormatterType.MISTRAL],
55
  "Fireball-Mistral-Nemo-Base-2407-sft-v2.2a.Q4_K_M.gguf": ["mradermacher/Fireball-Mistral-Nemo-Base-2407-sft-v2.2a-GGUF", MessagesFormatterType.MISTRAL],
56
  "T-III-12B.Q4_K_M.gguf": ["mradermacher/T-III-12B-GGUF", MessagesFormatterType.CHATML],
@@ -70,12 +72,52 @@ llm_models = {
70
  "Rocinante-12B-v2h-Q4_K_M.gguf": ["BeaverAI/Rocinante-12B-v2h-GGUF", MessagesFormatterType.MISTRAL],
71
  "Mistral-Nemo-12B-ArliAI-RPMax-v1.1.i1-Q4_K_M.gguf": ["mradermacher/Mistral-Nemo-12B-ArliAI-RPMax-v1.1-i1-GGUF", MessagesFormatterType.MISTRAL],
72
  "Pans_Gutenbergum_V0.1.Q4_K_M.gguf": ["mradermacher/Pans_Gutenbergum_V0.1-GGUF", MessagesFormatterType.MISTRAL],
 
73
  "Trinas_Nectar-8B-model_stock.i1-Q4_K_M.gguf": ["mradermacher/Trinas_Nectar-8B-model_stock-i1-GGUF", MessagesFormatterType.MISTRAL],
74
  "ChatWaifu_Magnum_V0.2.Q4_K_M.gguf": ["mradermacher/ChatWaifu_Magnum_V0.2-GGUF", MessagesFormatterType.MISTRAL],
75
  "ChatWaifu_12B_v2.0.Q5_K_M.gguf": ["mradermacher/ChatWaifu_12B_v2.0-GGUF", MessagesFormatterType.MISTRAL],
76
  "ChatWaifu_22B_v2.0_preview.Q4_K_S.gguf": ["mradermacher/ChatWaifu_22B_v2.0_preview-GGUF", MessagesFormatterType.MISTRAL],
77
  "ChatWaifu_v1.4.Q5_K_M.gguf": ["mradermacher/ChatWaifu_v1.4-GGUF", MessagesFormatterType.MISTRAL],
78
  "ChatWaifu_v1.3.1.Q4_K_M.gguf": ["mradermacher/ChatWaifu_v1.3.1-GGUF", MessagesFormatterType.MISTRAL],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
79
  "dolphin-2.6-mistral-7b-dpo-laser.Q4_K_S.gguf": ["mradermacher/dolphin-2.6-mistral-7b-dpo-laser-GGUF", MessagesFormatterType.MISTRAL],
80
  "Flowable-Docs-Llama-3.1-8B.Q5_K_M.gguf": ["mradermacher/Flowable-Docs-Llama-3.1-8B-GGUF", MessagesFormatterType.LLAMA_3],
81
  "slimorca-gemma2-9b-fft.Q4_K_M.gguf": ["mradermacher/slimorca-gemma2-9b-fft-GGUF", MessagesFormatterType.ALPACA],
 
27
  "mn-12b-lyra-v2a1-q5_k_m.gguf": ["HalleyStarbun/MN-12B-Lyra-v2a1-Q5_K_M-GGUF", MessagesFormatterType.CHATML],
28
  "L3-8B-Tamamo-v1.i1-Q5_K_M.gguf": ["mradermacher/L3-8B-Tamamo-v1-i1-GGUF", MessagesFormatterType.LLAMA_3],
29
  "MN-Chinofun-12B-2.i1-Q4_K_M.gguf": ["mradermacher/MN-Chinofun-12B-2-i1-GGUF", MessagesFormatterType.MISTRAL],
30
+ "Mistral-Nemo-Prism-12B-v2.i1-Q4_K_M.gguf": ["mradermacher/Mistral-Nemo-Prism-12B-v2-i1-GGUF", MessagesFormatterType.MISTRAL],
31
  "Mahou-1.5-mistral-nemo-12B.i1-Q4_K_M.gguf": ["mradermacher/Mahou-1.5-mistral-nemo-12B-i1-GGUF", MessagesFormatterType.MISTRAL],
32
  "MN-12B-Mag-Mell-Q4_K_M.gguf": ["inflatebot/MN-12B-Mag-Mell-R1-GGUF", MessagesFormatterType.MISTRAL],
33
  "Qwen-modelstock-15B.i1-Q4_K_M.gguf": ["mradermacher/Qwen-modelstock-15B-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
 
52
  "Nemo-12B-Marlin-v7.Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v7-GGUF", MessagesFormatterType.MISTRAL],
53
  "Nemo-12B-Marlin-v8.Q4_K_S.gguf": ["mradermacher/Nemo-12B-Marlin-v8-GGUF", MessagesFormatterType.MISTRAL],
54
  "NemoDori-v0.2-Upscaled.1-14B.Q4_K_M.gguf": ["mradermacher/NemoDori-v0.2-Upscaled.1-14B-GGUF", MessagesFormatterType.MISTRAL],
55
+ "MT-Gen2-GIMMMA-gemma-2-9B.Q4_K_M.gguf": ["mradermacher/MT-Gen2-GIMMMA-gemma-2-9B-GGUF", MessagesFormatterType.ALPACA],
56
  "Fireball-12B-v1.0.i1-Q4_K_M.gguf": ["mradermacher/Fireball-12B-v1.0-i1-GGUF", MessagesFormatterType.MISTRAL],
57
  "Fireball-Mistral-Nemo-Base-2407-sft-v2.2a.Q4_K_M.gguf": ["mradermacher/Fireball-Mistral-Nemo-Base-2407-sft-v2.2a-GGUF", MessagesFormatterType.MISTRAL],
58
  "T-III-12B.Q4_K_M.gguf": ["mradermacher/T-III-12B-GGUF", MessagesFormatterType.CHATML],
 
72
  "Rocinante-12B-v2h-Q4_K_M.gguf": ["BeaverAI/Rocinante-12B-v2h-GGUF", MessagesFormatterType.MISTRAL],
73
  "Mistral-Nemo-12B-ArliAI-RPMax-v1.1.i1-Q4_K_M.gguf": ["mradermacher/Mistral-Nemo-12B-ArliAI-RPMax-v1.1-i1-GGUF", MessagesFormatterType.MISTRAL],
74
  "Pans_Gutenbergum_V0.1.Q4_K_M.gguf": ["mradermacher/Pans_Gutenbergum_V0.1-GGUF", MessagesFormatterType.MISTRAL],
75
+ "ChronoStar-Unleashed-v0.1.i1-Q4_K_M.gguf": ["mradermacher/ChronoStar-Unleashed-v0.1-i1-GGUF", MessagesFormatterType.MISTRAL],
76
  "Trinas_Nectar-8B-model_stock.i1-Q4_K_M.gguf": ["mradermacher/Trinas_Nectar-8B-model_stock-i1-GGUF", MessagesFormatterType.MISTRAL],
77
  "ChatWaifu_Magnum_V0.2.Q4_K_M.gguf": ["mradermacher/ChatWaifu_Magnum_V0.2-GGUF", MessagesFormatterType.MISTRAL],
78
  "ChatWaifu_12B_v2.0.Q5_K_M.gguf": ["mradermacher/ChatWaifu_12B_v2.0-GGUF", MessagesFormatterType.MISTRAL],
79
  "ChatWaifu_22B_v2.0_preview.Q4_K_S.gguf": ["mradermacher/ChatWaifu_22B_v2.0_preview-GGUF", MessagesFormatterType.MISTRAL],
80
  "ChatWaifu_v1.4.Q5_K_M.gguf": ["mradermacher/ChatWaifu_v1.4-GGUF", MessagesFormatterType.MISTRAL],
81
  "ChatWaifu_v1.3.1.Q4_K_M.gguf": ["mradermacher/ChatWaifu_v1.3.1-GGUF", MessagesFormatterType.MISTRAL],
82
+ "magnum-12b-v2.5-kto.i1-Q4_K_M.gguf": ["mradermacher/magnum-12b-v2.5-kto-i1-GGUF", MessagesFormatterType.CHATML],
83
+ "L3.1-8B-Slush.i1-Q5_K_M.gguf": ["mradermacher/L3.1-8B-Slush-i1-GGUF", MessagesFormatterType.LLAMA_3],
84
+ "QandoraExp-7B-Persona.i1-Q5_K_M.gguf": ["mradermacher/QandoraExp-7B-Persona-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
85
+ "Pantheon-RP-1.6-12b-Nemo-KTO.i1-Q4_K_M.gguf": ["mradermacher/Pantheon-RP-1.6-12b-Nemo-KTO-i1-GGUF", MessagesFormatterType.CHATML],
86
+ "Unaligned-Base-8b-1024K.i1-Q5_K_M.gguf": ["mradermacher/Unaligned-Base-8b-1024K-i1-GGUF", MessagesFormatterType.LLAMA_3],
87
+ "Mistral-Nemo-Prism-12B-v6.Q4_K_M.gguf": ["mradermacher/Mistral-Nemo-Prism-12B-v6-GGUF", MessagesFormatterType.MISTRAL],
88
+ "hermes-llama3-roleplay-1000-v3.Q5_K_M.gguf": ["mradermacher/hermes-llama3-roleplay-1000-v3-GGUF", MessagesFormatterType.LLAMA_3],
89
+ "Prismatic-12b.Q4_K_M.gguf": ["mradermacher/Prismatic-12b-GGUF", MessagesFormatterType.MISTRAL],
90
+ "Qwen-2.5-Aether-SlerpFusion-7B.i1-Q5_K_M.gguf": ["mradermacher/Qwen-2.5-Aether-SlerpFusion-7B-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
91
+ "miscii-14b-1028.i1-Q4_K_M.gguf": ["mradermacher/miscii-14b-1028-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
92
+ "DarkAtom-12B-v3.i1-Q4_K_M.gguf": ["mradermacher/DarkAtom-12B-v3-i1-GGUF", MessagesFormatterType.MISTRAL],
93
+ "Mistral-Nemo-Prism-12B.i1-Q4_K_M.gguf": ["mradermacher/Mistral-Nemo-Prism-12B-i1-GGUF", MessagesFormatterType.MISTRAL],
94
+ "QandoraExp-7B-v2.Q5_K_M.gguf": ["mradermacher/QandoraExp-7B-v2-GGUF", MessagesFormatterType.OPEN_CHAT],
95
+ "Kunocchini-7b-128k-test.Q5_K_M.gguf": ["mradermacher/Kunocchini-7b-128k-test-GGUF", MessagesFormatterType.MISTRAL],
96
+ "MT2-Gen2-BGMAMU-gemma-2-9B.Q4_K_M.gguf": ["mradermacher/MT2-Gen2-BGMAMU-gemma-2-9B-GGUF", MessagesFormatterType.ALPACA],
97
+ "dolphin-mixtral-2x7b.i1-Q4_K_M.gguf": ["mradermacher/dolphin-mixtral-2x7b-i1-GGUF", MessagesFormatterType.MISTRAL],
98
+ "NightyGurps-14b-v1.1.Q4_K_M.gguf": ["mradermacher/NightyGurps-14b-v1.1-GGUF", MessagesFormatterType.OPEN_CHAT],
99
+ "MT2-Gen2-BB-gemma-2-MTMMT5-9B.Q4_K_M.gguf": ["mradermacher/MT2-Gen2-BB-gemma-2-MTMMT5-9B-GGUF", MessagesFormatterType.ALPACA],
100
+ "MT2-Gen2-IMM-gemma-2-9B.Q4_K_M.gguf": ["mradermacher/MT2-Gen2-IMM-gemma-2-9B-GGUF", MessagesFormatterType.ALPACA],
101
+ "Unaligned-RP-Base-8b-3.Q5_K_M.gguf": ["mradermacher/Unaligned-RP-Base-8b-3-GGUF", MessagesFormatterType.LLAMA_3],
102
+ "Unaligned-RP-Base-8b-2.Q5_K_M.gguf": ["mradermacher/Unaligned-RP-Base-8b-2-GGUF", MessagesFormatterType.LLAMA_3],
103
+ "LongWriter-Qwen2.5-7B-Instruct.i1-Q5_K_M.gguf": ["mradermacher/LongWriter-Qwen2.5-7B-Instruct-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
104
+ "Everyone-LLM-7b-Base.i1-Q5_K_M.gguf": ["mradermacher/Everyone-LLM-7b-Base-i1-GGUF", MessagesFormatterType.MISTRAL],
105
+ "QwenSlerp8-7B.Q5_K_M.gguf": ["mradermacher/QwenSlerp8-7B-GGUF", MessagesFormatterType.OPEN_CHAT],
106
+ "Tess-10.7B-v1.5.Q4_K_M.gguf": ["mradermacher/Tess-10.7B-v1.5-GGUF", MessagesFormatterType.LLAMA_3],
107
+ "Rombos-Coder-V2.5-Qwen-7b.i1-Q5_K_M.gguf": ["mradermacher/Rombos-Coder-V2.5-Qwen-7b-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
108
+ "Qwen2.5-Gutenberg-Doppel-14B.Q4_K_M.gguf": ["mradermacher/Qwen2.5-Gutenberg-Doppel-14B-GGUF", MessagesFormatterType.OPEN_CHAT],
109
+ "MT-Gen2-MU-gemma-2-MT1RAv0.1t0.25-9B.Q4_K_M.gguf": ["mradermacher/MT-Gen2-MU-gemma-2-MT1RAv0.1t0.25-9B-GGUF", MessagesFormatterType.ALPACA],
110
+ "MT1-Gen2-GP-gemma-2-MT1DMv1-9B.Q4_K_M.gguf": ["mradermacher/MT1-Gen2-GP-gemma-2-MT1DMv1-9B-GGUF", MessagesFormatterType.ALPACA],
111
+ "MT1-Gen2-GMA-gemma-2-9B.Q4_K_M.gguf": ["mradermacher/MT1-Gen2-GMA-gemma-2-9B-GGUF", MessagesFormatterType.ALPACA],
112
+ "MT1-Gen2-MMMU-gemma-2-9B.Q4_K_M.gguf": ["mradermacher/MT1-Gen2-MMMU-gemma-2-9B-GGUF", MessagesFormatterType.ALPACA],
113
+ "LGRC-7B-slerp.Q5_K_M.gguf": ["mradermacher/LGRC-7B-slerp-GGUF", MessagesFormatterType.MISTRAL],
114
+ "Ice0.32-10.11-RP.Q5_K_M.gguf": ["mradermacher/Ice0.32-10.11-RP-GGUF", MessagesFormatterType.MISTRAL],
115
+ "Vecteus-v1.i1-Q5_K_M.gguf": ["mradermacher/Vecteus-v1-i1-GGUF", MessagesFormatterType.MISTRAL],
116
+ "L3.1-BaeZel-8B-Della.Q4_K_S.gguf": ["mradermacher/L3.1-BaeZel-8B-Della-GGUF", MessagesFormatterType.LLAMA_3],
117
+ "Qwen2.5-14B-Instruct-SEALONG.i1-Q4_K_M.gguf": ["mradermacher/Qwen2.5-14B-Instruct-SEALONG-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
118
+ "Qwen2.5-7B-Instruct-SEALONG.i1-Q5_K_M.gguf": ["mradermacher/Qwen2.5-7B-Instruct-SEALONG-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
119
+ "Fraken-Maid-TW-K-Slerp.i1-Q5_K_M.gguf": ["mradermacher/Fraken-Maid-TW-K-Slerp-i1-GGUF", MessagesFormatterType.MISTRAL],
120
+ "AutoTrain-Qwen-Rui-Elite.Q5_K_M.gguf": ["mradermacher/AutoTrain-Qwen-Rui-Elite-GGUF", MessagesFormatterType.OPEN_CHAT],
121
  "dolphin-2.6-mistral-7b-dpo-laser.Q4_K_S.gguf": ["mradermacher/dolphin-2.6-mistral-7b-dpo-laser-GGUF", MessagesFormatterType.MISTRAL],
122
  "Flowable-Docs-Llama-3.1-8B.Q5_K_M.gguf": ["mradermacher/Flowable-Docs-Llama-3.1-8B-GGUF", MessagesFormatterType.LLAMA_3],
123
  "slimorca-gemma2-9b-fft.Q4_K_M.gguf": ["mradermacher/slimorca-gemma2-9b-fft-GGUF", MessagesFormatterType.ALPACA],
requirements.txt CHANGED
@@ -4,7 +4,7 @@ diffusers
4
  invisible_watermark
5
  transformers
6
  xformers
7
- git+https://github.com/R3gm/stablepy.git@ed51089 # -b refactor_sampler_fix
8
  torch==2.2.0
9
  gdown
10
  opencv-python
 
4
  invisible_watermark
5
  transformers
6
  xformers
7
+ git+https://github.com/R3gm/stablepy.git@f686946 # -b refactor_sampler_fix
8
  torch==2.2.0
9
  gdown
10
  opencv-python
utils.py CHANGED
@@ -7,11 +7,14 @@ from constants import (
7
  HF_TOKEN,
8
  MODEL_TYPE_CLASS,
9
  DIRECTORY_LORAS,
 
10
  )
11
  from huggingface_hub import HfApi
 
12
  from diffusers import DiffusionPipeline
13
  from huggingface_hub import model_info as model_info_data
14
  from diffusers.pipelines.pipeline_loading_utils import variant_compatible_siblings
 
15
  from pathlib import PosixPath
16
  from unidecode import unidecode
17
  import urllib.parse
@@ -283,10 +286,15 @@ def get_model_type(repo_id: str):
283
  api = HfApi(token=os.environ.get("HF_TOKEN")) # if use private or gated model
284
  default = "SD 1.5"
285
  try:
286
- model = api.model_info(repo_id=repo_id, timeout=5.0)
287
- tags = model.tags
288
- for tag in tags:
289
- if tag in MODEL_TYPE_CLASS.keys(): return MODEL_TYPE_CLASS.get(tag, default)
 
 
 
 
 
290
  except Exception:
291
  return default
292
  return default
@@ -371,17 +379,23 @@ def download_diffuser_repo(repo_name: str, model_type: str, revision: str = "mai
371
  if len(variant_filenames):
372
  variant = "fp16"
373
 
374
- cached_folder = DiffusionPipeline.download(
375
- pretrained_model_name=repo_name,
376
- force_download=False,
377
- token=token,
378
- revision=revision,
379
- # mirror="https://hf-mirror.com",
380
- variant=variant,
381
- use_safetensors=True,
382
- trust_remote_code=False,
383
- timeout=5.0,
384
- )
 
 
 
 
 
 
385
 
386
  if isinstance(cached_folder, PosixPath):
387
  cached_folder = cached_folder.as_posix()
 
7
  HF_TOKEN,
8
  MODEL_TYPE_CLASS,
9
  DIRECTORY_LORAS,
10
+ DIFFUSECRAFT_CHECKPOINT_NAME,
11
  )
12
  from huggingface_hub import HfApi
13
+ from huggingface_hub import snapshot_download
14
  from diffusers import DiffusionPipeline
15
  from huggingface_hub import model_info as model_info_data
16
  from diffusers.pipelines.pipeline_loading_utils import variant_compatible_siblings
17
+ from stablepy.diffusers_vanilla.utils import checkpoint_model_type
18
  from pathlib import PosixPath
19
  from unidecode import unidecode
20
  import urllib.parse
 
286
  api = HfApi(token=os.environ.get("HF_TOKEN")) # if use private or gated model
287
  default = "SD 1.5"
288
  try:
289
+ if os.path.exists(repo_id):
290
+ tag = checkpoint_model_type(repo_id)
291
+ return DIFFUSECRAFT_CHECKPOINT_NAME[tag]
292
+ else:
293
+ model = api.model_info(repo_id=repo_id, timeout=5.0)
294
+ tags = model.tags
295
+ for tag in tags:
296
+ if tag in MODEL_TYPE_CLASS.keys(): return MODEL_TYPE_CLASS.get(tag, default)
297
+
298
  except Exception:
299
  return default
300
  return default
 
379
  if len(variant_filenames):
380
  variant = "fp16"
381
 
382
+ if model_type == "FLUX":
383
+ cached_folder = snapshot_download(
384
+ repo_id=repo_name,
385
+ allow_patterns="transformer/*"
386
+ )
387
+ else:
388
+ cached_folder = DiffusionPipeline.download(
389
+ pretrained_model_name=repo_name,
390
+ force_download=False,
391
+ token=token,
392
+ revision=revision,
393
+ # mirror="https://hf-mirror.com",
394
+ variant=variant,
395
+ use_safetensors=True,
396
+ trust_remote_code=False,
397
+ timeout=5.0,
398
+ )
399
 
400
  if isinstance(cached_folder, PosixPath):
401
  cached_folder = cached_folder.as_posix()