Spaces:
Running
on
Zero
Running
on
Zero
Upload 6 files
Browse files- app.py +22 -18
- dc.py +27 -40
- llmdolphin.py +26 -0
- lora_dict.json +7 -0
- modutils.py +122 -52
- null.png +0 -0
app.py
CHANGED
@@ -4,11 +4,10 @@ import numpy as np
|
|
4 |
|
5 |
# DiffuseCraft
|
6 |
from dc import (infer, _infer, pass_result, get_diffusers_model_list, get_samplers,
|
7 |
-
get_vaes, enable_model_recom_prompt, enable_diffusers_model_detail,
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
preset_quality, preset_styles, process_style_prompt)
|
12 |
# Translator
|
13 |
from llmdolphin import (dolphin_respond_auto, dolphin_parse_simple,
|
14 |
get_llm_formats, get_dolphin_model_format, get_dolphin_models,
|
@@ -41,8 +40,9 @@ css = """
|
|
41 |
#col-container { margin: 0 auto; !important; }
|
42 |
#result { max-width: 520px; max-height: 520px; margin: 0px auto; !important; }
|
43 |
.lora { min-width: 480px; !important; }
|
44 |
-
|
45 |
-
.
|
|
|
46 |
"""
|
47 |
|
48 |
with gr.Blocks(fill_width=True, elem_id="container", css=css, delete_cache=(60, 3600)) as demo:
|
@@ -80,7 +80,7 @@ with gr.Blocks(fill_width=True, elem_id="container", css=css, delete_cache=(60,
|
|
80 |
model_name = gr.Dropdown(label="Model", info="You can enter a huggingface model repo_id to want to use.",
|
81 |
choices=get_diffusers_model_list(), value=get_diffusers_model_list()[0],
|
82 |
allow_custom_value=True, interactive=True, min_width=320)
|
83 |
-
model_info = gr.Markdown(
|
84 |
with gr.Column(scale=1):
|
85 |
model_detail = gr.Checkbox(label="Show detail of model in list", value=False)
|
86 |
|
@@ -141,17 +141,20 @@ with gr.Blocks(fill_width=True, elem_id="container", css=css, delete_cache=(60,
|
|
141 |
lora5_md = gr.Markdown(value="", visible=False)
|
142 |
with gr.Accordion("From URL", open=True, visible=True):
|
143 |
with gr.Row():
|
144 |
-
lora_search_civitai_basemodel = gr.CheckboxGroup(label="Search LoRA for", choices=
|
145 |
-
lora_search_civitai_sort = gr.Radio(label="Sort", choices=
|
146 |
-
lora_search_civitai_period = gr.Radio(label="Period", choices=
|
147 |
with gr.Row():
|
148 |
lora_search_civitai_query = gr.Textbox(label="Query", placeholder="oomuro sakurako...", lines=1)
|
149 |
-
lora_search_civitai_tag = gr.
|
150 |
-
|
|
|
151 |
with gr.Row():
|
152 |
-
lora_search_civitai_result = gr.Dropdown(label="Search Results", choices=[("", "")], value="", allow_custom_value=True, visible=False)
|
153 |
lora_search_civitai_json = gr.JSON(value={}, visible=False)
|
154 |
-
lora_search_civitai_desc = gr.Markdown(value="", visible=False)
|
|
|
|
|
|
|
155 |
lora_download_url = gr.Textbox(label="LoRA URL", placeholder="https://civitai.com/api/download/models/28907", lines=1)
|
156 |
lora_download = gr.Button("Get and set LoRA and apply to prompt")
|
157 |
|
@@ -254,10 +257,10 @@ with gr.Blocks(fill_width=True, elem_id="container", css=css, delete_cache=(60,
|
|
254 |
lora5_copy.click(apply_lora_prompt, [prompt, lora5_info], [prompt], queue=False, show_api=False)
|
255 |
|
256 |
gr.on(
|
257 |
-
triggers=[lora_search_civitai_submit.click, lora_search_civitai_query.submit
|
258 |
fn=search_civitai_lora,
|
259 |
-
inputs=[lora_search_civitai_query, lora_search_civitai_basemodel, lora_search_civitai_sort, lora_search_civitai_period, lora_search_civitai_tag],
|
260 |
-
outputs=[lora_search_civitai_result, lora_search_civitai_desc, lora_search_civitai_submit, lora_search_civitai_query],
|
261 |
scroll_to_output=True,
|
262 |
queue=True,
|
263 |
show_api=False,
|
@@ -273,6 +276,7 @@ with gr.Blocks(fill_width=True, elem_id="container", css=css, delete_cache=(60,
|
|
273 |
queue=True,
|
274 |
show_api=False,
|
275 |
)
|
|
|
276 |
|
277 |
recom_prompt.change(enable_model_recom_prompt, [recom_prompt], [recom_prompt], queue=False, show_api=False)
|
278 |
gr.on(
|
|
|
4 |
|
5 |
# DiffuseCraft
|
6 |
from dc import (infer, _infer, pass_result, get_diffusers_model_list, get_samplers,
|
7 |
+
get_vaes, enable_model_recom_prompt, enable_diffusers_model_detail, extract_exif_data, esrgan_upscale, UPSCALER_KEYS,
|
8 |
+
preset_quality, preset_styles, process_style_prompt, get_all_lora_tupled_list, update_loras, apply_lora_prompt,
|
9 |
+
download_my_lora, search_civitai_lora, update_civitai_selection, select_civitai_lora, search_civitai_lora_json)
|
10 |
+
from modutils import get_t2i_model_info, get_civitai_tag, CIVITAI_SORT, CIVITAI_PERIOD, CIVITAI_BASEMODEL
|
|
|
11 |
# Translator
|
12 |
from llmdolphin import (dolphin_respond_auto, dolphin_parse_simple,
|
13 |
get_llm_formats, get_dolphin_model_format, get_dolphin_models,
|
|
|
40 |
#col-container { margin: 0 auto; !important; }
|
41 |
#result { max-width: 520px; max-height: 520px; margin: 0px auto; !important; }
|
42 |
.lora { min-width: 480px; !important; }
|
43 |
+
.title { font-size: 3em; align-items: center; text-align: center; }
|
44 |
+
.info { align-items: center; text-align: center; }
|
45 |
+
.desc [src$='#float'] { float: right; margin: 20px; }
|
46 |
"""
|
47 |
|
48 |
with gr.Blocks(fill_width=True, elem_id="container", css=css, delete_cache=(60, 3600)) as demo:
|
|
|
80 |
model_name = gr.Dropdown(label="Model", info="You can enter a huggingface model repo_id to want to use.",
|
81 |
choices=get_diffusers_model_list(), value=get_diffusers_model_list()[0],
|
82 |
allow_custom_value=True, interactive=True, min_width=320)
|
83 |
+
model_info = gr.Markdown(elem_classes="info")
|
84 |
with gr.Column(scale=1):
|
85 |
model_detail = gr.Checkbox(label="Show detail of model in list", value=False)
|
86 |
|
|
|
141 |
lora5_md = gr.Markdown(value="", visible=False)
|
142 |
with gr.Accordion("From URL", open=True, visible=True):
|
143 |
with gr.Row():
|
144 |
+
lora_search_civitai_basemodel = gr.CheckboxGroup(label="Search LoRA for", choices=CIVITAI_BASEMODEL, value=["Pony", "SDXL 1.0"])
|
145 |
+
lora_search_civitai_sort = gr.Radio(label="Sort", choices=CIVITAI_SORT, value="Highest Rated")
|
146 |
+
lora_search_civitai_period = gr.Radio(label="Period", choices=CIVITAI_PERIOD, value="AllTime")
|
147 |
with gr.Row():
|
148 |
lora_search_civitai_query = gr.Textbox(label="Query", placeholder="oomuro sakurako...", lines=1)
|
149 |
+
lora_search_civitai_tag = gr.Dropdown(label="Tag", choices=get_civitai_tag(), value=get_civitai_tag()[0], allow_custom_value=True)
|
150 |
+
lora_search_civitai_user = gr.Textbox(label="Username", lines=1)
|
151 |
+
lora_search_civitai_submit = gr.Button("Search on Civitai")
|
152 |
with gr.Row():
|
|
|
153 |
lora_search_civitai_json = gr.JSON(value={}, visible=False)
|
154 |
+
lora_search_civitai_desc = gr.Markdown(value="", visible=False, elem_classes="desc")
|
155 |
+
with gr.Accordion("Select from Gallery", open=False):
|
156 |
+
lora_search_civitai_gallery = gr.Gallery([], label="Results", allow_preview=False, columns=5, show_share_button=False, interactive=False)
|
157 |
+
lora_search_civitai_result = gr.Dropdown(label="Search Results", choices=[("", "")], value="", allow_custom_value=True, visible=False)
|
158 |
lora_download_url = gr.Textbox(label="LoRA URL", placeholder="https://civitai.com/api/download/models/28907", lines=1)
|
159 |
lora_download = gr.Button("Get and set LoRA and apply to prompt")
|
160 |
|
|
|
257 |
lora5_copy.click(apply_lora_prompt, [prompt, lora5_info], [prompt], queue=False, show_api=False)
|
258 |
|
259 |
gr.on(
|
260 |
+
triggers=[lora_search_civitai_submit.click, lora_search_civitai_query.submit],
|
261 |
fn=search_civitai_lora,
|
262 |
+
inputs=[lora_search_civitai_query, lora_search_civitai_basemodel, lora_search_civitai_sort, lora_search_civitai_period, lora_search_civitai_tag, lora_search_civitai_user, lora_search_civitai_gallery],
|
263 |
+
outputs=[lora_search_civitai_result, lora_search_civitai_desc, lora_search_civitai_submit, lora_search_civitai_query, lora_search_civitai_gallery],
|
264 |
scroll_to_output=True,
|
265 |
queue=True,
|
266 |
show_api=False,
|
|
|
276 |
queue=True,
|
277 |
show_api=False,
|
278 |
)
|
279 |
+
lora_search_civitai_gallery.select(update_civitai_selection, None, [lora_search_civitai_result], queue=False, show_api=False)
|
280 |
|
281 |
recom_prompt.change(enable_model_recom_prompt, [recom_prompt], [recom_prompt], queue=False, show_api=False)
|
282 |
gr.on(
|
dc.py
CHANGED
@@ -783,7 +783,7 @@ from PIL import Image
|
|
783 |
import random, json
|
784 |
from modutils import (safe_float, escape_lora_basename, to_lora_key, to_lora_path,
|
785 |
get_local_model_list, get_private_lora_model_lists, get_valid_lora_name,
|
786 |
-
get_valid_lora_path, get_valid_lora_wt, get_lora_info,
|
787 |
normalize_prompt_list, get_civitai_info, search_lora_on_civitai, translate_to_en)
|
788 |
|
789 |
sd_gen = GuiSD()
|
@@ -893,35 +893,6 @@ def enable_diffusers_model_detail(is_enable: bool = False, model_name: str = "")
|
|
893 |
return gr.update(value=is_enable), gr.update(value=new_value, choices=get_diffusers_model_list())
|
894 |
|
895 |
|
896 |
-
def get_t2i_model_info(repo_id: str):
|
897 |
-
from huggingface_hub import HfApi
|
898 |
-
api = HfApi()
|
899 |
-
try:
|
900 |
-
if " " in repo_id or not api.repo_exists(repo_id): return ""
|
901 |
-
model = api.model_info(repo_id=repo_id)
|
902 |
-
except Exception as e:
|
903 |
-
print(f"Error: Failed to get {repo_id}'s info. {e}")
|
904 |
-
return ""
|
905 |
-
if model.private or model.gated: return ""
|
906 |
-
tags = model.tags
|
907 |
-
info = []
|
908 |
-
url = f"https://huggingface.co/{repo_id}/"
|
909 |
-
if not 'diffusers' in tags: return ""
|
910 |
-
if 'diffusers:FluxPipeline' in tags:
|
911 |
-
info.append("FLUX.1")
|
912 |
-
elif 'diffusers:StableDiffusionXLPipeline' in tags:
|
913 |
-
info.append("SDXL")
|
914 |
-
elif 'diffusers:StableDiffusionPipeline' in tags:
|
915 |
-
info.append("SD1.5")
|
916 |
-
if model.card_data and model.card_data.tags:
|
917 |
-
info.extend(list_sub(model.card_data.tags, ['text-to-image', 'stable-diffusion', 'stable-diffusion-api', 'safetensors', 'stable-diffusion-xl']))
|
918 |
-
info.append(f"DLs: {model.downloads}")
|
919 |
-
info.append(f"likes: {model.likes}")
|
920 |
-
info.append(model.last_modified.strftime("lastmod: %Y-%m-%d"))
|
921 |
-
md = f"Model Info: {', '.join(info)}, [Model Repo]({url})"
|
922 |
-
return gr.update(value=md)
|
923 |
-
|
924 |
-
|
925 |
def load_model_prompt_dict():
|
926 |
import json
|
927 |
dict = {}
|
@@ -1209,30 +1180,46 @@ def update_loras(prompt, lora1, lora1_wt, lora2, lora2_wt, lora3, lora3_wt, lora
|
|
1209 |
gr.update(value=tag5, label=label5, visible=on5), gr.update(visible=on5), gr.update(value=md5, visible=on5)
|
1210 |
|
1211 |
|
1212 |
-
def search_civitai_lora(query, base_model, sort=
|
1213 |
-
global
|
1214 |
-
|
|
|
|
|
|
|
1215 |
if not items: return gr.update(choices=[("", "")], value="", visible=False),\
|
1216 |
-
gr.update(value="", visible=False), gr.update(visible=True), gr.update(visible=True)
|
1217 |
-
|
1218 |
choices = []
|
|
|
1219 |
for item in items:
|
1220 |
base_model_name = "Pony🐴" if item['base_model'] == "Pony" else item['base_model']
|
1221 |
name = f"{item['name']} (for {base_model_name} / By: {item['creator']} / Tags: {', '.join(item['tags'])})"
|
1222 |
value = item['dl_url']
|
1223 |
choices.append((name, value))
|
1224 |
-
|
|
|
1225 |
if not choices: return gr.update(choices=[("", "")], value="", visible=False),\
|
1226 |
-
gr.update(value="", visible=False), gr.update(visible=True), gr.update(visible=True)
|
1227 |
-
|
|
|
|
|
1228 |
md = result['md'] if result else ""
|
1229 |
return gr.update(choices=choices, value=choices[0][1], visible=True), gr.update(value=md, visible=True),\
|
1230 |
-
gr.update(visible=True), gr.update(visible=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1231 |
|
1232 |
|
1233 |
def select_civitai_lora(search_result):
|
1234 |
if not "http" in search_result: return gr.update(value=""), gr.update(value="None", visible=True)
|
1235 |
-
result =
|
1236 |
md = result['md'] if result else ""
|
1237 |
return gr.update(value=search_result), gr.update(value=md, visible=True)
|
1238 |
|
|
|
783 |
import random, json
|
784 |
from modutils import (safe_float, escape_lora_basename, to_lora_key, to_lora_path,
|
785 |
get_local_model_list, get_private_lora_model_lists, get_valid_lora_name,
|
786 |
+
get_valid_lora_path, get_valid_lora_wt, get_lora_info, CIVITAI_SORT, CIVITAI_PERIOD,
|
787 |
normalize_prompt_list, get_civitai_info, search_lora_on_civitai, translate_to_en)
|
788 |
|
789 |
sd_gen = GuiSD()
|
|
|
893 |
return gr.update(value=is_enable), gr.update(value=new_value, choices=get_diffusers_model_list())
|
894 |
|
895 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
896 |
def load_model_prompt_dict():
|
897 |
import json
|
898 |
dict = {}
|
|
|
1180 |
gr.update(value=tag5, label=label5, visible=on5), gr.update(visible=on5), gr.update(value=md5, visible=on5)
|
1181 |
|
1182 |
|
1183 |
+
def search_civitai_lora(query, base_model=[], sort=CIVITAI_SORT[0], period=CIVITAI_PERIOD[0], tag="", user="", gallery=[]):
|
1184 |
+
global civitai_last_results, civitai_last_choices, civitai_last_gallery
|
1185 |
+
civitai_last_choices = [("", "")]
|
1186 |
+
civitai_last_gallery = []
|
1187 |
+
civitai_last_results = {}
|
1188 |
+
items = search_lora_on_civitai(query, base_model, 100, sort, period, tag, user)
|
1189 |
if not items: return gr.update(choices=[("", "")], value="", visible=False),\
|
1190 |
+
gr.update(value="", visible=False), gr.update(visible=True), gr.update(visible=True), gr.update(visible=True)
|
1191 |
+
civitai_last_results = {}
|
1192 |
choices = []
|
1193 |
+
gallery = []
|
1194 |
for item in items:
|
1195 |
base_model_name = "Pony🐴" if item['base_model'] == "Pony" else item['base_model']
|
1196 |
name = f"{item['name']} (for {base_model_name} / By: {item['creator']} / Tags: {', '.join(item['tags'])})"
|
1197 |
value = item['dl_url']
|
1198 |
choices.append((name, value))
|
1199 |
+
gallery.append((item['img_url'], name))
|
1200 |
+
civitai_last_results[value] = item
|
1201 |
if not choices: return gr.update(choices=[("", "")], value="", visible=False),\
|
1202 |
+
gr.update(value="", visible=False), gr.update(visible=True), gr.update(visible=True), gr.update(visible=True)
|
1203 |
+
civitai_last_choices = choices
|
1204 |
+
civitai_last_gallery = gallery
|
1205 |
+
result = civitai_last_results.get(choices[0][1], "None")
|
1206 |
md = result['md'] if result else ""
|
1207 |
return gr.update(choices=choices, value=choices[0][1], visible=True), gr.update(value=md, visible=True),\
|
1208 |
+
gr.update(visible=True), gr.update(visible=True), gr.update(value=gallery)
|
1209 |
+
|
1210 |
+
|
1211 |
+
def update_civitai_selection(evt: gr.SelectData):
|
1212 |
+
try:
|
1213 |
+
selected_index = evt.index
|
1214 |
+
selected = civitai_last_choices[selected_index][1]
|
1215 |
+
return gr.update(value=selected)
|
1216 |
+
except Exception:
|
1217 |
+
return gr.update(visible=True)
|
1218 |
|
1219 |
|
1220 |
def select_civitai_lora(search_result):
|
1221 |
if not "http" in search_result: return gr.update(value=""), gr.update(value="None", visible=True)
|
1222 |
+
result = civitai_last_results.get(search_result, "None")
|
1223 |
md = result['md'] if result else ""
|
1224 |
return gr.update(value=search_result), gr.update(value=md, visible=True)
|
1225 |
|
llmdolphin.py
CHANGED
@@ -59,11 +59,37 @@ llm_models = {
|
|
59 |
"Qwen2.5-14B_Uncensored_Instruct.Q4_K_M.gguf": ["mradermacher/Qwen2.5-14B_Uncensored_Instruct-GGUF", MessagesFormatterType.OPEN_CHAT],
|
60 |
"EVA-Qwen2.5-14B-v0.0.i1-IQ4_XS.gguf": ["mradermacher/EVA-Qwen2.5-14B-v0.0-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
61 |
"MN-12B-Vespa-x1.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Vespa-x1-i1-GGUF", MessagesFormatterType.CHATML],
|
|
|
62 |
"Trinas_Nectar-8B-model_stock.i1-Q4_K_M.gguf": ["mradermacher/Trinas_Nectar-8B-model_stock-i1-GGUF", MessagesFormatterType.MISTRAL],
|
63 |
"ChatWaifu_12B_v2.0.Q5_K_M.gguf": ["mradermacher/ChatWaifu_12B_v2.0-GGUF", MessagesFormatterType.MISTRAL],
|
64 |
"ChatWaifu_22B_v2.0_preview.Q4_K_S.gguf": ["mradermacher/ChatWaifu_22B_v2.0_preview-GGUF", MessagesFormatterType.MISTRAL],
|
65 |
"ChatWaifu_v1.4.Q5_K_M.gguf": ["mradermacher/ChatWaifu_v1.4-GGUF", MessagesFormatterType.MISTRAL],
|
66 |
"ChatWaifu_v1.3.1.Q4_K_M.gguf": ["mradermacher/ChatWaifu_v1.3.1-GGUF", MessagesFormatterType.MISTRAL],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
67 |
"ModeliCo-8B.i1-Q5_K_M.gguf": ["mradermacher/ModeliCo-8B-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
68 |
"Llama3-8B-function-calling-dpo-slerp.i1-Q5_K_M.gguf": ["mradermacher/Llama3-8B-function-calling-dpo-slerp-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
69 |
"Aspire1.2-8B-TIES.i1-Q5_K_M.gguf": ["mradermacher/Aspire1.2-8B-TIES-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
|
|
59 |
"Qwen2.5-14B_Uncensored_Instruct.Q4_K_M.gguf": ["mradermacher/Qwen2.5-14B_Uncensored_Instruct-GGUF", MessagesFormatterType.OPEN_CHAT],
|
60 |
"EVA-Qwen2.5-14B-v0.0.i1-IQ4_XS.gguf": ["mradermacher/EVA-Qwen2.5-14B-v0.0-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
61 |
"MN-12B-Vespa-x1.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Vespa-x1-i1-GGUF", MessagesFormatterType.CHATML],
|
62 |
+
"Mistral-Nemo-12B-ArliAI-RPMax-v1.1.i1-Q4_K_M.gguf": ["mradermacher/Mistral-Nemo-12B-ArliAI-RPMax-v1.1-i1-GGUF", MessagesFormatterType.MISTRAL],
|
63 |
"Trinas_Nectar-8B-model_stock.i1-Q4_K_M.gguf": ["mradermacher/Trinas_Nectar-8B-model_stock-i1-GGUF", MessagesFormatterType.MISTRAL],
|
64 |
"ChatWaifu_12B_v2.0.Q5_K_M.gguf": ["mradermacher/ChatWaifu_12B_v2.0-GGUF", MessagesFormatterType.MISTRAL],
|
65 |
"ChatWaifu_22B_v2.0_preview.Q4_K_S.gguf": ["mradermacher/ChatWaifu_22B_v2.0_preview-GGUF", MessagesFormatterType.MISTRAL],
|
66 |
"ChatWaifu_v1.4.Q5_K_M.gguf": ["mradermacher/ChatWaifu_v1.4-GGUF", MessagesFormatterType.MISTRAL],
|
67 |
"ChatWaifu_v1.3.1.Q4_K_M.gguf": ["mradermacher/ChatWaifu_v1.3.1-GGUF", MessagesFormatterType.MISTRAL],
|
68 |
+
"Aster-G2-9B-v1.Q4_K_S.gguf": ["mradermacher/Aster-G2-9B-v1-GGUF", MessagesFormatterType.ALPACA],
|
69 |
+
"nemo-12b-rp-merge.Q4_K_S.gguf": ["mradermacher/nemo-12b-rp-merge-GGUF", MessagesFormatterType.MISTRAL],
|
70 |
+
"SthenoMix3.3.Q5_K_M.gguf": ["mradermacher/SthenoMix3.3-GGUF", MessagesFormatterType.LLAMA_3],
|
71 |
+
"Celestial-Harmony-14b-v1.0-Experimental-1016-Q4_K_M.gguf": ["bartowski/Celestial-Harmony-14b-v1.0-Experimental-1016-GGUF", MessagesFormatterType.MISTRAL],
|
72 |
+
"Gemma-2-Ataraxy-v4c-9B.Q4_K_M.gguf": ["mradermacher/Gemma-2-Ataraxy-v4c-9B-GGUF", MessagesFormatterType.ALPACA],
|
73 |
+
"Gemma-2-Ataraxy-v4b-9B.Q4_K_M.gguf": ["mradermacher/Gemma-2-Ataraxy-v4b-9B-GGUF", MessagesFormatterType.ALPACA],
|
74 |
+
"L3.1-EtherealRainbow-v1.0-rc1-8B.Q5_K_M.gguf": ["mradermacher/L3.1-EtherealRainbow-v1.0-rc1-8B-GGUF", MessagesFormatterType.LLAMA_3],
|
75 |
+
"MN-Lulanum-12B-FIX.i1-Q4_K_M.gguf": ["mradermacher/MN-Lulanum-12B-FIX-i1-GGUF", MessagesFormatterType.MISTRAL],
|
76 |
+
"Ministral-8B-Instruct-2410-HF-Q4_K_M.gguf": ["bartowski/Ministral-8B-Instruct-2410-HF-GGUF-TEST", MessagesFormatterType.MISTRAL],
|
77 |
+
"QevaCoT-7B-Stock.Q5_K_M.gguf": ["mradermacher/QevaCoT-7B-Stock-GGUF", MessagesFormatterType.OPEN_CHAT],
|
78 |
+
"Mixtronix-8B.i1-Q4_K_M.gguf": ["mradermacher/Mixtronix-8B-i1-GGUF", MessagesFormatterType.CHATML],
|
79 |
+
"Tsunami-0.5x-7B-Instruct.i1-Q5_K_M.gguf": ["mradermacher/Tsunami-0.5x-7B-Instruct-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
80 |
+
"mt3-gemma-2-9b-q6_k.gguf": ["zelk12/MT3-gemma-2-9B-Q6_K-GGUF", MessagesFormatterType.ALPACA],
|
81 |
+
"NeuralDaredevil-SuperNova-Lite-7B-DARETIES-abliterated.Q5_K_M.gguf": ["mradermacher/NeuralDaredevil-SuperNova-Lite-7B-DARETIES-abliterated-GGUF", MessagesFormatterType.LLAMA_3],
|
82 |
+
"MadMix-Unleashed-12B.Q4_K_M.gguf": ["mradermacher/MadMix-Unleashed-12B-GGUF", MessagesFormatterType.MISTRAL],
|
83 |
+
"Gemma-2-Ataraxy-v4a-Advanced-9B.i1-Q4_K_M.gguf": ["mradermacher/Gemma-2-Ataraxy-v4a-Advanced-9B-i1-GGUF", MessagesFormatterType.ALPACA],
|
84 |
+
"writing-roleplay-20k-context-nemo-12b-v1.0.i1-Q4_K_M.gguf": ["mradermacher/writing-roleplay-20k-context-nemo-12b-v1.0-i1-GGUF", MessagesFormatterType.MISTRAL],
|
85 |
+
"GEMMA2-9b-Pollux-exp.Q4_K_M.gguf": ["mradermacher/GEMMA2-9b-Pollux-exp-GGUF", MessagesFormatterType.ALPACA],
|
86 |
+
"Gemma-2-Ataraxy-v4a-Advanced-9B.Q4_K_M.gguf": ["mradermacher/Gemma-2-Ataraxy-v4a-Advanced-9B-GGUF", MessagesFormatterType.ALPACA],
|
87 |
+
"llama-3.1-8b-titanfusion-mix-2.1-q4_k_m-imat.gguf": ["bunnycore/Llama-3.1-8B-TitanFusion-Mix-2.1-Q4_K_M-GGUF", MessagesFormatterType.LLAMA_3],
|
88 |
+
"Gemma-2-Ataraxy-v4-Advanced-9B.Q4_K_M.gguf": ["mradermacher/Gemma-2-Ataraxy-v4-Advanced-9B-GGUF", MessagesFormatterType.ALPACA],
|
89 |
+
"Gemma-2-9B-ArliAI-RPMax-v1.1.i1-Q4_K_S.gguf": ["mradermacher/Gemma-2-9B-ArliAI-RPMax-v1.1-i1-GGUF", MessagesFormatterType.ALPACA],
|
90 |
+
"SuperNeuralDreadDevil-8b.Q5_K_M.gguf": ["mradermacher/SuperNeuralDreadDevil-8b-GGUF", MessagesFormatterType.LLAMA_3],
|
91 |
+
"astral-fusion-neural-happy-l3.1-8b-q4_0.gguf": ["ZeroXClem/Astral-Fusion-Neural-Happy-L3.1-8B-Q4_0-GGUF", MessagesFormatterType.LLAMA_3],
|
92 |
+
"LexiMaid-L3-8B.Q5_K_M.gguf": ["mradermacher/LexiMaid-L3-8B-GGUF", MessagesFormatterType.LLAMA_3],
|
93 |
"ModeliCo-8B.i1-Q5_K_M.gguf": ["mradermacher/ModeliCo-8B-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
94 |
"Llama3-8B-function-calling-dpo-slerp.i1-Q5_K_M.gguf": ["mradermacher/Llama3-8B-function-calling-dpo-slerp-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
95 |
"Aspire1.2-8B-TIES.i1-Q5_K_M.gguf": ["mradermacher/Aspire1.2-8B-TIES-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
lora_dict.json
CHANGED
@@ -4381,6 +4381,13 @@
|
|
4381 |
"https://civitai.com/models/577378",
|
4382 |
"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/459bd20d-a9d6-4a0b-8947-7dcebc061c0f/width=450/19781986.jpeg"
|
4383 |
],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4384 |
"genshin_v4": [
|
4385 |
"hina_(genshin_impact) / sethos_(genshin_impact) / raiden_shogun_mitake",
|
4386 |
"Pony",
|
|
|
4381 |
"https://civitai.com/models/577378",
|
4382 |
"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/459bd20d-a9d6-4a0b-8947-7dcebc061c0f/width=450/19781986.jpeg"
|
4383 |
],
|
4384 |
+
"genbaneko_v4_illustrious_uo_1024-000040": [
|
4385 |
+
"genbaneko / cat, headwear, hat, grey headwear, baseball cap, / speech bubble, speech text,",
|
4386 |
+
"SDXL 1.0",
|
4387 |
+
"Shigotoneko(Genbaneko) Style - illustrious | \u4ed5\u4e8b\u732b\uff08\u73fe\u5834\u732b\uff09",
|
4388 |
+
"https://civitai.com/models/859355",
|
4389 |
+
"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/0f145509-d867-418c-b545-0c0e49275f48/width=450/34849585.jpeg"
|
4390 |
+
],
|
4391 |
"genshin_v4": [
|
4392 |
"hina_(genshin_impact) / sethos_(genshin_impact) / raiden_shogun_mitake",
|
4393 |
"Pony",
|
modutils.py
CHANGED
@@ -2,11 +2,16 @@ import spaces
|
|
2 |
import json
|
3 |
import gradio as gr
|
4 |
import os
|
|
|
5 |
from pathlib import Path
|
6 |
from PIL import Image
|
7 |
-
|
|
|
|
|
|
|
8 |
import urllib.parse
|
9 |
-
import
|
|
|
10 |
|
11 |
|
12 |
from env import (HF_LORA_PRIVATE_REPOS1, HF_LORA_PRIVATE_REPOS2,
|
@@ -38,7 +43,6 @@ def list_sub(a, b):
|
|
38 |
|
39 |
|
40 |
def is_repo_name(s):
|
41 |
-
import re
|
42 |
return re.fullmatch(r'^[^/]+?/[^/]+?$', s)
|
43 |
|
44 |
|
@@ -99,10 +103,12 @@ def download_hf_file(directory, url, progress=gr.Progress(track_tqdm=True)):
|
|
99 |
repo_id, filename, subfolder, repo_type = split_hf_url(url)
|
100 |
try:
|
101 |
print(f"Downloading {url} to {directory}")
|
102 |
-
if subfolder is not None: hf_hub_download(repo_id=repo_id, filename=filename, subfolder=subfolder, repo_type=repo_type, local_dir=directory, token=hf_token)
|
103 |
-
else: hf_hub_download(repo_id=repo_id, filename=filename, repo_type=repo_type, local_dir=directory, token=hf_token)
|
|
|
104 |
except Exception as e:
|
105 |
print(f"Failed to download: {e}")
|
|
|
106 |
|
107 |
|
108 |
def download_things(directory, url, hf_token="", civitai_api_key=""):
|
@@ -224,7 +230,6 @@ def save_gallery_images(images, progress=gr.Progress(track_tqdm=True)):
|
|
224 |
|
225 |
|
226 |
def download_private_repo(repo_id, dir_path, is_replace):
|
227 |
-
from huggingface_hub import snapshot_download
|
228 |
if not hf_read_token: return
|
229 |
try:
|
230 |
snapshot_download(repo_id=repo_id, local_dir=dir_path, allow_patterns=['*.ckpt', '*.pt', '*.pth', '*.safetensors', '*.bin'], use_auth_token=hf_read_token)
|
@@ -263,7 +268,6 @@ def get_private_model_list(repo_id, dir_path):
|
|
263 |
|
264 |
|
265 |
def download_private_file(repo_id, path, is_replace):
|
266 |
-
from huggingface_hub import hf_hub_download
|
267 |
file = Path(path)
|
268 |
newpath = Path(f'{file.parent.name}/{escape_lora_basename(file.stem)}{file.suffix}') if is_replace else file
|
269 |
if not hf_read_token or newpath.exists(): return
|
@@ -387,7 +391,9 @@ except Exception as e:
|
|
387 |
loras_dict = {"None": ["", "", "", "", ""], "": ["", "", "", "", ""]} | private_lora_dict.copy()
|
388 |
civitai_not_exists_list = []
|
389 |
loras_url_to_path_dict = {} # {"URL to download": "local filepath", ...}
|
390 |
-
|
|
|
|
|
391 |
all_lora_list = []
|
392 |
|
393 |
|
@@ -411,9 +417,6 @@ private_lora_model_list = get_private_lora_model_lists()
|
|
411 |
|
412 |
def get_civitai_info(path):
|
413 |
global civitai_not_exists_list
|
414 |
-
import requests
|
415 |
-
from urllib3.util import Retry
|
416 |
-
from requests.adapters import HTTPAdapter
|
417 |
if path in set(civitai_not_exists_list): return ["", "", "", "", ""]
|
418 |
if not Path(path).exists(): return None
|
419 |
user_agent = get_user_agent()
|
@@ -448,7 +451,7 @@ def get_civitai_info(path):
|
|
448 |
|
449 |
|
450 |
def get_lora_model_list():
|
451 |
-
loras = list_uniq(get_private_lora_model_lists() + get_local_model_list(directory_loras)
|
452 |
loras.insert(0, "None")
|
453 |
loras.insert(0, "")
|
454 |
return loras
|
@@ -523,7 +526,6 @@ def download_lora(dl_urls: str):
|
|
523 |
|
524 |
|
525 |
def copy_lora(path: str, new_path: str):
|
526 |
-
import shutil
|
527 |
if path == new_path: return new_path
|
528 |
cpath = Path(path)
|
529 |
npath = Path(new_path)
|
@@ -587,7 +589,6 @@ def get_valid_lora_path(query: str):
|
|
587 |
|
588 |
|
589 |
def get_valid_lora_wt(prompt: str, lora_path: str, lora_wt: float):
|
590 |
-
import re
|
591 |
wt = lora_wt
|
592 |
result = re.findall(f'<lora:{to_lora_key(lora_path)}:(.+?)>', prompt)
|
593 |
if not result: return wt
|
@@ -596,7 +597,6 @@ def get_valid_lora_wt(prompt: str, lora_path: str, lora_wt: float):
|
|
596 |
|
597 |
|
598 |
def set_prompt_loras(prompt, prompt_syntax, model_name, lora1, lora1_wt, lora2, lora2_wt, lora3, lora3_wt, lora4, lora4_wt, lora5, lora5_wt):
|
599 |
-
import re
|
600 |
if not "Classic" in str(prompt_syntax): return lora1, lora1_wt, lora2, lora2_wt, lora3, lora3_wt, lora4, lora4_wt, lora5, lora5_wt
|
601 |
lora1 = get_valid_lora_name(lora1, model_name)
|
602 |
lora2 = get_valid_lora_name(lora2, model_name)
|
@@ -716,7 +716,6 @@ def apply_lora_prompt(prompt: str = "", lora_info: str = ""):
|
|
716 |
|
717 |
|
718 |
def update_loras(prompt, prompt_syntax, lora1, lora1_wt, lora2, lora2_wt, lora3, lora3_wt, lora4, lora4_wt, lora5, lora5_wt):
|
719 |
-
import re
|
720 |
on1, label1, tag1, md1 = get_lora_info(lora1)
|
721 |
on2, label2, tag2, md2 = get_lora_info(lora2)
|
722 |
on3, label3, tag3, md3 = get_lora_info(lora3)
|
@@ -763,7 +762,6 @@ def update_loras(prompt, prompt_syntax, lora1, lora1_wt, lora2, lora2_wt, lora3,
|
|
763 |
|
764 |
|
765 |
def get_my_lora(link_url):
|
766 |
-
from pathlib import Path
|
767 |
before = get_local_model_list(directory_loras)
|
768 |
for url in [url.strip() for url in link_url.split(',')]:
|
769 |
if not Path(f"{directory_loras}/{url.split('/')[-1]}").exists():
|
@@ -800,7 +798,6 @@ def upload_file_lora(files, progress=gr.Progress(track_tqdm=True)):
|
|
800 |
|
801 |
|
802 |
def move_file_lora(filepaths):
|
803 |
-
import shutil
|
804 |
for file in filepaths:
|
805 |
path = Path(shutil.move(Path(file).resolve(), Path(f"./{directory_loras}").resolve()))
|
806 |
newpath = Path(f'{path.parent.name}/{escape_lora_basename(path.stem)}{path.suffix}')
|
@@ -823,11 +820,13 @@ def move_file_lora(filepaths):
|
|
823 |
)
|
824 |
|
825 |
|
|
|
|
|
|
|
|
|
|
|
826 |
def get_civitai_info(path):
|
827 |
global civitai_not_exists_list, loras_url_to_path_dict
|
828 |
-
import requests
|
829 |
-
from requests.adapters import HTTPAdapter
|
830 |
-
from urllib3.util import Retry
|
831 |
default = ["", "", "", "", ""]
|
832 |
if path in set(civitai_not_exists_list): return default
|
833 |
if not Path(path).exists(): return None
|
@@ -865,16 +864,14 @@ def get_civitai_info(path):
|
|
865 |
|
866 |
|
867 |
def search_lora_on_civitai(query: str, allow_model: list[str] = ["Pony", "SDXL 1.0"], limit: int = 100,
|
868 |
-
sort: str = "Highest Rated", period: str = "AllTime", tag: str = ""):
|
869 |
-
import requests
|
870 |
-
from requests.adapters import HTTPAdapter
|
871 |
-
from urllib3.util import Retry
|
872 |
user_agent = get_user_agent()
|
873 |
headers = {'User-Agent': user_agent, 'content-type': 'application/json'}
|
874 |
base_url = 'https://civitai.com/api/v1/models'
|
875 |
-
params = {'types': ['LORA'], 'sort': sort, 'period': period, 'limit': limit, 'nsfw': 'true'}
|
876 |
if query: params["query"] = query
|
877 |
if tag: params["tag"] = tag
|
|
|
878 |
session = requests.Session()
|
879 |
retries = Retry(total=5, backoff_factor=1, status_forcelist=[500, 502, 503, 504])
|
880 |
session.mount("https://", HTTPAdapter(max_retries=retries))
|
@@ -891,46 +888,129 @@ def search_lora_on_civitai(query: str, allow_model: list[str] = ["Pony", "SDXL 1
|
|
891 |
for j in json['items']:
|
892 |
for model in j['modelVersions']:
|
893 |
item = {}
|
894 |
-
if model['baseModel'] not in set(allow_model): continue
|
895 |
item['name'] = j['name']
|
896 |
-
item['creator'] = j['creator']['username']
|
897 |
-
item['tags'] = j['tags']
|
898 |
-
item['model_name'] = model['name']
|
899 |
-
item['base_model'] = model['baseModel']
|
|
|
900 |
item['dl_url'] = model['downloadUrl']
|
901 |
-
item['md'] =
|
|
|
|
|
|
|
|
|
|
|
|
|
902 |
items.append(item)
|
903 |
return items
|
904 |
|
905 |
|
906 |
-
def search_civitai_lora(query, base_model, sort=
|
907 |
-
global
|
908 |
-
|
|
|
|
|
|
|
909 |
if not items: return gr.update(choices=[("", "")], value="", visible=False),\
|
910 |
-
gr.update(value="", visible=False), gr.update(visible=True), gr.update(visible=True)
|
911 |
-
|
912 |
choices = []
|
|
|
913 |
for item in items:
|
914 |
base_model_name = "Pony🐴" if item['base_model'] == "Pony" else item['base_model']
|
915 |
name = f"{item['name']} (for {base_model_name} / By: {item['creator']} / Tags: {', '.join(item['tags'])})"
|
916 |
value = item['dl_url']
|
917 |
choices.append((name, value))
|
918 |
-
|
|
|
919 |
if not choices: return gr.update(choices=[("", "")], value="", visible=False),\
|
920 |
-
gr.update(value="", visible=False), gr.update(visible=True), gr.update(visible=True)
|
921 |
-
|
|
|
|
|
922 |
md = result['md'] if result else ""
|
923 |
return gr.update(choices=choices, value=choices[0][1], visible=True), gr.update(value=md, visible=True),\
|
924 |
-
gr.update(visible=True), gr.update(visible=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
925 |
|
926 |
|
927 |
def select_civitai_lora(search_result):
|
928 |
if not "http" in search_result: return gr.update(value=""), gr.update(value="None", visible=True)
|
929 |
-
result =
|
930 |
md = result['md'] if result else ""
|
931 |
return gr.update(value=search_result), gr.update(value=md, visible=True)
|
932 |
|
933 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
934 |
LORA_BASE_MODEL_DICT = {
|
935 |
"diffusers:StableDiffusionPipeline": ["SD 1.5"],
|
936 |
"diffusers:StableDiffusionXLPipeline": ["Pony", "SDXL 1.0"],
|
@@ -1175,15 +1255,6 @@ preset_quality = {k["name"]: (k["prompt"], k["negative_prompt"]) for k in qualit
|
|
1175 |
|
1176 |
|
1177 |
def process_style_prompt(prompt: str, neg_prompt: str, styles_key: str = "None", quality_key: str = "None", type: str = "Auto"):
|
1178 |
-
def to_list(s):
|
1179 |
-
return [x.strip() for x in s.split(",") if not s == ""]
|
1180 |
-
|
1181 |
-
def list_sub(a, b):
|
1182 |
-
return [e for e in a if e not in b]
|
1183 |
-
|
1184 |
-
def list_uniq(l):
|
1185 |
-
return sorted(set(l), key=l.index)
|
1186 |
-
|
1187 |
animagine_ps = to_list("anime artwork, anime style, vibrant, studio anime, highly detailed, masterpiece, best quality, very aesthetic, absurdres")
|
1188 |
animagine_nps = to_list("lowres, (bad), text, error, fewer, extra, missing, worst quality, jpeg artifacts, low quality, watermark, unfinished, displeasing, oldest, early, chromatic aberration, signature, extra digits, artistic error, username, scan, [abstract]")
|
1189 |
pony_ps = to_list("source_anime, score_9, score_8_up, score_7_up, masterpiece, best quality, very aesthetic, absurdres")
|
@@ -1335,7 +1406,6 @@ def set_textual_inversion_prompt(textual_inversion_gui, prompt_gui, neg_prompt_g
|
|
1335 |
|
1336 |
|
1337 |
def get_model_pipeline(repo_id: str):
|
1338 |
-
from huggingface_hub import HfApi
|
1339 |
api = HfApi(token=HF_TOKEN)
|
1340 |
default = "StableDiffusionPipeline"
|
1341 |
try:
|
|
|
2 |
import json
|
3 |
import gradio as gr
|
4 |
import os
|
5 |
+
import re
|
6 |
from pathlib import Path
|
7 |
from PIL import Image
|
8 |
+
import shutil
|
9 |
+
import requests
|
10 |
+
from requests.adapters import HTTPAdapter
|
11 |
+
from urllib3.util import Retry
|
12 |
import urllib.parse
|
13 |
+
import pandas as pd
|
14 |
+
from huggingface_hub import HfApi, HfFolder, hf_hub_download, snapshot_download
|
15 |
|
16 |
|
17 |
from env import (HF_LORA_PRIVATE_REPOS1, HF_LORA_PRIVATE_REPOS2,
|
|
|
43 |
|
44 |
|
45 |
def is_repo_name(s):
|
|
|
46 |
return re.fullmatch(r'^[^/]+?/[^/]+?$', s)
|
47 |
|
48 |
|
|
|
103 |
repo_id, filename, subfolder, repo_type = split_hf_url(url)
|
104 |
try:
|
105 |
print(f"Downloading {url} to {directory}")
|
106 |
+
if subfolder is not None: path = hf_hub_download(repo_id=repo_id, filename=filename, subfolder=subfolder, repo_type=repo_type, local_dir=directory, token=hf_token)
|
107 |
+
else: path = hf_hub_download(repo_id=repo_id, filename=filename, repo_type=repo_type, local_dir=directory, token=hf_token)
|
108 |
+
return path
|
109 |
except Exception as e:
|
110 |
print(f"Failed to download: {e}")
|
111 |
+
return None
|
112 |
|
113 |
|
114 |
def download_things(directory, url, hf_token="", civitai_api_key=""):
|
|
|
230 |
|
231 |
|
232 |
def download_private_repo(repo_id, dir_path, is_replace):
|
|
|
233 |
if not hf_read_token: return
|
234 |
try:
|
235 |
snapshot_download(repo_id=repo_id, local_dir=dir_path, allow_patterns=['*.ckpt', '*.pt', '*.pth', '*.safetensors', '*.bin'], use_auth_token=hf_read_token)
|
|
|
268 |
|
269 |
|
270 |
def download_private_file(repo_id, path, is_replace):
|
|
|
271 |
file = Path(path)
|
272 |
newpath = Path(f'{file.parent.name}/{escape_lora_basename(file.stem)}{file.suffix}') if is_replace else file
|
273 |
if not hf_read_token or newpath.exists(): return
|
|
|
391 |
loras_dict = {"None": ["", "", "", "", ""], "": ["", "", "", "", ""]} | private_lora_dict.copy()
|
392 |
civitai_not_exists_list = []
|
393 |
loras_url_to_path_dict = {} # {"URL to download": "local filepath", ...}
|
394 |
+
civitai_last_results = {} # {"URL to download": {search results}, ...}
|
395 |
+
civitai_last_choices = [("", "")]
|
396 |
+
civitai_last_gallery = []
|
397 |
all_lora_list = []
|
398 |
|
399 |
|
|
|
417 |
|
418 |
def get_civitai_info(path):
|
419 |
global civitai_not_exists_list
|
|
|
|
|
|
|
420 |
if path in set(civitai_not_exists_list): return ["", "", "", "", ""]
|
421 |
if not Path(path).exists(): return None
|
422 |
user_agent = get_user_agent()
|
|
|
451 |
|
452 |
|
453 |
def get_lora_model_list():
|
454 |
+
loras = list_uniq(get_private_lora_model_lists() + DIFFUSERS_FORMAT_LORAS + get_local_model_list(directory_loras))
|
455 |
loras.insert(0, "None")
|
456 |
loras.insert(0, "")
|
457 |
return loras
|
|
|
526 |
|
527 |
|
528 |
def copy_lora(path: str, new_path: str):
|
|
|
529 |
if path == new_path: return new_path
|
530 |
cpath = Path(path)
|
531 |
npath = Path(new_path)
|
|
|
589 |
|
590 |
|
591 |
def get_valid_lora_wt(prompt: str, lora_path: str, lora_wt: float):
|
|
|
592 |
wt = lora_wt
|
593 |
result = re.findall(f'<lora:{to_lora_key(lora_path)}:(.+?)>', prompt)
|
594 |
if not result: return wt
|
|
|
597 |
|
598 |
|
599 |
def set_prompt_loras(prompt, prompt_syntax, model_name, lora1, lora1_wt, lora2, lora2_wt, lora3, lora3_wt, lora4, lora4_wt, lora5, lora5_wt):
|
|
|
600 |
if not "Classic" in str(prompt_syntax): return lora1, lora1_wt, lora2, lora2_wt, lora3, lora3_wt, lora4, lora4_wt, lora5, lora5_wt
|
601 |
lora1 = get_valid_lora_name(lora1, model_name)
|
602 |
lora2 = get_valid_lora_name(lora2, model_name)
|
|
|
716 |
|
717 |
|
718 |
def update_loras(prompt, prompt_syntax, lora1, lora1_wt, lora2, lora2_wt, lora3, lora3_wt, lora4, lora4_wt, lora5, lora5_wt):
|
|
|
719 |
on1, label1, tag1, md1 = get_lora_info(lora1)
|
720 |
on2, label2, tag2, md2 = get_lora_info(lora2)
|
721 |
on3, label3, tag3, md3 = get_lora_info(lora3)
|
|
|
762 |
|
763 |
|
764 |
def get_my_lora(link_url):
|
|
|
765 |
before = get_local_model_list(directory_loras)
|
766 |
for url in [url.strip() for url in link_url.split(',')]:
|
767 |
if not Path(f"{directory_loras}/{url.split('/')[-1]}").exists():
|
|
|
798 |
|
799 |
|
800 |
def move_file_lora(filepaths):
|
|
|
801 |
for file in filepaths:
|
802 |
path = Path(shutil.move(Path(file).resolve(), Path(f"./{directory_loras}").resolve()))
|
803 |
newpath = Path(f'{path.parent.name}/{escape_lora_basename(path.stem)}{path.suffix}')
|
|
|
820 |
)
|
821 |
|
822 |
|
823 |
+
CIVITAI_SORT = ["Highest Rated", "Most Downloaded", "Newest"]
|
824 |
+
CIVITAI_PERIOD = ["AllTime", "Year", "Month", "Week", "Day"]
|
825 |
+
CIVITAI_BASEMODEL = ["Pony", "SD 1.5", "SDXL 1.0", "Flux.1 D", "Flux.1 S"]
|
826 |
+
|
827 |
+
|
828 |
def get_civitai_info(path):
|
829 |
global civitai_not_exists_list, loras_url_to_path_dict
|
|
|
|
|
|
|
830 |
default = ["", "", "", "", ""]
|
831 |
if path in set(civitai_not_exists_list): return default
|
832 |
if not Path(path).exists(): return None
|
|
|
864 |
|
865 |
|
866 |
def search_lora_on_civitai(query: str, allow_model: list[str] = ["Pony", "SDXL 1.0"], limit: int = 100,
|
867 |
+
sort: str = "Highest Rated", period: str = "AllTime", tag: str = "", user: str = "", page: int = 1):
|
|
|
|
|
|
|
868 |
user_agent = get_user_agent()
|
869 |
headers = {'User-Agent': user_agent, 'content-type': 'application/json'}
|
870 |
base_url = 'https://civitai.com/api/v1/models'
|
871 |
+
params = {'types': ['LORA'], 'sort': sort, 'period': period, 'limit': limit, 'page': int(page), 'nsfw': 'true'}
|
872 |
if query: params["query"] = query
|
873 |
if tag: params["tag"] = tag
|
874 |
+
if user: params["username"] = user
|
875 |
session = requests.Session()
|
876 |
retries = Retry(total=5, backoff_factor=1, status_forcelist=[500, 502, 503, 504])
|
877 |
session.mount("https://", HTTPAdapter(max_retries=retries))
|
|
|
888 |
for j in json['items']:
|
889 |
for model in j['modelVersions']:
|
890 |
item = {}
|
891 |
+
if len(allow_model) != 0 and model['baseModel'] not in set(allow_model): continue
|
892 |
item['name'] = j['name']
|
893 |
+
item['creator'] = j['creator']['username'] if 'creator' in j.keys() and 'username' in j['creator'].keys() else ""
|
894 |
+
item['tags'] = j['tags'] if 'tags' in j.keys() else []
|
895 |
+
item['model_name'] = model['name'] if 'name' in model.keys() else ""
|
896 |
+
item['base_model'] = model['baseModel'] if 'baseModel' in model.keys() else ""
|
897 |
+
item['description'] = model['description'] if 'description' in model.keys() else ""
|
898 |
item['dl_url'] = model['downloadUrl']
|
899 |
+
item['md'] = ""
|
900 |
+
if 'images' in model.keys() and len(model["images"]) != 0:
|
901 |
+
item['img_url'] = model["images"][0]["url"]
|
902 |
+
item['md'] += f'<img src="{model["images"][0]["url"]}#float" alt="thumbnail" width="150" height="240"><br>'
|
903 |
+
else: item['img_url'] = "/home/user/app/null.png"
|
904 |
+
item['md'] += f'''Model URL: [https://civitai.com/models/{j["id"]}](https://civitai.com/models/{j["id"]})<br>Model Name: {item["name"]}<br>
|
905 |
+
Creator: {item["creator"]}<br>Tags: {", ".join(item["tags"])}<br>Base Model: {item["base_model"]}<br>Description: {item["description"]}'''
|
906 |
items.append(item)
|
907 |
return items
|
908 |
|
909 |
|
910 |
+
def search_civitai_lora(query, base_model=[], sort=CIVITAI_SORT[0], period=CIVITAI_PERIOD[0], tag="", user="", gallery=[]):
|
911 |
+
global civitai_last_results, civitai_last_choices, civitai_last_gallery
|
912 |
+
civitai_last_choices = [("", "")]
|
913 |
+
civitai_last_gallery = []
|
914 |
+
civitai_last_results = {}
|
915 |
+
items = search_lora_on_civitai(query, base_model, 100, sort, period, tag, user)
|
916 |
if not items: return gr.update(choices=[("", "")], value="", visible=False),\
|
917 |
+
gr.update(value="", visible=False), gr.update(visible=True), gr.update(visible=True), gr.update(visible=True)
|
918 |
+
civitai_last_results = {}
|
919 |
choices = []
|
920 |
+
gallery = []
|
921 |
for item in items:
|
922 |
base_model_name = "Pony🐴" if item['base_model'] == "Pony" else item['base_model']
|
923 |
name = f"{item['name']} (for {base_model_name} / By: {item['creator']} / Tags: {', '.join(item['tags'])})"
|
924 |
value = item['dl_url']
|
925 |
choices.append((name, value))
|
926 |
+
gallery.append((item['img_url'], name))
|
927 |
+
civitai_last_results[value] = item
|
928 |
if not choices: return gr.update(choices=[("", "")], value="", visible=False),\
|
929 |
+
gr.update(value="", visible=False), gr.update(visible=True), gr.update(visible=True), gr.update(visible=True)
|
930 |
+
civitai_last_choices = choices
|
931 |
+
civitai_last_gallery = gallery
|
932 |
+
result = civitai_last_results.get(choices[0][1], "None")
|
933 |
md = result['md'] if result else ""
|
934 |
return gr.update(choices=choices, value=choices[0][1], visible=True), gr.update(value=md, visible=True),\
|
935 |
+
gr.update(visible=True), gr.update(visible=True), gr.update(value=gallery)
|
936 |
+
|
937 |
+
|
938 |
+
def update_civitai_selection(evt: gr.SelectData):
|
939 |
+
try:
|
940 |
+
selected_index = evt.index
|
941 |
+
selected = civitai_last_choices[selected_index][1]
|
942 |
+
return gr.update(value=selected)
|
943 |
+
except Exception:
|
944 |
+
return gr.update(visible=True)
|
945 |
|
946 |
|
947 |
def select_civitai_lora(search_result):
|
948 |
if not "http" in search_result: return gr.update(value=""), gr.update(value="None", visible=True)
|
949 |
+
result = civitai_last_results.get(search_result, "None")
|
950 |
md = result['md'] if result else ""
|
951 |
return gr.update(value=search_result), gr.update(value=md, visible=True)
|
952 |
|
953 |
|
954 |
+
def download_my_lora_flux(dl_urls: str, lora):
|
955 |
+
path = download_lora(dl_urls)
|
956 |
+
if path: lora = path
|
957 |
+
choices = get_all_lora_tupled_list()
|
958 |
+
return gr.update(value=lora, choices=choices)
|
959 |
+
|
960 |
+
|
961 |
+
def apply_lora_prompt_flux(lora_info: str):
|
962 |
+
if lora_info == "None": return ""
|
963 |
+
lora_tag = lora_info.replace("/",",")
|
964 |
+
lora_tags = lora_tag.split(",") if str(lora_info) != "None" else []
|
965 |
+
lora_prompts = normalize_prompt_list(lora_tags)
|
966 |
+
prompt = ", ".join(list_uniq(lora_prompts))
|
967 |
+
return prompt
|
968 |
+
|
969 |
+
|
970 |
+
def update_loras_flux(prompt, lora, lora_wt):
|
971 |
+
on, label, tag, md = get_lora_info(lora)
|
972 |
+
choices = get_all_lora_tupled_list()
|
973 |
+
return gr.update(value=prompt), gr.update(value=lora, choices=choices), gr.update(value=lora_wt),\
|
974 |
+
gr.update(value=tag, label=label, visible=on), gr.update(value=md, visible=on)
|
975 |
+
|
976 |
+
|
977 |
+
def search_civitai_lora_json(query, base_model):
|
978 |
+
results = {}
|
979 |
+
items = search_lora_on_civitai(query, base_model)
|
980 |
+
if not items: return gr.update(value=results)
|
981 |
+
for item in items:
|
982 |
+
results[item['dl_url']] = item
|
983 |
+
return gr.update(value=results)
|
984 |
+
|
985 |
+
|
986 |
+
def get_civitai_tag():
|
987 |
+
default = [""]
|
988 |
+
user_agent = get_user_agent()
|
989 |
+
headers = {'User-Agent': user_agent, 'content-type': 'application/json'}
|
990 |
+
base_url = 'https://civitai.com/api/v1/tags'
|
991 |
+
params = {'limit': 200}
|
992 |
+
session = requests.Session()
|
993 |
+
retries = Retry(total=5, backoff_factor=1, status_forcelist=[500, 502, 503, 504])
|
994 |
+
session.mount("https://", HTTPAdapter(max_retries=retries))
|
995 |
+
url = base_url
|
996 |
+
try:
|
997 |
+
r = session.get(url, params=params, headers=headers, stream=True, timeout=(3.0, 15))
|
998 |
+
if not r.ok: return default
|
999 |
+
j = dict(r.json()).copy()
|
1000 |
+
if "items" not in j.keys(): return default
|
1001 |
+
items = []
|
1002 |
+
for item in j["items"]:
|
1003 |
+
items.append([str(item.get("name", "")), int(item.get("modelCount", 0))])
|
1004 |
+
df = pd.DataFrame(items)
|
1005 |
+
df.sort_values(1, ascending=False)
|
1006 |
+
tags = df.values.tolist()
|
1007 |
+
tags = [""] + [l[0] for l in tags]
|
1008 |
+
return tags
|
1009 |
+
except Exception as e:
|
1010 |
+
print(e)
|
1011 |
+
return default
|
1012 |
+
|
1013 |
+
|
1014 |
LORA_BASE_MODEL_DICT = {
|
1015 |
"diffusers:StableDiffusionPipeline": ["SD 1.5"],
|
1016 |
"diffusers:StableDiffusionXLPipeline": ["Pony", "SDXL 1.0"],
|
|
|
1255 |
|
1256 |
|
1257 |
def process_style_prompt(prompt: str, neg_prompt: str, styles_key: str = "None", quality_key: str = "None", type: str = "Auto"):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1258 |
animagine_ps = to_list("anime artwork, anime style, vibrant, studio anime, highly detailed, masterpiece, best quality, very aesthetic, absurdres")
|
1259 |
animagine_nps = to_list("lowres, (bad), text, error, fewer, extra, missing, worst quality, jpeg artifacts, low quality, watermark, unfinished, displeasing, oldest, early, chromatic aberration, signature, extra digits, artistic error, username, scan, [abstract]")
|
1260 |
pony_ps = to_list("source_anime, score_9, score_8_up, score_7_up, masterpiece, best quality, very aesthetic, absurdres")
|
|
|
1406 |
|
1407 |
|
1408 |
def get_model_pipeline(repo_id: str):
|
|
|
1409 |
api = HfApi(token=HF_TOKEN)
|
1410 |
default = "StableDiffusionPipeline"
|
1411 |
try:
|
null.png
ADDED