Spaces:
Sleeping
Sleeping
Update src/webui.py
Browse files- src/webui.py +53 -17
src/webui.py
CHANGED
@@ -21,6 +21,35 @@ def get_current_models(models_dir):
|
|
21 |
items_to_remove = ['hubert_base.pt', 'MODELS.txt', 'public_models.json', 'rmvpe.pt']
|
22 |
return [item for item in models_list if item not in items_to_remove]
|
23 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
|
25 |
def update_models_list():
|
26 |
models_l = get_current_models(rvc_models_dir)
|
@@ -174,25 +203,32 @@ if __name__ == '__main__':
|
|
174 |
)
|
175 |
gr.Markdown("Duplicate the old CPU space for use in private: [](https://huggingface.co/spaces/r3gm/AICoverGen_old_stable_cpu?duplicate=true)\n\n")
|
176 |
|
177 |
-
with gr.Tab("
|
178 |
-
#
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
188 |
)
|
189 |
|
190 |
-
# Button to trigger the search
|
191 |
-
filter_btn = gr.Button("Search")
|
192 |
-
|
193 |
-
# Connect the button click to the filter_models function
|
194 |
-
filter_btn.click(fn=lambda query: filter_models([], query), inputs=model_query, outputs=model_display)
|
195 |
-
|
196 |
|
197 |
with gr.Tab("Generate"):
|
198 |
|
|
|
21 |
items_to_remove = ['hubert_base.pt', 'MODELS.txt', 'public_models.json', 'rmvpe.pt']
|
22 |
return [item for item in models_list if item not in items_to_remove]
|
23 |
|
24 |
+
def filter_records(query):
|
25 |
+
# Ensure the query is not empty
|
26 |
+
if not query:
|
27 |
+
return [["Error", "Error", "Error", "Error"]]
|
28 |
+
|
29 |
+
response = requests.get(
|
30 |
+
"https://huggingface.co/api/models",
|
31 |
+
params={"search": query, "filter": "", "sort": "downloads", "direction": "-1", "limit": "20", "full": "True"},
|
32 |
+
)
|
33 |
+
|
34 |
+
if response.status_code == 200:
|
35 |
+
response_json = response.json()
|
36 |
+
models = []
|
37 |
+
|
38 |
+
for model in response_json:
|
39 |
+
if "rvc" in model["tags"] or "huggingartists" in model["tags"]:
|
40 |
+
for file in model.get("siblings", []):
|
41 |
+
if file.get("rfilename", "").endswith(".pth"):
|
42 |
+
models.append([
|
43 |
+
model.get("id").split("/")[-1],
|
44 |
+
model.get("author"),
|
45 |
+
model.get("downloads"),
|
46 |
+
f"https://huggingface.co/{model.get('id')}/resolve/main/{file.get('rfilename')}",
|
47 |
+
])
|
48 |
+
|
49 |
+
return models
|
50 |
+
|
51 |
+
else:
|
52 |
+
return [["Error", "Error", "Error", "Error"]]
|
53 |
|
54 |
def update_models_list():
|
55 |
models_l = get_current_models(rvc_models_dir)
|
|
|
203 |
)
|
204 |
gr.Markdown("Duplicate the old CPU space for use in private: [](https://huggingface.co/spaces/r3gm/AICoverGen_old_stable_cpu?duplicate=true)\n\n")
|
205 |
|
206 |
+
with gr.Tab("Search"):
|
207 |
+
gr.Markdown("# Model Search")
|
208 |
+
|
209 |
+
with gr.Column():
|
210 |
+
with gr.Row():
|
211 |
+
query_input = gr.Textbox(
|
212 |
+
label="Search Query",
|
213 |
+
placeholder="Enter model name or keyword...",
|
214 |
+
lines=1,
|
215 |
+
min_width=320
|
216 |
+
)
|
217 |
+
|
218 |
+
with gr.Row():
|
219 |
+
output_df = gr.Dataframe(
|
220 |
+
headers=["name", "author", "downloads", "downloadURL"],
|
221 |
+
datatype=["str", "str", "number", "str"],
|
222 |
+
row_count=5,
|
223 |
+
col_count=(4, "fixed"),
|
224 |
+
)
|
225 |
+
|
226 |
+
query_input.submit(
|
227 |
+
fn=filter_records,
|
228 |
+
inputs=query_input,
|
229 |
+
outputs=output_df
|
230 |
)
|
231 |
|
|
|
|
|
|
|
|
|
|
|
|
|
232 |
|
233 |
with gr.Tab("Generate"):
|
234 |
|