Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -9,10 +9,18 @@ from apscheduler.schedulers.background import BackgroundScheduler
|
|
9 |
|
10 |
HF_TOKEN = os.environ.get("HF_TOKEN")
|
11 |
|
12 |
-
api = HfApi()
|
13 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
|
15 |
-
|
16 |
|
17 |
MODEL_NAME = model_id.split("/")[-1]
|
18 |
|
@@ -33,29 +41,40 @@ def process_model(model_id: str, file_path: str, key: str, value: str, hf_token)
|
|
33 |
api.upload_folder(
|
34 |
folder_path=MODEL_NAME,
|
35 |
repo_id=model_id,
|
36 |
-
allow_patterns=["*.gguf"
|
37 |
-
token=hf_token,
|
38 |
)
|
39 |
print("Uploaded successfully!")
|
40 |
|
41 |
return "Processing complete."
|
42 |
|
43 |
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
inputs=[
|
48 |
-
gr.Textbox(lines=1, label="Model ID"),
|
49 |
-
gr.Textbox(lines=1, label="File path"),
|
50 |
-
gr.Textbox(lines=1, label="Key"),
|
51 |
-
gr.Textbox(lines=1, label="Value"),
|
52 |
-
gr.Textbox(lines=1, label="Token"),
|
53 |
-
],
|
54 |
-
outputs="text",
|
55 |
-
)
|
56 |
|
57 |
-
|
58 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
|
60 |
|
61 |
def restart_space():
|
|
|
9 |
|
10 |
HF_TOKEN = os.environ.get("HF_TOKEN")
|
11 |
|
|
|
12 |
|
13 |
+
def process_model(
|
14 |
+
model_id: str,
|
15 |
+
file_path: str,
|
16 |
+
key: str,
|
17 |
+
value: str,
|
18 |
+
oauth_token: gr.OAuthToken | None,
|
19 |
+
):
|
20 |
+
if oauth_token.token is None:
|
21 |
+
raise ValueError("You must be logged in to use gguf-metadata-updater")
|
22 |
|
23 |
+
api = HfApi(token=oauth_token.token)
|
24 |
|
25 |
MODEL_NAME = model_id.split("/")[-1]
|
26 |
|
|
|
41 |
api.upload_folder(
|
42 |
folder_path=MODEL_NAME,
|
43 |
repo_id=model_id,
|
44 |
+
allow_patterns=["*.gguf"],
|
|
|
45 |
)
|
46 |
print("Uploaded successfully!")
|
47 |
|
48 |
return "Processing complete."
|
49 |
|
50 |
|
51 |
+
with gr.Blocks() as demo:
|
52 |
+
gr.Markdown("You must be logged in to use GGUF metadata updated.")
|
53 |
+
gr.LoginButton(min_width=250)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
54 |
|
55 |
+
model_id = HuggingfaceHubSearch(
|
56 |
+
label="Hub Model ID",
|
57 |
+
placeholder="Search for model id on Huggingface",
|
58 |
+
search_type="model",
|
59 |
+
)
|
60 |
+
|
61 |
+
file_path = gr.Textbox(lines=1, label="File path")
|
62 |
+
|
63 |
+
key = gr.Textbox(lines=1, label="Key")
|
64 |
+
|
65 |
+
value = gr.Textbox(lines=1, label="Value")
|
66 |
+
|
67 |
+
iface = gr.Interface(
|
68 |
+
fn=process_model,
|
69 |
+
inputs=[model_id, file_path, key, value],
|
70 |
+
outputs=[
|
71 |
+
gr.Markdown(label="output"),
|
72 |
+
gr.Image(show_label=False),
|
73 |
+
],
|
74 |
+
title="Update metadata for a GGUF file",
|
75 |
+
description="The space takes an HF repo, a file within that repo, a metadata key, and new metadata value to update it to.",
|
76 |
+
api_name=False,
|
77 |
+
)
|
78 |
|
79 |
|
80 |
def restart_space():
|