Spaces:
Running
Running
File size: 4,008 Bytes
110e4da 5be238a 7fa96f1 110e4da 5be238a 110e4da 9f96495 e8675f9 9f96495 110e4da 26580dd 110e4da e8675f9 26580dd 89e3ff3 110e4da 89e3ff3 110e4da 7fa96f1 89e3ff3 110e4da 89e3ff3 7fa96f1 89e3ff3 7fa96f1 89e3ff3 7fa96f1 110e4da 89e3ff3 110e4da 9f96495 110e4da 9f96495 e8675f9 9f96495 e8675f9 9f96495 5be238a e8675f9 6832797 e8675f9 5be238a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 |
import gradio as gr
import subprocess
import os
import shutil
from huggingface_hub import HfApi, snapshot_download
from gradio_huggingfacehub_search import HuggingfaceHubSearch
from apscheduler.schedulers.background import BackgroundScheduler
HF_TOKEN = os.environ.get("HF_TOKEN")
def process_model(
model_id: str,
file_path: str,
file_path_dropdown: str,
key: str,
value: str,
oauth_token: gr.OAuthToken | None,
):
if oauth_token.token is None:
raise ValueError("You must be logged in to use gguf-metadata-updater")
print(f"Model ID: {model_id}")
print(f"file_path: {file_path}")
print(f"file_path_dropdown: {file_path_dropdown}")
print(f"key: {key}")
print(f"value: {value}")
MODEL_NAME = model_id.split("/")[-1]
if file_path_dropdown:
FILE_PATH = file_path_dropdown
else:
FILE_PATH = file_path
print(f"FILE_PATH: {FILE_PATH}")
try:
api = HfApi(token=oauth_token.token)
FILE_NAME = file_path.split("/")[-1]
print("Starting download")
api.snapshot_download(
repo_id=model_id,
allow_patterns=FILE_PATH,
local_dir=f"{MODEL_NAME}",
)
print("Model downloaded successully!")
metadata_update = f"python3 llama.cpp/gguf-py/scripts/gguf_set_metadata.py {MODEL_NAME}/{FILE_PATH} {key} {value} --force"
subprocess.run(metadata_update, shell=True)
print(f"Model metadata {key} updated to {value} successully!")
print(f"Reuploading file")
# Upload gguf files
api.upload_folder(
folder_path=MODEL_NAME,
commit_message=f"Updating {FILE_NAME} metadata {key} to {value}",
repo_id=model_id,
allow_patterns=["*.gguf"],
)
print("Uploaded successfully!")
shutil.rmtree(f"{MODEL_NAME}/")
return f"Processing complete"
except Exception as e:
return f"Something went wrong: {e}"
with gr.Blocks() as demo:
gr.Markdown("You must be logged in to use GGUF metadata updated.")
gr.LoginButton(min_width=250)
model_id = HuggingfaceHubSearch(
label="Hub Model ID",
placeholder="Search for model id on Huggingface",
search_type="model",
)
file_path = gr.Textbox(lines=1, label="File path")
file_path_dropdown = gr.Dropdown(["None"], label="File", visible=False)
key = gr.Textbox(lines=1, label="Key")
value = gr.Textbox(lines=1, label="Value")
iface = gr.Interface(
fn=process_model,
inputs=[model_id, file_path, file_path_dropdown, key, value],
outputs=[
gr.Markdown(label="output"),
],
title="Update metadata for a GGUF file",
description="The space takes an HF repo, a file within that repo, a metadata key, and new metadata value to update it to.",
api_name=False,
)
def updateFilePath(model_id: HuggingfaceHubSearch):
try:
api = HfApi()
files = []
for file in api.list_repo_tree(
repo_id=model_id,
recursive=True,
):
if "of-0000" in file.path and "0001-of-" not in file.path:
pass
elif file.path.endswith("gguf"):
files.append(file.path)
return gr.update(visible=False), gr.update(visible=True, choices=files)
except Exception:
return gr.update(visible=True), gr.update(visible=False)
model_id.change(
fn=updateFilePath, inputs=model_id, outputs=[file_path, file_path_dropdown]
)
def restart_space():
HfApi().restart_space(
repo_id="bartowski/gguf-metadata-updated", token=HF_TOKEN, factory_reboot=True
)
scheduler = BackgroundScheduler()
scheduler.add_job(restart_space, "interval", seconds=21600)
scheduler.start()
# Launch the interface
demo.queue(default_concurrency_limit=1, max_size=5).launch(debug=True, show_api=False)
|