Spaces:
Running
on
L4
Running
on
L4
added card
Browse files
app.py
CHANGED
@@ -8,8 +8,7 @@ import gradio as gr
|
|
8 |
from huggingface_hub import HfApi
|
9 |
from huggingface_hub import ModelCard
|
10 |
|
11 |
-
from
|
12 |
-
from apscheduler.schedulers.background import BackgroundScheduler
|
13 |
|
14 |
HF_PATH = "https://huggingface.co/"
|
15 |
|
@@ -92,12 +91,44 @@ def button_click(hf_model_id, conv_template, quantization, oauth_token: gr.OAuth
|
|
92 |
|
93 |
# push to HF
|
94 |
user_name = api.whoami()["name"]
|
95 |
-
api.create_repo(repo_id=f"{user_name}/{mlc_model_name}", private=True)
|
|
|
96 |
|
97 |
api.upload_large_folder(folder_path=f"./dist/{mlc_model_name}",
|
98 |
-
|
99 |
-
|
100 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
101 |
os.system("rm -rf dist/")
|
102 |
|
103 |
return "Successful"
|
|
|
8 |
from huggingface_hub import HfApi
|
9 |
from huggingface_hub import ModelCard
|
10 |
|
11 |
+
from textwrap import dedent
|
|
|
12 |
|
13 |
HF_PATH = "https://huggingface.co/"
|
14 |
|
|
|
91 |
|
92 |
# push to HF
|
93 |
user_name = api.whoami()["name"]
|
94 |
+
created_repo_url = api.create_repo(repo_id=f"{user_name}/{mlc_model_name}", private=True)
|
95 |
+
created_repo_id = created_repo_url.repo_id
|
96 |
|
97 |
api.upload_large_folder(folder_path=f"./dist/{mlc_model_name}",
|
98 |
+
repo_id=f"{user_name}/{mlc_model_name}",
|
99 |
+
repo_type="model")
|
100 |
|
101 |
+
# push model card to HF
|
102 |
+
card = ModelCard.load(hf_model_id, token=oauth_token.token)
|
103 |
+
if not card.data.tags:
|
104 |
+
card.data.tags = []
|
105 |
+
card.data.tags.append("mlc-ai")
|
106 |
+
card.data.tags.append("MLC-Weight-Conversion")
|
107 |
+
card.data.base_model = hf_model_id
|
108 |
+
|
109 |
+
card.text = dedent(
|
110 |
+
f"""
|
111 |
+
# {created_repo_id}
|
112 |
+
This model was compiled using MLC-LLM with {quantization} quantization from [{hf_model_id}]({HF_PATH}{hf_model_id}).
|
113 |
+
The conversion was done using the [MLC-Weight-Conversion](https://huggingface.co/spaces/mlc-ai/MLC-Weight-Conversion) space.
|
114 |
+
|
115 |
+
To run this model, please first install [MLC-LLM](https://llm.mlc.ai/docs/install/mlc_llm.html#install-mlc-packages).
|
116 |
+
|
117 |
+
To chat with the model on your terminal:
|
118 |
+
```bash
|
119 |
+
mlc_llm chat HF://{created_repo_id}
|
120 |
+
```
|
121 |
+
|
122 |
+
For more information on how to use MLC-LLM, please visit the MLC-LLM [documentation](https://llm.mlc.ai/docs/index.html).
|
123 |
+
"""
|
124 |
+
)
|
125 |
+
card.save("./dist/README.md")
|
126 |
+
|
127 |
+
api.upload_file(path_or_fileobj="./dist/README.md",
|
128 |
+
path_in_repo="README.md",
|
129 |
+
repo_id=created_repo_id,
|
130 |
+
repo_type="model")
|
131 |
+
|
132 |
os.system("rm -rf dist/")
|
133 |
|
134 |
return "Successful"
|