Spaces:
Sleeping
Sleeping
File size: 7,214 Bytes
00dd2d0 9d04ba5 00dd2d0 9d04ba5 00dd2d0 9d04ba5 7249cb2 9d04ba5 00dd2d0 9d04ba5 00dd2d0 9d04ba5 00dd2d0 9d04ba5 00dd2d0 9d04ba5 00dd2d0 9d04ba5 00dd2d0 9d04ba5 00dd2d0 9d04ba5 00dd2d0 9d04ba5 00dd2d0 9d04ba5 00dd2d0 9a09f64 a89ab80 9a09f64 00dd2d0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 |
import contextlib
import re
import tempfile
from functools import lru_cache
import gradio as gr
from git import Repo
from httpx import Client
from huggingface_hub import create_repo, upload_folder
from toolz import groupby
client = Client()
def clone_into_temp_dir(github_repo_url):
temp_dir = tempfile.TemporaryDirectory()
return Repo.clone_from(github_repo_url, temp_dir), temp_dir
repo = clone_into_temp_dir("https://github.com/chen-zichen/XplainLLM_dataset/")
clone_into_temp_dir("https://github.com/chen-zichen/XplainLLM_dataset/")
def upload_directory_to_hf(
repo_id: str,
directory: str,
token: str,
private: bool = False,
):
url = create_repo(
repo_id,
token=token,
exist_ok=True,
repo_type="dataset",
private=private,
)
commit_url = upload_folder(
folder_path=directory,
path_in_repo="data",
repo_id=repo_id,
repo_type="dataset",
token=token,
commit_message="Migrated from GitHub",
ignore_patterns=[
"*.git*",
"*README.md*",
"*.DS_Store",
"*.env",
], # ignore git files, README, and .env files
)
def push_to_hf(
source_github_repository, destination_hf_hub_repository, hf_token, subdirectory=None
):
gr.Info("Cloning source GitHub repository...")
repo, temporary_directory = clone_into_temp_dir(source_github_repository)
gr.Info("Cloning source GitHub repository...Done")
gr.Info("Syncing with Hugging Face Hub...")
if subdirectory:
src_directory = f"{repo.working_dir}/{subdirectory[0]}"
else:
src_directory = repo.working_dir
upload_directory_to_hf(
repo_id=destination_hf_hub_repository,
directory=src_directory,
token=hf_token,
private=False,
)
gr.Info("Syncing with Hugging Face Hub...Done")
temporary_directory.cleanup()
return f"Pushed the dataset to [{destination_hf_hub_repository}](https://huggingface.co/datasets{destination_hf_hub_repository})"
def extract_user_name_and_repo_from_url(github_url: str):
pattern = r"https://github.com/([^/]+)/([^/]+)"
if match := re.search(pattern, github_url):
return match[1], match[2]
print("No match found in the GitHub URL.")
return None
def get_files_and_directories(response):
data = response.json()
grouped_by_type = groupby(lambda item: item["type"], data["tree"])
files = grouped_by_type.get("blob", [])
directories = grouped_by_type.get("tree", [])
if files:
files = [file["path"] for file in files]
if directories:
directories = [directory["path"] for directory in directories]
return {"files": files, "directories": directories}
@lru_cache(maxsize=128)
def list_git_repo_files_and_directories(repo_url: str, branch: str = "main"):
user_name_and_repo = extract_user_name_and_repo_from_url(repo_url)
if user_name_and_repo is None:
return None
user_name, repo_name = user_name_and_repo
url = f"https://api.github.com/repos/{user_name}/{repo_name}/git/trees/{branch}"
response = client.get(url)
if response.status_code == 200:
return get_files_and_directories(response)
def show_files_and_directories(url: str):
with contextlib.suppress(Exception):
files_and_directories = list_git_repo_files_and_directories(url)
directories = files_and_directories.get("directories", [])
files = files_and_directories.get("files", [])
print(directories)
return gr.Dropdown(
label="Directories",
choices=directories,
max_choices=1,
visible=True,
interactive=True,
multiselect=True,
), gr.Dropdown(
label="Files",
choices=files,
max_choices=None,
visible=True,
interactive=True,
multiselect=True,
)
html_text_app_description = """
Whilst GitHub is great for hosting code the Hugging Face Datasets Hub is a better place to host datasets.
Some of the benefits of hosting datasets on the Hugging Face Datasets Hub are:
<br>
<ul>
<li>Hosting for large datasets</li>
<li>An interactive preview of your dataset</li>
<li>Access to the dataset via many tools and libraries including; datasets, pandas, polars, dask and DuckDB</li>
</ul>
<br>
This app will help you migrate a dataset currently hosted on GitHub to the Hugging Face Datasets Hub.
"""
with gr.Blocks(theme=gr.themes.Base()) as demo:
gr.HTML(
"""<h1 style='text-align: center;'> GitHub to Hugging Face Hub Dataset Migration Tool</h1>
<center><i> ✨ Migrate a dataset in a few steps ✨</i></center>"""
)
gr.HTML(
"""<center> GitHub is a great place for sharing code but the Hugging Face Hub has many advantages for sharing datasets.
<br> This Space will guide you through the process of migrating a dataset from GitHub to the Hugging Face Hub. </center>"""
)
gr.Markdown("### Location of existing dataset")
gr.Markdown("URL for the GitHub repository where the dataset is currently hosted")
source_github_repository = gr.Textbox(lines=1, label="Source GitHub Repository URL")
gr.Markdown("### Select files and folder to migrate")
gr.Markdown(
"(Optional): select a specific folder and/or files to migrate from the GitHub repository."
)
folder_in_github_repo = gr.Dropdown(
None,
label="Folder in the GitHub Repository to migrate",
allow_custom_value=True,
visible=True,
)
files_in_github_repo = gr.Dropdown(
None,
label="Files in GitHub Repository to migrate",
allow_custom_value=True,
visible=True,
)
source_github_repository.change(
show_files_and_directories,
[source_github_repository],
[folder_in_github_repo, files_in_github_repo],
)
gr.Markdown("### Destination for your migrated dataset")
gr.Markdown("Destination repository for your dataset on the Hugging Face Hub")
destination_hf_hub_repository = gr.Textbox(
label="Destination Hugging Face Repository",
placeholder="i.e. <hugging face username>/<repository_name>",
)
gr.Markdown("## Authentication")
gr.Markdown(
"""You need to provide a token with write access to the namespace you want to upload to.
You can generate/access your Hugging FAce token from [here](https://huggingface.co/settings/token)."""
)
hf_token = gr.Textbox(label="Hugging Face Token", type="password")
summit_btn = gr.Button("Migrate Dataset")
result = gr.Markdown(label="Summary", visible=True)
summit_btn.click(
push_to_hf,
[
source_github_repository,
destination_hf_hub_repository,
hf_token,
folder_in_github_repo,
],
[result],
)
gr.Markdown(
"If you have any questions or feedback feel free to reach out to us on using the [Discussion tab]https://huggingface.co/spaces/librarian-bots/github-to-huggingface-dataset-migration-tool/discussions/1)"
)
demo.launch()
|