namespace-Pt commited on
Commit
712dc96
·
1 Parent(s): 91130cc

Delete push_to_hub.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. push_to_hub.py +0 -106
push_to_hub.py DELETED
@@ -1,106 +0,0 @@
1
- from pathlib import Path
2
- from datasets import load_dataset, load_from_disk
3
- from dataclasses import dataclass, field
4
- from huggingface_hub import HfApi
5
- from transformers import AutoModel, AutoTokenizer, HfArgumentParser
6
- from typing import Optional, List
7
-
8
-
9
- @dataclass
10
- class DownloadArgs:
11
- model_cache_dir: str = field(
12
- default='/share/LMs',
13
- metadata={'help': 'Default path to save language models'}
14
- )
15
- model_name_or_path: Optional[str] = field(
16
- default=None,
17
- metadata={'help': 'Path to pretrained model or model identifier from huggingface.co/models'}
18
- )
19
- dataset_cache_dir: str = field(
20
- default='/share/peitian/Data/Datasets/huggingface',
21
- metadata={'help': 'Default path to save huggingface datasets'}
22
- )
23
- dataset_name_or_path: Optional[str] = field(
24
- default=None,
25
- metadata={'help': 'Dataset name'}
26
- )
27
- data_files: Optional[dict] = field(
28
- default=None,
29
- metadata={'help': 'Data files for json dataset.'}
30
- )
31
- dataset_from_disk: bool = field(
32
- default=False,
33
- metadata={'help': 'Load dataset from disk?'}
34
- )
35
-
36
- file: Optional[str] = field(
37
- default=None,
38
- metadata={'help': 'File to upload.'}
39
- )
40
- file_in_repo: Optional[str] = field(
41
- default=None,
42
- metadata={'help': 'File name in repository.'}
43
- )
44
-
45
- hub_name: Optional[str] = field(
46
- default=None,
47
- metadata={'help': 'Name of the huggingface repo.'}
48
- )
49
-
50
- revision: str = field(
51
- default=None,
52
- metadata={'help': 'Remote code revision'}
53
- )
54
- resume_download: bool = field(
55
- default=True,
56
- metadata={'help': 'Resume downloading'}
57
- )
58
- def __post_init__(self):
59
- # folder or model not exists
60
- if self.model_name_or_path is not None:
61
- tokenizer = AutoTokenizer.from_pretrained(self.model_name_or_path, cache_dir=self.model_cache_dir, trust_remote_code=True)
62
- model = AutoModel.from_pretrained(self.model_name_or_path, cache_dir=self.model_cache_dir, trust_remote_code=True)
63
- # use loop to force success upload
64
- while 1:
65
- try:
66
- tokenizer.push_to_hub(self.hub_name)
67
- break
68
- except:
69
- pass
70
- while 1:
71
- try:
72
- model.push_to_hub(self.hub_name)
73
- break
74
- except:
75
- pass
76
-
77
- if self.dataset_name_or_path is not None:
78
- if self.dataset_from_disk:
79
- dataset = load_from_disk(self.dataset_name_or_path)
80
- else:
81
- dataset = load_dataset(self.dataset_name_or_path, data_files=self.data_files, cache_dir=self.dataset_cache_dir)
82
- # use loop to force success upload
83
- while 1:
84
- try:
85
- dataset.push_to_hub(self.hub_name)
86
- break
87
- except:
88
- pass
89
-
90
- if self.file is not None:
91
- api = HfApi()
92
- if self.file_in_repo is None:
93
- self.file_in_repo = Path(self.file).name
94
- api.upload_file(
95
- path_or_fileobj=self.file,
96
- path_in_repo=self.file_in_repo,
97
- repo_id=self.hub_name,
98
- repo_type="dataset",
99
- )
100
-
101
-
102
- if __name__ == "__main__":
103
- parser = HfArgumentParser([DownloadArgs])
104
- args, = parser.parse_args_into_dataclasses()
105
-
106
-