Spaces:
Running
Running
Add manual hf_token
Browse files
app.py
CHANGED
@@ -31,17 +31,19 @@ def stop_generation():
|
|
31 |
"""
|
32 |
stop_event.set()
|
33 |
|
34 |
-
def get_hf_token():
|
35 |
"""
|
36 |
-
Retrieve the Hugging Face token from the
|
|
|
|
|
|
|
37 |
|
38 |
Returns:
|
39 |
str: The Hugging Face token.
|
40 |
"""
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
return token
|
45 |
|
46 |
def generate_synthetic_dataset(
|
47 |
llm_model,
|
@@ -54,6 +56,7 @@ def generate_synthetic_dataset(
|
|
54 |
language,
|
55 |
additional_description,
|
56 |
num_entries,
|
|
|
57 |
hf_repo_name,
|
58 |
llm_env_vars,
|
59 |
):
|
@@ -71,6 +74,7 @@ def generate_synthetic_dataset(
|
|
71 |
language (str): The language of the dataset.
|
72 |
additional_description (str): Additional description for the dataset.
|
73 |
num_entries (int): The number of entries in the dataset.
|
|
|
74 |
hf_repo_name (str): The Hugging Face repository name.
|
75 |
llm_env_vars (str): Comma-separated environment variables for the LLM.
|
76 |
|
@@ -78,7 +82,7 @@ def generate_synthetic_dataset(
|
|
78 |
str: A message indicating the result of the dataset generation.
|
79 |
"""
|
80 |
|
81 |
-
hf_token = get_hf_token()
|
82 |
os.environ["HF_TOKEN"] = hf_token
|
83 |
|
84 |
for var in llm_env_vars.split(","):
|
@@ -97,6 +101,7 @@ def generate_synthetic_dataset(
|
|
97 |
domains,
|
98 |
language,
|
99 |
num_entries,
|
|
|
100 |
hf_repo_name,
|
101 |
llm_env_vars,
|
102 |
):
|
@@ -183,9 +188,12 @@ def ui_main():
|
|
183 |
"""
|
184 |
)
|
185 |
|
186 |
-
gr.
|
187 |
-
|
188 |
-
|
|
|
|
|
|
|
189 |
|
190 |
with gr.Row():
|
191 |
llm_model = gr.Textbox(
|
@@ -263,6 +271,7 @@ def ui_main():
|
|
263 |
language,
|
264 |
additional_description,
|
265 |
num_entries,
|
|
|
266 |
hf_repo_name,
|
267 |
llm_env_vars,
|
268 |
],
|
|
|
31 |
"""
|
32 |
stop_event.set()
|
33 |
|
34 |
+
def get_hf_token(hf_token):
|
35 |
"""
|
36 |
+
Retrieve the Hugging Face token from the provided input.
|
37 |
+
|
38 |
+
Args:
|
39 |
+
hf_token (str): The Hugging Face token.
|
40 |
|
41 |
Returns:
|
42 |
str: The Hugging Face token.
|
43 |
"""
|
44 |
+
if not hf_token:
|
45 |
+
raise ValueError("Hugging Face token not provided.")
|
46 |
+
return hf_token
|
|
|
47 |
|
48 |
def generate_synthetic_dataset(
|
49 |
llm_model,
|
|
|
56 |
language,
|
57 |
additional_description,
|
58 |
num_entries,
|
59 |
+
hf_token,
|
60 |
hf_repo_name,
|
61 |
llm_env_vars,
|
62 |
):
|
|
|
74 |
language (str): The language of the dataset.
|
75 |
additional_description (str): Additional description for the dataset.
|
76 |
num_entries (int): The number of entries in the dataset.
|
77 |
+
hf_token (str): The Hugging Face token.
|
78 |
hf_repo_name (str): The Hugging Face repository name.
|
79 |
llm_env_vars (str): Comma-separated environment variables for the LLM.
|
80 |
|
|
|
82 |
str: A message indicating the result of the dataset generation.
|
83 |
"""
|
84 |
|
85 |
+
hf_token = get_hf_token(hf_token)
|
86 |
os.environ["HF_TOKEN"] = hf_token
|
87 |
|
88 |
for var in llm_env_vars.split(","):
|
|
|
101 |
domains,
|
102 |
language,
|
103 |
num_entries,
|
104 |
+
hf_token,
|
105 |
hf_repo_name,
|
106 |
llm_env_vars,
|
107 |
):
|
|
|
188 |
"""
|
189 |
)
|
190 |
|
191 |
+
with gr.Row():
|
192 |
+
hf_token = gr.Textbox(
|
193 |
+
label="Hugging Face Token",
|
194 |
+
placeholder="Your HF Token",
|
195 |
+
type="password",
|
196 |
+
)
|
197 |
|
198 |
with gr.Row():
|
199 |
llm_model = gr.Textbox(
|
|
|
271 |
language,
|
272 |
additional_description,
|
273 |
num_entries,
|
274 |
+
hf_token,
|
275 |
hf_repo_name,
|
276 |
llm_env_vars,
|
277 |
],
|