Spaces:
Running
Running
Commit
·
e575418
1
Parent(s):
51b49ff
update etag_timeout, TIMEOUT=300
Browse files- app.py +3 -2
- src/backend.py +8 -7
- src/envs.py +1 -0
app.py
CHANGED
@@ -13,7 +13,8 @@ from src.envs import (
|
|
13 |
UNZIP_TARGET_DIR,
|
14 |
TIME_DURATION,
|
15 |
EVAL_K_VALUES,
|
16 |
-
SUBMIT_INFOS_TABLE_COLS
|
|
|
17 |
)
|
18 |
from src.css_html_js import custom_css
|
19 |
|
@@ -36,7 +37,7 @@ def load_submit_infos_df():
|
|
36 |
repo_id=SUBMIT_INFOS_REPO,
|
37 |
repo_type="dataset",
|
38 |
local_dir=SUBMIT_INFOS_DIR,
|
39 |
-
etag_timeout=
|
40 |
)
|
41 |
submit_infos_save_path = os.path.join(SUBMIT_INFOS_DIR, SUBMIT_INFOS_FILE_NAME)
|
42 |
|
|
|
13 |
UNZIP_TARGET_DIR,
|
14 |
TIME_DURATION,
|
15 |
EVAL_K_VALUES,
|
16 |
+
SUBMIT_INFOS_TABLE_COLS,
|
17 |
+
TIMEOUT,
|
18 |
)
|
19 |
from src.css_html_js import custom_css
|
20 |
|
|
|
37 |
repo_id=SUBMIT_INFOS_REPO,
|
38 |
repo_type="dataset",
|
39 |
local_dir=SUBMIT_INFOS_DIR,
|
40 |
+
etag_timeout=TIMEOUT,
|
41 |
)
|
42 |
submit_infos_save_path = os.path.join(SUBMIT_INFOS_DIR, SUBMIT_INFOS_FILE_NAME)
|
43 |
|
src/backend.py
CHANGED
@@ -15,7 +15,8 @@ from src.envs import (
|
|
15 |
API,
|
16 |
ZIP_CACHE_DIR,
|
17 |
SEARCH_RESULTS_REPO, RESULTS_REPO, SUBMIT_INFOS_REPO,
|
18 |
-
make_clickable_model
|
|
|
19 |
)
|
20 |
|
21 |
logger = logging.getLogger(__name__)
|
@@ -171,7 +172,7 @@ def pull_search_results(
|
|
171 |
repo_id=SUBMIT_INFOS_REPO,
|
172 |
repo_type="dataset",
|
173 |
local_dir=submit_infos_dir,
|
174 |
-
etag_timeout=
|
175 |
)
|
176 |
|
177 |
logger.warning(f"Start from commit: {start_commit_id}")
|
@@ -181,7 +182,7 @@ def pull_search_results(
|
|
181 |
repo_type="dataset",
|
182 |
revision=start_commit_id,
|
183 |
local_dir=hf_search_results_repo_dir,
|
184 |
-
etag_timeout=
|
185 |
allow_patterns=['*.json']
|
186 |
)
|
187 |
cur_file_paths = get_file_list(hf_search_results_repo_dir, allowed_suffixes=['.json'])
|
@@ -190,7 +191,7 @@ def pull_search_results(
|
|
190 |
repo_id=SEARCH_RESULTS_REPO,
|
191 |
repo_type="dataset",
|
192 |
local_dir=hf_search_results_repo_dir,
|
193 |
-
etag_timeout=
|
194 |
allow_patterns=['*.json']
|
195 |
)
|
196 |
cur_file_paths = get_file_list(hf_search_results_repo_dir, allowed_suffixes=['.json'])
|
@@ -204,13 +205,13 @@ def pull_search_results(
|
|
204 |
repo_id=RESULTS_REPO,
|
205 |
repo_type="dataset",
|
206 |
local_dir=hf_eval_results_repo_dir,
|
207 |
-
etag_timeout=
|
208 |
)
|
209 |
API.snapshot_download(
|
210 |
repo_id=SEARCH_RESULTS_REPO,
|
211 |
repo_type="dataset",
|
212 |
local_dir=hf_search_results_repo_dir,
|
213 |
-
etag_timeout=
|
214 |
allow_patterns=['*.json']
|
215 |
)
|
216 |
except Exception as e:
|
@@ -247,7 +248,7 @@ def pull_search_results(
|
|
247 |
repo_id=SEARCH_RESULTS_REPO,
|
248 |
repo_type="dataset",
|
249 |
local_dir=ZIP_CACHE_DIR,
|
250 |
-
etag_timeout=
|
251 |
allow_patterns=[f'*{zip_file_name}']
|
252 |
)
|
253 |
zip_file_path = get_zip_file_path(zip_file_name)
|
|
|
15 |
API,
|
16 |
ZIP_CACHE_DIR,
|
17 |
SEARCH_RESULTS_REPO, RESULTS_REPO, SUBMIT_INFOS_REPO,
|
18 |
+
make_clickable_model,
|
19 |
+
TIMEOUT,
|
20 |
)
|
21 |
|
22 |
logger = logging.getLogger(__name__)
|
|
|
172 |
repo_id=SUBMIT_INFOS_REPO,
|
173 |
repo_type="dataset",
|
174 |
local_dir=submit_infos_dir,
|
175 |
+
etag_timeout=TIMEOUT,
|
176 |
)
|
177 |
|
178 |
logger.warning(f"Start from commit: {start_commit_id}")
|
|
|
182 |
repo_type="dataset",
|
183 |
revision=start_commit_id,
|
184 |
local_dir=hf_search_results_repo_dir,
|
185 |
+
etag_timeout=TIMEOUT,
|
186 |
allow_patterns=['*.json']
|
187 |
)
|
188 |
cur_file_paths = get_file_list(hf_search_results_repo_dir, allowed_suffixes=['.json'])
|
|
|
191 |
repo_id=SEARCH_RESULTS_REPO,
|
192 |
repo_type="dataset",
|
193 |
local_dir=hf_search_results_repo_dir,
|
194 |
+
etag_timeout=TIMEOUT,
|
195 |
allow_patterns=['*.json']
|
196 |
)
|
197 |
cur_file_paths = get_file_list(hf_search_results_repo_dir, allowed_suffixes=['.json'])
|
|
|
205 |
repo_id=RESULTS_REPO,
|
206 |
repo_type="dataset",
|
207 |
local_dir=hf_eval_results_repo_dir,
|
208 |
+
etag_timeout=TIMEOUT
|
209 |
)
|
210 |
API.snapshot_download(
|
211 |
repo_id=SEARCH_RESULTS_REPO,
|
212 |
repo_type="dataset",
|
213 |
local_dir=hf_search_results_repo_dir,
|
214 |
+
etag_timeout=TIMEOUT,
|
215 |
allow_patterns=['*.json']
|
216 |
)
|
217 |
except Exception as e:
|
|
|
248 |
repo_id=SEARCH_RESULTS_REPO,
|
249 |
repo_type="dataset",
|
250 |
local_dir=ZIP_CACHE_DIR,
|
251 |
+
etag_timeout=TIMEOUT,
|
252 |
allow_patterns=[f'*{zip_file_name}']
|
253 |
)
|
254 |
zip_file_path = get_zip_file_path(zip_file_name)
|
src/envs.py
CHANGED
@@ -34,6 +34,7 @@ HF_EVAL_RESULTS_REPO_DIR = os.path.join(CACHE_PATH, "eval_results")
|
|
34 |
UNZIP_TARGET_DIR = os.path.join(CACHE_PATH, "unzip_target_dir")
|
35 |
|
36 |
TIME_DURATION = 600 # seconds
|
|
|
37 |
|
38 |
EVAL_K_VALUES = [1, 3, 5, 10, 50, 100, 1000]
|
39 |
|
|
|
34 |
UNZIP_TARGET_DIR = os.path.join(CACHE_PATH, "unzip_target_dir")
|
35 |
|
36 |
TIME_DURATION = 600 # seconds
|
37 |
+
TIMEOUT=300 # seconds
|
38 |
|
39 |
EVAL_K_VALUES = [1, 3, 5, 10, 50, 100, 1000]
|
40 |
|