Spaces:
Running
Running
github-actions[bot]
commited on
Commit
·
0d6e70c
1
Parent(s):
4d34d9b
Update from GitHub Actions
Browse files
main.py
CHANGED
@@ -378,11 +378,11 @@ async def proxy(request: Request):
|
|
378 |
# 创建带有代理的 scraper
|
379 |
# 创建cloudscraper实例
|
380 |
scraper = cloudscraper.create_scraper(
|
381 |
-
browser={
|
382 |
-
|
383 |
-
|
384 |
-
|
385 |
-
},
|
386 |
debug=True,
|
387 |
delay=10
|
388 |
)
|
@@ -412,8 +412,8 @@ async def proxy(request: Request):
|
|
412 |
home_url = f"{parsed_url.scheme}://{parsed_url.netloc}"
|
413 |
|
414 |
# 重试获取主页响应
|
415 |
-
max_retries =
|
416 |
-
retry_delay =
|
417 |
home_response = None
|
418 |
|
419 |
for attempt in range(max_retries):
|
|
|
378 |
# 创建带有代理的 scraper
|
379 |
# 创建cloudscraper实例
|
380 |
scraper = cloudscraper.create_scraper(
|
381 |
+
# browser={
|
382 |
+
# 'browser': 'chrome',
|
383 |
+
# 'platform': 'windows',
|
384 |
+
# 'mobile': False
|
385 |
+
# },
|
386 |
debug=True,
|
387 |
delay=10
|
388 |
)
|
|
|
412 |
home_url = f"{parsed_url.scheme}://{parsed_url.netloc}"
|
413 |
|
414 |
# 重试获取主页响应
|
415 |
+
max_retries = 3
|
416 |
+
retry_delay = 1 # 重试间隔秒数
|
417 |
home_response = None
|
418 |
|
419 |
for attempt in range(max_retries):
|