github-actions[bot] commited on
Commit
baa8d8f
·
1 Parent(s): b2e89e3

Update from GitHub Actions

Browse files
Files changed (1) hide show
  1. main.py +8 -6
main.py CHANGED
@@ -383,8 +383,8 @@ async def proxy(request: Request):
383
  # 'platform': 'windows',
384
  # 'mobile': False
385
  # },
386
- debug=True,
387
- delay=10
388
  )
389
 
390
  # 从请求中获取cookies并设置到scraper
@@ -409,16 +409,18 @@ async def proxy(request: Request):
409
  if not home_url:
410
  # 从target_url中提取home_url
411
  parsed_url = urlparse(target_url)
412
- home_url = f"{parsed_url.scheme}://{parsed_url.netloc}"
413
 
414
  # 重试获取主页响应
415
  max_retries = 10
416
- retry_delay = 2 # 重试间隔秒数
417
  home_response = None
418
-
419
  for attempt in range(max_retries):
420
  try:
421
- home_response = scraper.get(home_url, headers={"sec-fetch-dest": "document"})
 
 
422
  print(f"主页{home_url}响应 (尝试 {attempt + 1}): {home_response.status_code}")
423
 
424
  if home_response.status_code == 200:
 
383
  # 'platform': 'windows',
384
  # 'mobile': False
385
  # },
386
+ #debug=True,
387
+ #delay=10
388
  )
389
 
390
  # 从请求中获取cookies并设置到scraper
 
409
  if not home_url:
410
  # 从target_url中提取home_url
411
  parsed_url = urlparse(target_url)
412
+ home_url = f"{parsed_url.scheme}://{parsed_url.netloc}/"
413
 
414
  # 重试获取主页响应
415
  max_retries = 10
416
+ retry_delay = 1 # 重试间隔秒数
417
  home_response = None
418
+
419
  for attempt in range(max_retries):
420
  try:
421
+ home_response = scraper.get(home_url, headers= {
422
+ "sec-fetch-dest": "document"
423
+ })
424
  print(f"主页{home_url}响应 (尝试 {attempt + 1}): {home_response.status_code}")
425
 
426
  if home_response.status_code == 200: