Update api/validate.py
Browse files- api/validate.py +40 -41
api/validate.py
CHANGED
|
@@ -1,14 +1,26 @@
|
|
| 1 |
-
# api/validate.py
|
| 2 |
-
|
| 3 |
import re
|
| 4 |
import time
|
| 5 |
import asyncio
|
| 6 |
-
import
|
| 7 |
-
from typing import Optional
|
| 8 |
|
| 9 |
base_url = "https://www.blackbox.ai"
|
| 10 |
headers = {
|
| 11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 12 |
}
|
| 13 |
|
| 14 |
# Cache variables
|
|
@@ -26,45 +38,32 @@ async def getHid(force_refresh: bool = False) -> Optional[str]:
|
|
| 26 |
return cached_hid
|
| 27 |
|
| 28 |
try:
|
| 29 |
-
async with
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
# Use a regular expression to find specific `static/chunks` paths.
|
| 36 |
-
pattern = r"static/chunks/app/layout-[a-zA-Z0-9]+\.js"
|
| 37 |
-
match = re.search(pattern, content)
|
| 38 |
-
|
| 39 |
-
if match:
|
| 40 |
-
# Construct the full URL of the JS file.
|
| 41 |
-
js_path = match.group()
|
| 42 |
-
full_url = f"{base_url}/_next/{js_path}"
|
| 43 |
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
js_response.raise_for_status()
|
| 47 |
|
| 48 |
-
|
| 49 |
-
h_pattern = r'h="([0-9a-f-]+)"'
|
| 50 |
-
h_match = re.search(h_pattern, js_response.text)
|
| 51 |
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
|
|
|
|
|
|
| 64 |
return None
|
| 65 |
-
except
|
| 66 |
print(f"An error occurred during the request: {e}")
|
| 67 |
return None
|
| 68 |
-
except httpx.HTTPStatusError as e:
|
| 69 |
-
print(f"HTTP error occurred: {e}")
|
| 70 |
-
return None
|
|
|
|
|
|
|
|
|
|
| 1 |
import re
|
| 2 |
import time
|
| 3 |
import asyncio
|
| 4 |
+
import aiohttp
|
| 5 |
+
from typing import Optional
|
| 6 |
|
| 7 |
base_url = "https://www.blackbox.ai"
|
| 8 |
headers = {
|
| 9 |
+
'accept': '*/*',
|
| 10 |
+
'accept-language': 'en-US,en;q=0.9',
|
| 11 |
+
'cache-control': 'no-cache',
|
| 12 |
+
'content-type': 'application/json',
|
| 13 |
+
'origin': base_url,
|
| 14 |
+
'pragma': 'no-cache',
|
| 15 |
+
'priority': 'u=1, i',
|
| 16 |
+
'referer': f'{base_url}/',
|
| 17 |
+
'sec-ch-ua': '"Not?A_Brand";v="99", "Chromium";v="130"',
|
| 18 |
+
'sec-ch-ua-mobile': '?0',
|
| 19 |
+
'sec-ch-ua-platform': '"Linux"',
|
| 20 |
+
'sec-fetch-dest': 'empty',
|
| 21 |
+
'sec-fetch-mode': 'cors',
|
| 22 |
+
'sec-fetch-site': 'same-origin',
|
| 23 |
+
'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36'
|
| 24 |
}
|
| 25 |
|
| 26 |
# Cache variables
|
|
|
|
| 38 |
return cached_hid
|
| 39 |
|
| 40 |
try:
|
| 41 |
+
async with aiohttp.ClientSession(headers=headers) as session:
|
| 42 |
+
async with session.get(base_url) as response:
|
| 43 |
+
if response.status != 200:
|
| 44 |
+
print("Failed to load the page.")
|
| 45 |
+
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 46 |
|
| 47 |
+
page_content = await response.text()
|
| 48 |
+
js_files = re.findall(r'static/chunks/\d{4}-[a-fA-F0-9]+\.js', page_content)
|
|
|
|
| 49 |
|
| 50 |
+
key_pattern = re.compile(r'w="([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})"')
|
|
|
|
|
|
|
| 51 |
|
| 52 |
+
for js_file in js_files:
|
| 53 |
+
js_url = f"{base_url}/_next/{js_file}"
|
| 54 |
+
async with session.get(js_url) as js_response:
|
| 55 |
+
if js_response.status == 200:
|
| 56 |
+
js_content = await js_response.text()
|
| 57 |
+
match = key_pattern.search(js_content)
|
| 58 |
+
if match:
|
| 59 |
+
h_value = match.group(1)
|
| 60 |
+
print("Found the h-value:", h_value)
|
| 61 |
+
# Update the cache
|
| 62 |
+
cached_hid = h_value
|
| 63 |
+
cache_time = current_time
|
| 64 |
+
return h_value
|
| 65 |
+
print("The h-value was not found in any JS content.")
|
| 66 |
return None
|
| 67 |
+
except Exception as e:
|
| 68 |
print(f"An error occurred during the request: {e}")
|
| 69 |
return None
|
|
|
|
|
|
|
|
|