import re import time import asyncio import aiohttp from typing import Optional base_url = "https://www.blackbox.ai" headers = { 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36', } # Cache variables cached_hid = None cache_time = 0 CACHE_DURATION = 36000 # Cache duration in seconds (10 hours) async def getHid(force_refresh: bool = False) -> Optional[str]: global cached_hid, cache_time current_time = time.time() # Check if a forced refresh is needed or if the cached values are still valid. if not force_refresh and cached_hid and (current_time - cache_time) < CACHE_DURATION: print("Using cached_hid:", cached_hid) return cached_hid uuid_format = r'["\']([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})["\']' def is_valid_context(text_around): return any(char + '=' in text_around for char in 'abcdefghijklmnopqrstuvwxyz') try: async with aiohttp.ClientSession(headers=headers) as session: async with session.get(base_url) as response: if response.status != 200: print("Failed to load the page.") return None page_content = await response.text() js_files = re.findall(r'static/chunks/\d{4}-[a-fA-F0-9]+\.js', page_content) for js_file in js_files: js_url = f"{base_url}/_next/{js_file}" async with session.get(js_url) as js_response: if js_response.status == 200: js_content = await js_response.text() for match in re.finditer(uuid_format, js_content): start = max(0, match.start() - 10) end = min(len(js_content), match.end() + 10) context = js_content[start:end] if is_valid_context(context): validated_value = match.group(1) print("Found and validated h-value:", validated_value) # Update the cache cached_hid = validated_value cache_time = current_time return validated_value print("The h-value was not found in any JS content.") return None except Exception as e: print(f"An error occurred during the request: {e}") return None