understanding commited on
Commit
3950a15
·
verified ·
1 Parent(s): 3fd6c41

Create terabox.py

Browse files
Files changed (1) hide show
  1. utils/terabox.py +72 -0
utils/terabox.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # filename: utils/terabox.py
2
+
3
+ import re
4
+ import asyncio
5
+ import logging
6
+ from functools import partial
7
+ import requests
8
+
9
+ import config
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+ # A set of regex patterns to extract the short ID from various Terabox URL formats
14
+ TERABOX_URL_PATTERNS = [
15
+ r'terabox\.com/s/([a-zA-Z0-9_-]+)',
16
+ r'teraboxapp\.com/s/([a-zA-Z0-9_-]+)',
17
+ r'1024tera\.com/s/([a-zA-Z0-9_-]+)',
18
+ r'freeterabox\.com/s/([a-zA-Z0-9_-]+)',
19
+ r'terabox\.com/sharing/link\?surl=([a-zA-Z0-9_-]+)',
20
+ r'terasharelink\.com/s/([a-zA-Z0-9_-]+)',
21
+ r'4funbox\.com/s/([a-zA-Z0-9_-]+)',
22
+ r'box-links\.com/s/([a-zA-Z0-9_-]+)'
23
+ ]
24
+
25
+ async def extract_terabox_short_id(full_url: str) -> str | None:
26
+ """Extracts the unique short ID from a Terabox URL."""
27
+ for pattern in TERABOX_URL_PATTERNS:
28
+ if match := re.search(pattern, full_url, re.IGNORECASE):
29
+ return match.group(1)
30
+ return None
31
+
32
+ async def get_final_url_and_metadata(original_link: str) -> dict:
33
+ """
34
+ Contacts the worker API to get the direct download link and file metadata.
35
+ Returns a dictionary with details or an error.
36
+ """
37
+ payload = {"link": original_link}
38
+ headers = {
39
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36"
40
+ }
41
+
42
+ try:
43
+ # We run the synchronous requests call in a separate thread to avoid blocking our async bot
44
+ loop = asyncio.get_event_loop()
45
+ response = await loop.run_in_executor(
46
+ None,
47
+ partial(requests.post, config.TERABOX_WORKER_URL, headers=headers, json=payload, timeout=30)
48
+ )
49
+ response.raise_for_status()
50
+ data = response.json()
51
+
52
+ # Check for expected keys in the worker's response
53
+ if data.get("ok") and all(k in data for k in ["file_name", "file_size_bytes", "download_link"]):
54
+ return {
55
+ "success": True,
56
+ "file_name": data["file_name"],
57
+ "file_size": data["file_size_bytes"],
58
+ "url": data["download_link"],
59
+ "error": None
60
+ }
61
+ else:
62
+ error_message = data.get("message", "Worker returned incomplete or invalid data.")
63
+ logger.warning(f"Worker API returned an error for link {original_link}: {error_message}")
64
+ return {"success": False, "error": error_message}
65
+
66
+ except requests.exceptions.RequestException as e:
67
+ logger.error(f"Network error while contacting worker for link {original_link}: {e}")
68
+ return {"success": False, "error": "A network error occurred while contacting the processing service."}
69
+ except Exception as e:
70
+ logger.error(f"An unexpected error occurred in get_final_url_and_metadata for link {original_link}: {e}", exc_info=True)
71
+ return {"success": False, "error": "An unexpected server error occurred."}
72
+