understanding commited on
Commit
7dd2d13
·
verified ·
1 Parent(s): 6815c67

Update core/bot.py

Browse files
Files changed (1) hide show
  1. core/bot.py +106 -80
core/bot.py CHANGED
@@ -1,4 +1,4 @@
1
- # filename: core/bot.py
2
 
3
  import asyncio
4
  import logging
@@ -14,8 +14,10 @@ from utils import terabox, ffmpeg, helpers
14
 
15
  logger = logging.getLogger(__name__)
16
 
17
- # --- Core Bot Client ---
18
- bot = TelegramClient('terabox_bot_session', config.API_ID, config.API_HASH)
 
 
19
 
20
  # --- In-Memory State Management ---
21
  # Queues for the workers
@@ -58,10 +60,11 @@ async def scheduler_loop():
58
 
59
  # If the user has no more tasks, remove them from the turn order.
60
  # Otherwise, move them to the back of the line.
61
- USER_TURN_ORDER.rotate(-1) # Move current user to the end
62
  if not ACTIVE_USER_TASKS[user_id]:
63
- USER_TURN_ORDER.pop() # Remove them if their list is now empty
64
  del ACTIVE_USER_TASKS[user_id]
 
 
65
  else:
66
  # Cleanup: If user is in the turn order but has no tasks, remove them.
67
  USER_TURN_ORDER.popleft()
@@ -73,119 +76,146 @@ async def _process_task(task: dict, worker_name: str):
73
  batch_id = task['batch_id']
74
  original_link = task['link']
75
  user_id = task['user_id']
76
- metadata = task['metadata'] # Metadata is pre-fetched by the handler
 
77
 
78
  batch_info = BATCH_JOBS.get(batch_id)
79
  if not batch_info:
80
  logger.warning(f"[{worker_name}] Batch {batch_id} not found. Task for {original_link} skipped.")
81
  return
82
 
83
- logger.info(f"[{worker_name}] Starting processing for link: {original_link}")
84
 
85
  download_path = None
86
  final_file_path = None
 
87
  error_reason = None
88
-
89
  try:
90
- # Step 1: Download the file from the direct link
91
- download_path = await terabox.download_file_from_url(
92
- url=metadata['url'],
93
- dir_path="downloads",
94
- filename=metadata['file_name']
95
- )
96
- if not download_path:
97
- raise ValueError("File download failed.")
98
-
99
- final_file_path = download_path
100
-
101
- # Step 2: FFMPEG processing (if enabled and it's a video)
102
- thumbnail_path = None
103
- if config.ENABLE_FFMPEG and final_file_path.endswith(('.mp4', '.mkv', '.webm')):
104
- # Remux to MP4 if needed (fast operation)
105
- if not final_file_path.endswith('.mp4'):
106
- remuxed_path = f"{os.path.splitext(final_file_path)[0]}.mp4"
107
- remuxed_path = await ffmpeg.remux_to_mp4(final_file_path, remuxed_path)
108
- if remuxed_path:
109
- os.remove(final_file_path) # remove original file
110
- final_file_path = remuxed_path
111
 
112
- # Generate thumbnail (fast operation)
113
- thumb_path = f"{os.path.splitext(final_file_path)[0]}.jpg"
114
- thumbnail_path = await ffmpeg.generate_thumbnail(final_file_path, thumb_path)
115
-
116
- # Step 3: Upload to backup channel & cache
117
- caption = templates.BotResponses.FILE_CAPTION.format(
118
- file_name=metadata['file_name'],
119
- file_size=helpers.format_bytes(metadata['file_size']),
120
- source_url=original_link
121
- )
122
-
123
- backup_message = await bot.send_file(
124
- config.BACKUP_CHANNEL_ID,
125
- file=final_file_path,
126
- thumb=thumbnail_path,
127
- caption=caption
128
- )
129
- await db_manager.add_to_cache(
130
- short_id=task['short_id'],
131
- file_id=backup_message.id,
132
- file_name=metadata['file_name'],
133
- file_size=metadata['file_size']
134
- )
135
-
136
- # Step 4: Deliver to user and schedule deletion
137
- dm_message = await bot.send_file(
138
- user_id,
139
- file=backup_message, # Send using file_id from backup channel for speed
140
- caption=caption
141
- )
142
- # Here you would add logic to call the APScheduler to delete dm_message in 30 mins
143
 
 
 
 
 
 
144
  except Exception as e:
145
  logger.error(f"[{worker_name}] Error processing {original_link}: {e}", exc_info=True)
146
  error_reason = str(e)
147
 
148
  finally:
149
- # Clean up local files
150
- if download_path and os.path.exists(download_path):
151
- os.remove(download_path)
152
- if final_file_path and final_file_path != download_path and os.path.exists(final_file_path):
153
- os.remove(final_file_path)
154
- if 'thumbnail_path' in locals() and thumbnail_path and os.path.exists(thumbnail_path):
155
- os.remove(thumbnail_path)
156
-
157
  # --- Final Step: Update Batch Status Safely ---
158
  async with batch_info['lock']:
159
  batch_info['processed_links'] += 1
160
  if error_reason:
161
  batch_info['failed_links'].append({"link": original_link, "error": error_reason})
162
 
163
- # Update progress message
164
  processed = batch_info['processed_links']
165
  total = batch_info['total_links']
166
  progress = processed / total
167
 
168
  try:
 
169
  await bot.edit_message(
170
  batch_info['chat_id'],
171
  batch_info['status_message_id'],
172
  text=templates.BotResponses.BATCH_UPDATE_VALIDATED.format(
173
- batch_id=batch_id[:6],
174
  valid_count=total,
175
  total_count=batch_info['original_total'],
176
- skipped_count=batch_info['original_total'] - total,
177
  progress_bar=helpers.create_progress_bar(progress),
178
  processed_count=processed
179
  )
180
  )
181
  except Exception:
182
- pass # Ignore if message can't be edited
183
 
184
  # If batch is fully processed, send final summary
185
  if processed == total:
186
- # Here you would call a final summary function
187
- logger.info(f"[{worker_name}] Batch {batch_id[:6]} complete.")
188
- # ... logic to send final summary and delete job from BATCH_JOBS ...
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
189
 
190
 
191
  async def worker(name: str, queue: asyncio.Queue):
@@ -197,19 +227,15 @@ async def worker(name: str, queue: asyncio.Queue):
197
  finally:
198
  queue.task_done()
199
 
200
- # --- Startup Function ---
201
  def start_bot_runtime():
202
  """Creates all the background tasks for the bot's engine."""
203
- # Create Premium Workers
204
  for i in range(config.PREMIUM_WORKERS):
205
  asyncio.create_task(worker(f"PremiumWorker-{i+1}", PREMIUM_QUEUE))
206
 
207
- # Create Free Workers
208
  for i in range(config.FREE_WORKERS):
209
  asyncio.create_task(worker(f"FreeWorker-{i+1}", FREE_QUEUE))
210
 
211
- # Start the Fair-Share Scheduler
212
  asyncio.create_task(scheduler_loop())
213
 
214
  logger.info(f"Bot runtime started with {config.PREMIUM_WORKERS} premium and {config.FREE_WORKERS} free workers.")
215
-
 
1
+ # filename: core/bot.py (Corrected Version)
2
 
3
  import asyncio
4
  import logging
 
14
 
15
  logger = logging.getLogger(__name__)
16
 
17
+ # --- Core Bot Client (THIS LINE HAS BEEN CHANGED) ---
18
+ # We now specify the 'data/' directory for the session file to ensure it's writable.
19
+ bot = TelegramClient('data/terabox_bot_session', config.API_ID, config.API_HASH)
20
+
21
 
22
  # --- In-Memory State Management ---
23
  # Queues for the workers
 
60
 
61
  # If the user has no more tasks, remove them from the turn order.
62
  # Otherwise, move them to the back of the line.
 
63
  if not ACTIVE_USER_TASKS[user_id]:
64
+ USER_TURN_ORDER.popleft() # Remove them if their list is now empty
65
  del ACTIVE_USER_TASKS[user_id]
66
+ else:
67
+ USER_TURN_ORDER.rotate(-1) # Move current user to the end
68
  else:
69
  # Cleanup: If user is in the turn order but has no tasks, remove them.
70
  USER_TURN_ORDER.popleft()
 
76
  batch_id = task['batch_id']
77
  original_link = task['link']
78
  user_id = task['user_id']
79
+ metadata = task['metadata']
80
+ is_cached_task = task.get('cached', False)
81
 
82
  batch_info = BATCH_JOBS.get(batch_id)
83
  if not batch_info:
84
  logger.warning(f"[{worker_name}] Batch {batch_id} not found. Task for {original_link} skipped.")
85
  return
86
 
87
+ logger.info(f"[{worker_name}] Starting processing for link: {original_link} (Cached: {is_cached_task})")
88
 
89
  download_path = None
90
  final_file_path = None
91
+ thumbnail_path = None
92
  error_reason = None
93
+
94
  try:
95
+ if is_cached_task:
96
+ # For cached tasks, we just need to forward the file
97
+ cached_file_id = metadata['file_id']
98
+ dm_message = await bot.send_file(
99
+ user_id,
100
+ file=int(cached_file_id), # Use file_id from cache
101
+ caption=templates.BotResponses.FILE_CAPTION.format(
102
+ file_name=metadata['file_name'],
103
+ file_size=helpers.format_bytes(metadata['file_size']),
104
+ source_url=original_link
105
+ )
106
+ )
107
+ else:
108
+ # For new tasks, follow the full download -> process -> upload workflow
109
+ download_path = await terabox.download_file_from_url(
110
+ url=metadata['url'],
111
+ dir_path="downloads",
112
+ filename=metadata['file_name']
113
+ )
114
+ if not download_path:
115
+ raise ValueError("File download failed.")
116
 
117
+ final_file_path = download_path
118
+
119
+ if config.ENABLE_FFMPEG and final_file_path.lower().endswith(('.mp4', '.mkv', '.webm')):
120
+ if not final_file_path.lower().endswith('.mp4'):
121
+ remuxed_path = f"{os.path.splitext(final_file_path)[0]}.mp4"
122
+ remuxed_path = await ffmpeg.remux_to_mp4(final_file_path, remuxed_path)
123
+ if remuxed_path:
124
+ if os.path.exists(final_file_path): os.remove(final_file_path)
125
+ final_file_path = remuxed_path
126
+
127
+ thumb_path_temp = f"{os.path.splitext(final_file_path)[0]}.jpg"
128
+ thumbnail_path = await ffmpeg.generate_thumbnail(final_file_path, thumb_path_temp)
129
+
130
+ caption = templates.BotResponses.FILE_CAPTION.format(
131
+ file_name=metadata['file_name'],
132
+ file_size=helpers.format_bytes(metadata['file_size']),
133
+ source_url=original_link
134
+ )
135
+
136
+ backup_message = await bot.send_file(
137
+ config.BACKUP_CHANNEL_ID,
138
+ file=final_file_path,
139
+ thumb=thumbnail_path,
140
+ caption=caption
141
+ )
142
+ await db_manager.add_to_cache(
143
+ short_id=task['short_id'],
144
+ file_id=str(backup_message.id),
145
+ file_name=metadata['file_name'],
146
+ file_size=metadata['file_size']
147
+ )
148
 
149
+ dm_message = await bot.send_file(
150
+ user_id,
151
+ file=backup_message,
152
+ caption=caption
153
+ )
154
  except Exception as e:
155
  logger.error(f"[{worker_name}] Error processing {original_link}: {e}", exc_info=True)
156
  error_reason = str(e)
157
 
158
  finally:
159
+ # Clean up local temporary files
160
+ for path in [download_path, final_file_path, thumbnail_path]:
161
+ if path and os.path.exists(path):
162
+ try:
163
+ os.remove(path)
164
+ except OSError as e:
165
+ logger.error(f"Error removing file {path}: {e}")
166
+
167
  # --- Final Step: Update Batch Status Safely ---
168
  async with batch_info['lock']:
169
  batch_info['processed_links'] += 1
170
  if error_reason:
171
  batch_info['failed_links'].append({"link": original_link, "error": error_reason})
172
 
 
173
  processed = batch_info['processed_links']
174
  total = batch_info['total_links']
175
  progress = processed / total
176
 
177
  try:
178
+ # Update progress message
179
  await bot.edit_message(
180
  batch_info['chat_id'],
181
  batch_info['status_message_id'],
182
  text=templates.BotResponses.BATCH_UPDATE_VALIDATED.format(
183
+ batch_id=batch_id,
184
  valid_count=total,
185
  total_count=batch_info['original_total'],
186
+ skipped_count=len(batch_info['skipped_links']),
187
  progress_bar=helpers.create_progress_bar(progress),
188
  processed_count=processed
189
  )
190
  )
191
  except Exception:
192
+ pass
193
 
194
  # If batch is fully processed, send final summary
195
  if processed == total:
196
+ logger.info(f"[{worker_name}] Batch {batch_id} complete.")
197
+ failed_items = batch_info['skipped_links'] + batch_info['failed_links']
198
+
199
+ if failed_items:
200
+ failed_details = "\n".join([f"- `{item['link']}` ({item['error']})" for item in failed_items])
201
+ final_text = templates.BotResponses.BATCH_COMPLETE_SUMMARY.format(
202
+ batch_id=batch_id,
203
+ success_count=total - len(failed_items),
204
+ skipped_count=len(failed_items),
205
+ failed_details=failed_details
206
+ )
207
+ else:
208
+ final_text = templates.BotResponses.BATCH_COMPLETE_NO_SKIPS.format(
209
+ batch_id=batch_id,
210
+ success_count=total
211
+ )
212
+
213
+ try:
214
+ await bot.edit_message(batch_info['chat_id'], batch_info['status_message_id'], final_text)
215
+ except Exception:
216
+ pass
217
+
218
+ del BATCH_JOBS[batch_id]
219
 
220
 
221
  async def worker(name: str, queue: asyncio.Queue):
 
227
  finally:
228
  queue.task_done()
229
 
230
+
231
  def start_bot_runtime():
232
  """Creates all the background tasks for the bot's engine."""
 
233
  for i in range(config.PREMIUM_WORKERS):
234
  asyncio.create_task(worker(f"PremiumWorker-{i+1}", PREMIUM_QUEUE))
235
 
 
236
  for i in range(config.FREE_WORKERS):
237
  asyncio.create_task(worker(f"FreeWorker-{i+1}", FREE_QUEUE))
238
 
 
239
  asyncio.create_task(scheduler_loop())
240
 
241
  logger.info(f"Bot runtime started with {config.PREMIUM_WORKERS} premium and {config.FREE_WORKERS} free workers.")