from fastapi import FastAPI, File, UploadFile, Request, Form from fastapi.responses import HTMLResponse, JSONResponse, StreamingResponse from fastapi.staticfiles import StaticFiles import requests import asyncio from typing import Dict import os import shutil app = FastAPI() # Directory to store temporary chunks and final files UPLOAD_DIR = "uploads" os.makedirs(UPLOAD_DIR, exist_ok=True) HTML_CONTENT = """ Radd PRO Uploader

Radd PRO Uploader

or drag and drop file here/paste image

Allowed file types: .zip, .mp4, .txt, .mp3, all image types, .pdf
""" @app.get("/", response_class=HTMLResponse) async def index(): return HTML_CONTENT @app.post("/upload_chunk") async def upload_chunk( fileId: str = Form(...), fileName: str = Form(...), totalChunks: int = Form(..., gt=0), chunkIndex: int = Form(..., ge=0), chunkSize: int = Form(..., gt=0), chunkData: UploadFile = File(...) ): """ Endpoint to handle each chunk upload. """ # Create a temporary directory based on fileId to store chunks temp_dir = os.path.join(UPLOAD_DIR, fileId) os.makedirs(temp_dir, exist_ok=True) chunk_file_path = os.path.join(temp_dir, f"chunk_{chunkIndex}") # Save the chunk to the temporary directory with open(chunk_file_path, "wb") as f: content = await chunkData.read() f.write(content) return {"status": "chunk received"} @app.post("/finalize_upload") async def finalize_upload(data: Dict): fileId = data.get('fileId') fileName = data.get('fileName') temp_dir = os.path.join(UPLOAD_DIR, fileId) if not os.path.exists(temp_dir): return JSONResponse(content={"error": "Upload session does not exist"}, status_code=400) # Get list of chunk files and sort them chunk_files = [os.path.join(temp_dir, f) for f in os.listdir(temp_dir) if f.startswith('chunk_')] if not chunk_files: return JSONResponse(content={"error": "No chunks found for this file"}, status_code=400) chunk_files.sort(key=lambda x: int(os.path.basename(x).split("_")[-1])) # Combine chunks into the final file final_file_path = os.path.join(temp_dir, fileName) with open(final_file_path, "wb") as outfile: for chunk_file in chunk_files: with open(chunk_file, "rb") as infile: outfile.write(infile.read()) # Proceed with your existing upload logic using 'final_file_path' # For example, read the file and upload to external service # Read the combined file content with open(final_file_path, "rb") as f: file_content = f.read() # Obtain cookies and initiate upload as before cookies = await get_cookies() if 'csrftoken' not in cookies or 'sessionid' not in cookies: return JSONResponse(content={"error": "Failed to obtain necessary cookies"}, status_code=500) # Get the content type based on the file extension content_type = get_content_type(fileName) upload_result = await initiate_upload(cookies, fileName, content_type) if not upload_result or 'upload_url' not in upload_result: return JSONResponse(content={"error": "Failed to initiate upload"}, status_code=500) upload_success = await retry_upload(upload_result['upload_url'], file_content, content_type) if not upload_success: return JSONResponse(content={"error": "File upload failed after multiple attempts"}, status_code=500) original_url = upload_result['serving_url'] mirrored_url = f"/rbxg/{original_url.split('/pbxt/')[1]}" # Optionally, remove the temporary directory shutil.rmtree(temp_dir) return JSONResponse(content={"url": mirrored_url}) def get_content_type(filename: str) -> str: import mimetypes return mimetypes.guess_type(filename)[0] or 'application/octet-stream' @app.get("/rbxg/{path:path}") async def handle_video_stream(path: str, request: Request): original_url = f'https://replicate.delivery/pbxt/{path}' range_header = request.headers.get('Range') headers = {'Range': range_header} if range_header else {} response = requests.get(original_url, headers=headers, stream=True) def generate(): for chunk in response.iter_content(chunk_size=8192): yield chunk headers = dict(response.headers) headers['Access-Control-Allow-Origin'] = '*' headers['Content-Disposition'] = 'inline' if response.status_code == 206: headers['Content-Range'] = response.headers.get('Content-Range') return StreamingResponse(generate(), status_code=response.status_code, headers=headers) @app.get("/embed") async def embed_video(url: str, thumbnail: str): html = f''' ''' return HTMLResponse(content=html) async def get_cookies() -> Dict[str, str]: try: response = requests.get('https://replicate.com/levelsio/neon-tokyo', headers={ 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36' }) return dict(response.cookies) except Exception as e: print(f'Error fetching the page: {e}') return {} async def initiate_upload(cookies: Dict[str, str], filename: str, content_type: str) -> Dict: url = f'https://replicate.com/api/upload/{filename}?content_type={content_type}' try: response = requests.post(url, cookies=cookies, headers={ 'X-CSRFToken': cookies.get('csrftoken'), 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36', 'Referer': 'https://replicate.com/levelsio/neon-tokyo', 'Origin': 'https://replicate.com', 'Accept': '*/*', 'Accept-Language': 'en-US,en;q=0.5', 'Accept-Encoding': 'identity', 'Sec-Fetch-Dest': 'empty', 'Sec-Fetch-Mode': 'cors', 'Sec-Fetch-Site': 'same-origin', 'Sec-GPC': '1', 'Priority': 'u=1, i' }) return response.json() except Exception as e: print(f'Error initiating upload: {e}') raise async def upload_file(upload_url: str, file_content: bytes, content_type: str) -> bool: try: response = requests.put(upload_url, data=file_content, headers={'Content-Type': content_type}) return response.status_code == 200 except Exception as e: print(f'Error uploading file: {e}') return False async def retry_upload(upload_url: str, file_content: bytes, content_type: str, max_retries: int = 5, delay: int = 1) -> bool: retries = 0 while retries < max_retries: try: success = await upload_file(upload_url, file_content, content_type) if success: return True print("Upload failed. Retrying...") except Exception as e: print(f"Error during upload: {e}") await asyncio.sleep(delay) delay = min(delay * 2, 60) # Exponential backoff, capped at 60 seconds retries += 1 return False