|
from asyncio import Queue, create_task |
|
from contextlib import asynccontextmanager |
|
from json import dumps, load, loads |
|
from pathlib import Path |
|
from proxybroker import Broker |
|
from fastapi import FastAPI, HTTPException |
|
from fastapi.responses import PlainTextResponse, JSONResponse |
|
from uvicorn import run as uvicorn_run |
|
from apscheduler.schedulers.asyncio import AsyncIOScheduler |
|
|
|
scheduler = AsyncIOScheduler() |
|
try: |
|
workdir = Path(__file__).parent |
|
except: |
|
workdir = Path.cwd().parent |
|
|
|
collected_json = workdir / 'proxies.json' |
|
countries_list = ['US', 'CA', 'FR', 'FI', 'HR', 'ME', 'CH', 'SE', 'EE', 'DE', 'GB', 'IT', 'NL', 'PL', 'CZ', 'RS', 'RO', 'MD', 'AT', 'BE', 'BG', 'HU', 'DK', 'IS', 'KZ', 'LV', 'LT', 'LU', 'NO', 'PT', 'SK', 'SI'] |
|
|
|
|
|
def create_json_from_proxies(proxy_lines, filename): |
|
countries = set() |
|
proxies = [] |
|
|
|
for line in proxy_lines: |
|
parts = line.split() |
|
country = parts[1] |
|
ping = float(parts[2].strip('s')) |
|
protocol = parts[3].strip('[]') |
|
host = parts[4].rstrip('>') |
|
|
|
if "HTTP:" in protocol: |
|
protocol = "HTTP" |
|
host = parts[5].rstrip(']>') |
|
|
|
countries.add(country) |
|
proxies.append({"country": country, "ping": ping, "protocol": protocol, "host": host}) |
|
|
|
data = { |
|
'countries': sorted(list(countries)), |
|
'proxies': proxies |
|
} |
|
filename.write_text(dumps(data, indent=4)) |
|
return filename |
|
|
|
|
|
async def collect_proxies(proxies_queue): |
|
proxies_list = [] |
|
while True: |
|
proxy = await proxies_queue.get() |
|
if proxy is None: |
|
break |
|
proxies_list.append(f'{proxy}') |
|
print(f"\rtotal proxies: {len(proxies_list)}", end='') |
|
return proxies_list |
|
|
|
|
|
async def find_proxies(): |
|
collected_json.write_text(dumps({'countries': None, 'proxies': []}, indent=4)) |
|
proxies_queue = Queue() |
|
broker = Broker(proxies_queue, timeout=5, max_conn=200, max_tries=3, verify_ssl=True) |
|
await broker.find(types=['HTTP', 'HTTPS', 'SOCKS5'], countries=countries_list, limit=150) |
|
proxies_list = await collect_proxies(proxies_queue) |
|
return create_json_from_proxies(proxies_list, collected_json) |
|
|
|
|
|
scheduler.add_job(find_proxies, 'interval', max_instances=1, minutes=60) |
|
|
|
|
|
@asynccontextmanager |
|
async def app_lifespan(app: FastAPI): |
|
scheduler.start() |
|
task = create_task(find_proxies()) |
|
yield |
|
await task |
|
scheduler.shutdown() |
|
|
|
|
|
app = FastAPI(lifespan=app_lifespan) |
|
|
|
|
|
@app.post('*') |
|
async def read_root(): |
|
return HTTPException(405) |
|
|
|
|
|
@app.get('/proxies/') |
|
async def get_proxies(): |
|
if collected_json.exists(): |
|
return loads(collected_json.read_text()) |
|
else: |
|
return JSONResponse({"error": "Proxy list is not ready yet."}, status_code=204) |
|
|
|
|
|
@app.get('/') |
|
async def read_root(): |
|
return PlainTextResponse('ну пролапс, ну и что', status_code=200) |
|
|
|
|
|
if __name__ == "__main__": |
|
uvicorn_run(app, host='127.0.0.1', port=7860, timeout_keep_alive=90) |
|
|