|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import asyncio |
|
import concurrent.futures as conc |
|
import pathlib |
|
import urllib.parse |
|
|
|
import aiofile |
|
import httpx |
|
import orjson |
|
|
|
from proxy_magic_session import get_async_session |
|
|
|
CONCURRENT_WORKERS = 128 |
|
|
|
executor = conc.ProcessPoolExecutor(max_workers=64) |
|
|
|
pages_queue = asyncio.Queue(maxsize=1048576) |
|
output_queue = asyncio.Queue(maxsize=int(CONCURRENT_WORKERS*1.5)) |
|
|
|
INPUT_JSONL = pathlib.Path("fandom_wikis_pages_210224_v2.jsonl") |
|
OUPUT_JSONL = pathlib.Path("fandom_wikis_pages_contents_210224_v2.jsonl") |
|
|
|
|
|
async def retry_url(url: str): |
|
loop = asyncio.get_running_loop() |
|
session: httpx.AsyncClient = get_async_session() |
|
session.cookies.clear() |
|
session.headers[ |
|
"user-agent" |
|
] = "Mozilla/6.2 (compatible; Microsoft Chrome 137.0; Apple Gecko 47.0 in AOL Firefox 37.6) Google Toolbar/1.3" |
|
tries = 10 |
|
data = None |
|
while True: |
|
try: |
|
data = await session.post(url, follow_redirects=True) |
|
if data.status_code >= 300 and data.status_code < 500 and data.status_code != 403: |
|
if data.status_code == 410: |
|
break |
|
print(f"[W] RetryRequest | {url} {data.status_code}") |
|
continue |
|
try: |
|
await loop.run_in_executor(executor, orjson.loads, data.content) |
|
except Exception: |
|
continue |
|
break |
|
except httpx.TransportError as e: |
|
await session.aclose() |
|
session: httpx.AsyncClient = get_async_session() |
|
print(f"[W] Retry TransportError {url} {e}") |
|
await asyncio.sleep(1) |
|
tries -= 1 |
|
except httpx.HTTPError as e: |
|
print(f"[W] Uncaught Exception Retry... {url} | {e}") |
|
await session.aclose() |
|
session: httpx.AsyncClient = get_async_session() |
|
await asyncio.sleep(1) |
|
tries -= 1 |
|
except Exception as e: |
|
print(f"[W] Uncaught Exception {url} | {e}") |
|
break |
|
if tries <= 0: |
|
print(f"[W] Tries Exceeded {url}") |
|
break |
|
await session.aclose() |
|
if tries <= 0: |
|
return |
|
return data |
|
|
|
|
|
async def HTMLWorker(): |
|
loop = asyncio.get_running_loop() |
|
while True: |
|
data = await pages_queue.get() |
|
if data is None: |
|
break |
|
domain, path, page = data |
|
query_params = { |
|
"action": "parse", |
|
"format": "json", |
|
"page": page, |
|
"prop": "text|langlinks|categories|links|templates|images|externallinks|sections|revid|displaytitle|iwlinks|properties|parsewarnings|wikitext", |
|
} |
|
print(f"[I] HTMLW | {domain} {page} query.") |
|
response = await retry_url( |
|
f"https://{domain}{path}api.php?{urllib.parse.urlencode(query_params)}" |
|
) |
|
if response and response.status_code == 200: |
|
print(f"[I] HTMLW | {domain} {page} dumped.") |
|
await output_queue.put( |
|
{ |
|
"domain": domain, |
|
"path": path, |
|
"page": page, |
|
"content": await loop.run_in_executor( |
|
executor, orjson.loads, response.content |
|
), |
|
} |
|
) |
|
|
|
|
|
async def jsonl_writer(): |
|
loop = asyncio.get_running_loop() |
|
async with aiofile.async_open(OUPUT_JSONL, "wb") as f: |
|
while True: |
|
dict_data: dict = await output_queue.get() |
|
if dict_data is None: |
|
break |
|
print(f"[I] Dump: {dict_data['domain']}{dict_data['path']}{dict_data['page']}") |
|
bytes_data = await loop.run_in_executor(executor,orjson.dumps, dict_data) |
|
await f.write(bytes_data) |
|
await f.write(b"\n") |
|
|
|
|
|
async def main(): |
|
loop = asyncio.get_running_loop() |
|
workers = [loop.create_task(HTMLWorker()) for _ in range(CONCURRENT_WORKERS)] |
|
writer = loop.create_task(jsonl_writer()) |
|
with open(INPUT_JSONL, "rb") as f: |
|
line = f.readline() |
|
for line in f: |
|
if line: |
|
domain_data = orjson.loads(line) |
|
page_count = len(domain_data["pages"]) |
|
if page_count <= 5: |
|
print(f"[I] Skip {domain_data['domain']} due to low page count.") |
|
continue |
|
for page in domain_data["pages"]: |
|
await pages_queue.put( |
|
(domain_data["domain"], domain_data["path"][:-5], page) |
|
) |
|
for _ in range(CONCURRENT_WORKERS): |
|
await pages_queue.put(None) |
|
while True: |
|
done_workers = 0 |
|
for worker in workers: |
|
if worker.done(): |
|
done_workers += 1 |
|
if done_workers != CONCURRENT_WORKERS: |
|
print(f"\r{done_workers} / {CONCURRENT_WORKERS} are completed.") |
|
await asyncio.sleep(60) |
|
else: |
|
break |
|
|
|
await output_queue.put(None) |
|
print("Sent shutdown to Jsonl writer.") |
|
await asyncio.gather(writer) |
|
|
|
if __name__ == "__main__": |
|
asyncio.run(main()) |