File size: 4,895 Bytes
adf682d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 |
import asyncio
import os
import random
from collections import deque
import ssl
from urllib.parse import urlparse
import aiohttp
import polars as pl
import aiofiles
from aiohttp import TCPConnector
# from limiter import Limiter
import certifi
from tenacity import retry, wait_exponential, wait_random_exponential
from tqdm.asyncio import tqdm_asyncio
# limit_downloads = Limiter(rate=20, capacity=1000, consume=1)
BATCH_SIZE = 100
BASE_DOWNLOAD_PATH = "/mnt/jupiter/openalex_extraction/openalex_2"
TIME_OUT = 60
USER_AGENTS = [
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Firefox/89.0",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:89.0) Gecko/20100101 Firefox/89.0",
]
HEADER = lambda x: {
"User-Agent": random.choice(USER_AGENTS),
"Referer": x,
}
async def get_request(session, queries: list, uuid: str) -> tuple[str, bytes | None]:
# Create SSL context
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = True
ssl_context.verify_mode = ssl.CERT_REQUIRED
for query in queries:
try:
async with session.get(url=query, headers=HEADER(query)) as response:
content = await response.read()
uuid = urlparse(uuid).path.replace(r"/", "")
filename = os.path.join(BASE_DOWNLOAD_PATH, f"{uuid}.pdf")
file_number = 1
while os.path.exists(filename):
filename = os.path.join(
BASE_DOWNLOAD_PATH, f"{uuid}_{file_number}.pdf"
)
file_number += 1
# async with aiofiles.open(filename, "wb") as f:
# await f.write(content)
return filename, content
except Exception as e:
print(f"An error occurred with query {query}: {e}")
continue # Try the next query in the list
# If all queries fail, handle the last one properly
print(f"All queries failed: {queries}")
return "error", None
async def get_batched(session, batch, seen: set = None):
if not seen:
seen = set()
tasks = []
for q in batch:
if q:
task = asyncio.ensure_future(get_request(session, queries=q[0], uuid=q[1]))
tasks.append(task)
return await tqdm_asyncio.gather(
*tasks, desc="Collecting batch", leave=True, position=0
)
async def main(file_loc):
# or df = datasets.load_dataset(url, split="train").to_polars().lazy()
df = pl.scan_parquet(file_loc)
total_rows = df.collect().height
# Calculate the number of rows to slice from the start index to the end
start_index = 0
num_rows = total_rows - start_index
df = df.slice(0, num_rows)
df = (
df.with_columns(pl.col("pdf_url").str.split(","))
.select(["identifier", "pdf_url"])
.collect(streaming=True)
.iter_rows(named=True)
)
batches = deque()
output = []
# Create SSL context
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = True
ssl_context.verify_mode = ssl.CERT_REQUIRED
timeout = aiohttp.ClientTimeout(total=TIME_OUT)
for row in df:
batches.append((row["pdf_url"], row["identifier"]))
if len(batches) == BATCH_SIZE:
async with aiohttp.ClientSession(
connector=TCPConnector(ssl=ssl_context, limit=50),
timeout=timeout,
) as session:
responses = await get_batched(session, batches)
for filename, content in responses:
if content:
with open(filename, "wb") as f:
f.write(content)
output.append(filename)
batches.clear()
if batches:
async with aiohttp.ClientSession(
connector=TCPConnector(ssl=ssl_context, limit=50), timeout=timeout
) as session:
responses = await get_batched(session, batches)
print("Saving Batch")
for filename, content in responses:
if content:
with open(filename, "wb") as f:
f.write(content)
output.append(filename)
print("Batch Saved")
return output
if __name__ == "__main__":
FILE_LOCATION = "/mnt/jupiter/openalex_extraction/openalex_extraction_2.parquet"
results = asyncio.run(main(FILE_LOCATION))
print(results) |